code stringlengths 281 23.7M |
|---|
class SieveExtrapolator(Extrapolator):
def __init__(self, extrapolator: Optional[Union[(PolynomialExtrapolator, DifferentialExtrapolator)]]=None, window: int=2, filter_before: bool=True, filter_after: bool=True) -> None:
self._extrapolator = WindowExtrapolator(extrapolator=extrapolator, window=window)
... |
class FedAVGServerTimer(ServerTimer):
def __init__(self, args, global_num_iterations, local_num_iterations_dict, global_epochs_per_round, local_num_epochs_per_comm_round_dict):
super().__init__(args, global_num_iterations, local_num_iterations_dict)
self.global_epochs_per_round = global_epochs_per_r... |
class TransitionLog(models.GenericTransitionLog):
EXTRA_LOG_ATTRIBUTES = (('user', 'user', None),)
user = django_models.ForeignKey(getattr(settings, 'XWORKFLOWS_USER_MODEL', getattr(settings, 'AUTH_USER_MODEL', 'auth.User')), blank=True, null=True, on_delete=django_models.CASCADE, verbose_name=_('author')) |
def calculate_storage_specific_size_data_provider():
return ({'sharding_type': ShardingType.TABLE_ROW_WISE, 'optimizer_class': torch.optim.SGD, 'expected_storage': [50, 50]}, {'sharding_type': ShardingType.COLUMN_WISE, 'optimizer_class': torch.optim.Adam, 'expected_storage': [150, 150]}, {'sharding_type': ShardingT... |
class Regex(MessageFilter):
__slots__ = ('pattern',)
def __init__(self, pattern: Union[(str, Pattern[str])]):
if isinstance(pattern, str):
pattern = re.compile(pattern)
self.pattern: Pattern[str] = pattern
super().__init__(name=f'filters.Regex({self.pattern})', data_filter=Tr... |
class SponsorshipResource(resources.ModelResource):
sponsor_name = Field(attribute='sponsor__name', column_name='Company Name')
contact_name = Field(column_name='Contact Name(s)')
contact_email = Field(column_name='Contact Email(s)')
contact_phone = Field(column_name='Contact phone number')
contact_... |
class OwnedPickupLocation(NamedTuple):
player_name: (str | None)
location: PickupLocation
def export(self, namer: HintNamer) -> str:
hint = namer.format_location(self.location, with_region=True, with_area=True, with_color=False)
if (self.player_name is not None):
hint = f"{namer.... |
class TestGetPolygonPointIntersect():
def test_get_polygon_point_intersect(self):
poly = Polygon([Point((0, 0)), Point((1, 0)), Point((1, 1)), Point((0, 1))])
pt = Point((0.5, 0.5))
assert (pt == get_polygon_point_intersect(poly, pt))
def test_get_polygon_point_intersect_on_edge(self):
... |
class NullOutput(Output):
def verbosity(self) -> Verbosity:
return Verbosity.QUIET
def is_decorated(self) -> bool:
return False
def decorated(self, decorated: bool=True) -> None:
pass
def supports_utf8(self) -> bool:
return True
def set_verbosity(self, verbosity: Verb... |
class DIControllerManager(ControllerManager):
def __init__(self, display=None):
self._display = display
self._controllers: Dict[(DirectInputDevice, base.Controller)] = {}
for device in _di_manager.devices:
self._add_controller(device)
_di_manager.event
def on_conn... |
def process_doc_file(code_file, add_new_line=True):
with open(code_file, 'r', encoding='utf-8', newline='\n') as f:
code = f.read()
splits = code.split('```')
if ((len(splits) % 2) != 1):
raise ValueError('The number of occurrences of ``` should be an even number.')
splits = [(s if ((i %... |
def assert_valid_process(process_id):
try:
process_handle = win32api.OpenProcess(win32con.MAXIMUM_ALLOWED, 0, process_id)
except win32gui.error as exc:
raise ProcessNotFoundError(((str(exc) + ', pid = ') + str(process_id)))
if (not process_handle):
message = ("Process with ID '%d' co... |
def train(train_loader, model, criterion, optimizer, epoch, args, log, tf_writer, CE):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('', ':6.2f')
top5 = AverageMeter('', ':6.2f')
model.train()
en... |
class ElementCollection():
def __init__(self, elements: [Element]) -> None:
self._elements = elements
self.style = StyleCollection(self)
def __getitem__(self, key):
if isinstance(key, int):
return self._elements[key]
elif isinstance(key, slice):
return Ele... |
class Effect5629(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Cruise Missiles')), 'thermalDamage', ship.getModifiedItemAttr('shipBonusMB'), skill='Minmatar Battleship', **kwargs) |
class GitLab(Extension):
def augment_cli(self, parser: ArgumentParser):
parser.add_argument(self.flag, help=self.help_text, nargs=0, action=include(PreCommit(), self))
return self
def activate(self, actions: List[Action]) -> List[Action]:
return self.register(actions, add_files, after='d... |
class KohnShamDFT(mol_ks.KohnShamDFT):
_keys = {'xc', 'nlc', 'grids', 'nlcgrids', 'small_rho_cutoff'}
get_rho = get_rho
density_fit = _patch_df_beckegrids(pbchf.RHF.density_fit)
rs_density_fit = _patch_df_beckegrids(pbchf.RHF.rs_density_fit)
mix_density_fit = _patch_df_beckegrids(pbchf.RHF.mix_densi... |
def set_user_repo_permission(username, namespace_name, repository_name, role_name):
if (username == namespace_name):
raise DataModelException('Namespace owner must always be admin.')
try:
user = User.get((User.username == username))
except User.DoesNotExist:
raise DataModelException(... |
class EoctBottleneck(nn.Module):
def __init__(self, in_channels, num_channels, stride=1, downsample=None, res_scale=1, **kwargs):
super(EoctBottleneck, self).__init__()
self.num_channels = num_channels
self.stride = stride
self.downsample = downsample
self.res_scale = res_sca... |
class CacheConfig():
cache_type: Optional[CacheType] = CacheType.AUTO
mapping: Dict[(str, CacheType)] = field(default_factory=dict)
key_extractors: Union[(KeyExtractorType, Dict[(str, KeyExtractorType)])] = field(default_factory=dict)
batch_size: Optional[int] = 32
num_workers: Optional[int] = None
... |
class Mxp(object):
def __init__(self, protocol):
self.protocol = protocol
self.protocol.protocol_flags['MXP'] = False
self.protocol.will(MXP).addCallbacks(self.do_mxp, self.no_mxp)
def no_mxp(self, option):
self.protocol.protocol_flags['MXP'] = False
self.protocol.handsha... |
class ModelTagFieldCountTest(TagTestManager, TestCase):
manage_models = [test_models.TagFieldModel]
def setUpExtra(self):
self.tag_model = test_models.TagFieldModel.tags.tag_model
self.tag_field = test_models.TagFieldModel.tags
def test_count_0_create(self):
self.tag_model.objects.cr... |
class PylintLinter():
last_diags = collections.defaultdict(list)
def lint(cls, document, is_saved, flags=''):
if (not is_saved):
return cls.last_diags[document.path]
cmd = ([sys.executable, '-c', 'import sys; from pylint.lint import Run; Run(sys.argv[1:])', '-f', 'json', document.pat... |
(python=PYTHON_ALL_VERSIONS)
def create_test_package_list(session: nox.Session) -> None:
session.run('python', '-m', 'pip', 'install', '--upgrade', 'pip')
output_dir = (session.posargs[0] if session.posargs else str(PIPX_TESTS_PACKAGE_LIST_DIR))
primary = str((PIPX_TESTS_PACKAGE_LIST_DIR / 'primary_packages... |
def general(eri_or_mol, mo_coeffs, erifile=None, dataname='eri_mo', intor='int2e', *args, **kwargs):
if isinstance(eri_or_mol, numpy.ndarray):
return incore.general(eri_or_mol, mo_coeffs, *args, **kwargs)
else:
if ('_spinor' in intor):
mod = r_outcore
else:
mod = ... |
class CachedDataset(Dataset):
def __init__(self, data_loader: DataLoader, num_batches: int, path: str):
if data_loader:
if (len(data_loader) < num_batches):
raise ValueError(f'Can not fetch {num_batches} batches from a data loader of length {len(data_loader)}.')
self.... |
_guvectorize(['void(int8[:,:], uint64[:], uint64[:])', 'void(int16[:,:], uint64[:], uint64[:])', 'void(int32[:,:], uint64[:], uint64[:])', 'void(int64[:,:], uint64[:], uint64[:])'], '(n, k),(g)->(g)')
def _count_sorted_genotypes(genotypes: ArrayLike, _: ArrayLike, out: ArrayLike) -> ArrayLike:
out[:] = 0
for i ... |
def _preprocess_kwargs_kv_pairs(items: Sequence[KVPair], ctx: CheckCallContext) -> Optional[Tuple[(Dict[(str, Tuple[(bool, Value)])], Optional[Value])]]:
out_items = {}
possible_values = []
covered_keys: Set[Value] = set()
for pair in reversed(items):
if (not pair.is_many):
if isinst... |
class Schema():
unknown = '<default>'
def __init__(self, name: str=unknown):
self.raw_name = escape_identifier_name(name)
def __str__(self):
return self.raw_name
def __repr__(self):
return ('Schema: ' + str(self))
def __eq__(self, other):
return (isinstance(other, Sch... |
def move_schedule_item_submissions_to_talk(apps, schema_editor):
ScheduleItem = apps.get_model('schedule', 'ScheduleItem')
for schedule_item in ScheduleItem.objects.all():
if (schedule_item.type == 'submission'):
schedule_item.type = 'talk'
schedule_item.save() |
_required
def leave_org(request, orgslugname):
try:
org = Organization.objects.get(slugname=orgslugname)
except Organization.DoesNotExist:
raise Http404(_('not found'))
pytitionuser = get_session_user(request)
if (pytitionuser not in org.members.all()):
raise Http404(_('not found... |
class TestTableReader():
.parametrize('database, table', [('database', 123), (123, None)])
def test_init_invalid_params(self, database, table):
with pytest.raises(ValueError):
TableReader('id', table, database)
def test_consume(self, spark_client, target_df):
database = 'test_dat... |
class LibvirtKVMCollector(diamond.collector.Collector):
blockStats = {'read_reqs': 0, 'read_bytes': 1, 'write_reqs': 2, 'write_bytes': 3}
vifStats = {'rx_bytes': 0, 'rx_packets': 1, 'rx_errors': 2, 'rx_drops': 3, 'tx_bytes': 4, 'tx_packets': 5, 'tx_errors': 6, 'tx_drops': 7}
def get_default_config_help(self... |
def _make_handshake_rejection(status_code: int, body: Optional[bytes]=None) -> List[Event]:
client = WSConnection(CLIENT)
server = h11.Connection(h11.SERVER)
server.receive_data(client.send(Request(host='localhost', target='/')))
headers = []
if (body is not None):
headers.append(('Content-L... |
def depth_bn_point_bn_relu(x, kernel_size, point_filters, strides=1, dilation=1):
with tf.variable_scope(None, 'depth_bn_point_bn_relu'):
x = slim.separable_conv2d(x, None, kernel_size, depth_multiplier=1, stride=strides, rate=dilation, activation_fn=None, biases_initializer=None)
x = slim.batch_nor... |
class StringType(Type[str]):
def clone(self, **kwargs):
return type(self)()
def filter(self, x, strict=False, allow_downcast=None):
if isinstance(x, str):
return x
else:
raise TypeError('Expected a string!')
def __str__(self):
return 'string'
def m... |
class PlateauLRScheduler(Scheduler):
def __init__(self, optimizer, decay_rate=0.1, patience_t=10, verbose=True, threshold=0.0001, cooldown_t=0, warmup_t=0, warmup_lr_init=0, lr_min=0, mode='max', noise_range_t=None, noise_type='normal', noise_pct=0.67, noise_std=1.0, noise_seed=None, initialize=True):
super... |
class ShardedTensorIOPreparer():
def subdivide_shard(shard: torch.Tensor, offsets: List[int], sizes: List[int], dim: int, max_shard_sz_bytes: int) -> List[Tuple[(torch.Tensor, List[int], List[int])]]:
if (max_shard_sz_bytes <= 0):
raise ValueError(f'max_shard_sz_bytes must be a positive integer ... |
.supported(only_if=(lambda backend: backend.cipher_supported(algorithms._IDEAInternal((b'\x00' * 16)), modes.OFB((b'\x00' * 8)))), skip_message='Does not support IDEA OFB')
class TestIDEAModeOFB():
test_ofb = generate_encrypt_test(load_nist_vectors, os.path.join('ciphers', 'IDEA'), ['idea-ofb.txt'], (lambda key, **... |
class Free(Bloq):
n: int
_property
def signature(self) -> Signature:
return Signature([Register('free', bitsize=self.n, side=Side.LEFT)])
def on_classical_vals(self, free: int) -> Dict[(str, 'ClassicalValT')]:
if (free != 0):
raise ValueError(f'Tried to free a non-zero regist... |
def test_duplicate_sysname():
sys = ct.rss(4, 1, 1)
sys = ct.NonlinearIOSystem(sys.updfcn, sys.outfcn, inputs=sys.ninputs, outputs=sys.noutputs, states=sys.nstates)
with warnings.catch_warnings():
warnings.simplefilter('error')
warnings.filterwarnings('ignore', 'the matrix subclass', categor... |
def calculate_General(material, x, i, T):
parent1 = data[material][4]
parent2 = data[material][5]
p0 = data[parent1][i]
p1 = data[parent2][i]
newData = {}
newData['muMin'] = interpolate_parameter_linear(x, p0['muMin'], p1['muMin'])
newData['muMax'] = interpolate_parameter_linear(x, p0['muMax... |
def set_hook(ctx: MethodSigContext) -> CallableType:
return CallableType([ctx.api.named_generic_type('__main__.Cls', []), ctx.api.named_generic_type('builtins.int', [])], ctx.default_signature.arg_kinds, ctx.default_signature.arg_names, ctx.default_signature.ret_type, ctx.default_signature.fallback) |
class TestFillPoly(EndianTest):
def setUp(self):
self.req_args_0 = {'coord_mode': 0, 'drawable': , 'gc': , 'points': [{'y': (- 9194), 'x': (- 10262)}, {'y': (- 8456), 'x': (- 1958)}, {'y': (- 10793), 'x': (- 8617)}], 'shape': 1}
self.req_bin_0 = b'E\x00\x00\x07\x199r}.\x9f\xcf+\x01\x00\x00\x00\xd7\x... |
class TokenApprovalAllItem(scrapy.Item):
transaction_hash = scrapy.Field()
log_index = scrapy.Field()
block_number = scrapy.Field()
timestamp = scrapy.Field()
contract_address = scrapy.Field()
address_from = scrapy.Field()
address_to = scrapy.Field()
approved = scrapy.Field() |
_bp.route('/repositories/<repopath:repository>/tags/<tag>', methods=['DELETE'])
_auth
_protect
_repository_name()
_repository_state
_v1_push_enabled()
_readonly
def delete_tag(namespace_name, repo_name, tag):
permission = ModifyRepositoryPermission(namespace_name, repo_name)
repository_ref = registry_model.look... |
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
self.errors = errors
self.state = 0
def reset(self):
self.state = 0
def getstate(self):
return self.state
def setstate(self, state):
self.state = state
def encode(self, d... |
class CallCodeEIP150(CallCode):
def compute_msg_gas(self, computation: ComputationAPI, gas: int, to: Address, value: int) -> Tuple[(int, int)]:
extra_gas = self.compute_msg_extra_gas(computation, gas, to, value)
return compute_eip150_msg_gas(computation=computation, gas=gas, extra_gas=extra_gas, val... |
def main():
lcd = LCD(False)
def idle():
lcd.poll()
if lcd.glutkeytime:
(k, t) = lcd.glutkeytime
dt = (gettime() - t)
if (dt > 0.5):
lcd.keypad[k].update(False)
lcd.glutkeytime = False
time.sleep(0.1)
if lcd.use_glut... |
def rollout(model, dataset, opts):
set_decode_type(model, 'greedy')
model.eval()
def eval_model_bat(bat):
with torch.no_grad():
(cost, _) = model(move_to(bat, opts.device))
return cost.data.cpu()
return torch.cat([eval_model_bat(bat) for bat in tqdm(DataLoader(dataset, batch_... |
class RowWiseThreshold(AffinityRefinementOperation):
def __init__(self, p_percentile: float=0.95, thresholding_soft_multiplier: float=0.01, thresholding_type: ThresholdType=ThresholdType.RowMax, thresholding_with_binarization: bool=False, thresholding_preserve_diagonal: bool=False):
self.p_percentile = p_pe... |
def create_token(user, title, expiration=_default_expiration_duration_opt):
if (expiration == _default_expiration_duration_opt):
duration = _default_expiration_duration()
expiration = ((duration + datetime.now()) if duration else None)
token_code = random_string_generator((TOKEN_NAME_PREFIX_LENG... |
class XskipperTestCase(unittest.TestCase):
def setUp(self):
self._old_sys_path = list(sys.path)
class_name = self.__class__.__name__
self.spark = SparkSession.builder.master('local[4]').config('spark.ui.enabled', 'false').appName(class_name).getOrCreate()
def tearDown(self):
self... |
class DownloadDir():
def __init__(self, tmp_path, config):
self._tmp_path = tmp_path
self._config = config
self.location = str(tmp_path)
def read_file(self):
files = list(self._tmp_path.iterdir())
assert (len(files) == 1)
return files[0].read_text(encoding='utf-8'... |
def command_dstandard(command, args):
def setup(parser):
add_source_options(parser)
add_double_options(parser)
(parser, opts, args) = cl_parse(command, args, setup=setup)
(dir1, dir2, smin, smax) = verify_arguements('dstandard', 4, args)
out_filename = opts.pop('output')
gfts = gftes... |
class CompositionClassifier(nn.Module):
def __init__(self, input_dim, num_classes, normalization_sign=False):
super().__init__()
half_input_dim = int((input_dim / 2))
self.mlp = nn.Linear(input_dim, half_input_dim)
self.fc = nn.Linear(half_input_dim, num_classes)
self.normali... |
class TestJSONBasicIO():
def test_write_and_read(self, tmp_path, rng):
file_name = str((tmp_path / 'test.ga.json'))
basis_names = np.array(layout.basis_names, dtype=str)
mv_array = ConformalMVArray([random_point_pair(rng=rng) for i in range(1000)]).value
write_json_file(file_name, mv... |
_required('assets.add_assets', raise_exception=True)
def import_assets(request):
if (request.method == 'POST'):
file = request.FILES.get('file')
filename = os.path.join(settings.BASE_DIR, 'upload', file.name)
if (not os.path.exists(os.path.dirname(filename))):
os.makedirs(os.path... |
class CharOffsetTokenizer(Tokenizer):
def __init__(self, **kwargs):
if (len(kwargs.get('annotators', {})) > 0):
logger.warning(('%s only tokenizes! Skipping annotators: %s' % (type(self).__name__, kwargs.get('annotators'))))
self.annotators = set()
def tokenize(self, text, offsets):
... |
class MEGALock(tm):
def __init__(self, flufl_fp, lifetime):
self.flufl_fp = flufl_fp
self.lifetime = lifetime
self.re_entries = 0
self.thread_lock = threading.Lock()
self.flufl_lock = flufl.lock.Lock(flufl_fp, lifetime=lifetime)
def __enter__(self):
if (self.re_en... |
class FingerPrintPool(GObject.GObject):
__gsignals__ = {'fingerprint-done': (GObject.SignalFlags.RUN_LAST, None, (object,)), 'fingerprint-started': (GObject.SignalFlags.RUN_LAST, None, (object,)), 'fingerprint-error': (GObject.SignalFlags.RUN_LAST, None, (object, object))}
def __init__(self, max_workers=None):
... |
def command_redeploy(args):
(parser, options, args) = cl_parse('redeploy', args)
if (not (len(args) == 2)):
sys.exit(parser.format_help())
(source_store_dir, dest_store_dir) = args
try:
source = gf.Store(source_store_dir)
except gf.StoreError as e:
die(e)
try:
gf.... |
class TestMetastoreMigration():
def test_queries(self, fs_schema, db_schema):
metastore_migration = MetastoreMigration()
expected_query = ['ALTER TABLE test.table_name ADD IF NOT EXISTS columns (new_feature FloatType);', 'ALTER TABLE table_name DROP IF EXISTS (feature1__avg_over_2_days_rolling_windo... |
def javascript_prompt(url, js_msg, default, abort_on):
log.js.debug('prompt: {}'.format(js_msg))
if config.val.content.javascript.modal_dialog:
raise CallSuper
if (not config.val.content.javascript.prompt):
return (False, '')
msg = '<b>{}</b> asks:<br/>{}'.format(html.escape(url.toDispla... |
class SponsorshipQuerySet(QuerySet):
def in_progress(self):
status = [self.model.APPLIED, self.model.APPROVED]
return self.filter(status__in=status)
def approved(self):
return self.filter(status=self.model.APPROVED)
def visible_to(self, user):
contacts = user.sponsorcontact_s... |
_vectorize_node.register(Unbroadcast)
def _vectorize_unbroadcast(op: Unbroadcast, node: Apply, x: TensorVariable) -> Apply:
batched_ndims = (x.type.ndim - node.inputs[0].type.ndim)
old_axes = op.axes
new_axes = ((old_axis + batched_ndims) for old_axis in old_axes)
return cast(Apply, unbroadcast(x, *new_... |
class Datalist(Container):
def __init__(self, options=None, *args, **kwargs):
super(Datalist, self).__init__(*args, **kwargs)
self.type = 'datalist'
self.css_display = 'none'
if options:
self.append(options)
def append(self, options, key=''):
if (type(options)... |
class SA(nn.Module):
def __init__(self, __C):
super(SA, self).__init__()
self.mhatt = MHAtt(__C)
self.ffn = FFN(__C)
self.dropout1 = nn.Dropout(__C['fusion']['mca_DROPOUT_R'])
self.norm1 = LayerNorm(__C['fusion']['mca_HIDDEN_SIZE'])
self.dropout2 = nn.Dropout(__C['fus... |
class UnauthenticatedTests(AuthenticatedAPITestCase):
def setUp(self):
super().setUp()
self.client.force_authenticate(user=None)
def test_detail_lookup_returns_401(self):
url = reverse('api:bot:infraction-detail', args=(6,))
response = self.client.get(url)
self.assertEqua... |
def _build_has_subcmd_parser() -> cmd2.Cmd2ArgumentParser:
has_subcmds_parser = cmd2.Cmd2ArgumentParser(description='Tests as_subcmd_to decorator')
has_subcmds_subparsers = has_subcmds_parser.add_subparsers(dest='subcommand', metavar='SUBCOMMAND')
has_subcmds_subparsers.required = True
return has_subcmd... |
def format_document(client_config, document, range=None):
text = document.source
config = load_config(document.path, client_config)
lines = ([((range['start']['line'] + 1), range['end']['line'])] if range else ())
try:
formatted_text = format_text(text=text, config=config, lines=lines)
excep... |
class InstallCommand(InstallerCommand):
name = 'install'
description = 'Installs the project dependencies.'
options = [*InstallerCommand._group_dependency_options(), option('no-dev', None, 'Do not install the development dependencies. (<warning>Deprecated</warning>)'), option('sync', None, 'Synchronize the ... |
def create_scene(material, x_pos):
scene = gfx.Scene()
m = meshes[0]
m.geometry.texcoords1 = texcoords1
material.light_map = light_map_tex
mesh = gfx.Mesh(m.geometry, material)
scene.add(mesh)
t = gfx.Text(gfx.TextGeometry(material.__class__.__name__, screen_space=True, font_size=20), gfx.Te... |
.parametrize('test_mda', [{'a': np.array([1, 2, 3]), 'd': 123}, {'a': {'b': np.array([4, 5, 6]), 'c': 1.0}, 'd': 'foo'}, {'a': {'b': np.array([1, 2, 3]), 'c': 2.0}, 'd': 'foo'}, {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'bar'}, np.array([1, 2, 3]), {'a': {'b': 'baz', 'c': 1.0}, 'd': 'foo'}, {'a': {'b': np.array(... |
(short_help='Create environments')
('env_name', default='default')
_obj
def create(app, env_name):
app.ensure_environment_plugin_dependencies()
root_env_name = env_name
project_config = app.project.config
if ((root_env_name not in project_config.envs) and (root_env_name not in project_config.matrices)):... |
def main(argv: List[str]) -> None:
args = parse_args(argv)
input_dir_labels_and_dense = args.input_dir_labels_and_dense
input_dir_sparse = args.input_dir_sparse
output_dir_full_set = args.output_dir_full_set
output_dir_shuffled = args.output_dir_shuffled
rows_per_file = Manager().dict()
proc... |
class Constructor(object):
def __init__(self, add_namespace_maps=False, strict=False, ns_hints=None, ns_ignore=False):
self.stack = []
self.queue = []
self.namespaces = defaultdict(list)
self.add_namespace_maps = add_namespace_maps
self.strict = strict
self.ns_hints =... |
class Effect3496(BaseEffect):
runTime = 'early'
type = 'passive'
def handler(fit, implant, context, projectionRange, **kwargs):
fit.appliedImplants.filteredItemMultiply((lambda mod: (mod.item.group.name == 'Cyberimplant')), 'agilityBonus', implant.getModifiedItemAttr('implantSetThukker'), **kwargs) |
def test_python_exist(tmp_path, capfd):
if (utils.platform != 'linux'):
pytest.skip('the test is only relevant to the linux build')
machine = platform.machine()
if (machine not in ['x86_64', 'i686']):
pytest.skip('this test is currently only possible on x86_64/i686 due to availability of alt... |
class Checkpointer(object):
def __init__(self, model: nn.Module, save_dir: str='', *, save_to_disk: bool=True, **checkpointables: object):
if isinstance(model, (DistributedDataParallel, DataParallel)):
model = model.module
self.model = model
self.checkpointables = copy.copy(check... |
class CUDACallback(Callback):
def on_train_epoch_start(self, trainer, pl_module):
torch.cuda.reset_peak_memory_stats(trainer.root_gpu)
torch.cuda.synchronize(trainer.root_gpu)
self.start_time = time.time()
def on_train_epoch_end(self, trainer, pl_module, outputs):
torch.cuda.sync... |
class ChatAdministratorRights(TelegramObject):
__slots__ = ('is_anonymous', 'can_manage_chat', 'can_delete_messages', 'can_manage_video_chats', 'can_restrict_members', 'can_promote_members', 'can_change_info', 'can_invite_users', 'can_post_messages', 'can_edit_messages', 'can_pin_messages', 'can_manage_topics', 'ca... |
class MaterialGroup(AutoIndex):
SLABS = auto()
WALLS = auto()
COLUMNS = auto()
FRAME = auto()
WINDOW = auto()
WINDOW_BARS = auto()
WINDOW_PANES = auto()
WINDOW_LOUVERS = auto()
DOOR = auto
DOOR_PANES = auto()
DOOR_PANELS = auto()
DOOR_LOUVERS = auto()
STAIRS = auto()
... |
def remove_erhua(text):
new_str = ''
while re.search('', text):
a = re.search('', text).span()
remove_er_flag = 0
if ER_WHITELIST_PATTERN.search(text):
b = ER_WHITELIST_PATTERN.search(text).span()
if (b[0] <= a[0]):
remove_er_flag = 1
if (r... |
class TestInstruments(InstLibTests):
.parametrize('inst_dict', instruments['download'])
.parametrize('kwarg,output', [(None, 0.0), (dt.timedelta(hours=1), 3600.0)])
def test_inst_start_time(self, inst_dict, kwarg, output):
(_, date) = cls_inst_lib.initialize_test_inst_and_date(inst_dict)
if ... |
def _validate_delegate(line_num: int, line: str) -> MessageIterator:
if (line == ''):
return
if (', ' in line):
for part in line.removesuffix(',').split(', '):
(yield from _email(line_num, part, 'Delegate'))
return
(yield from _email(line_num, line, 'Delegate')) |
def test_no_valid_routes():
channels = make_channel_set([NettingChannelStateProperties(canonical_identifier=make_canonical_identifier(channel_identifier=1), partner_state=NettingChannelEndStateProperties(balance=UNIT_TRANSFER_AMOUNT, address=UNIT_TRANSFER_SENDER)), NettingChannelStateProperties(make_canonical_ident... |
class MultiPlotItem(GraphicsLayout.GraphicsLayout):
def __init__(self, *args, **kwds):
GraphicsLayout.GraphicsLayout.__init__(self, *args, **kwds)
self.plots = []
def plot(self, data, **plotArgs):
if (hasattr(data, 'implements') and data.implements('MetaArray')):
if (data.ndi... |
class Logger():
def __init__(self, console_logger):
self.console_logger = console_logger
self.use_tb = False
self.use_sacred = False
self.use_hdf = False
self.stats = defaultdict((lambda : []))
def setup_tb(self, directory_name):
from tensorboard_logger import con... |
class ADE20KSegmentation(SegmentationDataset):
BASE_DIR = 'ADEChallengeData2016'
NUM_CLASS = 150
def __init__(self, root='datasets/ade', split='test', mode=None, transform=None, **kwargs):
super(ADE20KSegmentation, self).__init__(root, split, mode, transform, **kwargs)
root = os.path.join(se... |
def learncurve_with_transforms(previous_run_path, min_segment_dur, logger=None, to_annot=False):
from vak import config
from vak.core.learncurve import train_dur_csv_paths as _train_dur_csv_paths
from vak.logging import log_or_print
previous_run_path = Path(previous_run_path)
toml_path = sorted(prev... |
def _find_compiler_bindir():
patterns = ['C:/Program Files (x86)/Microsoft Visual Studio/*/Professional/VC/Tools/MSVC/*/bin/Hostx64/x64', 'C:/Program Files (x86)/Microsoft Visual Studio/*/BuildTools/VC/Tools/MSVC/*/bin/Hostx64/x64', 'C:/Program Files (x86)/Microsoft Visual Studio/*/Community/VC/Tools/MSVC/*/bin/Hos... |
def preview_contract_view(ModelAdmin, request, pk):
contract = get_object_or_404(ModelAdmin.get_queryset(request), pk=pk)
format = request.GET.get('format', 'pdf')
if (format == 'docx'):
response = render_contract_to_docx_response(request, contract)
else:
response = render_contract_to_pd... |
def get_orders_status(orders: List[str]):
if settings.SIMULATE_PRETIX_DB:
return {}
with connections['pretix'].cursor() as cursor:
cursor.execute('SELECT code, status FROM pretixbase_order WHERE code = ANY(%s)', [orders])
statuses = cursor.fetchall()
return {status[0]: status[1] for ... |
class HNSWFaissSearch(DenseRetrievalFaissSearch):
def __init__(self, model, batch_size: int=128, corpus_chunk_size: int=50000, hnsw_store_n: int=512, hnsw_ef_search: int=128, hnsw_ef_construction: int=200, similarity_metric=faiss.METRIC_INNER_PRODUCT, **kwargs):
super(HNSWFaissSearch, self).__init__(model, ... |
def fix_type_var_tuple_argument(any_type: Type, t: Instance) -> None:
if t.type.has_type_var_tuple_type:
args = list(t.args)
assert (t.type.type_var_tuple_prefix is not None)
tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix]
assert isinstance(tvt, TypeVarTupleType)
ar... |
_fixtures(WebFixture, PageMenuFixture)
def test_active_state_of_page_links(web_fixture, page_menu_fixture):
fixture = page_menu_fixture
fixture.number_of_pages = 30
fixture.max_page_links = 5
web_fixture.reahl_server.set_app(fixture.wsgi_app)
web_fixture.driver_browser.open('/')
with web_fixture... |
class CMN(nn.Module):
def __init__(self, emodict, worddict, embedding, args):
super(CMN, self).__init__()
self.num_classes = emodict.n_words
self.embeddings = embedding
self.gpu = args.gpu
self.hops = args.hops
self.wind_1 = args.wind1
self.utt_cnn = CNNencode... |
def setup_routes_from_config(app: Bottle, config: RunConfig) -> Bottle:
def _setup_health_endpoint(app, config):
if (config.health_endpoint in [route.rule for route in app.routes]):
raise RuntimeError('Provided health endpoint overlaps with existing routes')
app.route(config.health_endpo... |
class UserRoles(Base):
def export_user_roles(self, format_type: Literal[('json', 'csv', 'xml', 'df')]='json', df_kwargs: Optional[Dict[(str, Any)]]=None):
payload = self._initialize_payload(content='userRole', format_type=format_type)
return_type = self._lookup_return_type(format_type, request_type=... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.