code stringlengths 281 23.7M |
|---|
def read_data(f, h, endianness='>'):
e = endianness
data = read(f, (1024 - 16))
first = struct.unpack((e + 'i'), data[0:4])[0]
dtype = {1: (e + 'i4'), 2: (e + 'i2'), 4: (e + 'i1')}
if (h.compression not in dtype):
raise GCFLoadError(('Unsupported compression code: %i' % h.compression))
n... |
class VarMaskedFastLSTM(nn.Module):
def __init__(self, input_size: int, hidden_size: int, num_layers: int=1, bias: bool=True, batch_first: bool=False, dropout: Tuple[(float, float)]=(0.0, 0.0), bidirectional: bool=False, initializer: Callable[([Tensor], None)]=None) -> None:
super(VarMaskedFastLSTM, self)._... |
def test_contextsetf_tuple():
context = Context({'ctx1': 'ctxvalue1', 'ctx2': 'ctxvalue2', 'ctx3': 'ctxvalue3', 'contextSetf': {'output': ('k1', 'k2', '{ctx3}', True, False, 44)}})
pypyr.steps.contextsetf.run_step(context)
output = context['output']
assert (output[0] == 'k1')
assert (output[1] == 'k... |
def train_td(dataloader, model, loss_fn, optimizer):
size = len(dataloader.dataset)
model.train()
for (batch, data) in enumerate(dataloader):
(X, y) = (data['images'].contiguous(), data['targets'].contiguous())
pred = model(X)
loss = loss_fn(pred, y)
optimizer.zero_grad()
... |
def main(_):
if (not os.path.exists(FLAGS.checkpoint_dir)):
os.makedirs(FLAGS.checkpoint_dir)
if (not os.path.exists((FLAGS.checkpoint_dir + '/train'))):
os.makedirs((FLAGS.checkpoint_dir + '/train'))
if (not os.path.exists((FLAGS.checkpoint_dir + '/val'))):
os.makedirs((FLAGS.checkp... |
def test_assert_keys_type_value_passes():
info1 = ContextItemInfo(key='key1', key_in_context=True, expected_type=str, is_expected_type=True, has_value=True)
info2 = ContextItemInfo(key='key2', key_in_context=True, expected_type=str, is_expected_type=True, has_value=True)
info3 = ContextItemInfo(key='key3', ... |
class GraphOptimizationApplication(OptimizationApplication):
def __init__(self, graph: Union[(nx.Graph, np.ndarray, List)]) -> None:
self._graph = nx.Graph(graph).copy(as_view=True)
_optionals.HAS_MATPLOTLIB.require_in_call
def draw(self, result: Optional[Union[(OptimizationResult, np.ndarray)]]=Non... |
def first_opened_window() -> 'mainwindow.MainWindow':
if (not window_registry):
raise NoWindow()
for idx in range(0, (len(window_registry) + 1)):
window = _window_by_index(idx)
if (not window.tabbed_browser.is_shutting_down):
return window
raise utils.Unreachable() |
def load_base_models(opts):
ckpt = opts.stylegan_path
g_ema = Generator(1024, 512, 8)
g_ema.load_state_dict(torch.load(ckpt)['g_ema'], strict=False)
g_ema.eval()
g_ema = g_ema.cuda()
mean_latent = torch.load(ckpt)['latent_avg'].unsqueeze(0).unsqueeze(0).repeat(1, 18, 1).clone().detach().cuda()
... |
class PlotWidget(GraphicsView):
def __init__(self, **kwds):
super().__init__(**kwds)
plotItem = graphicsItems.PlotItem.PlotItem(enableMenu=False)
self.gfxView.setCentralItem(plotItem)
connect_viewbox_redraw(plotItem.getViewBox(), self.request_draw)
self.plotItem = plotItem
... |
class TestStates(EvenniaTest):
def setUp(self):
super().setUp()
self.room = utils.create_evscaperoom_object('evscaperoom.room.EvscapeRoom', key='Testroom', home=self.room1)
self.roomtag = 'evscaperoom_#{}'.format(self.room.id)
def tearDown(self):
self.room.delete()
def _get_a... |
def load_xml_info(gt_file, img_info):
obj = ET.parse(gt_file)
root = obj.getroot()
anno_info = []
for obj in root.iter('object'):
x = max(0, int(obj.find('bndbox').find('xmin').text))
y = max(0, int(obj.find('bndbox').find('ymin').text))
xmax = int(obj.find('bndbox').find('xmax')... |
class SecretRegistry():
def __init__(self, jsonrpc_client: JSONRPCClient, secret_registry_address: SecretRegistryAddress, contract_manager: ContractManager, block_identifier: BlockIdentifier) -> None:
if (not is_binary_address(secret_registry_address)):
raise ValueError('Expected binary address ... |
def bench_once(client, args, write_profile=None):
n_workers = len(client.scheduler_info()['workers'])
args.base_chunks = (args.base_chunks or n_workers)
args.other_chunks = (args.other_chunks or n_workers)
ddf_base = get_random_ddf(args.chunk_size, args.base_chunks, args.frac_match, 'build', args).persi... |
def setup_everything():
parser = argparse.ArgumentParser()
parser.add_argument('--train_args_file', type=str, default='train_args/baichuan-sft-qlora.json', help='')
args = parser.parse_args()
train_args_file = args.train_args_file
parser = HfArgumentParser((QLoRAArguments, TrainingArguments))
(a... |
class PipelineContainerGroup():
def __init__(self):
self.compute_containers = None
self.render_containers = None
def update(self, wobject, environment, changed):
if ('create' in changed):
self.compute_containers = []
self.render_containers = []
renderf... |
class ManagedWindow(ManagedWindowBase):
def __init__(self, procedure_class, x_axis=None, y_axis=None, linewidth=1, log_fmt=None, log_datefmt=None, **kwargs):
self.x_axis = x_axis
self.y_axis = y_axis
self.log_widget = LogWidget('Experiment Log', fmt=log_fmt, datefmt=log_datefmt)
self... |
def _assert_column_lineage(lr: LineageRunner, column_lineages=None):
expected = set()
if column_lineages:
for (src, tgt) in column_lineages:
src_col: Column = Column(src.column)
if (src.qualifier is not None):
src_col.parent = Table(src.qualifier)
tgt_... |
class PromptTuningConfig(PromptLearningConfig):
prompt_tuning_init: Union[(PromptTuningInit, str)] = field(default=PromptTuningInit.RANDOM, metadata={'help': 'How to initialize the prompt tuning parameters'})
prompt_tuning_init_text: Optional[str] = field(default=None, metadata={'help': 'The text to use for pro... |
def test_create_elevators_field_no_elevator(empty_patches, echoes_game_description):
with pytest.raises(InvalidConfiguration, match='Invalid elevator count. Expected 22, got 0.'):
patch_data_factory._create_elevators_field(empty_patches, echoes_game_description, echoes_game_description.dock_weakness_databas... |
def test_create_proxy_cache_config_with_defaults(initialized_db):
upstream_registry = 'quay.io'
org = create_org(user_name='test', user_email='', org_name='foobar', org_email='')
result = create_proxy_cache_config(org.username, upstream_registry)
assert (result.organization_id == org.id)
assert (res... |
class Effect11398(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Small Projectile Turret')), 'damageMultiplier', ship.getModifiedItemAttr('shipBonusNavyDestroyerMinmatar1'), skill='Minmatar Dest... |
class TestDERSerialization():
.parametrize(('key_path', 'password'), [(['DER_Serialization', 'enc-rsa-pkcs8.der'], b'foobar'), (['DER_Serialization', 'enc2-rsa-pkcs8.der'], b'baz'), (['DER_Serialization', 'unenc-rsa-pkcs8.der'], None), (['DER_Serialization', 'testrsa.der'], None)])
def test_load_der_rsa_private... |
_request_params(docs._search_query, docs._project_id, docs._pagination)
def get_users_autocomplete(q: str, **params) -> JsonResponse:
response = get(f'{API_V1}/users/autocomplete', q=q, **params)
users = response.json()
users['results'] = convert_all_timestamps(users['results'])
return users |
class TextureOptions():
def __init__(self):
self.name = 'default'
self.blendu = 'on'
self.blendv = 'on'
self.bm = 1.0
self.boost = 0.0
self.cc = 'off'
self.clamp = 'off'
self.imfchan = 'l'
self.mm = (0.0, 1.0)
self.o = (0.0, 0.0, 0.0)
... |
def sample_embeddings(mean, std, mean_coef, num_objects, embedding_dim):
size = (num_objects, embedding_dim)
x = torch.normal(mean=mean, std=std, size=size)
y = torch.normal(mean=mean, std=std, size=size)
z = torch.normal(mean=(mean * mean_coef), std=std, size=size)
same_dist_embeddings = torch.cat(... |
def test_implode_roundtrip_simple():
segments = FinTS3Parser.explode_segments(TEST_MESSAGES['basic_simple'])
assert (FinTS3Serializer.implode_segments(segments) == TEST_MESSAGES['basic_simple'])
message = FinTS3Parser().parse_message(segments)
assert (FinTS3Serializer().serialize_message(message) == TES... |
class MNLI(Task):
VERSION = 0
DATASET_PATH = 'glue'
DATASET_NAME = 'mnli'
def has_training_docs(self):
return True
def has_validation_docs(self):
return True
def has_test_docs(self):
return False
def training_docs(self):
if (self._training_docs is None):
... |
def test_unicode_params():
res = substitute_params('SELECT * FROM WHERE name = %s', '')
eq_(res, b"SELECT * FROM \xce\x94 WHERE name = N'\xce\xa8'")
res = substitute_params(u"testing ascii (ace) 1=%d 'one'=%s", (1, 'str'))
eq_(res, b"testing ascii (\xc4\x85\xc4\x8d\xc4\x99) 1=1 'one'=N'str'") |
def _command_features_from_confidence_results(split_key: str, feature_names: List[str], dataset_type: str, protocol_name: str, run_name: str, results_dir: str, features_dir: str) -> Command:
concatenated_confidence_features_dir = _concatenated_confidence_features_dir(protocol_name, feature_names, features_dir)
... |
class PoolFormerDropPath(nn.Module):
def __init__(self, drop_prob: Optional[float]=None) -> None:
super().__init__()
self.drop_prob = drop_prob
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)
def extra_rep... |
class CriterionCWD(nn.Module):
def __init__(self, s_channels, t_channels, norm_type='none', divergence='mse', temperature=1.0):
super(CriterionCWD, self).__init__()
if (norm_type == 'channel'):
self.normalize = ChannelNorm()
elif (norm_type == 'spatial'):
self.normali... |
def DenseUNet(nb_dense_block=4, growth_rate=48, nb_filter=96, reduction=0.0, dropout_rate=0.0, weight_decay=0.0001, weights_path=None, args=None):
eps = 1.1e-05
compression = (1.0 - reduction)
global concat_axis
if (K.image_dim_ordering() == 'tf'):
concat_axis = 3
img_input = Input(batch... |
class CSRNet_DM(nn.Module):
def __init__(self, load_weights=True):
super(CSRNet_DM, self).__init__()
self.seen = 0
self.frontend_feat = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512]
self.backend_feat = [512, 512, 512, 256, 128, 64]
self.frontend = make_layer... |
def load_checkpoint(model, filename, map_location='cpu', strict=False, logger=None):
checkpoint = _load_checkpoint(filename, map_location)
if (not isinstance(checkpoint, dict)):
raise RuntimeError(f'No state_dict found in checkpoint file {filename}')
if ('state_dict' in checkpoint):
state_di... |
def main_worker(local_rank, args):
rank = local_rank
args.local_rank = local_rank
args.global_rank = local_rank
args.distributed = (args.ngpus_per_node > 1)
if (args.ngpus_per_node > 1):
from torch.distributed import init_process_group
torch.cuda.set_device(local_rank)
init_p... |
class TestNullModem():
(name='use_port')
def get_port_in_class(base_ports):
base_ports[__class__.__name__] += 2
return base_ports[__class__.__name__]
def test_init(self, dummy_protocol):
prot = dummy_protocol()
NullModem(prot)
prot.connection_made.assert_not_called()
... |
def extract_constant(code, symbol, default=(- 1)):
if (symbol not in code.co_names):
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = dis.opmap['STORE_NAME']
STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
LOAD_CONST = dis.opmap['LOAD_CONST']
const = default
for byte_co... |
class CentroidCorners():
def __init__(self, gdf, verbose=True):
self.gdf = gdf
results_list = []
results_list_sd = []
def true_angle(a, b, c):
ba = (a - b)
bc = (c - b)
cosine_angle = (np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc)))
... |
def check_accumulator_overflow(sess: tf.compat.v1.Session, quant_bw: int, accum_bw: int):
most_accum_range_used = 0
most_accum_range_used_layer = None
for op in sess.graph.get_operations():
if (op.type == 'Conv2D'):
weights = utils.op.conv.WeightTensorUtils.get_tensor_as_numpy_data(sess,... |
_config
def test_focus_lost_hide(manager):
manager.c.group['SCRATCHPAD'].dropdown_reconfigure('dd-c')
manager.c.group['SCRATCHPAD'].dropdown_reconfigure('dd-d')
manager.test_window('one')
assert_focused(manager, 'one')
manager.c.group['SCRATCHPAD'].dropdown_toggle('dd-c')
is_spawned(manager, 'dd... |
(all_backends)
def test_diagonal(backend):
xnp = get_xnp(backend)
dtype = xnp.float32
diag = xnp.array([0.1, 0.2, 3.0, 4.0], dtype=dtype, device=None)
C = xnp.diag((diag ** 0.5))
B = sqrt(Diagonal(diag=diag), Auto())
rel_error = relative_error(C, B.to_dense())
assert (rel_error < _tol) |
def model_dist(w_1, w_2):
assert (w_1.keys() == w_2.keys()), 'Error: cannot compute distance between dict with different keys'
dist_total = torch.zeros(1).float()
for key in w_1:
dist = torch.norm((w_1[key].cpu() - w_2[key].cpu()))
dist_total += dist.cpu()
return dist_total.cpu().item() |
class PaymentSchema(BaseSchema):
initiator_address = AddressField(missing=None)
target_address = AddressField(missing=None)
token_address = AddressField(missing=None)
amount = IntegerToStringField(required=True)
identifier = IntegerToStringField(missing=None)
secret = SecretField(missing=None)
... |
class TestCacheEnabled(BaseTestCase):
async def test_cache_enable_disable(self):
responses = {}
def set_response(res):
responses[res.url.split('/').pop()] = res
self.page.on('response', set_response)
(await self.page.goto((self.url + 'static/cached/one-style.html'), waitU... |
class LikeFile():
mode = 'rb'
maker = None
def __init__(self, infile, need_seek=None):
self._check_file(infile, need_seek)
self.infile = infile
self.closed = self.infile_closed = None
self.inbuf = b''
self.outbuf = array.array('b')
self.eof = self.infile_eof =... |
class QuantifierEliminator(object):
def __init__(self):
self._destroyed = False
def eliminate_quantifiers(self, formula):
raise NotImplementedError
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.exit()
def exit(self):
if (... |
def apply_constraints(operator, n_fermions):
n_orbitals = count_qubits(operator)
constraints = constraint_matrix(n_orbitals, n_fermions)
(n_constraints, n_terms) = constraints.get_shape()
vectorized_operator = operator_to_vector(operator)
initial_bound = (numpy.sum(numpy.absolute(vectorized_operator... |
def inception_arg_scope(weight_decay=4e-05, use_batch_norm=True, batch_norm_decay=0.9997, batch_norm_epsilon=0.001):
batch_norm_params = {'decay': batch_norm_decay, 'epsilon': batch_norm_epsilon, 'updates_collections': tf.GraphKeys.UPDATE_OPS}
if use_batch_norm:
normalizer_fn = slim.batch_norm
n... |
class MenuItem():
name: str
description: str
vegetarian: bool
price: float
def __init__(self, name: str, description: str, vegetarian: bool, price: float):
self.name = name
self.description = description
self.vegetarian = vegetarian
self.price = price
def getName(... |
def generate_data_zz(filename):
(backend_result, xdata, qubits, spectators, zz_value, omega) = zz_circuit_execution()
data = {'backend_result': backend_result.to_dict(), 'xdata': xdata.tolist(), 'qubits': qubits, 'spectators': spectators, 'zz': zz_value, 'omega': omega}
with open(filename, 'w') as handle:
... |
class DockerComposeSetup():
def __init__(self, namespace_name, release_name, image_tag_details, runtime_props, image_script_dir, command):
self.namespace_name = namespace_name
self.release_name = release_name
self.image_tag_details = image_tag_details
self.runtime_props = (runtime_pr... |
def _repair_names_unique(names: Iterable[str], quiet: bool=False, sanitizer: Callable=None) -> List[str]:
min_names = _repair_names_minimal(names)
neat_names = [re.sub('(?:(?<!_)_{1,2}\\d+|(?<!_)__)+$', '', name) for name in min_names]
if callable(sanitizer):
neat_names = [sanitizer(name) for name i... |
class CtrlModAddK(Bloq):
k: Union[(int, sympy.Expr)]
mod: Union[(int, sympy.Expr)]
bitsize: Union[(int, sympy.Expr)]
_property
def signature(self) -> 'Signature':
return Signature([Register('ctrl', bitsize=1), Register('x', bitsize=self.bitsize)])
def build_call_graph(self, ssa: 'SympySy... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
(model_args, data_args, training_args) = parser.parse_args_into_dataclasses()
if (os.path.exists(training_args.output_dir) and os.listdir(training_args.output_dir) and training_args.do_train and (not training_a... |
class Binarizer():
def binarize(filename, dict, consumer, tokenize=tokenize_line, append_eos=True, reverse_order=False, offset=0, end=(- 1), already_numberized=False):
(nseq, ntok) = (0, 0)
replaced = Counter()
def replaced_consumer(word, idx):
if ((idx == dict.unk_index) and (wo... |
class RevocationStore():
START_INDEX = ((2 ** 48) - 1)
def __init__(self, storage):
if (len(storage) == 0):
storage['index'] = self.START_INDEX
storage['buckets'] = {}
self.storage = storage
self.buckets = storage['buckets']
def add_next_entry(self, hsh):
... |
def test_linke_turbidity_corners():
months = pd.DatetimeIndex((('%d/1/2016' % (m + 1)) for m in range(12)))
def monthly_lt_nointerp(lat, lon, time=months):
return clearsky.lookup_linke_turbidity(time, lat, lon, interp_turbidity=False)
assert np.allclose(monthly_lt_nointerp(90, (- 180)), [1.9, 1.9, 1... |
def test_stackednested(tmpdir):
runner = CliRunner()
result = runner.invoke(yadage.steering.main, [os.path.join(str(tmpdir), 'workdir'), 'workflow.yml', '-t', 'tests/testspecs/stackednestings', 'tests/testspecs/stackednestings/input.yml', '-d', 'initdir={}'.format(os.path.abspath('tests/testspecs/stackednesting... |
def get_expected_system_site_packages(session):
base_prefix = session.creator.pyenv_cfg['base-prefix']
base_exec_prefix = session.creator.pyenv_cfg['base-exec-prefix']
old_prefixes = site.PREFIXES
site.PREFIXES = [base_prefix, base_exec_prefix]
system_site_packages = site.getsitepackages()
site.... |
def test_upload_mixin_with_filedata(gl):
class TestClass(UploadMixin, FakeObject):
_upload_path = '/tests/{id}/uploads'
url = '
responses.add(method=responses.POST, url=url, json={'id': 42, 'file_name': 'test.txt', 'file_content': 'testing contents'}, status=200, match=[responses.matchers.query_para... |
def test_update(dict_tmp_path, monkeypatch):
monkeypatch.setattr(dictcli, 'download_dictionary', (lambda _url, dest: pathlib.Path(dest).touch()))
(dict_tmp_path / 'pl-PL-2-0.bdic').touch()
assert (polish().local_version < polish().remote_version)
dictcli.update(langs())
assert (polish().local_versio... |
_module()
class MixVisionTransformer(BaseModule):
def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dims=[64, 128, 256, 512], num_heads=[1, 2, 4, 8], mlp_ratios=[4, 4, 4, 4], qkv_bias=False, qk_scale=None, drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.1, norm_layer=nn.LayerNo... |
.parametrize('ra, rb', [([11, 20, 14], [11, 20, 14]), ([11, 20, 14], [14, 16, 15]), ([11, 20, 14], [15, 19, 12]), ([14, 16, 15], [15, 19, 12])])
def test_dominance(ra, rb):
result = rank.dominance(ra, rb, reverse=False)
assert (result.eq == np.equal(ra, rb).sum())
assert (result.aDb == np.greater(ra, rb).su... |
def test_opt_in_args(pm: PluginManager) -> None:
class Api():
def hello(self, arg1, arg2, common_arg):
class Plugin1():
def hello(self, arg1, common_arg):
return (arg1 + common_arg)
class Plugin2():
def hello(self, arg2, common_arg):
return (arg2 + common_arg)... |
.isolated
def test_build_with_dep_on_console_script(tmp_path, demo_pkg_inline, capfd, mocker):
toml = textwrap.dedent('\n [build-system]\n requires = ["demo_pkg_inline"]\n build-backend = "build"\n backend-path = ["."]\n\n [project]\n description = "Factory A code generato... |
def get_org_latent(image_path):
model_path = 'restyle_encoder/pretrained_models/restyle_psp_ffhq_encode.pt'
transform = transforms.Compose([transforms.Resize((256, 256)), transforms.ToTensor(), transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])])
with torch.no_grad():
ckpt = torch.load(model_pat... |
class nnUNetTrainer_4000epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool=True, device: torch.device=torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs ... |
class UniformMultiHeadAttention(nn.Module):
def __init__(self, h, d_model, attn_p=0.1):
super(UniformMultiHeadAttention, self).__init__()
self.h = h
self.d = d_model
assert ((d_model % h) == 0)
self.d_head = (d_model // h)
self.fc_query = Bottle(Linear(d_model, (h * s... |
def classification_report(data, model, session, sample=False):
(_, _, _, a, x, _, f, _, _) = data.next_validation_batch()
(n, e) = session.run(([model.nodes_gumbel_argmax, model.edges_gumbel_argmax] if sample else [model.nodes_argmax, model.edges_argmax]), feed_dict={model.edges_labels: a, model.nodes_labels: x... |
.functions
def test_logit():
s = pd.Series([0, 0.1, 0.2, 0.3, 0.5, 0.9, 1, 2])
inside = ((0 < s) & (s < 1))
valid = np.array([0.1, 0.2, 0.3, 0.5, 0.9])
ans = np.log((valid / (1 - valid)))
with pytest.raises(RuntimeError):
s.logit(error='raise')
with pytest.warns(RuntimeWarning):
... |
class LatentCodesPool():
def __init__(self, pool_size):
self.pool_size = pool_size
if (self.pool_size > 0):
self.num_ws = 0
self.ws = []
def query(self, ws):
if (self.pool_size == 0):
return ws
return_ws = []
for w in ws:
if... |
class SlowLockMock():
default_delay_time = 3
def __init__(self, client, lock, delay_time=None):
self._client = client
self._lock = lock
self.delay_time = (self.default_delay_time if (delay_time is None) else delay_time)
def acquire(self, timeout=None):
sleep = self._client.ha... |
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY')
MAIL_SERVER = os.environ.get('MAIL_SERVER')
MAIL_PORT = int((os.environ.get('MAIL_PORT') or 25))
MAIL_USE_TLS = (os.environ.get('MAIL_USE_TLS') is not None)
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.... |
def DataList(items, filter_by_priority=None, sort_by_priority=False):
if (filter_by_priority is not None):
items = [i for i in items if (i['priority'] <= filter_by_priority)]
if sort_by_priority:
items = sorted(items, key=(lambda i: i['priority']))
list_item_elements = [html.li(i['text']) fo... |
def pylsp_lint(workspace, document):
with workspace.report_progress('lint: pycodestyle'):
config = workspace._config
settings = config.plugin_settings('pycodestyle', document_path=document.path)
log.debug('Got pycodestyle settings: %s', settings)
opts = {'exclude': settings.get('excl... |
def read_ecdc_header(fo: tp.IO[bytes]):
header_bytes = _read_exactly(fo, _encodec_header_struct.size)
(magic, version, meta_size) = _encodec_header_struct.unpack(header_bytes)
if (magic != _ENCODEC_MAGIC):
raise ValueError('File is not in ECDC format.')
if (version != 0):
raise ValueErro... |
def _p(solver, partInfo, subname, shape, retAll=False):
if (not solver):
if (not utils.hasCenter(shape)):
return 'a vertex or circular edge/face'
if utils.isDraftWire(partInfo):
if (utils.draftWireVertex2PointIndex(partInfo, subname) is None):
raise RuntimeErr... |
def get_wheels_for_support_versions(folder):
downloader = WheelDownloader((folder / 'wheel-store'))
downloader.run(HERE.parent, VERSIONS)
packages = defaultdict((lambda : defaultdict((lambda : defaultdict(WheelForVersion)))))
for (version, collected) in downloader.collected.items():
for (pkg, pl... |
class NCLLexer(RegexLexer):
name = 'NCL'
aliases = ['ncl']
filenames = ['*.ncl']
mimetypes = ['text/ncl']
url = '
version_added = '2.2'
flags = re.MULTILINE
tokens = {'root': [(';.*\\n', Comment), include('strings'), include('core'), ('[a-zA-Z_]\\w*', Name), include('nums'), ('[\\s]+', T... |
class ReadabilityOAuth(BaseOAuth1):
name = 'readability'
ID_KEY = 'username'
AUTHORIZATION_URL = f'{READABILITY_API}/oauth/authorize/'
REQUEST_TOKEN_URL = f'{READABILITY_API}/oauth/request_token/'
ACCESS_TOKEN_URL = f'{READABILITY_API}/oauth/access_token/'
EXTRA_DATA = [('date_joined', 'date_joi... |
class KeatingStreamFlowParameter(Parameter):
def __init__(self, model, storage_node, levels, transmissivity, coefficient=1.0, **kwargs):
super(KeatingStreamFlowParameter, self).__init__(model, **kwargs)
self.storage_node = storage_node
if (len(levels) != len(transmissivity)):
rai... |
def test_overriding_generated_unstructure():
converter = Converter()
class Inner():
a: int
class Outer():
i: Inner
inst = Outer(Inner(1))
converter.unstructure(inst)
converter.register_unstructure_hook(Inner, (lambda _: {'a': 2}))
r = converter.structure(converter.unstructure... |
def init(model_s, model_t, init_modules, criterion, train_loader, logger, opt):
model_t.eval()
model_s.eval()
init_modules.train()
if torch.cuda.is_available():
model_s.cuda()
model_t.cuda()
init_modules.cuda()
cudnn.benchmark = True
if ((opt.model_s in ['resnet8', 'r... |
def get_parser():
parser = argparse.ArgumentParser(description='Feature extraction with reid models')
parser.add_argument('--config-file', metavar='FILE', help='path to config file')
parser.add_argument('--parallel', action='store_true', help='If use multiprocess for feature extraction.')
parser.add_arg... |
def adjust_lr(args, optimizer, epoch):
if ('cifar' in args.dataset):
change_points = [80, 120, 160]
elif ('indoor' in args.dataset):
change_points = [60, 80, 100]
elif ('dog' in args.dataset):
change_points = [60, 80, 100]
elif ('voc' in args.dataset):
change_points = [30... |
class RemoteSendEvent(ModbusEvent):
def __init__(self, **kwargs):
self.read = kwargs.get('read', False)
self.slave_abort = kwargs.get('slave_abort', False)
self.slave_busy = kwargs.get('slave_busy', False)
self.slave_nak = kwargs.get('slave_nak', False)
self.write_timeout = k... |
class RAW_Loss(Loss):
def __init__(self, mode, **kwargs):
super().__init__()
assert (mode in ['l1', 'l2', 'mse'])
self.criterion = (f.l1_loss if (mode == 'l1') else f.mse_loss)
def compute(self, model, mixture_signal, target_signal):
target_signal_hat = model.separate(mixture_sig... |
def test_020_parseStation_legal():
assert (Metar.Metar('KEWR').station_id == 'KEWR')
assert (Metar.Metar('METAR KEWR').station_id == 'KEWR')
assert (Metar.Metar('METAR COR KEWR').station_id == 'KEWR')
assert (Metar.Metar('BIX1').station_id == 'BIX1')
assert (Metar.Metar('K256').station_id == 'K256') |
def _parse_atomic(source, info):
saved_flags = info.flags
saved_ignore = source.ignore_space
try:
subpattern = _parse_pattern(source, info)
finally:
source.ignore_space = saved_ignore
info.flags = saved_flags
source.expect(u')')
return make_atomic(info, subpattern) |
.parametrize('rast_name', ['py_satellite', 'py_semantic', 'box_debug', 'satellite_debug'])
.parametrize('dataset_cls', [EgoDataset, AgentDataset])
def test_dataset_rasterizer(rast_name: str, dataset_cls: Callable, zarr_dataset: ChunkedDataset, dmg: LocalDataManager, cfg: dict) -> None:
rasterizer = build_rasterizer... |
class TerminalDef(Serialize):
__serialize_fields__ = ('name', 'pattern', 'priority')
__serialize_namespace__ = (PatternStr, PatternRE)
name: str
pattern: Pattern
priority: int
def __init__(self, name: str, pattern: Pattern, priority: int=TOKEN_DEFAULT_PRIORITY) -> None:
assert isinstance... |
class StripTrailingSpaceFormatter(Formatter):
patterns = ('*.c', '*.cpp', '*.h', '*.hpp', '*.py', 'CMakelists.txt')
def format(cls, filename, data):
lines = data.split('\n')
for i in range(len(lines)):
lines[i] = (lines[i].rstrip() + '\n')
return ''.join(lines) |
class XLMRobertaXLConfig(PretrainedConfig):
model_type = 'xlm-roberta-xl'
def __init__(self, vocab_size=250880, hidden_size=2560, num_hidden_layers=36, num_attention_heads=32, intermediate_size=10240, hidden_act='gelu', hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=514, type... |
_kernel_api(params={'from_kernel': BOOL, 'string_is_canonical': BOOL, 'namestring': POINTER, 'namestringlen': SIZE_T, 'name': POINTER, 'namelen': INT, 'req': POINTER})
def hook__sysctl_root(ql, address, params):
if (params['string_is_canonical'] == 'True'):
ev_name = ql.mem.read(params['namestring'], params... |
class Compute(Resource):
def __init__(self, id=None, session=None, _adaptor=None, _adaptor_state={}, _ttype=None):
self._resrc = super(Compute, self)
self._resrc.__init__(id, session, _adaptor, _adaptor_state, _ttype)
if (self.rtype != c.COMPUTE):
raise se.BadParameter(('Cannot i... |
def trivially_double_commutes_dual_basis(term_a, term_b, term_c):
(modes_acted_on_by_term_b,) = term_b.terms.keys()
(modes_acted_on_by_term_c,) = term_c.terms.keys()
modes_touched_c = [modes_acted_on_by_term_c[0][0], modes_acted_on_by_term_c[1][0]]
if (not ((modes_acted_on_by_term_b[0][0] in modes_touch... |
class TestHeaderIndexing(object):
example_request_headers = [HeaderTuple(u':authority', u'example.com'), HeaderTuple(u':path', u'/'), HeaderTuple(u':scheme', u' HeaderTuple(u':method', u'GET')]
bytes_example_request_headers = [HeaderTuple(b':authority', b'example.com'), HeaderTuple(b':path', b'/'), HeaderTuple(... |
class Lumped(BaseThermal):
def __init__(self, param, options=None):
super().__init__(param, options=options)
pybamm.citations.register('Timms2021')
def get_fundamental_variables(self):
T_vol_av = pybamm.Variable('Volume-averaged cell temperature [K]', scale=self.param.T_ref, print_name='... |
def test_profile_function():
x = [(- 5), (- 1), 0, 1, 3, 5, 7, 9, 10, 11, 15]
centre = 5
field_width = 10
penumbra_width = 2
expected_profile_values = [0, 0.2, 0.5, 0.8, 1, 1, 1, 0.8, 0.5, 0.2, 0]
profile = create_profile_function(centre, field_width, penumbra_width)
np.testing.assert_allclo... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.