code stringlengths 281 23.7M |
|---|
def test_default_values_with_default() -> None:
instance = printer.Dummy()
instance.set_with_default()
expected_sequence = (TXT_NORMAL, TXT_STYLE['size']['normal'], TXT_STYLE['flip'][False], TXT_STYLE['smooth'][False], TXT_STYLE['bold'][False], TXT_STYLE['underline'][0], SET_FONT(b'\x00'), TXT_STYLE['align'... |
def find_linear(c: Constraint) -> tuple[(bool, (TypeVarId | None))]:
if isinstance(c.origin_type_var, TypeVarType):
if isinstance(c.target, TypeVarType):
return (True, c.target.id)
if isinstance(c.origin_type_var, ParamSpecType):
if (isinstance(c.target, ParamSpecType) and (not c.tar... |
def load_dataset_ProbaV(**kws):
kws.setdefault('input_size', (128, 128))
kws.setdefault('output_size', (384, 384))
kws.setdefault('chip_size', kws['input_size'])
kws.setdefault('normalize_lr', True)
kws.setdefault('interpolation', InterpolationMode.BICUBIC)
kws.setdefault('scene_classification_f... |
def test_no_spoiler_encode(obfuscator_no_secret, multiworld_rdvgame):
input_data = copy.deepcopy(multiworld_rdvgame)
input_data = description_migration.convert_to_current_version(input_data)
input_data['info']['has_spoiler'] = False
layout = LayoutDescription.from_json_dict(input_data)
encoded = lay... |
def _spin_symmetric_gaussian_circuit(qubits: Sequence[cirq.Qid], quadratic_hamiltonian: 'openfermion.QuadraticHamiltonian', occupied_orbitals: Tuple[(Sequence[int], Sequence[int])], initial_state: Union[(int, Sequence[int])]) -> cirq.OP_TREE:
n_qubits = len(qubits)
if isinstance(initial_state, int):
ini... |
(frozen=True)
class OverloadedSignature():
signatures: Tuple[(Signature, ...)]
def __init__(self, sigs: Sequence[Signature]) -> None:
object.__setattr__(self, 'signatures', tuple(sigs))
def check_call(self, args: Iterable[Argument], visitor: 'NameCheckVisitor', node: Optional[ast.AST]) -> Value:
... |
class DeeplightAgent(NetworkAgent):
def __init__(self, dic_agent_conf, dic_traffic_env_conf, dic_path, cnt_round=None, best_round=None):
super(DeeplightAgent, self).__init__(dic_agent_conf, dic_traffic_env_conf, dic_path)
self.num_actions = len(self.dic_sumo_env_conf['PHASE'])
self.num_phase... |
def write_data(augmented_data_dir, ori_path, all_new_progs, namedir='withoutconds'):
sub_dir = ori_path.split('/')[(- 2)]
old_name = ori_path.split('/')[(- 1)].split('.')[0]
new_dir = os.path.join(augmented_data_dir, namedir, sub_dir, old_name)
try:
os.makedirs(new_dir)
except:
pass
... |
class RobustTest(unittest.TestCase):
def test_check_common_error(self):
def cause_catchable_error(a):
os.lstat('aoenuthaoeu/aosutnhcg.4fpr,38p')
def cause_uncatchable_error():
ansoethusaotneuhsaotneuhsaontehuaou
result = robust.check_common_error(None, cause_catchable... |
def test_tensor_qobjevo_multiple():
N = 5
t = 1.5
left = QobjEvo([basis(N, 0), [basis(N, 1), 't']])
center = QobjEvo([basis(2, 0).dag(), [basis(2, 1).dag(), 't']])
right = QobjEvo([sigmax()])
as_QobjEvo = tensor(left, center, right)(t)
as_Qobj = tensor(left(t), center(t), right(t))
asser... |
class Effect6652(BaseEffect):
runTime = 'late'
type = ('projected', 'active')
def handler(fit, module, context, projectionRange, **kwargs):
if ('projected' not in context):
return
if fit.ship.getModifiedItemAttr('disallowAssistance'):
return
amount = module.ge... |
def test_project_create_restricted_get(db, client, settings):
settings.PROJECT_CREATE_RESTRICTED = True
settings.PROJECT_CREATE_GROUPS = ['projects']
group = Group.objects.create(name='projects')
user = User.objects.get(username='user')
user.groups.add(group)
client.login(username='user', passwo... |
def pass_calibration_data(session: tf.Session, _):
data_loader = ImageNetDataPipeline.get_val_dataloader()
batch_size = data_loader.batch_size
input_label_tensors = [session.graph.get_tensor_by_name('input_1:0'), session.graph.get_tensor_by_name('labels:0')]
train_tensors = [session.graph.get_tensor_by_... |
class NameConstraints(ExtensionType):
oid = ExtensionOID.NAME_CONSTRAINTS
def __init__(self, permitted_subtrees: (typing.Iterable[GeneralName] | None), excluded_subtrees: (typing.Iterable[GeneralName] | None)) -> None:
if (permitted_subtrees is not None):
permitted_subtrees = list(permitted_... |
def example_resistive_tunnel_junction(show=True):
from solcore.structure import TunnelJunction
from solcore.solar_cell_solver import default_options
import matplotlib.pyplot as plt
my_tunnel = TunnelJunction(R=0.05)
resistive_tunnel_junction(my_tunnel, default_options)
if show:
v = my_tu... |
class BaseCall(Opcode, ABC):
def compute_msg_extra_gas(self, computation: ComputationAPI, gas: int, to: Address, value: int) -> int:
raise NotImplementedError('Must be implemented by subclasses')
def get_call_params(self, computation: ComputationAPI) -> CallParams:
raise NotImplementedError('Mus... |
def test_str_predicates_at_params():
retort1 = Retort(recipe=[name_mapping(skip=['a', 'c'])])
assert (retort1.dump(Foo()) == {'b': 0})
retort2 = Retort(recipe=[name_mapping(skip=P[('a', 'c')])])
assert (retort2.dump(Foo()) == {'b': 0})
retort3 = Retort(recipe=[name_mapping(only=(~ P[('a', 'c')]))])
... |
def test_system_accessors(s):
cut_cause = KCut(Direction.CAUSE, KPartition(Part((0, 2), (0, 1)), Part((1,), (2,))))
cs_cause = compute.subsystem.ConceptStyleSystem(s, Direction.CAUSE, cut_cause)
assert (cs_cause.cause_system.cut == cut_cause)
assert (not cs_cause.effect_system.is_cut)
cut_effect = K... |
def lookup_target(manager: BuildManager, target: str) -> tuple[(list[FineGrainedDeferredNode], (TypeInfo | None))]:
def not_found() -> None:
manager.log_fine_grained(f"Can't find matching target for {target} (stale dependency?)")
modules = manager.modules
items = split_target(modules, target)
if... |
def solve_with_dependent(vars: list[TypeVarId], constraints: list[Constraint], original_vars: list[TypeVarId], originals: dict[(TypeVarId, TypeVarLikeType)]) -> tuple[(Solutions, list[TypeVarLikeType])]:
(graph, lowers, uppers) = transitive_closure(vars, constraints)
dmap = compute_dependencies(vars, graph, low... |
def train():
(train_sentences, dico, char_to_id, id_to_char) = load_sentence(FLAGS.train_file)
if (not os.path.isfile(FLAGS.map_file)):
if FLAGS.pre_emb:
(dico_chars, char_to_id, id_to_char) = augment_with_pretrained(dico.copy(), FLAGS.emb_file)
else:
(sentences, dico, ch... |
class SawyerStickPushV2Policy(Policy):
_fully_parsed
def _parse_obs(obs):
return {'hand_pos': obs[:3], 'unused_1': obs[3], 'stick_pos': obs[4:7], 'unused_2': obs[7:11], 'obj_pos': obs[11:14], 'unused_3': obs[14:(- 3)], 'goal_pos': obs[(- 3):]}
def get_action(self, obs):
o_d = self._parse_obs... |
def test_the_trio_scheduler_is_deterministic_if_seeded(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(trio._core._run, '_ALLOW_DETERMINISTIC_SCHEDULING', True)
traces = []
for _ in range(10):
state = trio._core._run._r.getstate()
try:
trio._core._run._r.seed(0)
... |
def test_set_catch_exceptions(app: Application, environ: dict[(str, str)]) -> None:
app.auto_exits(False)
os.environ['COLUMNS'] = '120'
tester = ApplicationTester(app)
app.catch_exceptions(True)
assert app.are_exceptions_caught()
tester.execute('foo', decorated=False)
assert (tester.io.fetch... |
class Signals():
def __init__(self, amplitude=1.0, frequency=1.0, start_time=0.0, duration=0.01, dc_offset=0.0):
self.amplitude = amplitude
self.frequency = frequency
self.period = (1.0 / frequency)
self.start_time = start_time
self.duration = duration
self.dc_offset ... |
('os.remove', side_effect=ValueError('test remove failed'))
def test_move_temp_file_err_removing_src(mock_remove, temp_dir, temp_file_creator):
file1 = temp_file_creator()
file2 = temp_dir.joinpath('blah', 'file2')
with patch_logger('pypyr.utils.filesystem', logging.ERROR) as mock_logger_err:
with p... |
def upgrade(saveddata_engine):
try:
saveddata_engine.execute('SELECT ignoreRestrictions FROM fits LIMIT 1')
except sqlalchemy.exc.DatabaseError:
saveddata_engine.execute('ALTER TABLE fits ADD COLUMN ignoreRestrictions BOOLEAN')
saveddata_engine.execute('UPDATE fits SET ignoreRestrictions... |
def evaluate_one_epoch():
stat_dict = {}
ap_calculator_list = [APCalculator(iou_thresh, DATASET_CONFIG.class2type) for iou_thresh in AP_IOU_THRESHOLDS]
net.eval()
for (batch_idx, batch_data_label) in enumerate(TEST_DATALOADER):
for key in batch_data_label:
batch_data_label[key] = bat... |
class LiquidLexer(RegexLexer):
name = 'liquid'
url = '
aliases = ['liquid']
filenames = ['*.liquid']
version_added = '2.0'
tokens = {'root': [('[^{]+', Text), ('(\\{%)(\\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'), ('(\\{\\{)(\\s*)([^\\s}]+)', bygroups(Punctuation, Whitespace, using... |
class ResponseCloseVerifier(CloseVerifier):
def __init__(self, read=None):
CloseVerifier.__init__(self)
self._read = read
def open(self):
self.opened()
response = _response.test_response('spam')
return ResponseCloseWrapper(response, self.closed, self._read) |
.usefixtures('mock_plugin_installation')
def test_plugin_dependencies_met(hatch, config_file, helpers, temp_dir):
config_file.model.template.plugins['default']['tests'] = False
config_file.save()
project_name = 'My.App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert (resu... |
def load_optimizer_state(optimizer: torch.optim.Optimizer, flat_metadata: Dict, flat_tensors: Sequence[torch.Tensor]):
flat_optimizer_state = []
for elem in flat_metadata:
if ((elem.get('type') == 'tensor') and isinstance(elem.get('index'), int)):
flat_optimizer_state.append(flat_tensors[ele... |
class Solution():
def maxNumberOfBalloons(self, text: str) -> int:
text = text.lower()
char_count = (lambda text, char: text.count(char))
dict_num = dict([(char, char_count(text, char)) for char in text])
try:
b_num = dict_num['b']
a_num = dict_num['a']
... |
class Synchronizer(SynchronizerBase):
def __init__(self, wallet: 'AddressSynchronizer'):
self.wallet = wallet
SynchronizerBase.__init__(self, wallet.network)
def _reset(self):
super()._reset()
self.requested_tx = {}
self.requested_histories = set()
self.requested_... |
(params=['docker', 'podman'], scope='module')
def container_engine(request):
if ((request.param == 'docker') and (not request.config.getoption('--run-docker'))):
pytest.skip('need --run-docker option to run')
if ((request.param == 'podman') and (not request.config.getoption('--run-podman'))):
py... |
def test_defaults(converter: BaseConverter) -> None:
union = Union[(A, B)]
configure_tagged_union(union, converter)
assert (converter.unstructure(A(1), union) == {'_type': 'A', 'a': 1})
assert (converter.unstructure(B('1'), union) == {'_type': 'B', 'a': '1'})
assert (converter.structure({'_type': 'A... |
def test_setting_world_transform():
root = gfx.WorldObject()
child = gfx.WorldObject()
child2 = gfx.WorldObject()
root.add(child)
child.add(child2)
root.local.position = (1, 2, 3)
child.local.position = (4, 4, 4)
child2.local.position = (10, 0, 0)
assert np.allclose(child.local.posit... |
class Effect5917(BaseEffect):
runTime = 'early'
type = ('projected', 'passive')
def handler(fit, beacon, context, projectionRange, **kwargs):
fit.modules.filteredChargeMultiply((lambda mod: mod.charge.requiresSkill('Bomb Deployment')), 'kineticDamage', beacon.getModifiedItemAttr('smartbombDamageMult... |
def init():
ssl_config = QSslConfiguration.defaultConfiguration()
default_ciphers = ssl_config.ciphers()
log.init.vdebug('Default Qt ciphers: {}'.format(', '.join((c.name() for c in default_ciphers))))
good_ciphers = []
bad_ciphers = []
for cipher in default_ciphers:
if _is_secure_cipher... |
class Path(object):
def db_root_dir(database=''):
db_root = '/esat/rat/wvangans/Datasets/'
db_names = ['VOCSegmentation']
if (database == ''):
return db_root
if (database == 'VOCSegmentation'):
return os.path.join(db_root, database)
else:
r... |
class DenseConv2dLayer_5C(nn.Module):
def __init__(self, in_channels, latent_channels, kernel_size=3, stride=1, padding=1, dilation=1, pad_type='zero', activation='lrelu', norm='none', sn=False):
super(DenseConv2dLayer_5C, self).__init__()
self.conv1 = Conv2dLayer(in_channels, latent_channels, kerne... |
class LinearExpression(QuadraticProgramElement):
def __init__(self, quadratic_program: Any, coefficients: Union[(ndarray, spmatrix, List[float], Dict[(Union[(int, str)], float)])]) -> None:
super().__init__(quadratic_program)
self.coefficients = coefficients
def __getitem__(self, i: Union[(int, ... |
class ActivationStatsHook():
def __init__(self, model, hook_fn_locs, hook_fns):
self.model = model
self.hook_fn_locs = hook_fn_locs
self.hook_fns = hook_fns
if (len(hook_fn_locs) != len(hook_fns)):
raise ValueError('Please provide `hook_fns` for each `hook_fn_locs`, ... |
def all_zeros(modules):
weight_zero = torch.equal(modules.weight.data, torch.zeros_like(modules.weight.data))
if hasattr(modules, 'bias'):
bias_zero = torch.equal(modules.bias.data, torch.zeros_like(modules.bias.data))
else:
bias_zero = True
return (weight_zero and bias_zero) |
def sa_cns_mpo() -> GoalDirectedBenchmark:
specification = uniform_specification(1, 10, 100)
benchmark_object = cns_mpo()
sa_biased = ScoringFunctionSAWrapper(benchmark_object.objective, SAScoreModifier())
return GoalDirectedBenchmark(name='SA_CNS', objective=sa_biased, contribution_specification=specif... |
class CompleteDirs(zipfile.ZipFile):
def _implied_dirs(names):
parents = itertools.chain.from_iterable(map(_parents, names))
as_dirs = ((p + posixpath.sep) for p in parents)
return _dedupe(_difference(as_dirs, names))
def namelist(self):
names = super(CompleteDirs, self).namelist... |
class CatalogSection(models.Model):
catalog = models.ForeignKey('Catalog', on_delete=models.CASCADE, related_name='catalog_sections')
section = models.ForeignKey('Section', on_delete=models.CASCADE, related_name='section_catalogs')
order = models.IntegerField(default=0)
class Meta():
ordering = ... |
def get_qpmad_demo_problem():
P = np.eye(20)
q = np.ones((20,))
G = np.vstack([np.ones((1, 20)), (- np.ones((1, 20)))])
h = np.hstack([1.5, 1.5])
lb = np.array([1.0, 2.0, 3.0, 4.0, (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (- 5.0), (-... |
def create_english_spanish_sentences(rstfilename):
def get_paragraph(fd):
lines = []
paragraph = []
for line in fd.read().splitlines():
if any([line.startswith('.. '), line.startswith('==='), line.startswith('---'), line.startswith('***')]):
continue
i... |
def default_blas_ldflags():
def check_required_file(paths, required_regexs):
libs = []
for req in required_regexs:
found = False
for path in paths:
m = re.search(req, path.name)
if m:
libs.append((str(path.parent), m.string[... |
class DeserializationError(JsonsError):
def __init__(self, message: str, source: object, target: Optional[type]):
JsonsError.__init__(self, message)
self._source = source
self._target = target
def source(self) -> object:
return self._source
def target(self) -> Optional[type]:... |
def _calculate_collection_text(pickup: PickupEntry, visual_pickup: PickupEntry, model_style: PickupModelStyle, memo_data: dict[(str, str)]) -> list[str]:
if (model_style == PickupModelStyle.HIDE_ALL):
hud_text = _get_all_hud_text(_conditional_resources_for_pickup(visual_pickup), memo_data)
num_condi... |
class CmdAttack(Command):
key = 'attack'
help_category = 'combat'
def func(self):
if (not is_in_combat(self.caller)):
self.caller.msg('You can only do that in combat. (see: help fight)')
return
if (not is_turn(self.caller)):
self.caller.msg('You can only d... |
_on_failure
.parametrize('number_of_nodes', [2])
.parametrize('channels_per_node', [1])
def test_automatic_secret_registration(raiden_chain: List[RaidenService], token_addresses: List[TokenAddress]) -> None:
(app0, app1) = raiden_chain
token_address = token_addresses[0]
token_network_address = views.get_tok... |
def get_product_types(exp_dir: Path):
pts = []
for pt_file in Path(exp_dir).glob('*.emb.pkl'):
pt_name = pt_file.stem[:(- len('.emb'))]
pts.append(pt_name)
if (not pts):
raise RuntimeError('No embedding file found from exp_dir! Run embedding first.')
else:
logger.warning(... |
class TReplayGain(PluginTestCase):
TIMEOUT = 20
def setUpClass(cls):
cls.mod = cls.modules['ReplayGain']
cls.kind = cls.plugins['ReplayGain'].cls
def tearDownClass(cls):
del cls.mod
del cls.kind
def setUp(self):
self.song = AudioFile({'artist': 'foo', 'album': 'th... |
def create_user(strategy, details, backend, user=None, *args, **kwargs):
if user:
return {'is_new': False}
fields = {name: kwargs.get(name, details.get(name)) for name in backend.setting('USER_FIELDS', USER_FIELDS)}
if (not fields):
return
return {'is_new': True, 'user': strategy.create_... |
class TotalVariationLoss(RegularizationLoss):
def __init__(self, *, exponent: float=2.0, input_guide: Optional[torch.Tensor]=None, score_weight: float=1.0):
super().__init__(input_guide=input_guide, score_weight=score_weight)
self.exponent = exponent
def input_enc_to_repr(self, image: torch.Tens... |
def random_password(length=DEFAULT_PASSWORD_LENGTH, chars=DEFAULT_PASSWORD_CHARS, seed=None):
if (not isinstance(chars, str)):
raise Exception(('%s (%s) is not a text_type' % (chars, type(chars))))
if (seed is None):
random_generator = random.SystemRandom()
else:
random_generator = r... |
def _get_valid_files_to_check(module_name: Union[(List[str], str)]) -> Generator[(AnyStr, None, None)]:
if (module_name == ''):
m = sys.modules['__main__']
spec = importlib.util.spec_from_file_location(m.__name__, m.__file__)
module_name = [spec.origin]
elif isinstance(module_name, str):... |
class testUtils(unittest.TestCase):
def setUp(self):
os.chdir(tests_dir)
os.chdir('dataset01')
pynag.Model.config = None
pynag.Model.cfg_file = './nagios/nagios.cfg'
pynag.Model.ObjectDefinition.objects.get_all()
self.tmp_dir = tempfile.mkdtemp()
os.environ['L... |
def _normalise_autoapi_dirs(autoapi_dirs, srcdir):
normalised_dirs = []
if isinstance(autoapi_dirs, str):
autoapi_dirs = [autoapi_dirs]
for path in autoapi_dirs:
if os.path.isabs(path):
normalised_dirs.append(path)
else:
normalised_dirs.append(os.path.normpath... |
def get_squad_titles(file_name):
data = json.load(codecs.open(file_name, 'r', 'utf-8'))
data = data['data']
for doc in tqdm(data):
title = doc['title']
title = title.lower()
title = title.replace('_', ' ')
title = title.strip()
squad_titles.add(title) |
class WeightAllocationMixin(object):
def allocate_equal_weights(self, signals: pd.DataFrame, cap: float=1.0) -> pd.DataFrame:
signals_count = signals.abs().sum(axis=1)
divisor = np.where((signals_count != 0), signals_count, 1)
return ((signals.div(divisor, axis=0) * cap) / 1.0)
def alloc... |
def test_clear_list(brew_info):
brew_info.brew_list_opt.update({'abc': 'abc'})
brew_info.brew_list.extend(['abc', 'efg'])
brew_info.brew_full_list.extend(['abc', 'efg'])
brew_info.tap_list.extend(['abc', 'efg'])
brew_info.cask_list.extend(['abc', 'efg'])
brew_info.appstore_list.extend(['abc', 'e... |
def test_PlotItem_maxTraces():
item = pg.PlotItem()
curve1 = pg.PlotDataItem(np.random.normal(size=10))
item.addItem(curve1)
assert curve1.isVisible(), 'curve1 should be visible'
item.ctrl.maxTracesCheck.setChecked(True)
item.ctrl.maxTracesSpin.setValue(0)
assert (not curve1.isVisible()), 'c... |
class ELF32_Rel(ELF_Rel):
Rel_SIZE = (4 * 2)
def __init__(self, buf, endian=0, ptr=None):
if (len(buf) != self.Rel_SIZE):
raise
self.ptr = ptr
self.fmt = ('<II' if (endian == 0) else '>II')
(r_offset, r_info) = struct.unpack(self.fmt, buf)
super(ELF32_Rel, sel... |
class RPNModule(torch.nn.Module):
def __init__(self, cfg):
super(RPNModule, self).__init__()
self.cfg = cfg.clone()
anchor_generator = make_anchor_generator(cfg)
in_channels = cfg.MODEL.BACKBONE.OUT_CHANNELS
rpn_head = registry.RPN_HEADS[cfg.MODEL.RPN.RPN_HEAD]
head =... |
def normalize(text, clean_value=True):
text = text.lower()
text = re.sub('^\\s*|\\s*$', '', text)
text = re.sub('b&b', 'bed and breakfast', text)
text = re.sub('b and b', 'bed and breakfast', text)
if clean_value:
ms = re.findall('\\(?(\\d{3})\\)?[-.\\s]?(\\d{3})[-.\\s]?(\\d{4,5})', text)
... |
def test_plain_stopall(testdir: Any) -> None:
testdir.makeini('\n [pytest]\n asyncio_mode=auto\n ')
testdir.makepyfile('\n import random\n\n def get_random_number():\n return random.randint(0, 100)\n\n def test_get_random_number(mocker):\n patcher ... |
def test_can_access_fixture_from_nested_scope(testdir):
testdir.makepyfile('\n import pytest\n\n def describe_something():\n \n def thing():\n return 42\n\n def describe_a_nested_scope():\n def thing_is_42(thing):\n asse... |
class RepoMirrorConfig(BaseModel):
repository = ForeignKeyField(Repository, index=True, unique=True, backref='mirror')
creation_date = DateTimeField(default=datetime.utcnow)
is_enabled = BooleanField(default=True)
mirror_type = ClientEnumField(RepoMirrorType, default=RepoMirrorType.PULL)
internal_ro... |
_attention('mlp')
class MLPAttention(BaseAttention):
def __init__(self, decoder_hidden_state_dim, context_dim, **kwargs):
super().__init__(decoder_hidden_state_dim, context_dim)
self.context_dim = context_dim
self.attention_dim = kwargs.get('attention_dim', context_dim)
self.encoder_... |
class InvertedResidual(nn.Module):
def __init__(self, in_chs, out_chs, dw_kernel_size=3, stride=1, dilation=1, pad_type='', act_layer=nn.ReLU, noskip=False, exp_ratio=1.0, exp_kernel_size=1, pw_kernel_size=1, se_ratio=0.0, se_kwargs=None, norm_layer=nn.BatchNorm2d, norm_kwargs=None, conv_kwargs=None, drop_path_rate... |
def wrapper_func(eval_func: EvalFunction):
def save_and_reload_tf_sess(*args, **kwargs):
args = list(args)
if ((not args) or (not isinstance(args[0], tf.compat.v1.Session))):
raise ValueError('First argument to eval function should be Session!')
updated_sess = save_and_load_graph... |
class ShardingEnv():
def __init__(self, world_size: int, rank: int, pg: Optional[dist.ProcessGroup]=None) -> None:
self.world_size = world_size
self.rank = rank
self.process_group: Optional[dist.ProcessGroup] = pg
def from_process_group(cls, pg: dist.ProcessGroup) -> 'ShardingEnv':
... |
def test_select_column_using_case_when():
sql = "INSERT INTO tab1\nSELECT CASE WHEN col1 = 1 THEN 'V1' WHEN col1 = 2 THEN 'V2' END\nFROM tab2"
assert_column_lineage_equal(sql, [(ColumnQualifierTuple('col1', 'tab2'), ColumnQualifierTuple("CASE WHEN col1 = 1 THEN 'V1' WHEN col1 = 2 THEN 'V2' END", 'tab1'))])
... |
def test_idle_threads_exit(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(_thread_cache, 'IDLE_TIMEOUT', 0.0001)
q: Queue[threading.Thread] = Queue()
start_thread_soon((lambda : None), (lambda _: q.put(threading.current_thread())))
seen_thread = q.get()
time.sleep(1)
assert (not s... |
def test_scheduled_ptr_query_dunder_methods():
query75 = _ScheduledPTRQuery('zoomy._hap._tcp.local.', '_hap._tcp.local.', 120, 120, 75)
query80 = _ScheduledPTRQuery('zoomy._hap._tcp.local.', '_hap._tcp.local.', 120, 120, 80)
query75_2 = _ScheduledPTRQuery('zoomy._hap._tcp.local.', '_hap._tcp.local.', 120, 1... |
.wrap
def apply_mc_method_to_jt_dict(method: str, features_dict: Dict[(str, JaggedTensor)], table_to_features: Dict[(str, List[str])], managed_collisions: nn.ModuleDict) -> Dict[(str, JaggedTensor)]:
mc_output: Dict[(str, JaggedTensor)] = features_dict.copy()
for (table, features) in table_to_features.items():
... |
def align_vols__multiple_rotations(v1, m1, v2, m2, L):
if (m1 is None):
m1 = MU.sphere_mask(v1.shape)
if (m2 is None):
m2 = MU.sphere_mask(v2.shape)
assert (v1.shape == m1.shape)
assert (v2.shape == m2.shape)
if (v1.shape != v2.shape):
print(v1.shape)
print(v2.shape)
... |
def AddBN(net, f_name):
bn_name = ('%s_bn' % f_name)
net[bn_name] = L.BatchNorm(net[f_name], in_place=True)
sc_name = ('%s_sc' % f_name)
scale_param = {'filler': {'type': 'constant', 'value': 0.1}, 'bias_term': True}
net[sc_name] = L.Scale(net[f_name], scale_param=scale_param, in_place=True) |
class Function(pybamm.Symbol):
def __init__(self, function, *children, name=None, derivative='autograd', differentiated_function=None):
children = list(children)
for (idx, child) in enumerate(children):
if isinstance(child, numbers.Number):
children[idx] = pybamm.Scalar(c... |
class Workflow(base.Workflow):
implementation_class = DjangoImplementationWrapper
log_model = get_default_log_model()
log_model_class = None
def __init__(self, *args, **kwargs):
log_model = kwargs.pop('log_model', self.log_model)
log_model_class = kwargs.pop('log_model_class', self.log_m... |
_model_architecture('fconv_lm', 'fconv_lm_dauphin_gbw')
def fconv_lm_dauphin_gbw(args):
layers = '[(512, 5)]'
layers += ' + [(128, 1, 0), (128, 5, 0), (512, 1, 3)] * 3'
layers += ' + [(512, 1, 0), (512, 5, 0), (1024, 1, 3)] * 3'
layers += ' + [(1024, 1, 0), (1024, 5, 0), (2048, 1, 3)] * 6'
layers +=... |
def CreateDeviceStructure(name=None, role='device', T=293, layers=None, comments='', repeat=1, substrate=DefaultMaterial, reflection=None):
output = {}
output['name'] = name
output['role'] = role
output['T'] = T
output['numlayers'] = 0
output['comments'] = comments
output['repeat'] = repeat
... |
def test_normalize_rank():
assert (normalize_rank('species') == 'species')
assert (normalize_rank('Sub-Species') == 'subspecies')
assert (normalize_rank('spp') == 'species')
assert (normalize_rank('fam') == 'family')
assert (normalize_rank('sub') == 'sub')
assert (normalize_rank('myrank') == 'my... |
class BatchNormFlow(nn.Module):
def __init__(self, num_inputs, momentum=0.0, eps=1e-05):
super(BatchNormFlow, self).__init__()
self.log_gamma = nn.Parameter(torch.zeros(num_inputs))
self.beta = nn.Parameter(torch.zeros(num_inputs))
self.momentum = momentum
self.eps = eps
... |
def visualize_gt(self, gt_point, gt_instance, ground_angle, image):
image = np.rollaxis(image, axis=2, start=0)
image = (np.rollaxis(image, axis=2, start=0) * 255.0)
image = image.astype(np.uint8).copy()
for y in range(self.p.grid_y):
for x in range(self.p.grid_x):
if (gt_point[0][y]... |
class OriginChecker():
def check_origin(cls, origin, allowed_origins, host):
if cls.is_same_site(origin, host):
return True
return any((fnmatch.fnmatch(origin, pattern) for pattern in allowed_origins))
def is_same_site(origin, host):
' origin host origin host
parse... |
def get_resize_output_image_size(input_image: np.ndarray, output_size: Union[(int, Iterable[int])], keep_aspect_ratio: bool, multiple: int) -> Tuple[(int, int)]:
def constraint_to_multiple_of(val, multiple, min_val=0, max_val=None):
x = (round((val / multiple)) * multiple)
if ((max_val is not None) ... |
def save_knn(neighbors: dict[(str, Tensor)], distances: dict[(str, Tensor)], path: Path) -> None:
assert path.is_dir()
for part in neighbors:
np.save((path / f'distances_{part}.npy'), distances[part].cpu().numpy())
np.save((path / f'neighbors_{part}.npy'), neighbors[part].cpu().numpy().astype('i... |
def load_txt_info(gt_file, img_info):
with open(gt_file, 'r', encoding='utf-8') as f:
anno_info = []
for line in f:
line = line.strip('\n')
ann = line.split(',')
bbox = ann[0:8]
word = line[(len(','.join(bbox)) + 1):]
bbox = [int(coord) for... |
class SharedArgs():
def __init__(self) -> None:
self.results_dir = Defaults.RESULTS_DIR
self.pretrained_path = Defaults.PRETRAINED_PATH
self.features_dir = Defaults.FEATURES_DIR
self.labels_dir = Defaults.LABELS_DIR
self.splits_dir = Defaults.SPLITS_DIR
self.dataset_t... |
def processWindowClause(c, table, prior_lcs, prior_globs):
schema = None
new_schema = None
var_mapping = {}
for v in c.vars:
var_mapping[v] = c.vars[v]
for t in table:
if (not schema):
schema = t.schema
new_schema = dict(t.schema)
for v in c.vars:
... |
class TestDiscovery(object):
_CONTEXT = pyudev.Context()
_DISCOVER = Discovery()
_DISCOVER.setup(_CONTEXT)
(strategies.sampled_from(_DEVICES).filter((lambda x: x.device_number)), strategies.text(':, -/+=').filter((lambda x: x)))
(max_examples=NUM_TESTS)
def test_device_number(self, a_device, a_s... |
class Bottleneck(nn.Module):
expansion: int = 4
def __init__(self, inplanes: int, planes: int, stride: int=1, downsample: Optional[nn.Module]=None, groups: int=1, base_width: int=64, dilation: int=1, norm_layer: Optional[Callable[(..., nn.Module)]]=None) -> None:
super().__init__()
if (norm_laye... |
def evaluate(model: TransformerModelWrapper, eval_data: List[InputExample], config: EvalConfig) -> Dict:
metrics = (config.metrics if config.metrics else ['acc'])
results = model.eval(eval_data=eval_data, per_gpu_eval_batch_size=config.per_gpu_eval_batch_size, n_gpu=config.n_gpu)
predictions = np.argmax(res... |
def convert_norm_act_type(norm_layer, act_layer, norm_kwargs=None):
assert isinstance(norm_layer, (type, str, types.FunctionType, functools.partial))
assert ((act_layer is None) or isinstance(act_layer, (type, str, types.FunctionType, functools.partial)))
norm_act_args = (norm_kwargs.copy() if norm_kwargs e... |
class Parser():
def __init__(self, lexer=Lexer):
self.lex = Lexer()
self.lex.build()
self.tokens = self.lex.tokens
self.parser = yacc.yacc(module=self, start='file_input', tabmodule='pythonql.parser.parsertab', debug=True)
def parse(self, text):
return self.parser.parse(t... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.