code stringlengths 281 23.7M |
|---|
def test_standalone_mock(testdir: Any) -> None:
pytest.importorskip('mock')
testdir.makepyfile('\n import mock\n\n def test_foo(mocker):\n assert mock.MagicMock is mocker.MagicMock\n ')
testdir.makeini('\n [pytest]\n mock_use_standalone_module = true\n ')
res... |
.parametrize('x, exc', [(set_test_value(pt.dmatrix(), (lambda x: x.T.dot(x))(x)), None), (set_test_value(pt.dmatrix(), (lambda x: x.T.dot(x))(y)), None), (set_test_value(pt.lmatrix(), (lambda x: x.T.dot(x))(rng.integers(1, 10, size=(3, 3)).astype('int64'))), None)])
def test_Eig(x, exc):
g = nlinalg.Eig()(x)
if... |
class Parameters():
def __init__(self, params):
self.params = params
def __getattr__(self, x):
return self.params[x.lower()]
def __str__(self):
return json.dumps(self.params, indent=2)
def __repr__(self):
return str(self)
def __getstate__(self):
return self.pa... |
(unsafe_hash=True, init=False)
class SequenceValue(GenericValue):
members: Tuple[(Tuple[(bool, Value)], ...)]
def __init__(self, typ: Union[(type, str)], members: Sequence[Tuple[(bool, Value)]]) -> None:
if members:
args = (unite_values(*[typ for (_, typ) in members]),)
else:
... |
class CommentForm(Form):
def __init__(self, view):
super().__init__(view, 'myform')
comment = Comment()
self.use_layout(FormLayout())
self.layout.add_input(TextInput(self, comment.fields.greyed_out_field))
self.define_event_handler(comment.events.greyed_out_event)
sel... |
class Stack(object):
def __init__(self, mode='3D'):
assert (mode in ['3D', 'TSN+2D', '2D', 'TSN+3D']), 'Unsupported mode: {}'.format()
self.mode = mode
def __call__(self, img_group):
assert (img_group[0].mode == 'RGB'), 'Must read images in RGB mode.'
if ('3D' in self.mode):
... |
_model(name='ncsnv2_256')
class NCSNv2_256(nn.Module):
config: ml_collections.ConfigDict
def __call__(self, x, labels, train=True):
config = self.config
nf = config.model.nf
act = get_act(config)
normalizer = get_normalization(config)
sigmas = get_sigmas(config)
i... |
class WindowUndoManager(QObject):
def __init__(self, parent=None):
super().__init__(parent)
self._undos: MutableSequence[_WindowUndoEntry] = collections.deque()
objects.qapp.window_closing.connect(self._on_window_closing)
config.instance.changed.connect(self._on_config_changed)
_... |
class VGGBlock(nn.Module):
def __init__(self, in_channels, middle_channels, out_channels):
super().__init__()
self.relu = nn.ReLU(inplace=True)
self.conv1 = nn.Conv2d(in_channels, middle_channels, 3, padding=1)
self.bn1 = nn.BatchNorm2d(middle_channels)
self.conv2 = nn.Conv2d... |
def test_no_rerun_on_class_setup_error_without_reruns(testdir):
testdir.makepyfile('\n class TestFoo(object):\n \n def setup_class(cls):\n assert False\n\n def test_pass():\n pass')
result = testdir.runpytest('--reruns', '0')
assert_outco... |
class SendBookmark(discord.ui.View):
def __init__(self, author: discord.Member, channel: discord.TextChannel, target_message: discord.Message, title: str):
super().__init__()
self.clicked = []
self.channel = channel
self.target_message = target_message
self.title = title
... |
def _interpolate_no_angles(clons, clats, src_resolution, dst_resolution):
interpolation_functions = {}
try:
from geotiepoints.simple_modis_interpolator import modis_1km_to_250m as simple_1km_to_250m
from geotiepoints.simple_modis_interpolator import modis_1km_to_500m as simple_1km_to_500m
ex... |
class MaxCliqueLib():
def __init__(self, args):
dir_path = os.path.dirname(os.path.realpath(__file__))
try:
self.lib = ctypes.CDLL('{}/build/lib_max_clique.so'.format(dir_path))
except OSError:
self.lib = ctypes.CDLL('{}/build/lib_max_clique.dylib'.format(dir_path))
... |
class AccelerationCondition(_EntityTriggerType):
def __init__(self, value, rule, direction=None):
self.value = convert_float(value)
self.rule = convert_enum(rule, Rule)
self.direction = convert_enum(direction, DirectionalDimension, True)
def __eq__(self, other):
if isinstance(oth... |
class _UnicodeCompiler(_Compiler):
compile_is = _binary_compiler('%s is %s')
compile_isnot = _binary_compiler('%s is not %s')
compile_and = _binary_compiler('%s and %s')
compile_or = _binary_compiler('%s or %s')
compile_mod = _binary_compiler('%s mod %s')
def compile_not(self, relation):
... |
def get_model_list(dirname, key):
if (os.path.exists(dirname) is False):
print(('no dir: %s' % dirname))
return None
gen_models = [os.path.join(dirname, f) for f in os.listdir(dirname) if (os.path.isfile(os.path.join(dirname, f)) and (key in f) and ('.pth' in f))]
if (gen_models is None):
... |
def phase_markers(events, stations, phase_names='Pg,P,PKP,PKIKP,S,PP,SS'.split(',')):
mod = cake.load_model()
phases = []
for name in phase_names:
for phase in cake.PhaseDef.classic(name):
phase.name = name
phases.append(phase)
markers = []
for event in events:
... |
.parametrize('use_chromium', [True, False], ids=['chromium', 'legacy'])
def test_launch(use_chromium, testdir):
file_test = testdir.makepyfile("\n import pytest\n\n .nondestructive\n def test_pass(webtext):\n assert webtext == u'Success!'\n\n \n def edge_options(edge_op... |
.parametrize('status_code', (200, 201))
.parametrize('mock_release_id', range(3))
def test_upload_asset_succeeds(default_gitea_client, example_changelog_md, status_code, mock_release_id):
urlparams = {'name': example_changelog_md.name}
with requests_mock.Mocker(session=default_gitea_client.session) as m:
... |
class GeneratorBlock(chainer.Chain):
def __init__(self, in_ch, out_ch):
super(GeneratorBlock, self).__init__()
with self.init_scope():
w = chainer.initializers.Normal(0.02)
self.c0 = L.Convolution2D(in_ch, out_ch, 3, 1, 1, initialW=w)
self.c1 = L.Convolution2D(out... |
def load_weights(var_list, weights_file):
with open(weights_file, 'rb') as fp:
np.fromfile(fp, dtype=np.int32, count=5)
weights = np.fromfile(fp, dtype=np.float32)
ptr = 0
i = 0
assign_ops = []
while (i < (len(var_list) - 1)):
var1 = var_list[i]
var2 = var_list[(i + 1... |
class BaseTracker(object):
def __init__(self, **kwargs):
self._prev_instances = None
self._matched_idx = set()
self._matched_ID = set()
self._untracked_prev_idx = set()
self._id_count = 0
def from_config(cls, cfg: CfgNode_):
raise NotImplementedError('Calling Base... |
def test_get_schema1_manifest_no_matching_list():
manifestlist = DockerSchema2ManifestList(Bytes.for_string_or_unicode(NO_AMD_MANIFESTLIST_BYTES))
assert (len(manifestlist.manifests(retriever)) == 1)
assert (manifestlist.media_type == 'application/vnd.docker.distribution.manifest.list.v2+json')
assert (... |
class OverlayControlMixin():
STYLE_SHEET_COMMON = '\n QPushButton { border-width: 1px; padding: 0px; margin: 0px; }\n '
STYLE_SHEET_LIGHT = '\n QPushButton { border: 1px solid transparent; }\n QPushButton:hover { border: 1px solid #3daee9; }\n '
def __init__(self, middle: bool=False):
... |
def upload_script(session, specs_json, args):
script_path = args.script_path
s3_dest = args.s3_dest
if (not s3_dest):
s3_bucket = specs_json['s3_bucket']
s3_prefix = os.path.join(specs_json['s3_prefix'], getpass.getuser())
else:
s3_bucket = urlparse(s3_dest).netloc
s3_pre... |
def test_get_rooms(client, mocker):
room_available = Room(number='A', room_status=RoomStatus.AVAILABLE, image_url='img1')
room_available.id = 1
display_query = mocker.MagicMock()
display_query.get_rooms.return_value = [room_available]
with client.app.container.display.query.override(display_query):
... |
class AdType(models.Model):
title = models.CharField(max_length=255, verbose_name=_('Title'))
name = models.CharField(max_length=255, verbose_name=_('ID'))
template = models.CharField(max_length=255, verbose_name=_('Template'), help_text=_('Path to template'))
class Meta():
unique_together = ('n... |
def resume_model(resume_path, model, optimizer, scheduler):
print(f"=> loading checkpoint '{resume_path}'")
checkpoint = torch.load(resume_path)
start_epoch = checkpoint['epoch']
best_acc1 = checkpoint['best_acc1']
best_epoch = checkpoint['best_epoch']
model.load_state_dict(checkpoint['model'])
... |
class noisy_cifar10(CIFAR10):
def __init__(self, root, train=True, transform=None, mode='label', label_path=None, indices=None, probs=None, psl=None, **kwargs):
assert (mode in ['warmup', 'eval_train', 'relabel', 'label', 'unlabel', 'test'])
super().__init__(root, train=train, transform=transform, *... |
class TestLogging(object):
def setup_method(self):
self.testInst = pysat.Instrument('pysat', 'testing', num_samples=10, clean_level='clean', update_files=False, use_header=True)
self.out = ''
return
def teardown_method(self):
del self.testInst, self.out
return
def tes... |
def split_shot_query(data, way, shot, query, ep_per_batch=1):
img_shape = data.shape[1:]
data = data.view(ep_per_batch, way, (shot + query), *img_shape)
(x_shot, x_query) = data.split([shot, query], dim=2)
x_shot = x_shot.contiguous()
x_query = x_query.contiguous().view(ep_per_batch, (way * query), ... |
class Config(object):
__beginning_of_object = re.compile('^\\s*define\\s+(\\w+)\\s*\\{?(.*)$')
def __init__(self, cfg_file=None, strict=False):
self.cfg_file = cfg_file
self.strict = strict
if (self.cfg_file is None):
self.cfg_file = self.guess_cfg_file()
self.data = ... |
class CoroutineBasedSession(Session):
event_loop_thread_id = None
def get_current_session(cls) -> 'CoroutineBasedSession':
if ((_context.current_session is None) or (cls.event_loop_thread_id != threading.current_thread().ident)):
raise SessionNotFoundException('No session found in current co... |
class ConvModule(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias='auto', conv_cfg=None, norm_cfg=None, act_cfg=dict(type='ReLU'), inplace=True, order=('conv', 'norm', 'act')):
super(ConvModule, self).__init__()
assert ((conv_cfg ... |
(frozen=True)
class FuncFixtureInfo():
__slots__ = ('argnames', 'initialnames', 'names_closure', 'name2fixturedefs')
argnames: Tuple[(str, ...)]
initialnames: Tuple[(str, ...)]
names_closure: List[str]
name2fixturedefs: Dict[(str, Sequence['FixtureDef[Any]'])]
def prune_dependency_tree(self) -> ... |
def get_date_range(client: SparkClient, start_date: Union[(str, datetime)], end_date: Union[(str, datetime)], step: int=None) -> DataFrame:
day_in_seconds = ((60 * 60) * 24)
step = (step or day_in_seconds)
start_date = (start_date if isinstance(start_date, str) else start_date.strftime('%Y-%m-%d'))
end_... |
class _Header():
def __init__(self, name, revMajor, revMinor):
self.name = name
self.revMajor = revMajor
self.revMinor = revMinor
def __eq__(self, other):
if isinstance(other, _Header):
if ((self.name == other.name) and (self.revMajor == other.revMajor) and (self.revM... |
class ResNetCam(nn.Module):
def __init__(self, block, layers, args, num_classes=1000, large_feature_map=True):
super(ResNetCam, self).__init__()
self.args = args
stride_l3 = (1 if large_feature_map else 2)
self.inplanes = 64
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_siz... |
def scheduler(function=None, impl='entangle.scheduler.DefaultScheduler', cpus=12, algorithm='first_available', max_time=(60 * 60)) -> Callable:
_scheduler = import_string(impl)()
logging.debug('scheduler: Requesting %s cpus', cpus)
def decorator(func, cpus=12) -> Callable:
_func = func
if is... |
def cov_nonDiaHess_max(value_at_max, sigma, l):
d = len(value_at_max)
cov_matrix = np.zeros(int(((d * (d - 1)) / 2)))
index = 0
for i in range(d):
for j in range((i + 1), d):
cov_matrix[index] = cov_x_devdevY(value_at_max, value_at_max, sigma, l, i, j)
index = (index + 1)... |
def apply_net_policy(mod: str, node: str, ips: typing.List[str], job_template, pod_template, network_params: typing.Dict[(str, str)], duration: str, bridge_name: str, kubecli: KrknKubernetes, test_execution: str) -> typing.List[str]:
job_list = []
for pod_ip in set(ips):
pod_inf = get_pod_interface(node... |
class Time(IObservable):
def __init__(self, current_time=None, format_time='YYYY-MM-DD HH:mm:ss', future=True):
super().__init__()
self.format = format_time
self.current_time = (arrow.get(current_time) if current_time else arrow.now())
self.update_time = self.current_time
sel... |
_ephem
def test_calc_time():
import pytz
import math
epoch = datetime.datetime(1970, 1, 1)
epoch_dt = pytz.utc.localize(epoch)
loc = tus
loc.pressure = 0
actual_time = pytz.timezone(loc.tz).localize(datetime.datetime(2014, 10, 10, 8, 30))
lb = pytz.timezone(loc.tz).localize(datetime.date... |
def sqa_get_constructed_history_and_golden_response(question_and_history):
reversed_utterance_head = [question.strip() for question in reversed(question_and_history[:(- 1)])]
reversed_utterance_head_str = ' | '.join(reversed_utterance_head)
return (((question_and_history[(- 1)].strip() + ' || ') + reversed_... |
def stat_all_batch(tmp_datetime):
datetime_str = tmp_datetime.strftime('%Y-%m-%d')
datetime_int = tmp_datetime.strftime('%Y%m%d')
print('datetime_str:', datetime_str)
print('datetime_int:', datetime_int)
try:
del_sql = (' DELETE FROM `stock_data`.`%s` WHERE `date`= %s ' % (table_name, dateti... |
def visualize_recon(batch_1, batch_2, frame_idx, obj_id, dataset_name, save_dir):
if (len(batch_1.shape) == 4):
if (batch_1.shape[3] == 2):
batchshow_1 = [flow_to_image(batch_1[j]) for j in range(batch_1.shape[0])]
else:
batchshow_1 = [batch_1[j] for j in range(batch_1.shape[... |
class ManifestBackfillWorker(Worker):
def __init__(self):
super(ManifestBackfillWorker, self).__init__()
self.add_operation(self._backfill_manifests, WORKER_FREQUENCY)
def _backfill_manifests(self):
try:
Manifest.select().where((Manifest.layers_compressed_size >> None)).get()... |
def work_stealing_partition(tasks, interval=0.0001):
tasks = static_partition(tasks)
out_of_task = [False]
def task_daemon():
while True:
time.sleep(interval)
while comm.Iprobe(source=MPI.ANY_SOURCE, tag=INQUIRY):
(src, req) = comm.recv(source=MPI.ANY_SOURCE, ... |
def _block_contiguity(regimes, ids=None):
regimes = pandas.Series(regimes, index=ids)
rids = regimes.unique()
neighbors = {}
for rid in rids:
members = regimes.index[(regimes == rid)].values
for member in members:
neighbors[member] = members[(members != member)]
return ne... |
class PSBTSection():
def _populate_psbt_fields_from_fd(self, fd=None):
if (not fd):
return
while True:
try:
(key_type, key, val) = self.get_next_kv_from_fd(fd)
except StopIteration:
break
self.parse_psbt_section_kv(key_t... |
def _model(arch: str, pretrained: bool, quantize=False):
print(f'[*] Load {arch}')
model: nn.Module = model_map[arch]()
if pretrained:
state_dict = torch.utils.model_zoo.load_url(url_map[arch])
if quantize:
model.quantize()
model.load_state_dict(state_dict)
return mod... |
class CategoryMismatch(ValueError):
def __init__(self, left, right):
(mismatches,) = np.where((left != right))
assert len(mismatches), 'Not actually a mismatch!'
super(CategoryMismatch, self).__init__("LabelArray categories don't match:\nMismatched Indices: {mismatches}\nLeft: {left}\nRight:... |
def direction(diagonal_angle, hit_x, hit_y, hit_area, landing_x, landing_y, landing_area):
if (type(hit_area) == float):
return ''
if (((hit_area[0] == 'C') or (hit_area[0] == 'E')) and ((landing_area[0] == 'C') or (landing_area[0] == 'E'))):
return 2
if (((landing_area[0] == 'B') or (landin... |
.parametrize('case', [{'source': '<div/>', 'model': {'tagName': 'div'}}, {'source': "<div style='background-color:blue'/>", 'model': {'tagName': 'div', 'attributes': {'style': {'background_color': 'blue'}}}}, {'source': '<div>Hello!</div>', 'model': {'tagName': 'div', 'children': ['Hello!']}}, {'source': '<div>Hello!<p... |
class ADC_CR1(IntEnum):
AWDCH = (31 << 0)
EOSIE = (1 << 5)
AWDIE = (1 << 6)
JEOSIE = (1 << 7)
SCAN = (1 << 8)
AWDSGL = (1 << 9)
JAUTO = (1 << 10)
DISCEN = (1 << 11)
JDISCEN = (1 << 12)
DISCNUM = (7 << 13)
DALMOD = (15 << 16)
JAWDEN = (1 << 22)
AWDEN = (1 << 23) |
def model_with_multiple_downsamples():
inputs = tf.keras.Input(shape=(8, 8, 10))
with tf.name_scope('downsample'):
gather_1 = tf.gather(inputs, [1, 2, 3, 4, 5, 6, 7, 8], axis=(- 1))
conv2d = tf.keras.layers.Conv2D(16, [2, 2])(gather_1)
gather_2 = tf.gather(conv2d, [1, 2, 3, 4, 5, 6], axi... |
class MixtureOfExperts(nn.Module):
enc_max_len = {'max_len': None, 'capacity_fp': 0, 'need_update': True}
dec_max_len = {'max_len': None, 'capacity_fp': 0, 'need_update': True}
def __del__(self):
self.reset_moe_state()
def reset_moe_encoder_state(cls):
cls.enc_max_len['need_update'] = Tr... |
def solve_path(prob_data_iter, **kws):
for (g1, g2, A1, A2) in prob_data_iter:
(soln, opt_data, admm_data) = solve(g1=g1, g2=g2, A1=A1, A2=A2, **kws)
(yield (soln, opt_data, admm_data))
kws['dual_init'] = admm_data['dual_vars']
kws['rho'] = admm_data['rho']
kws['D_mat'] = adm... |
def main(data_dir, client, c, config):
benchmark(read_tables, config, c)
query_1 = f'''
SELECT d_date_sk
FROM date_dim
WHERE CAST(d_date as date) IN (date '{q08_STARTDATE}',
date '{q08_ENDDATE}')
ORDER BY CAST(d_date as date) asc
'''
... |
class TestLogging(QiskitNatureTestCase):
def setUp(self):
super().setUp()
self._test_handler = TestHandler()
self._logging_dict = {'qiskit_nature': logging.DEBUG, 'qiskit': logging.DEBUG}
self._old_logging_dict = nature_logging.get_levels_for_names(self._logging_dict.keys())
def ... |
class HTMLPage(LinkSource):
def __init__(self, url: str, content: str) -> None:
super().__init__(url=url)
parser = HTMLPageParser()
parser.feed(content)
self._parsed = parser.anchors
self._base_url: (str | None) = parser.base_url
_property
def _link_cache(self) -> Lin... |
class SimpleStringBuffer(io.BytesIO):
def __init__(self):
super().__init__()
def seek(self, offset: int, origin: int=0) -> int:
raise OSError('Illega Seek')
def seekable(self) -> bool:
return False
def write(self, buf: bytes) -> int:
pos = super().tell()
super().s... |
class TypeInfo(SymbolNode):
__slots__ = ('_fullname', 'module_name', 'defn', 'mro', '_mro_refs', 'bad_mro', 'is_final', 'declared_metaclass', 'metaclass_type', 'names', 'is_abstract', 'is_protocol', 'runtime_protocol', 'abstract_attributes', 'deletable_attributes', 'slots', 'assuming', 'assuming_proper', 'inferring... |
def get_win_folder_from_env_vars(csidl_name: str) -> str:
if (csidl_name == 'CSIDL_PERSONAL'):
return os.path.join(os.path.normpath(os.environ['USERPROFILE']), 'Documents')
env_var_name = {'CSIDL_APPDATA': 'APPDATA', 'CSIDL_COMMON_APPDATA': 'ALLUSERSPROFILE', 'CSIDL_LOCAL_APPDATA': 'LOCALAPPDATA'}.get(c... |
def test_dataset_frames_subset_invalid(zarr_dataset: ChunkedDataset) -> None:
with pytest.raises(ValueError):
get_frames_subset(zarr_dataset, 0, 10)
with pytest.raises(ValueError):
get_frames_subset(zarr_dataset, (- 1), 10)
with pytest.raises(ValueError):
get_frames_subset(zarr_datas... |
class GaussianDiffusion():
def __init__(self, *, betas, model_mean_type, model_var_type, loss_type, rescale_timesteps=False):
self.model_mean_type = model_mean_type
self.model_var_type = model_var_type
self.loss_type = loss_type
self.rescale_timesteps = rescale_timesteps
beta... |
class TestLocalImageCollection():
def test_len(self):
num_images = 3
images = {str(idx): data.LocalImage(str(idx)) for idx in range(num_images)}
collection = data.LocalImageCollection(images)
actual = len(collection)
desired = num_images
assert (actual == desired)
... |
class Concat(nn.Module):
def forward(self, x, y):
(_, _, xh, xw) = x.size()
(_, _, yh, yw) = y.size()
diffY = (xh - yh)
diffX = (xw - yw)
y = F.pad(y, ((diffX // 2), (diffX - (diffX // 2)), (diffY // 2), (diffY - (diffY // 2))))
return torch.cat((x, y), dim=1) |
def add_help_to_file(item: str, outfile: TextIO, is_command: bool) -> None:
if is_command:
label = 'COMMAND'
else:
label = 'TOPIC'
header = '{}\n{}: {}\n{}\n'.format(ASTERISKS, label, item, ASTERISKS)
outfile.write(header)
result = app('help {}'.format(item))
outfile.write(result... |
def pytask_execute_task_process_report(session: Session, report: ExecutionReport) -> (bool | None):
if (report.exc_info and isinstance(report.exc_info[1], Persisted)):
report.outcome = TaskOutcome.PERSISTENCE
update_states_in_database(session, report.task.signature)
return True
return No... |
def build_conv_layer(cfg, *args, **kwargs):
if (cfg is None):
cfg_ = dict(type='Conv2d')
else:
if (not isinstance(cfg, dict)):
raise TypeError('cfg must be a dict')
if ('type' not in cfg):
raise KeyError('the cfg dict must contain the key "type"')
cfg_ = c... |
def run(train_batch_size, epochs, lr, weight_decay, config, exp_id, log_dir, disable_gpu=False):
if (config['test_ratio'] is not None):
(train_loader, val_loader, test_loader) = get_data_loaders(config, train_batch_size, exp_id)
else:
(train_loader, val_loader) = get_data_loaders(config, train_b... |
class RealFileSystemAccessTest(RealFsTestCase):
def setUp(self):
self.filesystem = fake_filesystem.FakeFilesystem()
self.fake_open = fake_filesystem.FakeFileOpen(self.filesystem)
self.pyfakefs_path = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0]
self.root_path = os.pat... |
class PetitionTemplateTest(TestCase):
def setUp(self):
User = get_user_model()
u = User.objects.create_user('julia', password='julia')
org = Organization.objects.create(name='RAP')
def test_createUserPetitionTemplate(self):
self.assertEqual(PetitionTemplate.objects.count(), 0)
... |
class _Once():
def __init__(self, afn: Callable[(..., Awaitable[object])], *args: object) -> None:
self._afn = afn
self._args = args
self.started = False
self._done = _sync.Event()
async def ensure(self, *, checkpoint: bool) -> None:
if (not self.started):
sel... |
def _render_subtree(name, rendered_children):
lines = []
lines.append(name)
for child_lines in rendered_children:
if (child_lines is rendered_children[(- 1)]):
first_prefix = END_PREFIX
rest_prefix = END_CONTINUE
else:
first_prefix = MID_PREFIX
... |
def get_catalog(name):
if (name == 'geofon'):
from pyrocko.client.geofon import Geofon
return Geofon()
elif (name == 'gcmt'):
from pyrocko.client.globalcmt import GlobalCMT
return GlobalCMT()
elif (name == 'isc'):
from pyrocko.client.isc import ISC
return ISC(... |
class Space(Cog):
def __init__(self, bot: Bot):
self. = bot.
self.bot = bot
self.rovers = {}
self.get_rovers.start()
def cog_unload(self) -> None:
self.get_rovers.cancel()
(hours=24)
async def get_rovers(self) -> None:
data = (await self.fetch_from_nasa('m... |
def _load_factory(url: str, default_name: Optional[str]=None) -> Callable:
(module_name, sep, func_name) = url.partition(':')
if (not sep):
if (not default_name):
raise ValueError('no name and no default specified')
func_name = default_name
module = importlib.import_module(module... |
def get_loaders(opt):
logging.info('STARTING Dataset Creation')
(train_full_load, val_full_load) = full_path_loader(opt.dataset_dir)
train_dataset = CDDloader(train_full_load, aug=opt.augmentation)
val_dataset = CDDloader(val_full_load, aug=False)
logging.info('STARTING Dataloading')
train_loade... |
class SampleRequestProvider(StaticProvider):
def __init__(self, provide_action: Callable[([Mediator], MySchema)]):
self.provide_action = provide_action
_provision_action
def _provide_overlay(self, mediator: Mediator, request: SampleRequest):
return self.provide_action(mediator) |
class OutputLogger():
def __init__(self, file=None, trace_level=False):
super().__init__()
self.file = (file or sys.stderr)
self.trace_level = trace_level
def debug(self, fmtstr, *args):
print(f'h2 (debug): {(fmtstr % args)}', file=self.file)
def trace(self, fmtstr, *args):
... |
class TestCLI():
def setup_class(cls) -> None:
with open(CONFIGFILE, 'w') as config:
config.write(CONFIG_YAML)
def teardown_class(cls) -> None:
os.remove(CONFIGFILE)
shutil.rmtree(TEST_DIR)
def setup_method(self) -> None:
self.env = TFE(base_path=TEST_DIR, cwd=os.... |
class XlibEventLoop(PlatformEventLoop):
def __init__(self):
super(XlibEventLoop, self).__init__()
self._notification_device = NotificationDevice()
self.select_devices = set()
self.select_devices.add(self._notification_device)
def notify(self):
self._notification_device.se... |
class DiffusionUNet(nn.Module):
def __init__(self, unet_params, vq_conf=None, conditioning_key=None):
super().__init__()
self.diffusion_net = UNet3DModel(**unet_params)
self.conditioning_key = conditioning_key
def forward(self, x, t, c_concat: list=None, c_crossattn: list=None):
... |
.parametrize('retriever, documents, k', [pytest.param(retriever, documents(), k, id=f'Multiple fields retriever: {retriever.__class__.__name__}, k: {k}') for k in [None, 2, 4] for retriever in cherche_retrievers(on=['article', 'title', 'author'])])
def test_fields_retriever(retriever, documents: list, k: int):
retr... |
class BackboneEncoderUsingLastLayerIntoWPlus(Module):
def __init__(self, num_layers, mode='ir', opts=None):
super(BackboneEncoderUsingLastLayerIntoWPlus, self).__init__()
print('Using BackboneEncoderUsingLastLayerIntoWPlus')
assert (num_layers in [50, 100, 152]), 'num_layers should be 50,100... |
class FedAVGAggregator(Aggregator):
def __init__(self, train_global, test_global, all_train_data_num, train_data_local_dict, test_data_local_dict, train_data_local_num_dict, worker_num, device, args, model_trainer, perf_timer=None, metrics=None, traindata_cls_counts=None):
super().__init__(train_global, tes... |
def create_model(opt, data_size=None):
if (opt.model == 'AE_maskgen_twostream'):
from TwoStreamAE_mask import TwoStreamAE_mask
model = TwoStreamAE_mask(opt)
elif (opt.model == 'pix2pixHD_condImg'):
from pix2pixHD_condImg_model import Pix2PixHDModel_condImg
model = Pix2PixHDModel_... |
def full_plotting(_fileTitle, _trainingLength, _currentRewardList):
scores = []
scores_std = []
scores_avg = []
scores_window = deque(maxlen=100)
reward_list = _currentRewardList
for i_episode in range(_trainingLength):
score = reward_list[i_episode]
scores_window.append(score)
... |
class PositionwiseFeedForward(nn.Module):
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid)
self.w_2 = nn.Linear(d_hid, d_in)
self.layer_norm = nn.LayerNorm(d_in, eps=1e-06)
self.dropout = nn.Dropout(dropout)
def forward(se... |
def nearest_next_idx(d1, d2, k, ties=None):
d1e = d1.End.sort_values()
d2s = d2.Start.sort_values()
ix = np.searchsorted(d2s, d1e, side='left')
valid = (ix < len(d2s))
ix = ix[valid]
d1e = d1e.iloc[valid]
(lidx, ridx_pos, dist) = k_nearest_next_nonoverlapping(d1e.values, d2s.values, d1e.inde... |
class _DefaultContext(Context):
def __init__(self, visitor: 'NameCheckVisitor', node: Optional[ast.AST], globals: Optional[Mapping[(str, object)]]=None, use_name_node_for_error: bool=False) -> None:
super().__init__()
self.visitor = visitor
self.node = node
self.globals = globals
... |
def pblock_057(content):
stage_number = int(get1(content, b'03'))
deci = sxml.Decimation(input_sample_rate=sxml.Frequency(value=float(get1(content, b'04'))), factor=int(get1(content, b'05')), offset=int(get1(content, b'06')), delay=sxml.FloatWithUnit(value=float(get1(content, b'07'))), correction=sxml.FloatWith... |
.parametrize('udn,wemo_class', [('uuid:Socket-1_0-SERIALNUMBER', discovery.Switch), ('uuid:Lightswitch-1_0-SERIALNUMBER', discovery.LightSwitchLongPress), ('uuid:Lightswitch-2_0-SERIALNUMBER', discovery.LightSwitchLongPress), ('uuid:Lightswitch-3_0-SERIALNUMBER', discovery.LightSwitchLongPress), ('uuid:Lightswitch-9_0-... |
def parse_options():
try:
(opts, args) = getopt.getopt(sys.argv[1:], 'de:hs:v', ['dcalls', 'enum=', 'help', 'solver=', 'verbose'])
except getopt.GetoptError as err:
sys.stderr.write((str(err).capitalize() + '\n'))
usage()
sys.exit(1)
dcalls = False
to_enum = 1
solver ... |
class SpatialSelfAttention(nn.Module):
def __init__(self, in_channels):
super().__init__()
self.in_channels = in_channels
self.norm = Normalize(in_channels)
self.q = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0)
self.k = torch.nn.Conv2d(in_chan... |
def pytest(context, junit=False, pty=True, append_cov=False):
ROOT_PATH = TASK_ROOT.parent.parent
with context.cd(str(ROOT_PATH)):
command_str = 'pytest --cov=cmd2_myplugin --cov-report=term --cov-report=html'
if append_cov:
command_str += ' --cov-append'
if junit:
... |
.functions
def test_drop_constant_columns(df_constant_columns):
processed_df = df_constant_columns.drop_constant_columns()
expected_col_list = ['Bell__Chart', 'decorated-elephant', 'cities']
assert (processed_df.columns.to_list() == expected_col_list)
data = {'Bell__Chart': ([1., 2.456234, 3.2346125] * ... |
class DeliveryLogfile(DeliveryBase):
def from_trf(cls, filepath):
(_, dataframe) = read_trf(filepath)
delivery = cls._from_pandas(dataframe)
return delivery
_deprecated(reason='`pymedphys.Delivery.from_logfile` has been replaced with `pymedphys.Delivery.from_trf`')
def from_logfile(c... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.