code stringlengths 281 23.7M |
|---|
class QuadTree():
def __init__(self, df, x_column, y_column, x_min=None, x_max=None, y_min=None, y_max=None, scale=(- 1), max_depth=15, min_size=12):
self.x_min = (df[x_column].min() if (not x_min) else x_min)
self.x_max = (df[x_column].max() if (not x_max) else x_max)
self.y_min = (df[y_col... |
class TestHRSC2016R3DetBCD(TestHRSC2016):
def eval(self):
r3det_bcd = build_whole_network.DetectionNetworkR3DetBCD(cfgs=self.cfgs, is_training=False)
all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=r3det_bcd, image_ext=self.args.image_ext)
imgs = os.listdir(self.args.img... |
class DataCubeTestCase(unittest.TestCase):
def test_load(self):
fpath = common.test_data_file('test1.cube')
traces_h = io.load(fpath, getdata=False, format='detect')
traces_d = io.load(fpath, getdata=True, format='detect')
mimas = [(24464, 88087), (42794, 80074), (53039, 73741)]
... |
def get_label(labels_path):
with torch.no_grad():
label_list = []
with open(labels_path, 'r') as f:
data = f.readlines()
for line in data:
label_list.append(line.strip())
text = torch.cat([clip.tokenize(f'there is {c} in scene') for c in label_list]).cuda()
... |
class IPTablesRoleTest(ProvyTestCase):
def setUp(self):
super(IPTablesRoleTest, self).setUp()
self.role = IPTablesRole(prov=None, context={'cleanup': []})
def installs_necessary_packages_to_provision(self):
with self.using_stub(AptitudeRole) as aptitude, self.execute_mock():
... |
def _conflict_bail(VC_err, version):
conflict_tmpl = textwrap.dedent("\n The required version of setuptools (>={version}) is not available,\n and can't be installed while this script is running. Please\n install a more recent version first, using\n 'easy_install -U setuptools'.\n\n ... |
def virtualenv(python: Path, venv_path: Path, dependency_constraint_flags: Sequence[PathOrStr]) -> dict[(str, str)]:
assert python.exists()
virtualenv_app = _ensure_virtualenv()
constraints = _parse_constraints_for_virtualenv(dependency_constraint_flags)
additional_flags = [f'--{package}={version}' for ... |
class _usb_interface_descriptor(Structure, _PackPolicy):
_fields_ = [('bLength', c_uint8), ('bDescriptorType', c_uint8), ('bInterfaceNumber', c_uint8), ('bAlternateSetting', c_uint8), ('bNumEndpoints', c_uint8), ('bInterfaceClass', c_uint8), ('bInterfaceSubClass', c_uint8), ('bInterfaceProtocol', c_uint8), ('iInter... |
def test_resample_inexact():
driven_control = DrivenControl(rabi_rates=np.array([0, 2]), azimuthal_angles=np.array([1.5, 0.5]), detunings=np.array([1.3, 2.3]), durations=np.array([1, 1]), name='control')
new_driven_control = driven_control.resample(0.3)
assert (len(new_driven_control.durations) == 7)
as... |
class RandomDepth(RandomModule):
def __init__(self, *layers, depth_min=None, depth_max=None):
super().__init__()
self.layers = nn.ModuleList(layers)
self.depth_min = depth_min
self.depth_max = depth_max
def random_sample(self):
if (self.depth_min is not None):
... |
def navigate_beeline(game, goal):
(player_x, player_y, player_angle) = game.get_agent_location()
goal_angle_rad = math.atan2((goal[1] - player_y), (goal[0] - player_x))
goal_angle = math.degrees(goal_angle_rad)
if (goal_angle < 0):
goal_angle = (360 + goal_angle)
if (abs((goal_angle - player... |
class CRITIC(SKCWeighterABC):
CORRELATION = ('pearson', 'spearman', 'kendall')
_skcriteria_parameters = ['correlation', 'scale']
def __init__(self, correlation='pearson', scale=True):
if (not ((correlation in self.CORRELATION) or callable(correlation))):
corr_keys = ', '.join((f"'{c}'" f... |
.parametrize('color, border_color, expected_alpha', [((0, 0, 0), (3, 3, 3), 255), ((2, 2, 2, 5), (7, 7, 7), 5), ((17, 17, 17), (23, 23, 23, 0), 0), ((1, 1, 1, 1), (2, 2, 2, 1), 1)])
def test_init_sets_opacity_for_valid_color_and_border_color_alphas(color, border_color, expected_alpha):
rect = BorderedRectangle(0, 0... |
_env('AssemblingKits-v0', max_episode_steps=200)
class AssemblingKitsEnv(StationaryManipulationEnv):
def __init__(self, asset_root='{ASSET_DIR}/assembling_kits', **kwargs):
self.asset_root = Path(format_path(asset_root))
self._kit_dir = (self.asset_root / 'kits')
self._models_dir = (self.ass... |
def _default_start_debug_action(instring: str, loc: int, expr: 'ParserElement', cache_hit: bool=False):
cache_hit_str = ('*' if cache_hit else '')
print(f'''{cache_hit_str}Match {expr} at loc {loc}({lineno(loc, instring)},{col(loc, instring)})
{line(loc, instring)}
{(' ' * (col(loc, instring) - 1))}^''') |
class CocoParser(Parser):
def __init__(self, cfg: CocoParserCfg):
super().__init__(bbox_yxyx=cfg.bbox_yxyx, has_labels=cfg.has_labels, include_masks=cfg.include_masks, include_bboxes_ignore=cfg.include_bboxes_ignore, ignore_empty_gt=(cfg.has_labels and cfg.ignore_empty_gt), min_img_size=cfg.min_img_size)
... |
class TestQuantizationSimTransformers(unittest.TestCase):
def test_word_langauge_model(self):
from transformer_models.word_language_model import TransformerModel
n_layers = 2
model = TransformerModel(33278, 200, 2, 200, n_layers)
model.eval()
get_quantizable_pt_transformer_mo... |
class PageMenu(HTMLWidget):
def __init__(self, view, css_id, page_index, paged_panel):
self.page_index = page_index
self.paged_panel = paged_panel
super().__init__(view)
self.menu = self.add_child(Menu(view))
self.create_items(self.menu)
self.set_html_representation(s... |
def generate_header(info_fields, format_fields, vcf_samples):
output = io.StringIO()
print('##fileformat=VCFv4.3', file=output)
print('##contig=<ID=1>', file=output)
print('##contig=<ID=2>', file=output)
for info_field in info_fields:
print(info_field.get_header(), file=output)
for forma... |
class SearchFilter(LineEditWithToolButtons):
filterChanged = QtCore.Signal()
def __init__(self, parent):
LineEditWithToolButtons.__init__(self, parent)
self._menuBut = self.addButtonRight(pyzo.icons['magnifier'], True)
self._menu = QtWidgets.QMenu(self._menuBut)
self._menu.trigge... |
class TestQueueThreads(QueueTestCase):
def test_queue_threads(self):
count = [20]
for i in range(count[0]):
self.queue.put([str(i)], self.TEST_MESSAGE_1)
lock = Lock()
def get(lock, count, queue):
item = queue.get()
if (item is None):
... |
class NTXent(nn.Module):
def __init__(self, tau, kernel='dot'):
super(NTXent, self).__init__()
self.tau = tau
self.kernel = kernel
self.l2_norm = Normalize()
def dot_pos(self, z_1, z_2):
z_1 = self.l2_norm(z_1)
z_2 = self.l2_norm(z_2)
batch_size = z_1.size... |
class Migration(migrations.Migration):
dependencies = [('domain', '0025_refactoring')]
operations = [migrations.AlterModelOptions(name='verbosename', options={'verbose_name': 'Verbose name', 'verbose_name_plural': 'Verbose names'}), migrations.AddField(model_name='attributeentity', name='uri_prefix', field=mode... |
def get_max_blocksize_from_mem(array_size, mem_per_block, mem, priority_list=None):
nindices = len(array_size)
if (priority_list is None):
_priority_list = ([1] * nindices)
else:
_priority_list = priority_list
cmem = (mem / mem_per_block)
_priority_list = numpy.array(_priority_list)
... |
def create_temporary_tag_outside_timemachine(manifest):
tag_name = ('$temp-%s' % str(uuid.uuid4()))
now_ms = get_epoch_timestamp_ms()
return Tag.create(name=tag_name, repository=manifest.repository_id, lifetime_start_ms=now_ms, lifetime_end_ms=0, reversion=False, hidden=True, manifest=manifest, tag_kind=Tag... |
def clean(html: str, tags: Optional[Set[str]]=None, attributes: Optional[Dict[(str, Set[str])]]=None) -> Optional[str]:
if (tags is None):
tags = ALLOWED_TAGS
if (attributes is None):
attributes = ALLOWED_ATTRIBUTES
try:
cleaned = nh3.clean(html, tags=ALLOWED_TAGS, attributes=ALLOWED... |
class FiftLexer(RegexLexer):
name = 'Fift'
aliases = ['fift', 'fif']
filenames = ['*.fif']
url = '
version_added = ''
tokens = {'root': [('\\s+', Whitespace), include('comments'), ('[\\.+]?\\"', String, 'string'), ('0x[0-9a-fA-F]+', Number.Hex), ('0b[01]+', Number.Bin), ('-?[0-9]+("/"-?[0-9]+)?'... |
def test_pickling_issue_5090():
with pm.Model() as model:
pm.Normal('x', initval='prior')
ip_before = model.initial_point(random_seed=5090)
model = cloudpickle.loads(cloudpickle.dumps(model))
ip_after = model.initial_point(random_seed=5090)
assert (ip_before['x'] == ip_after['x']) |
def test_build_plugin_layer():
with pytest.raises(TypeError):
cfg = 'Plugin'
build_plugin_layer(cfg)
with pytest.raises(KeyError):
cfg = dict()
build_plugin_layer(cfg)
with pytest.raises(KeyError):
cfg = dict(type='FancyPlugin')
build_plugin_layer(cfg)
wit... |
class YAMLObject(object):
__metaclass__ = YAMLObjectMetaclass
__slots__ = ()
yaml_loader = Loader
yaml_dumper = Dumper
yaml_tag = None
yaml_flow_style = None
def from_yaml(cls, loader, node):
return loader.construct_yaml_object(node, cls)
from_yaml = classmethod(from_yaml)
de... |
def brighten(cur_path, init_dirname, is_gamma=True, method='Ying_CAIP', gamma=1.0, print_timespan=False):
method_path = method
if (not os.path.exists(method_path)):
os.mkdir(method_path)
new_path = os.path.join(cur_path, method_path)
if is_gamma:
transform = adjust_gamma_table(gamma)
... |
class RepeatedTimer(object):
def __init__(self, interval, dump_func, outfile):
self._timer = None
self.interval = interval
self.dump_func = dump_func
self.outfile = outfile
self.is_running = False
self.next_call = time.time()
self.start()
def _run(self):
... |
class EncHead(nn.Module):
def __init__(self, in_channels, out_channels, se_loss=True, jpu=True, lateral=False, norm_layer=None, up_kwargs=None):
super(EncHead, self).__init__()
self.se_loss = se_loss
self.lateral = lateral
self.up_kwargs = up_kwargs
self.conv5 = (nn.Sequentia... |
class ImageList(object):
def __init__(self, root, list_file, memcached=False, mclient_path=None):
with open(list_file, 'r') as f:
lines = f.readlines()
self.has_labels = (len(lines[0].split()) == 2)
if self.has_labels:
(self.fns, self.labels) = zip(*[l.strip().split()... |
class Publisher(TimeStampedModel, IndestructibleModel):
name = models.CharField(_('Name'), max_length=200)
slug = models.SlugField(_('Publisher Slug'), max_length=200, unique=True)
revenue_share_percentage = models.FloatField(default=70.0, validators=[MinValueValidator(0), MaxValueValidator(100)], help_text... |
class Effect5431(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Large Energy Turret')), 'trackingSpeed', ship.getModifiedItemAttr('shipBonusAB'), skill='Amarr Battleship', **kwargs) |
def topoSort(roots, getParents):
results = []
visited = set()
stack = [(node, 0) for node in roots]
while stack:
(current, state) = stack.pop()
if (state == 0):
if (current not in visited):
visited.add(current)
stack.append((current, 1))
... |
_hook('precise_bn')
class PreciseBatchNormHook(ClassyHook):
on_end = ClassyHook._noop
def __init__(self, num_samples: int, cache_samples: bool=False) -> None:
super().__init__()
if (num_samples <= 0):
raise ValueError('num_samples has to be a positive integer')
self.num_sampl... |
def string_to_date(obj_datetime):
obj_datetime = parse_string_date(obj_datetime)
try:
date_time_obj = datetime.datetime.strptime(obj_datetime, '%a %b %d %H:%M:%S %Z %Y')
return date_time_obj
except Exception:
logging.info("Couldn't parse string to datetime object")
return dat... |
def calibration_data_to_graph(calib_dict: cg.Calibration) -> nx.Graph:
err_graph = nx.Graph()
for ((q1, q2), err) in calib_dict['two_qubit_sycamore_gate_xeb_cycle_total_error'].items():
err_graph.add_edge(q1, q2, weight=err[0])
for ((q,), err) in calib_dict['single_qubit_readout_p0_error'].items():
... |
class TestFDDBDCL(TestFDDB):
def eval(self):
dcl = build_whole_network.DetectionNetworkDCL(cfgs=self.cfgs, is_training=False)
all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=dcl, image_ext=self.args.image_ext)
imgs = os.listdir(self.args.img_dir)
real_test_imgnam... |
.fast
def test_diluents_molefraction(verbose=True, plot=False, *args, **kwargs):
from radis.misc.warning import MoleFractionError
sf = SpectrumFactory(wavelength_min=4300, wavelength_max=4500, wstep=0.01, cutoff=1e-30, pressure=1, isotope=[1], verbose=verbose, diluent={'CO2': 0.4, 'air': 0.2})
sf.load_datab... |
class FileList():
def __init__(self, warn=None, debug_print=None):
self.allfiles = None
self.files = []
def set_allfiles(self, allfiles):
self.allfiles = allfiles
def findall(self, dir=os.curdir):
self.allfiles = findall(dir)
def debug_print(self, msg):
from distu... |
(params=[True, False])
def freezer(request, monkeypatch):
if (request.param and (not getattr(sys, 'frozen', False))):
monkeypatch.setattr(sys, 'frozen', True, raising=False)
monkeypatch.setattr(sys, 'executable', qutebrowser.__file__)
elif ((not request.param) and getattr(sys, 'frozen', False)):... |
class Migration(migrations.Migration):
dependencies = [('adserver', '0065_flight_invoices')]
operations = [migrations.AddField(model_name='historicalpublisher', name='disabled', field=models.BooleanField(default=False, help_text='Completely disable this publisher')), migrations.AddField(model_name='publisher', ... |
class FCHead(nn.Module):
def __init__(self, input_shape, fc_dim, num_fc):
super().__init__()
fc_dims = [fc_dim for _ in range(num_fc)]
assert (len(fc_dims) > 0)
self._output_size = (input_shape.channels, input_shape.height, input_shape.width)
self.fcs = []
for (k, x) ... |
def _find_image_files(labels_file, name):
all_sets = json.load(open(labels_file))
shuffled_index = range(len(all_sets))
random.seed(12345)
random.shuffle(shuffled_index)
all_sets = [all_sets[i] for i in shuffled_index]
print(('Found %d fashion sets.' % len(all_sets)))
return all_sets |
def conv2d_same(inputs, num_outputs, kernel_size, stride, rate=1, scope=None):
if (stride == 1):
return slim.conv2d(inputs, num_outputs, kernel_size, stride=1, rate=rate, padding='SAME', scope=scope)
else:
kernel_size_effective = (kernel_size + ((kernel_size - 1) * (rate - 1)))
pad_total... |
class RBFitterBase(ABC):
def raw_data(self):
return
def cliff_lengths(self):
return
def ydata(self):
return
def fit(self):
return
def rb_fit_fun(self):
return
def seeds(self):
return
def results(self):
return
def add_data(self):
... |
class FreqEncoder(nn.Module):
def __init__(self, input_dim, max_freq_log2, N_freqs, log_sampling=True, include_input=True, periodic_fns=(torch.sin, torch.cos)):
super().__init__()
self.input_dim = input_dim
self.include_input = include_input
self.periodic_fns = periodic_fns
s... |
class PathDistribution(Distribution):
def __init__(self, path: SimplePath) -> None:
self._path = path
def read_text(self, filename: (str | os.PathLike[str])) -> Optional[str]:
with suppress(FileNotFoundError, IsADirectoryError, KeyError, NotADirectoryError, PermissionError):
return s... |
def main(argv):
from mutagen.ogg import OggPage
parser = OptionParser(usage='%prog [options] filename.ogg ...', description='Split Ogg logical streams using Mutagen.', version=('Mutagen %s' % '.'.join(map(str, mutagen.version))))
parser.add_option('--extension', dest='extension', default='ogg', metavar='ext... |
class TestObjection():
def testHookMethodWithMethodObject():
obj = Objection('127.0.0.1:8888')
method = Method(None, MethodObject('Lcom/google/progress/WifiCheckTask;', 'checkWifiCanOrNotConnectServer', '([Ljava/lang/String;)Z'))
with patch('requests.post') as mocked_post:
obj.ho... |
def test_TVRegDiffPoint():
n = 800
x = np.linspace((- 10), 10, n)
np.random.seed(1)
noise = (np.random.normal(0, np.std(x), x.shape) * 0.05)
y_clean = np.sin(x)
y_grad = np.cos(x)
y_noise = (y_clean + noise)
dx = (x[1] - x[0])
ux_list_true = []
(ux_list, uxx_list) = ([], [])
... |
def detect_outlier_channels(data_frame_with_relative_ranges):
Q1 = data_frame_with_relative_ranges.quantile(0.25)
Q3 = data_frame_with_relative_ranges.quantile(0.75)
IQR = (Q3 - Q1)
v = (data_frame_with_relative_ranges > (Q3 + (1.5 * IQR)))
v_df = v.to_frame()
keep_only_outliers = v_df.loc[v_df[... |
def guess_lexer(_text, **options):
if (not isinstance(_text, str)):
inencoding = options.get('inencoding', options.get('encoding'))
if inencoding:
_text = _text.decode((inencoding or 'utf8'))
else:
(_text, _) = guess_decode(_text)
ft = get_filetype_from_buffer(_te... |
class panTab(QWidget):
def __init__(self):
super().__init__()
self.layout = QFormLayout()
self.setLayout(self.layout)
self.label = QLabel('Pan and Zoom Controls\nTo zoom in/out, scroll up/down in the display below\nTo pan, click and drag in the display below', alignment=Qt.AlignCente... |
class MakeTableTests(unittest.TestCase):
baked_solutions = {"self.rows=[['Apple', 5]]self.labels=Noneself.centered=False": '\n Apple 5 \n', "self.rows=[['Apple', 5], ['Banana', 3], ['Cherry', 7]]self.labels=Noneself.centered=False": '\n Apple 5 \n Banana 3 \n Cherry 7 \n', "self.rows=[['Apple', 5], ['Banana', ... |
def _get_decorator_optional_bool_argument(ctx: mypy.plugin.ClassDefContext, name: str, default: (bool | None)=None) -> (bool | None):
if isinstance(ctx.reason, CallExpr):
attr_value = _get_argument(ctx.reason, name)
if attr_value:
if isinstance(attr_value, NameExpr):
if (... |
()
_options
_options
def shell(metadir, accept_metadir, controller, ctrlopt, modelsetup, modelopt, backend, local, verbosity):
handle_common_options(verbosity)
ys = handle_connection_options(metadir, accept_metadir, controller, ctrlopt, modelsetup, modelopt, backend, local)
click.secho('launching yadage she... |
def make_sockaddr_in():
class in_addr(ctypes.BigEndianStructure):
_fields_ = (('s_b1', ctypes.c_uint8), ('s_b2', ctypes.c_uint8), ('s_b3', ctypes.c_uint8), ('s_b4', ctypes.c_uint8))
class sockaddr_in(ctypes.BigEndianStructure):
_fields_ = (('sin_family', ctypes.c_int16), ('sin_port', ctypes.c_ui... |
class VersionToString_TestCase(CommandTest):
def runTest(self):
highest = 0
for (ver_str, ver_num) in list(versionMap.items()):
if ver_str.startswith('RHEL'):
continue
highest = max(ver_num, highest)
self.assertEqual(highest, DEVEL)
self.assert... |
class _ValueConditionFactory():
def parse_value_condition(element):
if (element.find('ParameterCondition') is not None):
return ParameterCondition.parse(element.find('ParameterCondition'))
elif (element.find('VariableCondition') is not None):
return VariableCondition.parse(el... |
def test_node_rejects_received_withdraw_expiry_invalid_total_withdraw():
pseudo_random_generator = random.Random()
(our_model1, _) = create_model(balance=70)
(partner_model1, privkey2) = create_model(balance=100)
channel_state = create_channel_from_models(our_model1, partner_model1, privkey2)
block_... |
def genetic_algorithm(ratio):
imgs = {}
masks = {}
img_set = []
mask = []
t = []
measure = []
true_image_set = []
resolution = []
img_true = img('true__out__images.pickle')[0]
mask_true = img('true__out__images.pickle')[1]
img_rotate = img('rotation_var__out__images.pickle')[... |
class PersonCastLink(KinopoiskPage):
xpath = {'id': './/p[="stars"]/', 'name': './/div[="name"]/a/text()', 'name_en': './/div[="name"]/span[="gray"]/text()'}
def parse(self):
self.instance.id = self.extract('id', to_int=True)
self.instance.name = self.extract('name', to_str=True)
self.in... |
class KnownValues(unittest.TestCase):
def test_chkfile_k_point(self):
cell = pbcgto.Cell()
cell.a = (np.eye(3) * 6)
cell.mesh = ([21] * 3)
cell.unit = 'B'
cell.atom = 'He 2. 2. 3.\n He 3. 2. 3.'
cell.basis = {'He': 'sto3g... |
def upgrade(op, tables, tester):
op.create_table('manifest', sa.Column('id', sa.Integer(), nullable=False), sa.Column('repository_id', sa.Integer(), nullable=False), sa.Column('digest', sa.String(length=255), nullable=False), sa.Column('media_type_id', sa.Integer(), nullable=False), sa.Column('manifest_bytes', sa.T... |
def solve(size):
problem = Problem()
cols = range(size)
rows = range(size)
problem.addVariables(cols, rows)
for col1 in cols:
for col2 in cols:
if (col1 < col2):
problem.addConstraint((lambda row1, row2: (row1 != row2)), (col1, col2))
solutions = problem.getSo... |
def merge_with(func, *dicts, **kwargs):
if ((len(dicts) == 1) and (not isinstance(dicts[0], Mapping))):
dicts = dicts[0]
factory = _get_factory(merge_with, kwargs)
values = collections.defaultdict((lambda : [].append))
for d in dicts:
for (k, v) in d.items():
values[k](v)
... |
def test_bose_hubbard_3x2():
hubbard_model = bose_hubbard(3, 2, 1.0, 4.0, chemical_potential=0.5, dipole=0.3)
assert (str(hubbard_model).strip() == '\n-1.0 [0 1^] +\n-1.0 [0 2^] +\n-1.0 [0 3^] +\n-2.5 [0^ 0] +\n2.0 [0^ 0 0^ 0] +\n0.3 [0^ 0 1^ 1] +\n0.3 [0^ 0 2^ 2] +\n0.3 [0^ 0 3^ 3] +\n-1.0 [0^ 1] +\n-1.0 [0^ 2... |
class ClassEventConnectorEditor(gui.ClassEventConnector):
editor_listener_callback = None
def __call__(self, *args, **kwargs):
callback_params = self.event_method_bound(*args, **kwargs)
if (not (self.editor_listener_callback is None)):
self.editor_listener_callback(self.event_source_... |
def attack():
is_training = False
with tf.Graph().as_default():
with tf.device(('/gpu:' + str(GPU_INDEX))):
(pointclouds_pl, labels_pl) = MODEL.placeholder_inputs(BATCH_SIZE, NUM_POINT)
is_training_pl = tf.placeholder(tf.bool, shape=())
pert = tf.get_variable(name='pe... |
def get_flat_grads(net, grad_grad=False):
grads = []
for param in net.parameters():
if (not param.requires_grad):
continue
grad = (param.grad.grad if grad_grad else param.grad)
if (grad is None):
grad = torch.zeros_like(param.data)
else:
grad =... |
class brelu(Function):
def forward(ctx, input):
ctx.save_for_backward(input)
input_shape = input.shape[0]
even_indices = [i for i in range(0, input_shape, 2)]
odd_indices = [i for i in range(1, input_shape, 2)]
output = input.clone()
output[even_indices] = output[even... |
def main(args):
utils.import_user_module(args)
if (args.buffer_size < 1):
args.buffer_size = 1
if ((args.max_tokens is None) and (args.max_sentences is None)):
args.max_sentences = 1
assert ((not args.sampling) or (args.nbest == args.beam)), '--sampling requires --nbest to be equal to --... |
class LogLossMetricsCheckpointHook(ClassyHook):
on_start = ClassyHook._noop
on_phase_start = ClassyHook._noop
on_forward = ClassyHook._noop
on_loss_and_meter = ClassyHook._noop
on_backward = ClassyHook._noop
on_end = ClassyHook._noop
on_update = ClassyHook._noop
def on_forward(self, task... |
def _warn_rvs_in_inferred_graph(graph: Union[(TensorVariable, Sequence[TensorVariable])]):
rvs_in_graph = _find_unallowed_rvs_in_graph(graph)
if rvs_in_graph:
warnings.warn(f'''RandomVariables {rvs_in_graph} were found in the derived graph. These variables are a clone and do not match the original ones ... |
def training_scope(is_training=True, weight_decay=4e-05, stddev=0.09, dropout_keep_prob=0.8, bn_decay=0.997):
batch_norm_params = {'is_training': is_training, 'decay': bn_decay}
if (stddev < 0):
weight_intitializer = slim.initializers.xavier_initializer()
else:
weight_intitializer = tf.trunc... |
def write_inp_section(file_object, allheaders, sectionheader, section_data, pad_top=True, na_fill=''):
f = file_object
add_str = ''
sectionheader = format_inp_section_header(sectionheader)
if (not section_data.empty):
if pad_top:
f.write((('\n\n' + sectionheader) + '\n'))
els... |
def create_generators(args):
common_args = {'batch_size': args.batch_size, 'image_size': args.image_size}
if args.random_transform:
misc_effect = MiscEffect()
visual_effect = VisualEffect()
else:
misc_effect = None
visual_effect = None
if (args.dataset_type == 'pascal'):
... |
def test_extract_method_global_with_similar(config, workspace, code_action_context):
document = create_document(workspace, 'method.py')
line = 6
start_col = document.lines[line].index('sys.stdin.read()')
end_col = document.lines[line].index(')\n')
selection = Range((line, start_col), (line, end_col)... |
def bch_expand(*ops, **kwargs):
order = kwargs.get('order', 6)
if ((not isinstance(order, int)) or (order < 0)):
raise ValueError('Invalid order parameter.')
if (len(ops) < 2):
raise ValueError('Input must consist of at least 2 operators.')
if (len(set((type(op) for op in ops))) != 1):
... |
class ExecuteContractUseCaseTests(TestCase):
def setUp(self):
self.notifications = [Mock()]
self.use_case = use_cases.ExecuteContractUseCase(self.notifications)
self.user = baker.make(settings.AUTH_USER_MODEL)
self.file = SimpleUploadedFile('contract.txt', b'Contract content')
... |
class SuperNetwork(nn.Module):
def __init__(self, init_channels, classes=10, shadow_bn=True):
super(SuperNetwork, self).__init__()
self.init_channels = init_channels
self.stem = nn.Sequential(nn.Conv2d(3, self.init_channels, kernel_size=3, stride=1, padding=1, bias=False), nn.BatchNorm2d(sel... |
def sample_grads(z, y):
idx = (z < 0)
out = np.zeros_like(z)
exp_z = np.exp(z[idx])
y_idx = y[idx]
out[idx] = ((((1 - y_idx) * exp_z) - y_idx) / (1 + exp_z))
exp_nx = np.exp((- z[(~ idx)]))
y_nidx = y[(~ idx)]
out[(~ idx)] = (((1 - y_nidx) - (y_nidx * exp_nx)) / (1 + exp_nx))
return ... |
def restoreCheckedStates(fit, stateInfo, ignoreModPoss=()):
if (stateInfo is None):
return
(changedMods, changedProjMods, changedProjDrones) = stateInfo
for (pos, state) in changedMods.items():
if (pos in ignoreModPoss):
continue
fit.modules[pos].state = state
for (po... |
def batch_padding(x_data, answerData, lengthData, x_char_data):
maxlen = 0
x_datapad = x_data[:]
answerDatapad = answerData[:]
x_char_datapad = x_char_data[:]
for length in lengthData:
if (length > maxlen):
maxlen = length
for idx in range(len(x_datapad)):
for i in ra... |
class UnpairedDataset(data.Dataset):
def initialize(self, sfiles, tfiles, sdataset_sizes, tdataset_sizes, preprocess_mode='resize_and_crop', load_size=286, crop_size=256):
self.source_paths = []
self.target_paths = []
assert ((len(sfiles) == len(tfiles)) and (len(sfiles) == len(sdataset_size... |
def synthesize(species, ksynth):
def synthesize_name_func(rule_expression):
cps = rule_expression.product_pattern.complex_patterns
return '_'.join((_complex_pattern_label(cp) for cp in cps))
if isinstance(species, Monomer):
species = species()
species = as_complex_pattern(species)
... |
_criterion('cross_entropy', dataclass=CrossEntropyCriterionConfig)
class CrossEntropyCriterion(FairseqCriterion):
def __init__(self, task, sentence_avg):
super().__init__(task)
self.sentence_avg = sentence_avg
def forward(self, model, sample, reduce=True):
net_output = model(**sample['ne... |
_function
def report(name, value):
if (name not in _counters):
_counters[name] = dict()
elems = torch.as_tensor(value)
if (elems.numel() == 0):
return value
elems = elems.detach().flatten().to(_reduce_dtype)
moments = torch.stack([torch.ones_like(elems).sum(), elems.sum(), elems.squa... |
def test_match_raises_error(pytester: Pytester) -> None:
pytester.makepyfile("\n import pytest\n def test_division_zero():\n with pytest.raises(ZeroDivisionError) as excinfo:\n 0 / 0\n excinfo.match(r'[123]+')\n ")
result = pytester.runpytest('--tb=short')
... |
class MAnet(SegmentationModel):
def __init__(self, encoder_name: str='resnet34', encoder_depth: int=5, encoder_weights: Optional[str]='imagenet', decoder_use_batchnorm: bool=True, decoder_channels: List[int]=(256, 128, 64, 32, 16), decoder_pab_channels: int=64, in_channels: int=3, classes: int=1, activation: Option... |
class GroupEpicIssue(ObjectDeleteMixin, SaveMixin, RESTObject):
_id_attr = 'epic_issue_id'
manager: 'GroupEpicIssueManager'
def save(self, **kwargs: Any) -> None:
updated_data = self._get_updated_data()
if (not updated_data):
return
obj_id = self.encoded_id
self.m... |
class RequestCallbackAnswer():
async def request_callback_answer(self: 'pyrogram.Client', chat_id: Union[(int, str)], message_id: int, callback_data: Union[(str, bytes)], timeout: int=10):
data = (bytes(callback_data, 'utf-8') if isinstance(callback_data, str) else callback_data)
return (await self.... |
class SharedRMSprop(optim.Optimizer):
def __init__(self, params, args):
lr = args.lr
alpha = 0.99
eps = 0.1
weight_decay = 0
momentum = 0
centered = False
defaults = defaultdict(lr=lr, alpha=alpha, eps=eps, weight_decay=weight_decay, momentum=momentum, centere... |
def evaluate(dataloader, model, criterion, postprocessors, confusion, config, args, thresh):
model.eval()
criterion.eval()
my_save_list_prob = dict()
my_save_list_coef = dict()
logging.error('VALIDATION')
for (i, batch) in enumerate(tqdm(dataloader)):
(seq_images, targets, _) = batch
... |
class ConcatShuffleDataset(Dataset):
def __init__(self, datasets):
self.concat_dataset = ConcatDataset(datasets)
self.index2old_index = list(range(len(self.concat_dataset)))
np.random.shuffle(self.index2old_index)
def __getitem__(self, index):
old_index = self.index2old_index[ind... |
class Effect6614(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Hull Upgrades')), 'armorHPBonusAdd', src.getModifiedItemAttr('shipBonusRole2'), **kwargs)
fit.modules.filteredItemBoost((la... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.