code stringlengths 281 23.7M |
|---|
class Decoder(nn.Module):
def __init__(self, vocab_size, hidden_size):
super().__init__()
self.embed = nn.Embedding(vocab_size, hidden_size)
self.rnn = nn.LSTM(hidden_size, hidden_size, 1, batch_first=True)
self.linear = nn.Linear(hidden_size, vocab_size)
self.logsoftmax = nn... |
def _get_config_path():
repo_dir = osp.dirname(osp.dirname(osp.dirname(__file__)))
config_dpath = osp.join(repo_dir, 'configs')
if (not osp.exists(config_dpath)):
raise Exception('Cannot find config path')
config_fpaths = list(glob.glob(osp.join(config_dpath, '*.py')))
config_names = [os.pat... |
.parametrize('read_until_end', [True, False])
.parametrize('max_chunk_size', [, 10, 5, 2, 1])
.parametrize('chunks', [[b'this', b'is', b'some', b'chunked', b'data', b''], [b'this is a very large chunk of data', b''], [b'h', b'e', b'l', b'l', b'o', b'']])
.parametrize('retry_count', [0, 5])
def test_chunked_upload(chunk... |
class AdaptiveCompressionBase(CompressionBase, ABC):
def choose_compression(self, info: CompressionInfo) -> CompressionBase:
...
def estimate_compression_ratio(self, info: CompressionInfo) -> float:
return self.choose_compression(info).estimate_compression_ratio(info)
def compress(self, tens... |
def get_file_list(basedir, all_files, filterfunc):
items = []
for filename in all_files:
absname = os.path.join(basedir, filename)
if filterfunc(absname):
items.append({'name': filename, 'absname': absname})
return sorted(items, key=(lambda v: v['name'].lower())) |
_traceback
def extract_semantic_single_core(proc_id, annotations_set, segmentations_folder, output_json_file, semantic_seg_folder, categories, save_as_png, things_other):
annotation_semantic_seg = []
for (working_idx, annotation) in enumerate(annotations_set):
if ((working_idx % 100) == 0):
... |
class PhysicalObject(pyglet.sprite.Sprite):
def __init__(self, *args, **kwargs):
super(PhysicalObject, self).__init__(*args, **kwargs)
(self.velocity_x, self.velocity_y) = (0.0, 0.0)
self.reacts_to_bullets = True
self.is_bullet = False
self.dead = False
self.new_objec... |
class StochasticTMLE():
def __init__(self, df, exposure, outcome, alpha=0.05, continuous_bound=0.0005, verbose=False):
self.exposure = exposure
self.outcome = outcome
self._missing_indicator = '__missing_indicator__'
(self.df, self._miss_flag, self._continuous_outcome) = check_input_... |
def alazy_constant(ttl=0):
def decorator(fn):
()
(fn)
def wrapper():
if ((wrapper.alazy_constant_refresh_time == 0) or ((ttl != 0) and (wrapper.alazy_constant_refresh_time < (utime() - ttl)))):
wrapper.alazy_constant_cached_value = (yield fn.asynq())
... |
def create_learning_rate_scheduler(factors='constant * linear_warmup * rsqrt_decay', base_learning_rate=0.5, warmup_steps=1000, decay_factor=0.5, steps_per_decay=20000, steps_per_cycle=100000):
factors = [n.strip() for n in factors.split('*')]
def step_fn(step):
ret = 1.0
for name in factors:
... |
def emulate_device(device=''):
print('Emulate device: "{device}"'.format(device=device))
for param in DEVICES.get(device, []):
peripheral = OPTIONS.get(param, {'install': empty_func})
print(' - Install function "{param}"'.format(param=param))
install = peripheral['install']
insta... |
def register_function(ctx: PluginContext, singledispatch_obj: Instance, func: Type, options: Options, register_arg: (Type | None)=None) -> None:
func = get_proper_type(func)
if (not isinstance(func, CallableType)):
return
metadata = get_singledispatch_info(singledispatch_obj)
if (metadata is Non... |
class Customer(Resource):
def __init__(self, client=None):
super(Customer, self).__init__(client)
self.base_url = (URL.V1 + URL.CUSTOMER_URL)
def fetch(self, customer_id, data={}, **kwargs):
return super(Customer, self).fetch(customer_id, data, **kwargs)
def create(self, data={}, **k... |
.bedtools
.parametrize('strandedness', no_opposite)
(max_examples=max_examples, deadline=deadline, print_blob=True, suppress_health_check=HealthCheck.all())
(gr=dfs_min(), gr2=dfs_min())
def test_set_union(gr, gr2, strandedness):
set_union_command = 'cat {f1} {f2} | bedtools sort | bedtools merge {strand} -c 4,5,6 ... |
class MLP(torch.nn.Sequential):
def __init__(self, input_dim, feature_dim, depth, hidden_dims):
if isinstance(hidden_dims, str):
hidden_dims = [int(d) for d in hidden_dims.split(',')]
modules = [torch.nn.Linear(input_dim, hidden_dims[0]), torch.nn.ReLU()]
for i in range(1, depth)... |
class TestDataset(Dataset):
def __init__(self, raw_data, batch_size, num_steps):
self.raw_data = np.array(raw_data, dtype=np.int64)
self.num_steps = num_steps
self.batch_size = batch_size
self.num_steps = num_steps
self.data_len = len(self.raw_data)
self.sample_len = ... |
def convert_markers(marker: BaseMarker) -> ConvertedMarkers:
from poetry.core.version.markers import MarkerUnion
from poetry.core.version.markers import MultiMarker
from poetry.core.version.markers import SingleMarker
requirements: ConvertedMarkers = {}
marker = dnf(marker)
conjunctions = (marke... |
class Fence(OpenIdConnectAuth):
name = 'fence'
OIDC_ENDPOINT = '
ID_KEY = 'username'
ACCESS_TOKEN_METHOD = 'POST'
DEFAULT_SCOPE = ['openid', 'user']
JWT_DECODE_OPTIONS = {'verify_at_hash': False}
def _url(self, path):
return urljoin(append_slash(self.OIDC_ENDPOINT), path)
def aut... |
class WebhookRequestBodyUnmarshaller(WebhookRequestValidator, BaseWebhookRequestUnmarshaller):
def unmarshal(self, request: WebhookRequest) -> RequestUnmarshalResult:
try:
(path, operation, _, path_result, _) = self._find_path(request)
except PathError as exc:
return RequestU... |
class TestBrute(TestCase):
def test_brute_length_default(self):
last_str = ''
for pw in brute():
last_str = pw
self.assertEqual(len(last_str), 3)
def test_brute_returns_generator(self):
self.assertIsInstance(brute(), GeneratorType)
def test_letters_numbers_symbols... |
class ImagePaginator(Paginator):
def __init__(self, prefix: str='', suffix: str=''):
super().__init__(prefix, suffix)
self._current_page = [prefix]
self.images = []
self._pages = []
def add_line(self, line: str='', *, empty: bool=False) -> None:
if line:
self.... |
def profile_sdn(model, input_size, device):
inp = (1, 3, input_size, input_size)
model.eval()
def add_hooks(m):
if (len(list(m.children())) > 0):
return
m.register_buffer('total_ops', torch.zeros(1))
m.register_buffer('total_params', torch.zeros(1))
for p in m.par... |
def iterate_with_weights(items: Iterable[T], item_weights: Mapping[(T, float)], rng: Random) -> Iterator[T]:
item_list = list(items)
weights = [max(item_weights[action], 0) for action in item_list]
while (item_list and any(((weight > 0) for weight in weights))):
pickup_node = rng.choices(item_list, ... |
def _yaml_object_in_list(eql_event, yaml_object, obj_type):
idx = 0
for obj in yaml_object[obj_type]:
match = True
for (k, v) in eql_event.items():
if (((k in obj) and (obj[k] == v)) or ((k == 'score_logbook') and (obj_type in ['visibility', 'detection'])) or ((k == 'applicable_to') ... |
def l1_inverse(depth1, depth2):
assert np.all((((np.isfinite(depth1) & np.isfinite(depth2)) & (depth1 > 0)) & (depth2 > 0)))
diff = (np.reciprocal(depth1) - np.reciprocal(depth2))
num_pixels = float(diff.size)
if (num_pixels == 0):
return np.nan
else:
return (np.sum(np.absolute(diff)... |
class TestBatchBySize(unittest.TestCase):
def batch_by_size_baseline(cls, indices, num_tokens_vec, max_tokens, max_sentences, bsz_mult):
batches = []
start = 0
while (start < len(indices)):
for end in range((start + 1), (len(indices) + 1)):
max_val = max((num_toke... |
def test_get_manifest_labels():
labels = dict(foo='bar', baz='meh')
retriever = ContentRetrieverForTesting.for_config({'config': {'Labels': labels}, 'rootfs': {'type': 'layers', 'diff_ids': []}, 'history': []}, CONFIG_DIGEST, CONFIG_SIZE)
manifest = DockerSchema2Manifest(Bytes.for_string_or_unicode(MANIFEST... |
def create_stub_hrit(filename, open_fun=open, meta=mda):
nbits = meta['number_of_bits_per_pixel']
lines = meta['number_of_lines']
cols = meta['number_of_columns']
total_bits = ((lines * cols) * nbits)
arr = np.random.randint(0, 256, size=int((total_bits / 8)), dtype=np.uint8)
with open_fun(filen... |
def run_step(context):
logger.debug('started')
context.assert_key_has_value(key='contextClear', caller=__name__)
for k in context['contextClear']:
logger.debug('removing %s from context', k)
context.pop(k, None)
logger.info('removed %s from context', k)
logger.debug('done') |
class Tsp(GraphOptimizationApplication):
def to_quadratic_program(self) -> QuadraticProgram:
mdl = Model(name='TSP')
n = self._graph.number_of_nodes()
x = {(i, k): mdl.binary_var(name=f'x_{i}_{k}') for i in range(n) for k in range(n)}
tsp_func = mdl.sum((((self._graph.edges[(i, j)]['... |
class InputInvoiceMessageContent(InputMessageContent):
__slots__ = ('title', 'description', 'payload', 'provider_token', 'currency', 'prices', 'max_tip_amount', 'suggested_tip_amounts', 'provider_data', 'photo_url', 'photo_size', 'photo_width', 'photo_height', 'need_name', 'need_phone_number', 'need_email', 'need_s... |
class TestPrettyprint(QiskitOptimizationTestCase):
def _convert(out: str):
print('"\\n".join([')
for line in out.split('\n'):
print(f'"{line}",')
print('])')
def test_prettyprint(self):
with self.subTest('empty'):
q_p = QuadraticProgram()
expec... |
def setup_context(setup_dir):
temp_dir = os.path.join(setup_dir, 'temp')
with save_pkg_resources_state():
with save_modules():
with save_path():
hide_setuptools()
with save_argv():
with override_temp(temp_dir):
with ... |
def main(args):
assert (args.dataset in ['mnist', 'svhn', 'cifar-10', 'cifar-100']), "dataset parameter must be either 'mnist', 'svhn', 'cifar-10', 'cifar-100'"
assert (args.model_name in ['ce', 'forward', 'backward', 'boot_hard', 'boot_soft', 'd2l']), "dataset parameter must be either 'ce', 'forward', 'backwar... |
def test_lanelinking_roads_suc_suc():
road1 = pyodrx.create_road(pyodrx.Line(10), 0, 1, 1)
road2 = pyodrx.create_road(pyodrx.Line(10), 1, 1, 1)
road1.add_successor(pyodrx.ElementType.road, 1, pyodrx.ContactPoint.end)
road2.add_successor(pyodrx.ElementType.road, 0, pyodrx.ContactPoint.end)
odr = pyod... |
def test_infer_str() -> None:
ast_nodes = astroid.extract_node("\n str(s) #\n str('a') #\n str(some_object()) #\n ")
for node in ast_nodes:
inferred = next(node.infer())
assert isinstance(inferred, astroid.Const)
node = astroid.extract_node("\n str(s='') #\n ")
inferred... |
def test_get_am15g():
e = spectrum.get_am15g()
assert_equal(len(e), 2002)
assert_equal(np.sum(e.index), 2761442)
assert_approx_equal(np.sum(e), 1002.88, significant=6)
wavelength = [270, 850, 950, 1200, 4001]
expected = [0.0, 0.89372, 0.14726, 0.44825, 0.0]
e = spectrum.get_am15g(wavelength)... |
def assert_is_leaf(leaf_cert: x509.Certificate) -> None:
bc = leaf_cert.extensions.get_extension_for_class(x509.BasicConstraints)
assert (bc.value.ca is False)
assert (bc.critical is True)
ku = leaf_cert.extensions.get_extension_for_class(x509.KeyUsage)
assert (ku.value.digital_signature is True)
... |
class TOrderWeighted(TestCase):
def test_weighted(self):
pl = PlaylistModel()
pl.set([r3, r1, r2, r0])
order = OrderWeighted()
scores = defaultdict(int)
for _i in range(500):
order.reset(pl)
cur = pl.current_iter
for j in range(3, (- 1), (-... |
def l2_afr_schema(settings=None):
settings = (settings or {})
nobs = settings.get('num_obs', 120)
nacc = settings.get('num_accumulations', 20)
return {'providers': settings.get('providers', {}), 'variable_path': settings.get('variable_path', ''), 'dimensions': accumulation_dimensions(nacc, nobs), 'varia... |
(description='Create speaker vouchers on Pretix')
def create_speaker_vouchers_on_pretix(modeladmin, request, queryset):
is_filtered_by_conference = (queryset.values_list('conference_id').distinct().count() == 1)
if (not is_filtered_by_conference):
messages.error(request, 'Please select only one conferen... |
class BaseParse(object):
__model__ = None
__request__ = request
by = frozenset(['by'])
query = frozenset(['gt', 'ge', 'lt', 'le', 'ne', 'eq', 'ic', 'ni', 'in'])
def __init__(self):
self._operator_funcs = {'gt': self.__gt_model, 'ge': self.__ge_model, 'lt': self.__lt_model, 'le': self.__le_mo... |
def process_config(json_file):
(config, _) = get_config_from_json(json_file)
paths = json_file.split('/')[1:(- 1)]
summary_dir = ((['./runs/pruning'] + paths) + [config.exp_name, 'summary/'])
ckpt_dir = ((['./runs/pruning'] + paths) + [config.exp_name, 'checkpoint/'])
config.summary_dir = os.path.jo... |
class TestSetClipRectangles(EndianTest):
def setUp(self):
self.req_args_0 = {'gc': , 'ordering': 1, 'rectangles': [{'x': (- 14422), 'y': (- 3797), 'width': 57581, 'height': 26888}, {'x': (- 858), 'y': (- 12431), 'width': 49373, 'height': 10384}], 'x_origin': (- 27444), 'y_origin': (- 780)}
self.req_... |
class TestListAndDeleteTag(ApiTestCase):
def test_invalid_tags(self):
self.login(ADMIN_ACCESS_USER)
json = self.getJsonResponse(ListRepositoryTags, params=dict(repository=(ADMIN_ACCESS_USER + '/complex'), specificTag='staging', onlyActiveTags=True))
staging_images = json['tags']
self... |
class Effect5107(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Remote Armor Repair Systems')), 'capacitorNeed', src.getModifiedItemAttr('shipBonusGF'), skill='Gallente Frigate', **kwargs) |
def plot(latents, name=''):
(fig, ax) = plt.subplots(1, 1, figsize=(3.6, 4), dpi=300)
fig.set_tight_layout(True)
fourier_latents = []
for latent in latents:
if (len(latent.shape) == 3):
(b, n, c) = latent.shape
(h, w) = (int(math.sqrt(n)), int(math.sqrt(n)))
l... |
def main():
try:
print('Installing needed scripts')
home = (os.environ['HOME'] + '/')
find_users_bash_config(home)
check_already_installed(home)
setup_bashhub_files(home)
except Exception as err:
sys.stderr.write(('Setup Error:\n%s\n' % str(err)))
sys.exit... |
class DataProcessor(object):
def get_train_examples(self, data_dir):
raise NotImplementedError()
def get_dev_examples(self, data_dir):
raise NotImplementedError()
def get_test_examples(self, data_dir):
raise NotImplementedError()
def get_labels(self):
raise NotImplemented... |
class Effect6059(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.drones.filteredItemBoost((lambda drone: drone.item.requiresSkill('Medium Drone Operation')), 'hp', ship.getModifiedItemAttr('shipBonusGC2'), skill='Gallente Cruiser', **kwargs) |
def _resolve_dot_segments(path):
segs = []
for seg in path:
if (seg == u'.'):
pass
elif (seg == u'..'):
if segs:
segs.pop()
else:
segs.append(seg)
if (list(path[(- 1):]) in ([u'.'], [u'..'])):
segs.append(u'')
return seg... |
def build_finished(app, exception):
if ((not app.config.autoapi_keep_files) and app.config.autoapi_generate_api_docs):
normalized_root = os.path.normpath(os.path.join(app.srcdir, app.config.autoapi_root))
if (app.verbosity > 1):
LOGGER.info((colorize('bold', '[AutoAPI] ') + colorize('dar... |
def assertTableData(table, data):
assert (len(data) == table.rowCount())
rows = list(range(table.rowCount()))
columns = list(range(table.columnCount()))
for r in rows:
assert (len(data[r]) == table.columnCount())
row = []
for c in columns:
item = table.item(r, c)
... |
('qf_lib.backtesting.broker.broker.Broker', autospec=True)
('qf_lib.backtesting.order.order_factory.OrderFactory', autospec=True)
('qf_lib.containers.futures.future_tickers.future_ticker.FutureTicker', autospec=True)
class TestFuturesRollingOrdersGenerator(unittest.TestCase):
def setUp(self) -> None:
self.c... |
class ReportDialog(_CrashDialog):
def __init__(self, pages, cmdhist, qobjects, parent=None):
super().__init__(False, parent)
self.setAttribute(Qt.WidgetAttribute.WA_DeleteOnClose)
self._pages = pages
self._cmdhist = cmdhist
self._qobjects = qobjects
self._set_crash_in... |
def test_connection_batch_write_item():
items = []
conn = Connection()
table_name = 'Thread'
for i in range(10):
items.append({'ForumName': 'FooForum', 'Subject': 'thread-{}'.format(i)})
with pytest.raises(ValueError):
conn.batch_write_item(table_name)
conn.add_meta_table(MetaTab... |
class StoppableHTTPServer(socketserver.TCPServer):
def server_bind(self):
socketserver.TCPServer.server_bind(self)
(host, port) = self.server_address[:2]
self.server_name = host
self.server_port = port
self.socket.settimeout(1)
self.run = True
def get_request(self... |
def get_parser():
parser = argparse.ArgumentParser(description='Convert Pytorch to Caffe model')
parser.add_argument('--config-file', metavar='FILE', help='path to config file')
parser.add_argument('--name', default='baseline', help='name for converted model')
parser.add_argument('--output', default='ca... |
class DelayedLinearWarmup(object):
def __init__(self, delay: int=2000, inc: float=0.005, t_max: float=1.0):
self.t = 0.0
self.t_max = t_max
self.inc = inc
self.delay = delay
self.counter = 0
def __iter__(self):
return self
def __next__(self):
self.coun... |
class CvtDropPath(nn.Module):
def __init__(self, drop_prob: Optional[float]=None) -> None:
super().__init__()
self.drop_prob = drop_prob
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)
def extra_repr(self)... |
(frozen=True)
class ContractSendChannelSettle(ContractSendEvent):
canonical_identifier: CanonicalIdentifier
def token_network_address(self) -> TokenNetworkAddress:
return self.canonical_identifier.token_network_address
def channel_identifier(self) -> ChannelID:
return self.canonical_identifi... |
def open_url(url: str, cache_dir: str=None, num_attempts: int=10, verbose: bool=True, return_filename: bool=False, cache: bool=True) -> Any:
assert (num_attempts >= 1)
assert (not (return_filename and (not cache)))
if (not re.match('^[a-z]+://', url)):
return (url if return_filename else open(url, '... |
def l2_lfl_schema(settings=None):
settings = (settings or {})
nobs = settings.get('num_obs', 1234)
epoch = datetime(2000, 1, 1)
stime = (datetime(2019, 1, 1) - epoch).total_seconds()
etime = (datetime(2019, 1, 2) - epoch).total_seconds()
return {'providers': settings.get('providers', {}), 'varia... |
def get_datasets(args):
print(args.dataset)
if (args.dataset == 'owod'):
train_set = args.train_set
test_set = args.test_set
dataset_train = OWDetection(args, args.owod_path, ['2007'], image_sets=[args.train_set], transforms=make_coco_transforms(args.train_set))
dataset_val = OWD... |
('pytube.captions.Caption.generate_srt_captions')
def test_download_with_output_path(srt):
open_mock = mock_open()
captions.target_directory = MagicMock(return_value='/target')
with patch('builtins.open', open_mock):
srt.return_value = ''
caption = Caption({'url': 'url1', 'name': {'simpleTex... |
class SketchToImageTransforms(TransformsConfig):
def __init__(self, opts):
super(SketchToImageTransforms, self).__init__(opts)
def get_transforms(self):
transforms_dict = {'transform_gt_train': transforms.Compose([transforms.Resize((256, 256)), transforms.ToTensor(), transforms.Normalize([0.5, 0... |
class MBart50Tokenizer(PreTrainedTokenizer):
vocab_files_names = VOCAB_FILES_NAMES
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
model_input_names = ['input_ids', 'attention_mask']
prefix_tokens: List[int] = []
suffix_token... |
def test_context_block():
imgs = torch.randn(2, 16, 20, 20)
gen_attention_block = GeneralizedAttention(16, attention_type='1000')
assert (gen_attention_block.query_conv.in_channels == 16)
assert (gen_attention_block.key_conv.in_channels == 16)
assert (gen_attention_block.key_conv.in_channels == 16)
... |
class HomeWithFlaskTests(unittest.TestCase):
def setUp(self):
os.environ[CONFIGMAP_FILE_ENVIRONMENT] = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config-tests-flask.yml')
ms = MyMicroservice()
ms.reload_conf()
self.app = ms.create_app()
self.client = self.app.t... |
class CT_Num(BaseOxmlElement):
abstractNumId = OneAndOnlyOne('w:abstractNumId')
lvlOverride = ZeroOrMore('w:lvlOverride')
numId = RequiredAttribute('w:numId', ST_DecimalNumber)
def add_lvlOverride(self, ilvl):
return self._add_lvlOverride(ilvl=ilvl)
def new(cls, num_id, abstractNum_id):
... |
_datapipe('shard_expand')
class ShardExpanderIterDataPipe(IterDataPipe[str]):
def __init__(self, source_datapipe: IterDataPipe[str]) -> None:
super().__init__()
self.source_datapipe: IterDataPipe[str] = source_datapipe
def __iter__(self) -> Iterator[str]:
for path in self.source_datapipe... |
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--cfg', type=str, help='cfg file path')
parser.add_argument('--pretrained', type=str, help='stage 1 checkpoint file path', default='')
parser.add_argument('--resume', type=str, help='resume', default='')
parser.add_argument('-... |
def create_transaction(order, dt, price, amount):
amount_magnitude = int(abs(amount))
if (amount_magnitude < 1):
raise Exception('Transaction magnitude must be at least 1.')
transaction = Transaction(asset=order.asset, amount=int(amount), dt=dt, price=price, order_id=order.id)
return transaction |
.parametrize('platform_specific', [False, True])
def test_config_settings(platform_specific, platform, intercepted_build_args, monkeypatch):
config_settings = 'setting=value setting=value2 other="something else"'
if platform_specific:
monkeypatch.setenv(('CIBW_CONFIG_SETTINGS_' + platform.upper()), conf... |
def random_scale_and_translate(bm, middle_edge):
verts = list(middle_edge.verts)
length = middle_edge.calc_length()
median = calc_edge_median(middle_edge)
axis = (VEC_RIGHT if (verts[0].co.y == verts[1].co.y) else VEC_FORWARD)
scale_factor = clamp((random.random() * 3), 1, 2.95)
bmesh.ops.scale(... |
def test_escaping_prompt():
from cmd2.rl_utils import rl_escape_prompt, rl_unescape_prompt
prompt = '(Cmd) '
assert (rl_escape_prompt(prompt) == prompt)
color = ansi.Fg.CYAN
prompt = ansi.style('InColor', fg=color)
escape_start = '\x01'
escape_end = '\x02'
escaped_prompt = rl_escape_prom... |
def get_inversions_on_batch(inputs, net, avg_image, opts):
(result_batch, result_latents) = run_on_batch(inputs, net, opts, avg_image)
y_hat = [result_batch[idx][(- 1)] for idx in range(len(result_batch))]
latents = [torch.from_numpy(result_latents[idx][(- 1)]).cuda() for idx in range(len(result_batch))]
... |
class DeepScene(BaseDataLoader):
def __init__(self, data_dir, batch_size, split, crop_size=None, base_size=None, scale=True, num_workers=1, val=False, shuffle=False, flip=False, rotate=False, blur=False, augment=False, val_split=None, return_id=False):
self.MEAN = [0.485, 0.456, 0.406]
self.STD = [0... |
class FilesystemMetadataStore(MetadataStore):
def __init__(self, cache_dir_prefix: str) -> None:
if cache_dir_prefix.startswith(os.devnull):
self.cache_dir_prefix = None
else:
self.cache_dir_prefix = cache_dir_prefix
def getmtime(self, name: str) -> float:
if (not... |
def init(out: ModelDir, model: Model, override=False):
for dir in [out.save_dir, out.log_dir]:
if os.path.exists(dir):
if (len(os.listdir(dir)) > 0):
if override:
print(('Clearing %d files/dirs that already existed in %s' % (len(os.listdir(dir)), dir)))
... |
def to_tensor(tensor):
if isinstance(tensor, list):
tensor = np.asarray(tensor)
if isinstance(tensor, np.ndarray):
tensor = torch.from_numpy(tensor)
if torch.cuda.is_available():
return torch.autograd.Variable(tensor).cuda()
return torch.autograd.Variable(tensor) |
def render_script_from_path(comm, path_executable_file, path_graph, render_args):
scene_id = obtain_scene_id_from_path(path_graph)
(title, description, script) = parse_exec_script_file(path_executable_file)
with open(path_graph, 'r') as f:
content = json.load(f)
init_graph = content['init_gr... |
def deconv_flops_counter_hook(conv_module: nn.Module, input: tuple, output: torch.Tensor) -> None:
batch_size = input[0].shape[0]
(input_height, input_width) = input[0].shape[2:]
(kernel_height, kernel_width) = conv_module.kernel_size
in_channels = conv_module.in_channels
out_channels = conv_module.... |
def parse_docstring(obj):
raw = getdoc(obj)
summary = (raw.strip(' \n').split('\n')[0].split('.')[0] if raw else None)
raises = {}
details = (raw.replace(summary, '').lstrip('. \n').strip(' \n') if raw else None)
for match in RE_RAISES.finditer((raw or '')):
raises[match.group('name')] = mat... |
def _evp_cipher_cipher_name(cipher: _AEADTypes) -> bytes:
from cryptography.hazmat.primitives.ciphers.aead import AESCCM, AESGCM, ChaCha20Poly1305
if isinstance(cipher, ChaCha20Poly1305):
return b'chacha20-poly1305'
elif isinstance(cipher, AESCCM):
return f'aes-{(len(cipher._key) * 8)}-ccm'.... |
class ProxyNCALoss(WeightRegularizerMixin, NCALoss):
def __init__(self, num_classes, embedding_size, **kwargs):
super().__init__(**kwargs)
self.proxies = torch.nn.Parameter(torch.Tensor(num_classes, embedding_size))
self.weight_init_func(self.proxies)
self.proxy_labels = torch.arange... |
.parametrize('runner', ['pytest', 'unittest'])
def test_unittest_expected_failure_for_failing_test_is_xfail(pytester: Pytester, runner) -> None:
script = pytester.makepyfile("\n import unittest\n class MyTestCase(unittest.TestCase):\n \n def test_failing_test_is_xfail(self):\n ... |
class MCL_Loss(nn.Module):
def __init__(self, args):
super(MCL_Loss, self).__init__()
self.embed_list = nn.ModuleList([])
self.args = args
for i in range(args.num_branches):
self.embed_list.append(Embed(args.rep_dim, args.feat_dim))
self.contrast = ContrastMemory(... |
def _load_trainer(args: SharedArgs, model_path: str, label_maps: Dict[(Task, LabelMap)], training_set: Dataset, validation_set: Optional[Dataset]) -> TrainerInterface:
if (args.detector in {DETECTOR_DENSE, DETECTOR_DENSE_DELTA}):
trainer_heads = _dense_trainer_heads(args, label_maps, training_set)
p... |
class Net(nn.Module):
def __init__(self, embeddings, lstm_hid_dim, num_classes=30, norm=True, scale=True):
super(Net, self).__init__()
self.extractor = Extractor(embeddings, lstm_hid_dim)
self.embedding = Embedding()
self.classifier = Classifier(num_classes)
self.s = nn.Param... |
def bulk_move_node_logic(args):
game = RandovaniaGame(args.game)
(path, data) = default_data.read_json_then_binary(game)
gd = data_reader.decode_data(data)
editor = Editor(gd)
region = gd.region_list.region_with_name(args.region)
source_area = region.area_by_name(args.source_area)
target_are... |
def test_concat_branches():
(a, b, c, d) = get_pseudo_nodes(4)
g = Graph()
c0 = ((g.orphan() >> a) >> b)
c1 = ((g >> c) >> d)
c2 = (c1 >> c0)
assert (c0.first == g.index_of(a))
assert (c2.first == BEGIN)
assert (c2.last == g.index_of(b))
assert (g.outputs_of(BEGIN) == g.indexes_of(c)... |
def state_bind_combobox_color(owner, state, path, widget):
def make_funcs():
def update_state(widget, state):
value = str(widget.currentText())
state.set(path, Color(value))
def update_widget(state, widget):
widget.blockSignals(True)
val = str(state.ge... |
class TestLoadCheckpoint(unittest.TestCase):
def setUp(self):
self.args_mock = MagicMock()
self.args_mock.optimizer_overrides = '{}'
self.args_mock.reset_dataloader = False
self.args_mock.reset_meters = False
self.args_mock.reset_optimizer = False
self.patches = {'os.... |
class TestExp():
def test_grad_0(self):
utt.verify_grad(exp, [np.asarray([[1.5089518, 1., (- 4.7820262)], [2., 0., (- 1.)]])])
def test_int(self):
x = ivector()
f = function([x], exp(x))
exp_3 = f([3])
assert (exp_3.dtype == 'float64')
def test_complex(self):
... |
def get_miou(pred: 'tensor (point_num, )', target: 'tensor (point_num, )', valid_labels: list):
(pred, target) = (pred.cpu().numpy(), target.cpu().numpy())
part_ious = []
for part_id in valid_labels:
pred_part = (pred == part_id)
target_part = (target == part_id)
I = np.sum(np.logica... |
class Random_Sampler():
def __init__(self, num_samples):
self.num_samples = num_samples
def sample(self, depth):
mask_keep = (depth > 0)
n_keep = np.count_nonzero(mask_keep)
if (n_keep == 0):
return mask_keep
else:
depth_sampled = np.zeros(depth.sh... |
class RecordEncoder():
def __init__(self) -> None:
self._record_seq = count()
def set_first_record_number(self, n: int) -> None:
self._record_seq = count(n)
def encode_volley(self, messages: Iterable[_AnyHandshakeMessage], mtu: int) -> list[bytearray]:
packets = []
packet = b... |
def calculate_non_kinematic_rescale_params(sim_dataset: SimulationDataset) -> NonKinematicActionRescaleParams:
x_component_frames = []
y_component_frames = []
yaw_component_frames = []
for index in range(1, (len(sim_dataset) - 1)):
ego_input = sim_dataset.rasterise_frame_batch(index)
x_c... |
def test_expand_packed_triangular():
with pytest.raises(ValueError):
x = pt.matrix('x')
x.tag.test_value = np.array([[1.0]], dtype=pytensor.config.floatX)
expand_packed_triangular(5, x)
N = 5
packed = pt.vector('packed')
packed.tag.test_value = floatX(np.zeros(((N * (N + 1)) // 2... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.