code stringlengths 281 23.7M |
|---|
class RichardsonGaudin(DOCIHamiltonian):
def __init__(self, g, n_qubits):
hc = numpy.zeros((n_qubits,))
hr1 = numpy.zeros((n_qubits, n_qubits))
hr2 = numpy.zeros((n_qubits, n_qubits))
for p in range(n_qubits):
hc[p] = (2 * (p + 1))
for q in range(n_qubits):
... |
def _get_layer_control_string(control: folium.LayerControl, map: folium.Map) -> str:
control._id = 'layer_control'
control.add_to(map)
control.render()
control_string = generate_leaflet_string(control, base_id='layer_control')
m_id = get_full_id(map)
control_string = control_string.replace(m_id,... |
class RHEL6_RaidData(F13_RaidData):
removedKeywords = F13_RaidData.removedKeywords
removedAttrs = F13_RaidData.removedAttrs
def __init__(self, *args, **kwargs):
F13_RaidData.__init__(self, *args, **kwargs)
self.cipher = kwargs.get('cipher', '')
def _getArgsAsStr(self):
retval = F... |
class TWXXX(TestCase):
def test_default(self):
frame = WXXX()
self.assertEqual(frame.encoding, 1)
self.assertEqual(frame.desc, u'')
self.assertEqual(frame.url, u'')
def test_hash(self):
self.assert_(isinstance(WXXX(url='durl'), WXXX))
frame = WXXX(encoding=0, desc... |
class DebertaTokenizer(GPT2Tokenizer):
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
model_input_names = ['input_ids', 'attention_mask', 'token_type_ids']
def __init__(self, vocab_file, mer... |
def eval_det_cls(pred, gt, classname, ovthresh=0.25, use_07_metric=False, get_iou_func=get_iou):
class_recs = {}
npos = 0
for img_id in gt.keys():
bbox = np.array(gt[img_id])
det = ([False] * len(bbox))
npos += len(bbox)
class_recs[img_id] = {'bbox': bbox, 'det': det}
for... |
class UniformControlPolicy(Policy, Serializable):
def __init__(self, env_spec):
Serializable.quick_init(self, locals())
super(UniformControlPolicy, self).__init__(env_spec=env_spec)
def vectorized(self):
return True
def get_action(self, observation):
return (self.action_space... |
class EmailIndex(GlobalSecondaryIndex):
class Meta():
index_name = 'custom_idx_name'
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
email = UnicodeAttribute(hash_key=True)
alt_numbers = NumberSetAttribute(range_key=True, attr_name='numbers') |
def build_dataloader(dataset, samples_per_gpu, workers_per_gpu, num_gpus=1, dist=True, shuffle=True, seed=None, **kwargs):
(rank, world_size) = get_dist_info()
if dist:
if shuffle:
sampler = DistributedGroupSampler(dataset, samples_per_gpu, world_size, rank, seed=seed)
else:
... |
class AcRepertoireIrreducibilityAnalysis(cmp.Orderable):
def __init__(self, alpha, state, direction, mechanism, purview, partition, probability, partitioned_probability, node_labels=None):
self.alpha = alpha
self.state = state
self.direction = direction
self.mechanism = mechanism
... |
def test_sequential_rom_rtl():
run_test_vector_sim(SequentialROMRTL(Bits32, 8, [8, 7, 6, 5, 4, 3, 2, 1], num_ports=2), [('raddr[0]', 'rdata[0]*', 'raddr[1]', 'rdata[1]*'), [1, '?', 5, '?'], [2, 7, 7, 3], [0, 6, 0, 1]])
run_test_vector_sim(SequentialROMRTL(Bits32, 8, [8, 7, 6, 5, 4, 3, 2, 1], num_ports=2), [('ra... |
class ByteBufferV2():
def __init__(self):
self._deque = collections.deque()
self._size = 0
def append(self, data):
if (not isinstance(data, bytes)):
raise ValueError('Expected bytes')
if data:
self._deque.append(data)
self._size += len(data)
... |
class _Pooling2D(Layer):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format=None, **kwargs):
super(_Pooling2D, self).__init__(**kwargs)
data_format = conv_utils.normalize_data_format(data_format)
if (strides is None):
strides = pool_size
self.... |
class Effect6037(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Small Projectile Turret')), 'damageMultiplier', ship.getModifiedItemAttr('shipBonusTacticalDestroyerMinmatar1'), skill='Minmatar T... |
class TriangleWindow(OpenGLWindow):
vertexShaderSource = '\nattribute highp vec4 posAttr;\nattribute lowp vec4 colAttr;\nvarying lowp vec4 col;\nuniform highp mat4 matrix;\nvoid main() {\n col = colAttr;\n gl_Position = matrix * posAttr;\n}\n'
fragmentShaderSource = '\nvarying lowp vec4 col;\nvoid main() ... |
_model
def test_model_repr():
monomers = [Monomer(f'A{i}') for i in range(1, 7)]
parameters = [Parameter(f'P{i}') for i in range(1, 5)]
rules = [Rule(f'R{i}', (m() >> None), parameters[0]) for (i, m) in enumerate(monomers[:5], 1)]
expressions = [Expression(f'E{i}', (p + 1)) for (i, p) in enumerate(param... |
def run_command_factory(args):
nlp = pipeline(task=args.task, model=(args.model if args.model else None), config=args.config, tokenizer=args.tokenizer, device=args.device)
format = (try_infer_format_from_ext(args.input) if (args.format == 'infer') else args.format)
reader = PipelineDataFormat.from_str(forma... |
def bench_telco(loops, filename):
getcontext().rounding = ROUND_DOWN
rates = list(map(Decimal, ('0.0013', '0.00894')))
twodig = Decimal('0.01')
Banker = Context(rounding=ROUND_HALF_EVEN)
basictax = Decimal('0.0675')
disttax = Decimal('0.0341')
with open(filename, 'rb') as infil:
data... |
class QuadOperatorTest(unittest.TestCase):
def test_is_normal_ordered_empty(self):
op = (QuadOperator() * 2)
self.assertTrue(op.is_normal_ordered())
def test_is_normal_ordered_number(self):
op = (QuadOperator('q2 p2') * (- 1j))
self.assertTrue(op.is_normal_ordered())
def test... |
.documentation
def test_docs_general_functions_present():
os.system('mkdocs build --clean')
rendered_correctly = False
with open('./site/api/functions/index.html', 'r+') as f:
for line in f.readlines():
if (('add_columns' in line) or ('update_where' in line)):
rendered_co... |
class _CustomEncoder(json.JSONEncoder):
def encode(self, o):
encoded = super(_CustomEncoder, self).encode(o)
if isinstance(o, str):
encoded = encoded.replace('<', '\\u003c')
encoded = encoded.replace('>', '\\u003e')
encoded = encoded.replace('&', '\\u0026')
... |
class Effect7039(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
groups = ('Structure Anti-Subcapital Missile', 'Structure Anti-Capital Missile')
for dmgType in ('em', 'kinetic', 'explosive', 'thermal'):
fit.modules.filteredChargeMultiply((lam... |
def iao(mol, orbocc, minao=MINAO, kpts=None, lindep_threshold=1e-08):
if (mol.has_ecp() and (minao == 'minao')):
logger.warn(mol, 'ECP/PP is used. MINAO is not a good reference AO basis in IAO.')
pmol = reference_mol(mol, minao)
has_pbc = (getattr(mol, 'dimension', 0) > 0)
if has_pbc:
fr... |
class EncodeDescription(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.dtype):
return str(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.bool_):
... |
def get_config_from_root(root):
setup_cfg = os.path.join(root, 'setup.cfg')
parser = configparser.ConfigParser()
parser.read(setup_cfg)
VCS = parser.get('versioneer', 'VCS')
def get(parser, name):
if parser.has_option('versioneer', name):
return parser.get('versioneer', name)
... |
class DCUN_TFC_FiLM_TDF(DenseCUNet_FiLM):
def __init__(self, n_fft, input_channels, internal_channels, n_blocks, n_internal_layers, first_conv_activation, last_activation, t_down_layers, f_down_layers, kernel_size_t, kernel_size_f, bn_factor, min_bn_units, tfc_tdf_bias, tfc_tdf_activation, control_vector_type, cont... |
def test_memory_file_gdal_error_message(capsys):
memfile = MemoryFile()
data = numpy.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]]).astype('uint8')
west_bound = 0
north_bound = 2
cellsize = 0.5
nodata = (- 9999)
driver = 'AAIGrid'
dtype = data.dtype
shape = da... |
class FragmentationTests(ProtocolTestCase):
def test_client_send_ping_pong_in_fragmented_message(self):
client = Protocol(CLIENT)
client.send_text(b'Spam', fin=False)
self.assertFrameSent(client, Frame(OP_TEXT, b'Spam', fin=False))
client.send_ping(b'Ping')
self.assertFrameSe... |
class TabToolButtonWithCloseButton(TabToolButton):
SIZE = (22, 16)
CROSS_OFFSET = (0, 2)
def __init__(self, *args):
TabToolButton.__init__(self, *args)
self._icon = None
self._cross = self.getCrossPixmap1()
self.setMouseTracking(True)
self._overCross = False
def _... |
class ConferenceSettingConstants():
ALLOW_PUBLIC_VOTING_ON_PROPOSALS = {'name': 'allow_public_voting_on_proposals', 'value': True, 'description': 'Allow public to vote on proposals'}
DISPLAY_PROPOSALS_IN_PUBLIC = {'name': 'display_proposals_in_public', 'value': True, 'description': 'Display proposals in public'... |
def get_mask(in_features, out_features, in_flow_features, mask_type=None):
if (mask_type == 'input'):
in_degrees = (torch.arange(in_features) % in_flow_features)
else:
in_degrees = (torch.arange(in_features) % (in_flow_features - 1))
if (mask_type == 'output'):
out_degrees = ((torch.... |
def split_and_write(path, output_dir_1, output_dir_2):
with open(path, 'r') as f:
d = json.load(f)
keys = list(d.keys())
keys.sort()
m = int((len(keys) / 2))
d1 = {k: d[k][:25] for k in keys[:m]}
d2 = {k: d[k][:25] for k in keys[m:]}
base = osp.basename(path)
output_path = osp.jo... |
class LoginCommand(BaseUserCommand):
def run(self):
print(ANSI.red('WARNING! `transformers-cli login` is deprecated and will be removed in v5. Please use `huggingface-cli login` instead.'))
print('\n _| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| ... |
class TAudioFileGroup(TestCase):
def test_multiple_values(self):
group = AudioFileGroup([GroupSong(True), GroupSong(True)])
self.assertTrue((group.can_multiple_values() is True))
self.assertTrue((group.can_multiple_values('foo') is True))
group = AudioFileGroup([GroupSong(['ha']), Gr... |
def test_assert_not_in():
seq = ((('a' * 1000) + 'bbb') + ('a' * 1000))
with AssertRaises(AssertionError) as ar:
assert_not_in('bbb', seq)
e = ar.expected_exception_found
assert_eq(("'bbb' is in '(truncated) ...%sbbb%s... (truncated)'" % (('a' * 50), ('a' * 50))), str(e))
seq = ('a' * 1000)
... |
class TestURIVariable(TestCase):
def setUp(self):
self.v = variable.URIVariable('{foo}')
def test_post_parse(self):
v = self.v
self.assertEqual(v.join_str, ',')
self.assertEqual(v.operator, '')
self.assertEqual(v.safe, '')
self.assertEqual(v.start, '')
def tes... |
def _unzip_with_bz2(filename, tmpfilepath):
with bz2.BZ2File(filename) as bz2file:
try:
content = bz2file.read()
except IOError:
LOGGER.debug('Failed to unzip bzipped file %s', str(filename))
os.remove(tmpfilepath)
raise
return content |
def _matches(node1: (nodes.NodeNG | bases.Proxy), node2: nodes.NodeNG) -> bool:
if (isinstance(node1, nodes.Name) and isinstance(node2, nodes.Name)):
return (node1.name == node2.name)
if (isinstance(node1, nodes.Attribute) and isinstance(node2, nodes.Attribute)):
return ((node1.attrname == node2... |
def get_channelnet(model_name=None, pretrained=False, root=os.path.join('~', '.torch', 'models'), **kwargs):
channels = [[[32, 64]], [[128, 128]], [[256, 256]], [[512, 512], [512, 512]], [[1024, 1024]]]
block_names = [[['channet_conv3x3', 'channet_dws_conv_block']], [['channet_dws_conv_block', 'channet_dws_conv... |
class ParticleNumber():
def __init__(self, num_spatial_orbitals: int) -> None:
self.num_spatial_orbitals = num_spatial_orbitals
def second_q_ops(self) -> Mapping[(str, FermionicOp)]:
num_spin_orbitals = (2 * self.num_spatial_orbitals)
op = FermionicOp({f'+_{o} -_{o}': 1.0 for o in range(... |
class ArgumentSchema(marshmallow.Schema):
param_decls = ArgumentNameField(data_key='name', metadata={'description': 'Name of the argument.'})
type = TypeField(default=str, metadata={'description': f"Name of the type. {', '.join(TYPES)} accepted."})
required = fields.Boolean(default=True, metadata={'descript... |
def demo(printer: escpos.Escpos, **kwargs) -> None:
for demo_choice in kwargs.keys():
command = getattr(printer, demo_choice.replace('barcodes_a', 'barcode').replace('barcodes_b', 'barcode'))
for params in DEMO_FUNCTIONS[demo_choice]:
command(**params)
printer.cut() |
class PacketGeneration(unittest.TestCase):
def test_parse_own_packet_simple(self):
generated = r.DNSOutgoing(0)
r.DNSIncoming(generated.packets()[0])
def test_parse_own_packet_simple_unicast(self):
generated = r.DNSOutgoing(0, False)
r.DNSIncoming(generated.packets()[0])
def ... |
class TwPooledEmbeddingDist(BaseEmbeddingDist[(EmbeddingShardingContext, torch.Tensor, torch.Tensor)]):
def __init__(self, pg: dist.ProcessGroup, dim_sum_per_rank: List[int], emb_dim_per_rank_per_feature: List[List[int]], device: Optional[torch.device]=None, callbacks: Optional[List[Callable[([torch.Tensor], torch.... |
class MonteLexer(RegexLexer):
name = 'Monte'
url = '
aliases = ['monte']
filenames = ['*.mt']
version_added = '2.2'
tokens = {'root': [('#[^\\n]*\\n', Comment), ('/\\*\\*.*?\\*/', String.Doc), ('\\bvar\\b', Keyword.Declaration, 'var'), ('\\binterface\\b', Keyword.Declaration, 'interface'), (word... |
def fuzzy_compare_filter(t, col, val, type):
t[col] = t[col].astype('str')
if (len(re.findall(pat_month, val)) > 0):
year_list = t[col].str.extract(pat_year, expand=False)
day_list = t[col].str.extract(pat_day, expand=False)
month_list = t[col].str.extract(pat_month, expand=False)
... |
def resnet_v2_101(inputs, num_classes=None, is_training=True, global_pool=True, output_stride=None, spatial_squeeze=True, reuse=None, scope='resnet_v2_101'):
blocks = [resnet_v2_block('block1', base_depth=64, num_units=3, stride=2), resnet_v2_block('block2', base_depth=128, num_units=4, stride=2), resnet_v2_block('... |
def configure_training(net_type, opt, lr, clip_grad, lr_decay, batch_size):
assert (opt in ['adam'])
assert (net_type in ['ff', 'rnn'])
opt_kwargs = {}
opt_kwargs['lr'] = lr
train_params = {}
train_params['optimizer'] = (opt, opt_kwargs)
train_params['clip_grad_norm'] = clip_grad
train_p... |
def electrolyte_conductivity_Nyman2008_arrhenius(c_e, T):
sigma_e = (((0.1297 * ((c_e / 1000) ** 3)) - (2.51 * ((c_e / 1000) ** 1.5))) + (3.329 * (c_e / 1000)))
E_sigma_e = 17000
arrhenius = pybamm.exp(((E_sigma_e / pybamm.constants.R) * ((1 / 298.15) - (1 / T))))
return (sigma_e * arrhenius) |
class TreeNode():
def __init__(self):
self.children = []
self.parent = None
self.expanded = True
self._children_top = None
self._children_bot = None
def add_client(self, node, hint=None):
node.parent = self
if (hint is not None):
try:
... |
class ExceptionTool(BaseTool):
name = '_Exception'
description = 'Exception tool'
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun]=None) -> str:
return query
async def _arun(self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun]=None) -> str:
r... |
def test_make_route_state_address_to_metadata_serialization_regression():
'Test that the address keys in address_to_metadata are deserialized.\n See:
addresses = [encode_hex(factories.make_address()) for _ in range(3)]
test_data = dict(path=addresses, address_metadata={address: {} for address in address... |
class TestAssertEqual(TestCase):
def test_you(self):
self.assertRegex(abc, 'xxx')
def test_me(self):
self.assertRegex(123, (xxx + y))
def test_everybody(self):
self.assertRegex('abc', 'def')
def test_message(self):
self.assertRegex((123 + z), (xxx + y), msg='This is wrong... |
class Parser():
def __init__(self, rules):
super(Parser, self).__init__()
self.orig_rules = {rule: rule for rule in rules}
rules = [self._to_rule(rule) for rule in rules]
self.grammar = to_cnf(Grammar(rules))
def _to_rule(self, lark_rule):
assert isinstance(lark_rule.orig... |
def check_all_contracts(*mod_names: str, decorate_main: bool=True) -> None:
if (not ENABLE_CONTRACT_CHECKING):
return
modules = []
if decorate_main:
mod_names = (mod_names + ('__main__',))
if RENAME_MAIN_TO_PYDEV_UMD:
mod_names = (mod_names + (_PYDEV_UMD_NAME,))
for m... |
def sa_pioglitazone_mpo() -> GoalDirectedBenchmark:
specification = uniform_specification(1, 10, 100)
benchmark_object = pioglitazone_mpo()
sa_biased = ScoringFunctionSAWrapper(benchmark_object.objective, SAScoreModifier())
return GoalDirectedBenchmark(name='SA_pioglitazone', objective=sa_biased, contri... |
def encoding_title(title, entities):
local_map = get_local_word2entity(entities)
array = title.split(' ')
word_encoding = (['0'] * MAX_TITLE_LENGTH)
entity_encoding = (['0'] * MAX_TITLE_LENGTH)
point = 0
for s in array:
if (s in word2index):
word_encoding[point] = str(word2in... |
.requires_user_action
class WINDOW_SET_MOUSE_CURSOR(InteractiveTestCase):
def on_mouse_motion(self, x, y, dx, dy):
print(('on_mousemotion(x=%f, y=%f, dx=%f, dy=%f)' % (x, y, dx, dy)))
def test_set_mouse_cursor(self):
(self.width, self.height) = (200, 200)
self.w = w = Window(self.width, ... |
def gen_clang_include_args(builddir):
includes = []
def _impl(dir: Path):
includes.append(dir)
for child in dir.iterdir():
if (child.is_dir() and (child not in includes)):
_impl(child)
_impl(((Path(builddir) / 'include') / 'libr'))
return [f'-I{str(p.resolve()... |
def run_preprocess():
parser = argparse.ArgumentParser()
parser.add_argument('--data_path', type=str, default=os.path.join('dataset', 'ljspeech.txt'))
parser.add_argument('--save_path', type=str, default=os.path.join('dataset', 'processed'))
parser.add_argument('--audio_index_path', type=str, default=os... |
def mat_to_laplacian(mat, normalized):
if sps.issparse(mat):
if np.all((mat.diagonal() >= 0)):
if np.all(((mat - sps.diags(mat.diagonal())).data <= 0)):
return mat
elif np.all((np.diag(mat) >= 0)):
if np.all(((mat - np.diag(mat)) <= 0)):
return mat
deg... |
def plot_learner_and_save(learner, fname):
(fig, ax) = plt.subplots()
tri = learner.interpolator(scaled=True).tri
triang = mtri.Triangulation(*tri.points.T, triangles=tri.vertices)
ax.triplot(triang, c='k', lw=0.8)
data = learner.interpolated_on_grid()
ax.imshow(np.vstack(data), extent=((- 0.5),... |
def generate_fswap_pairs(depth: int, dimension: int):
swap_list = []
for i in range(0, depth):
if ((i % 2) == 0):
swap_list.append([(i, (i + 1)) for i in range(0, (dimension - 1), 2)])
else:
swap_list.append([(i, (i + 1)) for i in range(1, (dimension - 1), 2)])
return... |
class ProvidedTextAssetConfiguration(AssetConfigurationMixin, BaseProvidedTextAsset, BenefitFeatureConfiguration):
class Meta(BaseProvidedTextAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = 'Provided Text Configuration'
verbose_name_plural = 'Provided Text Configurations'
const... |
class DescribeRun():
def it_knows_its_bool_prop_states(self, bool_prop_get_fixture):
(run, prop_name, expected_state) = bool_prop_get_fixture
assert (getattr(run, prop_name) == expected_state)
def it_can_change_its_bool_prop_settings(self, bool_prop_set_fixture):
(run, prop_name, value, ... |
class TestTranslation(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_fconv(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_fconv') as data_dir:
... |
(whitelist=['batch_decoder'])
def process_batch(example_strings, class_ids, image_size, batch_decoder=None):
if isinstance(batch_decoder, decoder.ImageDecoder):
log_data_augmentation(batch_decoder.data_augmentation, 'batch')
batch_decoder.image_size = image_size
images = tf.map_fn(batch_decoder,... |
def confirm_timebased_sqli(base, parameter, payload_detected, injected_sleep_time, detected_response_time, url='', data='', headers='', injection_type='', proxy='', with_status_code=200, is_different_status_code_injectable=False, is_multipart=False, timeout=30, delay=0, timesec=5, is_boolean_confirmed=False, is_read_ti... |
(((TORCH_VERSION == (1, 8)) and torch.cuda.is_available()), 'This test fails under cuda11 + torch1.8.')
class DeformableTest(unittest.TestCase):
((not torch.cuda.is_available()), 'Deformable not supported for cpu')
def test_forward_output(self):
device = torch.device('cuda')
(N, C, H, W) = shape... |
def window_partition(x, window_size: List[int]):
(B, H, W, C) = x.shape
_assert(((H % window_size[0]) == 0), f'height ({H}) must be divisible by window ({window_size[0]})')
_assert(((W % window_size[1]) == 0), '')
x = x.view(B, (H // window_size[0]), window_size[0], (W // window_size[1]), window_size[1]... |
def _timed_hash_bucket(annotated_delta: DeltaAnnotated, round_completion_info: Optional[RoundCompletionInfo], primary_keys: List[str], sort_keys: List[SortKey], num_buckets: int, num_groups: int, enable_profiler: bool, read_kwargs_provider: Optional[ReadKwargsProvider]=None, object_store: Optional[IObjectStore]=None, d... |
class encoder(nn.Module):
def __init__(self, dim, nc=1):
super(encoder, self).__init__()
self.dim = dim
self.c1 = nn.Sequential(vgg_layer(nc, 64), vgg_layer(64, 64))
self.c2 = nn.Sequential(vgg_layer(64, 128), vgg_layer(128, 128))
self.c3 = nn.Sequential(vgg_layer(128, 256), ... |
def read_tables(config, c=None):
table_reader = build_reader(data_format=config['file_format'], basepath=config['data_dir'], split_row_groups=config['split_row_groups'], backend=config['backend'])
ss_cols = ['ss_customer_sk', 'ss_sold_date_sk', 'ss_ticket_number', 'ss_net_paid']
ws_cols = ['ws_bill_customer... |
class ContextManagers():
def __init__(self, context_managers: List[ContextManager]):
self.context_managers = context_managers
self.stack = ExitStack()
def __enter__(self):
for context_manager in self.context_managers:
self.stack.enter_context(context_manager)
def __exit__... |
def visualise_ldamallet_topics(dataset, alpha, num_topic):
ldamallet_dir = 'data/topic_models/basic/{}_alpha{}_{}/ldamallet'.format(dataset, alpha, num_topic)
convertedLDAmallet = convertLDAmallet(dataDir=ldamallet_dir, filename='state.mallet.gz')
pyLDAvis.enable_notebook()
vis = pyLDAvis.prepare(**conv... |
def main(args):
input_color = args.video
(kids, comb) = availabe_kindata(input_color, kinect_count=4)
print('Available kinects for sequence {}: {}'.format(basename(input_color), kids))
kinect_count = len(kids)
video_prefix = basename(input_color).split('.')[0]
video_folder = dirname(input_color)... |
class Preferences(qltk.UniqueWindow, EditDisplayPatternMixin):
_DEFAULT_PATTERN = DEFAULT_PATTERN_TEXT
_PREVIEW_ITEM = FakeDisplayItem({'date': '2010-10-31', '~length': util.format_time_display(6319), '~long-length': util.format_time_long(6319), '~tracks': numeric_phrase('%d track', '%d tracks', 5), '~discs': n... |
def flatten_dict(d: MutableMapping, parent_key: str='', delimiter: str='.'):
def _flatten_dict(d, parent_key='', delimiter='.'):
for (k, v) in d.items():
key = (((str(parent_key) + delimiter) + str(k)) if parent_key else k)
if (v and isinstance(v, MutableMapping)):
(y... |
def optimize_acqf_and_get_observation(acq_func, bounds, test_function_bounds, batch_size, test_function):
(candidates, _) = optimize_acqf(acq_function=acq_func, bounds=bounds, q=batch_size, num_restarts=10, raw_samples=512, options={'batch_limit': 5, 'maxiter': 200})
new_x = candidates.detach()
new_x_unboun... |
def get_section_links(soup, section_id, filter_text, contains=False):
links = []
x = soup.find(id=section_id)
if (x is None):
return links
for i in x.find_all(name='li'):
for link in i.find_all('a', href=True):
links.append(link['href'])
break
cleaned_links = ... |
def list_join_clause(segment: BaseSegment) -> List[BaseSegment]:
if (from_expression := segment.get_child('from_expression')):
if (bracketed := from_expression.get_child('bracketed')):
join_clauses = bracketed.get_children('join_clause')
if (inner_bracket := bracketed.get_child('brac... |
_REGISTRY.register()
class LargeVehicleID(VehicleID):
def __init__(self, root='datasets', **kwargs):
dataset_dir = osp.join(root, self.dataset_dir)
self.test_list = osp.join(dataset_dir, 'train_test_split/test_list_2400.txt')
super(LargeVehicleID, self).__init__(root, self.test_list, **kwarg... |
class GoalDirectedBenchmarkResult():
def __init__(self, benchmark_name: str, score: float, optimized_molecules: List[Tuple[(str, float)]], execution_time: float, number_scoring_function_calls: int, metadata: Dict[(str, Any)]) -> None:
self.benchmark_name = benchmark_name
self.score = score
s... |
def get_packet_type(p) -> PacketType:
if (p.quic.header_form == '0'):
return PacketType.ONERTT
if (p.quic.version == '0x'):
return PacketType.VERSIONNEGOTIATION
if (p.quic.version == QUIC_V2):
for (t, num) in WIRESHARK_PACKET_TYPES_V2.items():
if (p.quic.long_packet_type_... |
class Cub200_2011Dataset(H5PYDataset):
_filename = 'cub200_2011/cub200_2011.hdf5'
def __init__(self, which_sets, **kwargs):
try:
path = '/home/zwz/zwz/DAML/chainer/lib/datasets/data/cub200_2011/cub200_2011.hdf5'
except IOError as e:
msg = (str(e) + '.\n You need t... |
def dr_relation(C, trans, nullable):
dr_set = {}
(state, N) = trans
terms = []
g = lr0_goto(C[state], N)
for p in g:
if (p.lr_index < (p.len - 1)):
a = p.prod[(p.lr_index + 1)]
if Terminals.has_key(a):
if (a not in terms):
terms.app... |
class KnownValues(unittest.TestCase):
def test_aft_get_pp_high_cost(self):
cell = pgto.Cell()
cell.verbose = 0
cell.atom = 'C 0 0 0; C 1 1 1'
cell.a = numpy.diag([4, 4, 4])
cell.basis = 'gth-szv'
cell.pseudo = 'gth-pade'
cell.build()
v1 = df.DF(cell).g... |
class CC_WEB_VIDEO(object):
def __init__(self):
with open('datasets/cc_web_video.pickle', 'rb') as f:
dataset = pk.load(f)
self.database = dataset['vid2index']
self.queries = dataset['queries']
self.ground_truth = dataset['ground_truth']
self.excluded = dataset['e... |
(coderize=True)
def accuracy(pred, target, topk=1, thresh=None):
assert isinstance(topk, (int, tuple))
if isinstance(topk, int):
topk = (topk,)
return_single = True
else:
return_single = False
maxk = max(topk)
if (pred.size(0) == 0):
accu = [pred.new_tensor(0.0) for i... |
def test_schlick():
f0_cuda = torch.rand(1, RES, RES, 3, dtype=DTYPE, device='cuda', requires_grad=True)
f0_ref = f0_cuda.clone().detach().requires_grad_(True)
f90_cuda = torch.rand(1, RES, RES, 3, dtype=DTYPE, device='cuda', requires_grad=True)
f90_ref = f90_cuda.clone().detach().requires_grad_(True)
... |
(init=False)
class TypedDictValue(GenericValue):
items: Dict[(str, Tuple[(bool, Value)])]
extra_keys: Optional[Value] = None
def __init__(self, items: Dict[(str, Tuple[(bool, Value)])], extra_keys: Optional[Value]=None) -> None:
value_types = []
if items:
value_types += [val for ... |
def load_model(base_model: str='decapoda-research/llama-7b-hf', lora_r: int=8, lora_alpha: int=16, lora_dropout: float=0.05, lora_target_modules: Tuple=('q_proj', 'k_proj', 'v_proj', 'o_proj'), resume_from_checkpoint: str='pretrained_model/', load_in_8bit: bool=True):
world_size = int(os.environ.get('WORLD_SIZE', 1... |
def _get_saving_handler(version):
pdf = pdfium.PdfDocument.new()
size = (612, 792)
pdf.new_page(*size)
kwargs = {}
if version:
kwargs['version'] = version
saved_pdf = (yield (pdf, kwargs))
if version:
(saved_pdf.get_version() == version)
assert (len(saved_pdf) == 1)
a... |
class _DefaultVizCallback(object):
def __init__(self):
self.train_vals = {}
self.train_emas = {}
self.ema_beta = 0.25
def __call__(self, viz, mode, it, k, v):
if (mode == 'train'):
self.train_emas[k] = ((self.ema_beta * v) + ((1.0 - self.ema_beta) * self.train_emas.ge... |
class ComponentLevel1(NamedObject):
def __new__(cls, *args, **kwargs):
inst = super().__new__(cls, *args, **kwargs)
inst._dsl.name_upblk = {}
inst._dsl.upblks = set()
inst._dsl.upblk_order = []
inst._dsl.U_U_constraints = set()
return inst
def _collect_vars(s, m):... |
class FairseqLRScheduler(object):
def __init__(self, cfg, optimizer):
super().__init__()
if ((optimizer is not None) and (not isinstance(optimizer, FairseqOptimizer))):
raise ValueError('optimizer must be an instance of FairseqOptimizer')
self.cfg = cfg
self.optimizer = o... |
class Awaitable(abc.ABC, Generic[W]):
def __init__(self) -> None:
self._callbacks: List[Callable[([W], W)]] = []
def _wait_impl(self) -> W:
pass
def wait(self) -> W:
with record_function(f'## {self.__class__.__name__} wait() ##'):
ret: W = self._wait_impl()
fo... |
class TestCanAssign():
def can(self, left: ConcreteSignature, right: ConcreteSignature) -> None:
tv_map = left.can_assign(right, CTX)
assert isinstance(tv_map, dict), f'cannot assign {right} to {left} due to {tv_map}'
def cannot(self, left: ConcreteSignature, right: ConcreteSignature) -> None:
... |
def make_predictions(all_examples, all_features, all_results, n_best_size, max_answer_length, larger_than_cls):
example_id_to_features = collections.defaultdict(list)
for feature in all_features:
example_id_to_features[feature.example_id].append(feature)
example_id_to_results = collections.defaultdi... |
def test_windows_compact(runner, path_rgb_byte_tif):
result = runner.invoke(main_group, ['blocks', path_rgb_byte_tif, '--compact'])
assert (result.exit_code == 0)
assert (result.output.count('"FeatureCollection') == 1)
assert (result.output.count('"Feature"') == 240)
assert (result.output.count('", ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.