code
stringlengths
101
5.91M
class IntersectionScenario(ScenarioGenScenario): def _generate_map(self, parameters, road_id=1, internal_id=100, junction_id=1): (n_roads, radius, lengths, cs_outer, cs_inner, n_lanes, angles) = parameters junction = Junction(f'junction {junction_id}', junction_id) roads = [] junction_roads = [] zero_cloth = False roadIDs = [] for i in range(len(n_lanes)): geometry = [Spiral(cs_outer[i], cs_inner[i], lengths[i])] road = create_road_start(road_id, n_lanes[i], (radius[i] * np.cos(angles[i])), (radius[i] * np.sin(angles[i])), angles[i], geometry=geometry) road.add_predecessor(xodr.ElementType.junction, 1) roads.append(road) roadIDs.append(road_id) road_id += 1 for (i, _) in enumerate(roadIDs): for (j, _) in enumerate(roadIDs): if (i == j): continue an1 = ((angles[j] - angles[i]) - np.pi) if (an1 > np.pi): an1 = (- ((2 * np.pi) - an1)) n_in = n_lanes[i][0] n_out = n_lanes[j][1] n_conn = min(n_in, n_out) in_offset = 0 out_offset = 0 if (((i - j) == 1) or ((i == 0) and (j == (len(n_lanes) - 1)))): if (n_in >= n_out): pass else: pass if (((i - j) == (- 1)) or ((i == (len(n_lanes) - 1)) and (j == 0))): if (n_in >= n_out): in_offset = int((n_in - n_out)) else: out_offset = int((n_out - n_in)) for k in range(n_conn): from_x = (- radius[i]) from_y = ((- (k + in_offset)) * 3.6) to_x = ((radius[j] * np.cos(an1)) + ((np.sin(an1) * (k + out_offset)) * 3.6)) to_y = ((radius[j] * np.sin(an1)) - ((np.cos(an1) * (k + out_offset)) * 3.6)) clothoids = pcloth.SolveG2(from_x, from_y, 0, (1 / ), to_x, to_y, an1, (1 / )) (junc_road, zero_cloth_) = create_3cloths_right_lane(clothoids[0].KappaStart, clothoids[0].KappaEnd, clothoids[0].length, clothoids[1].KappaStart, clothoids[1].KappaEnd, clothoids[1].length, clothoids[2].KappaStart, clothoids[2].KappaEnd, clothoids[2].length, internal_id, x_start=((radius[i] * np.cos(angles[i])) - ((np.sin(angles[i]) * (k + in_offset)) * 3.6)), y_start=((radius[i] * np.sin(angles[i])) + ((np.cos(angles[i]) * (k + in_offset)) * 3.6)), h_start=(angles[i] - np.pi), n_lanes=1, lane_offset=3.6, junction=1) internal_id += 1 zero_cloth = (zero_cloth or zero_cloth_) junc_road.add_successor(xodr.ElementType.road, roadIDs[j], xodr.ContactPoint.start, lane_offset=((- k) - out_offset)) junc_road.add_predecessor(xodr.ElementType.road, roadIDs[i], xodr.ContactPoint.start, lane_offset=(k + in_offset)) junction_roads.append(junc_road) conne1 = Connection(junc_road.successor.element_id, junc_road.id, ContactPoint.end) conne1.add_lanelink((- 1), (- ((1 + k) + out_offset))) conne2 = Connection(junc_road.predecessor.element_id, junc_road.id, ContactPoint.start) conne2.add_lanelink(((1 + k) + in_offset), (- 1)) junction.add_connection(conne1) junction.add_connection(conne2) internal_id += 1 return ((roads + junction_roads), [junction], zero_cloth) def sample_map_parameters(self, rng): n_roads = rng.choice([3, 4, 5], p=[0.3, 0.5, 0.2]) radius = [rng.integers(15, 31) for _ in range(n_roads)] lengths = [rng.integers(40, 80) for _ in range(n_roads)] cs_outer = [rng.uniform(low=(- 0.02), high=0.02) for _ in range(n_roads)] cs_inner = [rng.uniform(low=(- 0.02), high=0.02) for _ in range(n_roads)] n_lanes = [[rng.choice([1, 2, 3], p=[0.45, 0.45, 0.1]), rng.choice([1, 2, 3], p=[0.45, 0.45, 0.1])] for _ in range(n_roads)] angles = [np.radians((((i * 365) / n_roads) + rng.integers((((- 250.0) / n_roads) / 2.0), ((250.0 / n_roads) / 2.0)))) for i in range(n_roads)] return (n_roads, radius, lengths, cs_outer, cs_inner, n_lanes, angles)
class LevelFusion(nn.Module): def __init__(self, in_channels, mid_channels, out_channels, downsample_scales=((1, 1, 1), (1, 1, 1))): super().__init__() num_stages = len(in_channels) self.downsamples = nn.ModuleList() for i in range(num_stages): downsample = DownSample(in_channels[i], mid_channels[i], kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False, padding=(0, 0, 0), groups=32, norm_cfg=dict(type='BN3d', requires_grad=True), act_cfg=dict(type='ReLU', inplace=True), downsample_position='before', downsample_scale=downsample_scales[i]) self.downsamples.append(downsample) self.fusion_conv = ConvModule(sum(mid_channels), out_channels, 1, stride=1, padding=0, bias=False, conv_cfg=dict(type='Conv3d'), norm_cfg=dict(type='BN3d', requires_grad=True), act_cfg=dict(type='ReLU', inplace=True)) def forward(self, x): out = [self.downsamples[i](feature) for (i, feature) in enumerate(x)] out = torch.cat(out, 1) out = self.fusion_conv(out) return out
def getAppResPhs(nsemdata): def appResPhs(freq, z): app_res = (((1.0 / (8e-07 * (np.pi ** 2))) / freq) * (np.abs(z) ** 2)) app_phs = (np.arctan2(z.imag, z.real) * (180 / np.pi)) return (app_res, app_phs) zList = [] for src in nsemdata.survey.source_list: zc = [src.frequency] for rx in src.receiver_list: if ('i' in rx.rxType): m = 1j else: m = 1 zc.append((m * nsemdata[(src, rx)])) zList.append(zc) return [appResPhs(zList[i][0], np.sum(zList[i][1:3])) for i in np.arange(len(zList))]
def dataset_split_replay(dataset, split_x, terminate_on_end=False): N = dataset['rewards'].shape[0] return_traj = [] obs_traj = [[]] next_obs_traj = [[]] action_traj = [[]] reward_traj = [[]] done_traj = [[]] for i in range((N - 1)): obs_traj[(- 1)].append(dataset['observations'][i].astype(np.float32)) next_obs_traj[(- 1)].append(dataset['observations'][(i + 1)].astype(np.float32)) action_traj[(- 1)].append(dataset['actions'][i].astype(np.float32)) reward_traj[(- 1)].append(dataset['rewards'][i].astype(np.float32)) done_traj[(- 1)].append(bool(dataset['terminals'][i])) final_timestep = (dataset['timeouts'][i] | dataset['terminals'][i]) if ((not terminate_on_end) and final_timestep): return_traj.append(np.sum(reward_traj[(- 1)])) obs_traj.append([]) next_obs_traj.append([]) action_traj.append([]) reward_traj.append([]) done_traj.append([]) inds_all = np.argsort(return_traj)[::(- 1)] succ_num = int((len(inds_all) * 0.05)) inds_top5 = inds_all[:succ_num] inds_e = inds_top5[1::split_x] inds_e = list(inds_e) inds_all = list(inds_all) inds_o = (set(inds_all) - set(inds_e)) inds_o = list(inds_o) print('# select {} trajs in mixed dataset as D_e, mean is {}'.format(len(inds_e), np.array(return_traj)[inds_e].mean())) print('# select {} trajs in mixed dataset as D_o, mean is {}'.format(len(inds_o), np.array(return_traj)[inds_o].mean())) obs_traj_e = [obs_traj[i] for i in inds_e] next_obs_traj_e = [next_obs_traj[i] for i in inds_e] action_traj_e = [action_traj[i] for i in inds_e] reward_traj_e = [reward_traj[i] for i in inds_e] done_traj_e = [done_traj[i] for i in inds_e] obs_traj_o = [obs_traj[i] for i in inds_o] next_obs_traj_o = [next_obs_traj[i] for i in inds_o] action_traj_o = [action_traj[i] for i in inds_o] reward_traj_o = [reward_traj[i] for i in inds_o] done_traj_o = [done_traj[i] for i in inds_o] def concat_trajectories(trajectories): return np.concatenate(trajectories, 0) dataset_e = {'observations': concat_trajectories(obs_traj_e), 'actions': concat_trajectories(action_traj_e), 'next_observations': concat_trajectories(next_obs_traj_e), 'rewards': concat_trajectories(reward_traj_e), 'terminals': concat_trajectories(done_traj_e)} dataset_o = {'observations': concat_trajectories(obs_traj_o), 'actions': concat_trajectories(action_traj_o), 'next_observations': concat_trajectories(next_obs_traj_o), 'rewards': concat_trajectories(reward_traj_o), 'terminals': concat_trajectories(done_traj_o)} return (dataset_e, dataset_o)
def get_cluster_config(attrs): default_map = {'train.num_ps': 0, 'train.num_workers': 1, 'train.worker_cpu': 400, 'train.worker_gpu': 0, 'train.ps_cpu': 200, 'train.ps_gpu': 0, 'train.num_evaluator': 0, 'train.evaluator_cpu': 200, 'train.evaluator_gpu': 0} update = {} for (k, v) in attrs.items(): if (k in default_map): update[k] = v elif (('train.' + k) in default_map): update[('train.' + k)] = v if (not all((isinstance(v, int) for v in update.values()))): raise SQLFlowDiagnostic('value for cluster config should be int') default_map.update(attrs) ps = {'count': default_map['train.num_ps'], 'cpu': default_map['train.ps_cpu'], 'gpu': default_map['train.ps_gpu']} worker = {'count': default_map['train.num_workers'], 'cpu': default_map['train.worker_cpu'], 'gpu': default_map['train.worker_gpu']} if ((worker['count'] > 1) and (ps['count'] < 1)): ps['count'] = 1 if (default_map['train.num_evaluator'] == 0): evaluator = None elif (default_map['train.num_evaluator'] == 1): evaluator = {'count': default_map['train.num_evaluator'], 'cpu': default_map['train.evaluator_cpu'], 'gpu': default_map['train.evaluator_gpu']} else: raise SQLFlowDiagnostic('train.num_evaluator should only be 1 or 0') conf = {'ps': ps, 'worker': worker} if (evaluator is not None): conf['evaluator'] = evaluator return conf
def get_exact(n_eigs, box_size, dim): Z = 1 if (dim == 2): eigs = [((((- (float(Z) ** 2)) / 2) / ((n - 0.5) ** 2)) / 4) for n in (((([1] + ([2] * 3)) + ([3] * 5)) + ([4] * 8)) + ([5] * 15))] elif (dim == 3): eigs = [(((- (float(Z) ** 2)) / 2) / (n ** 2)) for n in (([1] + ([2] * (2 ** 2))) + ([3] * (3 ** 2)))] return eigs
class RowColTransformer(nn.Module): def __init__(self, num_tokens, dim, nfeats, depth, heads, dim_head, attn_dropout, ff_dropout, style='col'): super().__init__() self.embeds = nn.Embedding(num_tokens, dim) self.layers = nn.ModuleList([]) self.mask_embed = nn.Embedding(nfeats, dim) self.style = style for _ in range(depth): if (self.style == 'colrow'): self.layers.append(nn.ModuleList([PreNorm(dim, Residual(Attention(dim, heads=heads, dim_head=dim_head, dropout=attn_dropout))), PreNorm(dim, Residual(FeedForward(dim, dropout=ff_dropout))), PreNorm((dim * nfeats), Residual(Attention((dim * nfeats), heads=heads, dim_head=64, dropout=attn_dropout))), PreNorm((dim * nfeats), Residual(FeedForward((dim * nfeats), dropout=ff_dropout)))])) else: self.layers.append(nn.ModuleList([PreNorm((dim * nfeats), Residual(Attention((dim * nfeats), heads=heads, dim_head=64, dropout=attn_dropout))), PreNorm((dim * nfeats), Residual(FeedForward((dim * nfeats), dropout=ff_dropout)))])) def forward(self, x, x_cont=None, mask=None): if (x_cont is not None): x = torch.cat((x, x_cont), dim=1) (_, n, _) = x.shape if (self.style == 'colrow'): for (attn1, ff1, attn2, ff2) in self.layers: x = attn1(x) x = ff1(x) x = rearrange(x, 'b n d -> 1 b (n d)') x = attn2(x) x = ff2(x) x = rearrange(x, '1 b (n d) -> b n d', n=n) else: for (attn1, ff1) in self.layers: x = rearrange(x, 'b n d -> 1 b (n d)') x = attn1(x) x = ff1(x) x = rearrange(x, '1 b (n d) -> b n d', n=n) return x
_utils.test(arch=[ti.opengl, ti.vulkan]) def test_sequential_dispatch(): g_init_builder = ti.graph.GraphBuilder() g_init_substep = g_init_builder.create_sequential() ivec3 = ti.types.vector(3, ti.i32) def init_data(test_vec: ivec3): pass sym_args = ti.graph.Arg(ti.graph.ArgKind.MATRIX, 'test_arg', ti.types.vector(3, ti.i32)) g_init_substep.dispatch(init_data, sym_args) g_init_builder.append(g_init_substep) g_init = g_init_builder.compile() with tempfile.TemporaryDirectory() as tmpdir: m = ti.aot.Module() m.add_graph('g_init', g_init) m.save(tmpdir) with open(os.path.join(tmpdir, 'metadata.json'), 'r') as json_file: json.load(json_file)
def apply_to_mask(f, clip, *a, **k): newclip = f(clip, *a, **k) if getattr(newclip, 'mask', None): newclip.mask = f(newclip.mask, *a, **k) return newclip
class AllGather(torch.autograd.Function): def forward(ctx, tensor, args): output = [torch.empty_like(tensor) for _ in range(args.world_size)] torch.distributed.all_gather(output, tensor) ctx.rank = args.rank ctx.batch_size = tensor.shape[0] return torch.cat(output, dim=0) def backward(ctx, grad_output): return (grad_output[(ctx.batch_size * ctx.rank):(ctx.batch_size * (ctx.rank + 1))], None)
class MPolynomialIdeal_magma_repr(): def _magma_init_(self, magma): P = magma(self.ring()) G = magma(self.gens()) return ('ideal<%s|%s>' % (P.name(), G._ref())) _gb_standard_options def _groebner_basis_magma(self, deg_bound=None, prot=False, magma=magma_default): R = self.ring() if (not deg_bound): mself = magma(self) else: mself = magma(list(self.gens())) if (get_verbose() >= 2): prot = True from sage.interfaces.magma import MagmaGBLogPrettyPrinter if prot: log_parser = MagmaGBLogPrettyPrinter(verbosity=(get_verbose() + 1), style=('sage' if (prot == 'sage') else 'magma')) else: log_parser = None ctx = StdOutContext(magma, silent=(not prot), stdout=log_parser) if prot: magma.SetVerbose('Groebner', 1) with ctx: if deg_bound: mgb = mself.GroebnerBasis(deg_bound) else: mgb = mself.GroebnerBasis() if (prot == 'sage'): print('') print(('Highest degree reached during computation: %2d.' % log_parser.max_deg)) mgb = [str(mgb[(i + 1)]) for i in range(len(mgb))] if (R.base_ring().degree() > 1): a = str(R.base_ring().gen()) mgb = [e.replace('$.1', a) for e in mgb] from sage.rings.polynomial.multi_polynomial_sequence import PolynomialSequence B = PolynomialSequence([R(e) for e in mgb], R, immutable=True) return B
def test_nested_child_body(i, ready_queue, nested_child_sleep): ready_queue.put(None) time.sleep(nested_child_sleep)
def profile(n): def decorator_with_name(func): def func_wrapper(*args, **kwargs): with profile_kv(n): return func(*args, **kwargs) return func_wrapper return decorator_with_name
class GradientClippingTest(unittest.TestCase): def test_gradient_clipping_by_norm(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} net_modifier = GradientClipping(grad_clip_method='by_norm', clip_norm_type='l2_norm', clip_threshold=0.1) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 17) def test_gradient_clipping_by_norm_l1_norm(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} net_modifier = GradientClipping(grad_clip_method='by_norm', clip_norm_type='l1_norm', clip_threshold=0.1) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 15) def test_gradient_clipping_by_norm_using_param_norm(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} net_modifier = GradientClipping(grad_clip_method='by_norm', clip_norm_type='l2_norm', clip_threshold=0.1, use_parameter_norm=True) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 21) def test_gradient_clipping_by_norm_compute_norm_ratio(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} net_modifier = GradientClipping(grad_clip_method='by_norm', clip_norm_type='l2_norm', clip_threshold=0.1, use_parameter_norm=True, compute_norm_ratio=True) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 23) def test_gradient_clipping_by_value(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} clip_max = 1e-08 clip_min = 0 net_modifier = GradientClipping(grad_clip_method='by_value', clip_max=clip_max, clip_min=clip_min) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 13) fc1_w_grad = workspace.FetchBlob('fc1_w_grad') self.assertLessEqual(np.amax(fc1_w_grad), clip_max) self.assertGreaterEqual(np.amin(fc1_w_grad), clip_min) def test_gradient_clipping_by_norm_including_blobs(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} net_modifier = GradientClipping(grad_clip_method='by_norm', clip_norm_type='l2_norm', clip_threshold=0.1, blobs_to_include=['fc1_w'], blobs_to_exclude=None) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 14) def test_gradient_clipping_by_norm_excluding_blobs(self): model = model_helper.ModelHelper(name='test') data = model.net.AddExternalInput('data') fc1 = brew.fc(model, data, 'fc1', dim_in=4, dim_out=2) fc2 = brew.fc(model, fc1, 'fc2', dim_in=2, dim_out=1) sigm = model.net.Sigmoid(fc2, 'sigm') sq = model.net.SquaredL2Distance([sigm, 'label'], 'sq') loss = model.net.SumElements(sq, 'loss') grad_map = model.AddGradientOperators([loss]) grad_map_for_param = {key: grad_map[key] for key in ['fc1_w', 'fc2_w']} net_modifier = GradientClipping(grad_clip_method='by_norm', clip_norm_type='l2_norm', clip_threshold=0.1, blobs_to_include=None, blobs_to_exclude=['fc1_w', 'fc2_w']) net_modifier(model.net, grad_map=grad_map_for_param) workspace.FeedBlob('data', np.random.rand(10, 4).astype(np.float32)) workspace.FeedBlob('label', np.random.rand(10, 1).astype(np.float32)) workspace.RunNetOnce(model.param_init_net) workspace.RunNetOnce(model.net) self.assertEqual(len(model.net.Proto().op), 11)
def _init_qqbar(): global ZZX_x, AA_0, QQbar_I, AA_hash_offset, QQbar_hash_offset, QQbar_I_generator, QQbar_I_nf global QQ_0, QQ_1, QQ_1_2, QQ_1_4, RR_1_10 RR_1_10 = (RR(1) / 10) QQ_0 = QQ.zero() QQ_1 = QQ.one() QQ_1_2 = QQ((1, 2)) QQ_1_4 = QQ((1, 4)) AA_0 = AA.zero() QQbar_I_nf = GaussianField() QQbar_I_generator = AlgebraicGenerator(QQbar_I_nf, ANRoot(((AAPoly.gen() ** 2) + 1), CIF(0, 1))) QQbar_I = AlgebraicNumber(ANExtensionElement(QQbar_I_generator, QQbar_I_nf.gen())) AA_hash_offset = AA((~ ZZ())) QQbar_hash_offset = AlgebraicNumber(ANExtensionElement(QQbar_I_generator, ((~ ZZ()) + (QQbar_I_nf.gen() / ZZ())))) ZZX_x = ZZ['x'].gen()
class PretrainDataset(Dataset): def __init__(self) -> None: super().__init__() (self.epoch, self.h5, self.vid, self.states) = (0, None, None, None) (self.index_path, self.language_path, self.language) = (None, None, None) def hydrate(self, path: Path) -> None: self.h5 = h5py.File(path, 'r') (self.vid, self.states) = (self.h5['vid'].asstr(), self.h5['states'].asstr()) if (self.language_path is not None): self.language = torch.load((self.index_path / self.language_path)) def set_epoch(self, epoch: int) -> None: self.epoch = epoch def __getitem__(self, idx: int) -> Tuple[(torch.Tensor, ...)]: raise NotImplementedError('PretrainDataset is an abstract class; should never be initialized directly!') def __len__(self) -> int: raise NotImplementedError('PretrainDataset is an abstract class; should never be initialized directly!')
def test_measure(): qc = Circuit(1) assert (len(qc.measured_qubits) == 0) qc.measure(0) assert ((len(qc.measured_qubits) == 1) and (0 in qc.measured_qubits)) with raises(AssertionError): qc.h(0) qc = Circuit(1) qc.h(0) qc.get_unitary_matrix() assert (not (qc._cache is None)) qc.x(0) assert (qc._cache is None) qc.get_unitary_matrix() qc.measure(0) assert (not (qc._cache is None))
def distutils_scheme(dist_name, user=False, home=None, root=None, isolated=False, prefix=None): from distutils.dist import Distribution dist_args = {'name': dist_name} if isolated: dist_args['script_args'] = ['--no-user-cfg'] d = Distribution(dist_args) d.parse_config_files() obj = None obj = d.get_command_obj('install', create=True) assert (obj is not None) i = cast(distutils_install_command, obj) assert (not (user and prefix)), 'user={} prefix={}'.format(user, prefix) assert (not (home and prefix)), 'home={} prefix={}'.format(home, prefix) i.user = (user or i.user) if (user or home): i.prefix = '' i.prefix = (prefix or i.prefix) i.home = (home or i.home) i.root = (root or i.root) i.finalize_options() scheme = {} for key in SCHEME_KEYS: scheme[key] = getattr(i, ('install_' + key)) if ('install_lib' in d.get_option_dict('install')): scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) if running_under_virtualenv(): scheme['headers'] = os.path.join(i.prefix, 'include', 'site', 'python{}'.format(get_major_minor_version()), dist_name) if (root is not None): path_no_drive = os.path.splitdrive(os.path.abspath(scheme['headers']))[1] scheme['headers'] = os.path.join(root, path_no_drive[1:]) return scheme
def load_corrupted_imagenet(severity, data_dir='data', batch_size=128, cuda=True, workers=1): normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize]) corruption_types = ['brightness', 'contrast', 'defocus_blur', 'elastic_transform', 'fog', 'frost', 'gaussian_blur', 'gaussian_noise', 'glass_blur', 'impulse_noise', 'jpeg_compression', 'motion_blur', 'pixelate', 'saturate', 'shot_noise', 'snow', 'spatter', 'speckle_noise', 'zoom_blur'] dsets = list() for c in corruption_types: path = os.path.join(data_dir, ((('ImageNet-C/' + c) + '/') + str(severity))) dsets.append(datasets.ImageFolder(path, transform=transform)) dataset = data_utils.ConcatDataset(dsets) loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=cuda) return loader
def Canberra_calc(TP, FP, FN, TN): try: return ((FP + FN) / ((TP + FP) + (TP + FN))) except Exception: return 'None'
class ReshapeModule(nn.Module): def __init__(self, shape): super().__init__() self.shape = shape def forward(self, x): assert (np.prod(x.shape[1:]) == np.prod(self.shape)) return x.reshape((- 1), *self.shape)
def worksheet2rst(s, images_dir=''): s = add_title_if_there_is_none(s) state = States.COMMENT result = ['.. -*- coding: utf-8 -*-\n'] ls = [] for line in s.splitlines(): (regex, next_state) = transitions[state] m = regex.match(line) if m: if (state == States.COMMENT): last_cell_id = m.group(1) img_path = (images_dir + os.path.sep) result.append(html2rst('\n'.join(ls), img_path)) elif (state == States.RESULT): img_path = os.path.join(images_dir, ('cell_%s_' % last_cell_id)) result.append(results2rst('\n'.join(ls), img_path)) result.append('') result.append('.. end of output') elif (state == States.CODE): if (ls and any(ls)): result.append(code_parser('\n'.join(ls))) else: next_state = States.RESULT_TO_BE_DROPPED ls = [] state = next_state else: ls.append(line) if (state == States.COMMENT): img_path = (images_dir + os.path.sep) result.append(html2rst('\n'.join(ls), img_path)) elif (state == States.RESULT): img_path = os.path.join(images_dir, ('cell_%s_' % last_cell_id)) result.append(result_parser('\n'.join(ls), img_path)) result.append('') result.append('.. end of output') elif (state == States.CODE): result.append(code_parser('\n'.join(ls))) return '\n'.join(result)
def _do_cleanup(input_queue, output_queue, num_workers, sentinels_received, num_outstanding): input_fd = input_queue.put_fd() output_fd = output_queue.get_fd() poller = select.epoll() poller.register(input_fd, select.EPOLLOUT) poller.register(output_fd, select.EPOLLIN) remaining_outputs = [] end_sentinels_to_send = (num_workers - sentinels_received) while (sentinels_received < num_workers): evts = dict(poller.poll(((- 1) if (num_outstanding > 0) else 10.0))) if (not evts): break if (output_fd in evts): (_, maybe_sentinel) = output_queue.get() if (maybe_sentinel is EndSentinel): sentinels_received += 1 else: remaining_outputs.append(maybe_sentinel[1]) num_outstanding -= 1 if (input_fd in evts): if (end_sentinels_to_send > 0): input_queue.put_nowait(EndSentinel) end_sentinels_to_send -= 1 else: poller.modify(input_fd, 0) assert (sentinels_received == num_workers), (sentinels_received, num_workers) assert output_queue.empty() return remaining_outputs
def PercentMxScc(tspec, *args): if (type(tspec) == PUNGraph): return PercentMxScc_PUNGraph(tspec, *args) if (type(tspec) == PUndirNet): return PercentMxScc_PUndirNet(tspec, *args) if (type(tspec) == PDirNet): return PercentMxScc_PDirNet(tspec, *args) if (type(tspec) == PNGraph): return PercentMxScc_PNGraph(tspec, *args) if (type(tspec) == PNEANet): return PercentMxScc_PNEANet(tspec, *args) if (type(tspec) == PNGraphMP): return PercentMxScc_PNGraphMP(tspec, *args) if (type(tspec) == PNEANetMP): return PercentMxScc_PNEANetMP(tspec, *args) raise TypeError('First argument has invalid type')
def push_prefix(prefix): _prefixes.append(prefix) global _prefix_str _prefix_str = ''.join(_prefixes)
def test_get_item_1d(): (x, y) = mamoDataset1.__getitem__(0) assert (x == y == 1) (x, y) = mamoDataset1.__getitem__(5) assert (x == y == 6) (x, y) = mamoDataset2.__getitem__(0) assert (x == 1) assert (y == (- 1)) (x, y) = mamoDataset2.__getitem__(5) assert (x == 6) assert (y == (- 6)) (x, y) = mamoDataset2.__getitem__((- 5)) assert (x == 6) assert (y == (- 6))
def test_obs_shape(): options = [(0, 5), (5, 5), (10, 5)] env = OptionsEnv(IntersimpleLidarFlat(n_rays=5), options) assert (env.reset().shape == (36,))
def load_data(prompt_file, continuation_file): print('Reading lines...') prompts = [] prompt_f = open(prompt_file, 'r') prompt_lines = prompt_f.readlines() for prompt in prompt_lines: prompts.append(float(prompt.strip('\n').strip('\ufeff'))) continuations = [] cont_f = open(continuation_file, 'r') cont_lines = cont_f.readlines() for cs in cont_lines: conts = cs.strip('\n').strip('\ufeff').split('\t') continuations.append([float(c) for c in conts]) assert (len(prompts) == len(continuations)) print('Loaded: %d', len(prompts)) return (prompts, continuations)
def _remove_and_clear_zip_directory_cache_data(normalized_path): def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): old_entry.clear() _update_zipimporter_cache(normalized_path, zipimport._zip_directory_cache, updater=clear_and_remove_cached_zip_archive_directory_data)
def cli_main(): parser = options.get_validation_parser() args = options.parse_args_and_arch(parser) override_parser = options.get_validation_parser() override_args = options.parse_args_and_arch(override_parser, suppress_defaults=True) main(args, override_args)
def last_n_checkpoints(paths, n, update_based, upper_bound=None): assert (len(paths) == 1) path = paths[0] if update_based: pt_regexp = re.compile('checkpoint_\\d+_(\\d+)\\.pt') else: pt_regexp = re.compile('checkpoint(\\d+)\\.pt') files = os.listdir(path) entries = [] for f in files: m = pt_regexp.fullmatch(f) if (m is not None): sort_key = int(m.group(1)) if ((upper_bound is None) or (sort_key <= upper_bound)): entries.append((sort_key, m.group(0))) if (len(entries) < n): raise Exception('Found {} checkpoint files but need at least {}', len(entries), n) return [os.path.join(path, x[1]) for x in sorted(entries, reverse=True)[:n]]
class TypeInferenceConfiguration(): type_inference_strategy: TypeInferenceStrategy = TypeInferenceStrategy.TYPE_HINTS type_tracing: bool = False type4py: bool = False type4py_uri: str = ' 'URI of the Type4Py server.\n Currently only for the module under test.\n For example: See type4py_timeout: int = 10
class ESR(nn.Module): INPUT_IMAGE_SIZE = (96, 96) INPUT_IMAGE_NORMALIZATION_MEAN = [0.0, 0.0, 0.0] INPUT_IMAGE_NORMALIZATION_STD = [1.0, 1.0, 1.0] PATH_TO_SAVED_NETWORK = './model/ml/trained_models/esr_9' FILE_NAME_BASE_NETWORK = 'Net-Base-Shared_Representations.pt' FILE_NAME_CONV_BRANCH = 'Net-Branch_{}.pt' def __init__(self, device): super(ESR, self).__init__() self.base = Base() self.base.load_state_dict(torch.load(path.join(ESR.PATH_TO_SAVED_NETWORK, ESR.FILE_NAME_BASE_NETWORK), map_location=device)) self.base.to(device) self.convolutional_branches = [] for i in range(1, (len(self) + 1)): self.convolutional_branches.append(ConvolutionalBranch()) self.convolutional_branches[(- 1)].load_state_dict(torch.load(path.join(ESR.PATH_TO_SAVED_NETWORK, ESR.FILE_NAME_CONV_BRANCH.format(i)), map_location=device)) self.convolutional_branches[(- 1)].to(device) self.to(device) self.eval() def forward(self, x): emotions = [] affect_values = [] x_shared_representations = self.base(x) for branch in self.convolutional_branches: (output_emotion, output_affect) = branch(x_shared_representations) emotions.append(output_emotion) affect_values.append(output_affect) return (emotions, affect_values) def __len__(self): return 9
def bjac(y, t): n = len(y) bjac = np.zeros((4, n), order='F') banded5x5.banded5x5_bjac(t, y, 1, 1, bjac) return bjac
def get_layer_id_for_convnext(var_name, max_layer_id): if (var_name in ('backbone.cls_token', 'backbone.mask_token', 'backbone.pos_embed')): return 0 elif var_name.startswith('backbone.downsample_layers'): stage_id = int(var_name.split('.')[2]) if (stage_id == 0): layer_id = 0 elif (stage_id == 1): layer_id = 2 elif (stage_id == 2): layer_id = 3 elif (stage_id == 3): layer_id = max_layer_id return layer_id elif var_name.startswith('backbone.stages'): stage_id = int(var_name.split('.')[2]) block_id = int(var_name.split('.')[3]) if (stage_id == 0): layer_id = 1 elif (stage_id == 1): layer_id = 2 elif (stage_id == 2): layer_id = (3 + (block_id // 3)) elif (stage_id == 3): layer_id = max_layer_id return layer_id else: return (max_layer_id + 1)
class _ScaledGradient(torch.autograd.Function): def forward(ctx, x: torch.Tensor, scale: float) -> torch.Tensor: ctx.scale = scale return x def backward(ctx, grad_output): return ((grad_output * ctx.scale), None)
def get_door_ids(modelCategoryFile): return get_class_ids(['door', 'fence', 'arch'], modelCategoryFile)
class TestMultiModelNumberTransformer(TestCase): def test___init__(self): num_modes = 10 instance = MultiModalNumberTransformer(num_modes=num_modes) assert (instance.num_modes == 10) ('tgan.data.GaussianMixture', autospec=True) def test_transform(self, gaussian_mock): data = np.array([[0.1], [0.5], [1.0]]) num_modes = 2 instance = MultiModalNumberTransformer(num_modes) model_mock_spec = {'fit.return_value': None, 'means_': np.array([[0.0], [1.0]]), 'covariances_': np.array([[[4.0], [1.0]]]), 'predict_proba.return_value': np.array([[0.1, 0.2], [0.2, 0.1], [0.1, 0.2]])} model_mock = MagicMock(**model_mock_spec) gaussian_mock.return_value = model_mock expected_features = np.array([[(- 0.45)], [0.125], [0.0]]) expected_probabilities = np.array([[0.1, 0.2], [0.2, 0.1], [0.1, 0.2]]) expected_means = np.array([0.0, 1.0]) expected_stds = np.array([2.0, 1.0]) result = instance.transform(data) (features, probabilities, means, stds) = result assert_equal(features, expected_features) assert_equal(probabilities, expected_probabilities) assert_equal(means, expected_means) assert_equal(stds, expected_stds) gaussian_mock.assert_called_once_with(2) model_mock.fit.assert_called_once_with(data) model_mock.predict_proba.assert_called_once_with(data) def test_inverse_transform(self): data = np.array([[(- 0.45), 0.1, 0.2], [0.125, 0.2, 0.1], [0.0, 0.1, 0.2]]) info = {'type': 'value', 'means': np.array([0.0, 1.0]), 'stds': np.array([2.0, 1.0])} instance = MultiModalNumberTransformer() expected_result = np.array([0.1, 0.5, 1.0]) result = instance.inverse_transform(data, info) assert_allclose(result, expected_result)
def run_sharded_shared_clustering(args, shards, nexpr=0): S_sharded = [] sharded_dataset_size = shards['sharded'][0][2] clusterings = get_shared_clusterings(args, shards['unsharded'], shards['sharded'], shards['sharded_ids']) expr_name = 'expr_{}_shard_shared_clustering_'.format(nexpr) for (i, shard) in enumerate(shards['sharded']): S = _run(args, *shard, expr_name='', clustering=clusterings[i], if_save=False, verbose=False) S = [(j + (sharded_dataset_size * i)) for j in S] S_sharded = [*S_sharded, *S] return (S_sharded, expr_name)
def fbn_resblock(x, maps, kernel=(3, 3), pad=(1, 1), stride=(1, 1), test=False, name='fbn-convblock'): with nn.parameter_scope(name): h = PF.convolution(x, maps, kernel=kernel, pad=pad, stride=stride, with_bias=False) h = PF.fused_batch_normalization(h, x, batch_stat=(not test)) return h
class RobertaForTokenClassification(): def __init__(self, *args, **kwargs): requires_pytorch(self) def from_pretrained(self, *args, **kwargs): requires_pytorch(self)
class DiscoveryProcessor(DataProcessor): def get_train_examples(self, data_dir): logger.info('LOOKING AT {}'.format(os.path.join(data_dir, 'train.tsv'))) return self._create_examples(self._read_tsv(os.path.join(data_dir, 'train.tsv')), 'train') def get_dev_examples(self, data_dir): return self._create_examples(self._read_tsv(os.path.join(data_dir, 'dev.tsv')), 'dev') def get_labels(self): return list(disc_label.keys()) def _create_examples(self, lines, set_type): examples = [] for (i, line) in enumerate(lines): if (i == 0): continue guid = ('%s-%s' % (set_type, i)) text_a = line[1].lower() text_b = line[0].lower() label = line[2] examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) return examples
class FPN(nn.Module): def __init__(self, in_channels, out_channels, num_outs, start_level=0, end_level=(- 1), add_extra_convs=False, extra_convs_on_inputs=False, relu_before_extra_convs=False, no_norm_on_lateral=False, conv_cfg=None, norm_cfg=None, act_cfg=None, upsample_cfg=dict(mode='nearest')): super(FPN, self).__init__() assert isinstance(in_channels, list) self.in_channels = in_channels self.out_channels = out_channels self.num_ins = len(in_channels) self.num_outs = num_outs self.relu_before_extra_convs = relu_before_extra_convs self.no_norm_on_lateral = no_norm_on_lateral self.fp16_enabled = False self.upsample_cfg = upsample_cfg.copy() if (end_level == (- 1)): self.backbone_end_level = self.num_ins assert (num_outs >= (self.num_ins - start_level)) else: self.backbone_end_level = end_level assert (end_level <= len(in_channels)) assert (num_outs == (end_level - start_level)) self.start_level = start_level self.end_level = end_level self.add_extra_convs = add_extra_convs assert isinstance(add_extra_convs, (str, bool)) if isinstance(add_extra_convs, str): assert (add_extra_convs in ('on_input', 'on_lateral', 'on_output')) elif add_extra_convs: if extra_convs_on_inputs: self.add_extra_convs = 'on_input' else: self.add_extra_convs = 'on_output' self.lateral_convs = nn.ModuleList() self.fpn_convs = nn.ModuleList() for i in range(self.start_level, self.backbone_end_level): l_conv = ConvModule(in_channels[i], out_channels, 1, conv_cfg=conv_cfg, norm_cfg=(norm_cfg if (not self.no_norm_on_lateral) else None), act_cfg=act_cfg, inplace=False) fpn_conv = ConvModule(out_channels, out_channels, 3, padding=1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg, inplace=False) self.lateral_convs.append(l_conv) self.fpn_convs.append(fpn_conv) extra_levels = ((num_outs - self.backbone_end_level) + self.start_level) if (self.add_extra_convs and (extra_levels >= 1)): for i in range(extra_levels): if ((i == 0) and (self.add_extra_convs == 'on_input')): in_channels = self.in_channels[(self.backbone_end_level - 1)] else: in_channels = out_channels extra_fpn_conv = ConvModule(in_channels, out_channels, 3, stride=2, padding=1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg, inplace=False) self.fpn_convs.append(extra_fpn_conv) def init_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): xavier_init(m, distribution='uniform') def forward(self, inputs): assert (len(inputs) == len(self.in_channels)) laterals = [lateral_conv(inputs[(i + self.start_level)]) for (i, lateral_conv) in enumerate(self.lateral_convs)] used_backbone_levels = len(laterals) for i in range((used_backbone_levels - 1), 0, (- 1)): if ('scale_factor' in self.upsample_cfg): laterals[(i - 1)] += F.interpolate(laterals[i], **self.upsample_cfg) else: prev_shape = laterals[(i - 1)].shape[2:] laterals[(i - 1)] += F.interpolate(laterals[i], size=prev_shape, **self.upsample_cfg) outs = [self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels)] if (self.num_outs > len(outs)): if (not self.add_extra_convs): for i in range((self.num_outs - used_backbone_levels)): outs.append(F.max_pool2d(outs[(- 1)], 1, stride=2)) else: if (self.add_extra_convs == 'on_input'): extra_source = inputs[(self.backbone_end_level - 1)] elif (self.add_extra_convs == 'on_lateral'): extra_source = laterals[(- 1)] elif (self.add_extra_convs == 'on_output'): extra_source = outs[(- 1)] else: raise NotImplementedError outs.append(self.fpn_convs[used_backbone_levels](extra_source)) for i in range((used_backbone_levels + 1), self.num_outs): if self.relu_before_extra_convs: outs.append(self.fpn_convs[i](F.relu(outs[(- 1)]))) else: outs.append(self.fpn_convs[i](outs[(- 1)])) return tuple(outs)
class Normal(_SimpleDistributionMixin): def __init__(self, loc, scale): (tensorlib, _) = get_backend() self.loc = loc self.scale = scale self._pdf = tensorlib.normal_dist(loc, scale) def expected_data(self): return self.loc
def create_shufflenet(model, data, num_input_channels, num_labels, label=None, is_test=False, no_loss=False): builder = ShuffleNetV2Builder(model, data, num_input_channels, num_labels, is_test=is_test) builder.create() if no_loss: return builder.last_out if (label is not None): (softmax, loss) = model.SoftmaxWithLoss([builder.last_out, label], ['softmax', 'loss']) return (softmax, loss)
def test_simple_control_dependency_4(): def func() -> int: foo = 1 bar = 2 if (foo == bar): result = 1 elif (foo > bar): result = 2 else: result = 3 return result return_block = BasicBlock([Instr('LOAD_FAST', arg='result'), Instr('RETURN_VALUE')]) else_block = BasicBlock([Instr('LOAD_CONST', arg=3), Instr('STORE_FAST', arg='result')]) elif_cond = BasicBlock([Instr('LOAD_FAST', arg='foo'), Instr('LOAD_FAST', arg='bar'), Instr('COMPARE_OP', arg=Compare.GT), Instr('POP_JUMP_IF_FALSE', arg=else_block)]) if_cond = BasicBlock([Instr('LOAD_FAST', arg='foo'), Instr('LOAD_FAST', arg='bar'), Instr('COMPARE_OP', arg=Compare.EQ), Instr('POP_JUMP_IF_FALSE', arg=elif_cond)]) init_block = BasicBlock([Instr('LOAD_CONST', arg=1), Instr('STORE_FAST', arg='foo'), Instr('LOAD_CONST', arg=2), Instr('STORE_FAST', arg='bar')]) expected_instructions = [] expected_instructions.extend(init_block) expected_instructions.extend(if_cond) expected_instructions.extend(elif_cond) expected_instructions.extend(else_block) expected_instructions.extend(return_block) sliced_instructions = slice_function_at_return(func) assert (len(sliced_instructions) == len(expected_instructions)) assert compare(sliced_instructions, expected_instructions)
def save_camera_ply(ply_file, images, scale): points3D = (scale * np.array(((0.0, 0.0, 0.0), ((- 1.0), (- 1.0), 1.0), ((- 1.0), 1.0, 1.0), (1.0, (- 1.0), 1.0), (1.0, 1.0, 1.0)))) faces = np.array(((0, 2, 1), (0, 4, 2), (0, 3, 4), (0, 1, 3), (1, 2, 4), (1, 4, 3))) r = np.linspace(0, 255, len(images), dtype=np.uint8) g = (255 - r) b = (r - np.linspace(0, 128, len(images), dtype=np.uint8)) color = np.column_stack((r, g, b)) with open(ply_file, 'w') as fid: ((print >> fid), 'ply') ((print >> fid), 'format ascii 1.0') ((print >> fid), 'element vertex', (len(points3D) * len(images))) ((print >> fid), 'property float x') ((print >> fid), 'property float y') ((print >> fid), 'property float z') ((print >> fid), 'property uchar red') ((print >> fid), 'property uchar green') ((print >> fid), 'property uchar blue') ((print >> fid), 'element face', (len(faces) * len(images))) ((print >> fid), 'property list uchar int vertex_index') ((print >> fid), 'end_header') for (image, c) in zip(images, color): for p3D in (points3D.dot(image.R()) + image.C()): ((print >> fid), p3D[0], p3D[1], p3D[2], c[0], c[1], c[2]) for i in xrange(len(images)): for f in (faces + (len(points3D) * i)): ((print >> fid), '3 {} {} {}'.format(*f))
class CustomImageFolder(VisionDataset): def __init__(self, root, transform, perturbation_fn=None, idx_subsample_list=None): super().__init__(root, transform=transform, target_transform=None) (classes, class_to_idx) = self._find_classes(self.root) samples = make_dataset(self.root, class_to_idx) if (len(samples) == 0): raise RuntimeError(('Found 0 images in subfolders of: ' + self.root)) if (idx_subsample_list is not None): samples = [samples[i] for i in idx_subsample_list] self.loader = pil_loader self.classes = classes self.class_to_idx = class_to_idx self.samples = samples self.targets = [s[1] for s in samples] self.perturbation_fn = perturbation_fn def _find_classes(self, dir): if (sys.version_info >= (3, 5)): classes = [d.name for d in os.scandir(dir) if d.is_dir()] else: classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))] classes.sort() class_to_idx = {classes[i]: i for i in range(len(classes))} return (classes, class_to_idx) def __getitem__(self, index): (path, target) = self.samples[index] return [index, os.path.relpath(path, self.root), self.load_img(path), target] def load_img(self, path): sample = self.loader(path) sample = self.transform(sample) if (self.perturbation_fn is not None): sample = self.perturbation_fn(sample) return sample def __len__(self): return len(self.samples)
def get_performance_limits(memory_info, throughput_info): max_capacity_batch_size = memory_info.usage_model_mb.inverse(memory_info.max_capacity_mb) max_capacity_throughput = ((max_capacity_batch_size / throughput_info.runtime_model_ms.evaluate(max_capacity_batch_size)) * 1000) max_throughput_batch_size = throughput_info.batch_from_throughput(throughput_info.max_throughput) thpt_limits = (max_throughput_batch_size, throughput_info.max_throughput) mem_limits = (max_capacity_batch_size, max_capacity_throughput) limits = min(thpt_limits, mem_limits, key=(lambda tup: tup[0])) return PerformanceLimits(max_batch_size=limits[0], throughput_limit=limits[1])
def recreate_folder(path): try: shutil.rmtree(path) time.sleep(2) except: pass os.makedirs(path)
def timeSince(since, percent): now = time.time() s = (now - since) es = (s / percent) rs = (es - s) return ('%s (- %s)' % (asMinutes(s), asMinutes(rs)))
def load_kernel(filename, cache=True): if cache: cache_filename = (filename + '.pkl') if os.path.exists(cache_filename): return pd.read_pickle(cache_filename) with open(filename, 'r') as f: lines = f.readlines() parts = lines[0].split(' ') columns = [parts[i].lower() for i in range(0, (len(parts) - 1), 3)] data = {c: [] for c in columns} for line in lines: parts = line.split(' ') for i in range(0, (len(parts) - 1), 3): data[parts[i].lower()].append(parts[(i + 2)].lower()) data['time'] = [(float(t) * 1000) for t in data['time']] for col in _int_kernel_cols: if (col in data): data[col] = [int(x) for x in data[col]] df = pd.DataFrame(data=data) def translate_layouts(x): if isinstance(x, str): x = x.translate({ord('n'): 'i'}) return x df = df.applymap(translate_layouts) if cache: df.to_pickle(cache_filename) return df
def main_worker(args): test_dataset = ECALS_Dataset(args.data_dir, 'TEST', args.sr, args.duration, args.num_chunks, False) test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=args.batch_size, shuffle=None, num_workers=args.workers, pin_memory=True, sampler=None, drop_last=False) audio_preprocessr = TFRep(sample_rate=args.sr, f_min=0, f_max=int((args.sr / 2)), n_fft=args.n_fft, win_length=args.win_length, hop_length=int((0.01 * args.sr)), n_mels=args.mel_dim) frontend = ResFrontEnd(input_size=(args.mel_dim, (int((100 * args.duration)) + 1)), conv_ndim=128, attention_ndim=args.attention_ndim, mix_type=args.mix_type) audio_encoder = MusicTransformer(audio_representation=audio_preprocessr, frontend=frontend, audio_rep=args.audio_rep, attention_nlayers=args.attention_nlayers, attention_ndim=args.attention_ndim) model = ClassificationModel(audio_encoder=audio_encoder, audio_dim=args.attention_ndim, mlp_dim=args.mlp_dim, num_classes=len(test_dataset.list_of_label)) save_dir = f'exp/{args.arch}_{args.frontend}_{args.mix_type}_{args.audio_rep}/{args.text_rep}_{args.text_type}/' pretrained_object = torch.load(f'{save_dir}/best.pth', map_location='cpu') state_dict = pretrained_object['state_dict'] for k in list(state_dict.keys()): if k.startswith('module.'): state_dict[k[len('module.'):]] = state_dict[k] del state_dict[k] model.load_state_dict(state_dict) torch.cuda.set_device(args.gpu) model = model.cuda(args.gpu) cudnn.benchmark = True model.eval() (audio_dict, predictions, groudturths, track_ids) = ({}, [], [], []) for batch in tqdm(test_loader): x = batch['audio'] y = batch['binary'] track_id = batch['track_id'] if (args.gpu is not None): x = x.cuda(args.gpu, non_blocking=True) y = y.cuda(args.gpu, non_blocking=True) with torch.no_grad(): z_audio = model.encode_audio(x.squeeze(0)) predict = model.forward_eval(x.squeeze(0)) audio_dict[track_id[0]] = z_audio.mean(0).detach().cpu() predictions.append(predict.mean(0, True).detach().cpu()) groudturths.append(y.detach().cpu()) track_ids.append(track_id) tag_dict = {} for (tag, centorid) in zip(test_dataset.list_of_label, model.head.fc_cls.weight): tag_dict[tag] = centorid.detach().cpu() torch.save(audio_dict, os.path.join(save_dir, 'audio_embs.pt')) torch.save(tag_dict, os.path.join(save_dir, 'tag_embs.pt')) logits = torch.cat(predictions, dim=0).numpy() targets = torch.cat(groudturths, dim=0).numpy() (_, _, _, bestF1_decisions) = get_binary_decisions(targets, logits) multi_query_gt = json.load(open(os.path.join(args.data_dir, 'ecals_annotation/multiquery_samples.json'), 'r')) (gt_items, pred_items) = multi_query_annotation(bestF1_decisions, track_ids, test_dataset.list_of_label, multi_query_gt) logits = pd.DataFrame(logits, columns=test_dataset.list_of_label) targets = pd.DataFrame(targets, columns=test_dataset.list_of_label) single_query_evaluation(targets, logits, save_dir, test_dataset.list_of_label) single_query_evaluation(targets, logits, save_dir, TAGNAMES) mq_results = rank_eval(gt_items, pred_items) with open(os.path.join(save_dir, f'mq_results.json'), mode='w') as io: json.dump(mq_results, io, indent=4)
def chebval(x, c): c = np.array(c, ndmin=1, copy=True) if (c.dtype.char in '?bBhHiIlLqQpP'): c = c.astype(np.double) if isinstance(x, int): x = float(x) if isinstance(x, (tuple, list)): x = np.asarray(x) y = np.zeros_like(x, dtype=c.dtype) c0 = np.zeros_like(x, dtype=c.dtype) c1 = np.zeros_like(x, dtype=c.dtype) tmp = np.zeros_like(x, dtype=c.dtype) x2 = np.zeros_like(x) y = _chebval(x, c, y, c0, c1, tmp, x2) return y
class Jitter(torch.nn.Module): def __init__(self, state_dim, action_dim, jitter_features, jitter_layers, max_action=1.0, phi=0.05): super().__init__() self.state_dim = state_dim self.action_dim = action_dim self.max_action = max_action self.phi = phi self.jitter_net = MLP((self.state_dim + self.action_dim), self.action_dim, jitter_features, jitter_layers, hidden_activation='relu') def forward(self, state, action): state_action = torch.cat([state, action], dim=(- 1)) noise = self.jitter_net(state_action) noise = ((self.phi * self.max_action) * torch.tanh(noise)) return torch.clamp((action + noise), (- self.max_action), self.max_action)
def max_memory_param(): return ('max_memory', UINT, UINT_MAX, 'maximum amount of memory in megabytes')
def find_argument(rel_name, rel_tokens, matched_event, sent_entities, sent_obj, is_gold, srl_obj): arg_start_ix = rel_tokens[0] if (len(rel_tokens) > 1): arg_end_ix = rel_tokens[1] else: arg_end_ix = rel_tokens[0] if (arg_end_ix >= len(sent_obj.get_tokens())): print('argument bound mismatch with sentence length') print('arg start index - {}'.format(arg_start_ix)) print('arg end index - {}'.format(arg_end_ix)) print('sentence length - {}'.format(len(sent_obj.get_tokens()))) print('raw sentence: {}'.format(sent_obj.get_raw_sentence())) print('matched event: {}'.format(str(matched_event))) print('srl obj - {}'.format(str(srl_obj))) (arg_str, arg_tokens) = sent_obj.fetch_mention_string(arg_start_ix, arg_end_ix) entity_found = False matched_entity = None for entity in sent_entities: if have_string_match(entity, arg_str, arg_start_ix, arg_end_ix): if ((rel_name == 'AM-TMP') and (entity.mention_type != 'TIM')): continue if ((rel_name == 'AM-LOC') and (entity.mention_type != 'LOC')): continue entity_found = True matched_entity = entity break if entity_found: add_arg_to_event(matched_entity, matched_event, rel_name) if is_gold: return True elif (matched_entity.gold_mention_id is not None): return True else: return False else: return False
def get_func(m, f): if hasattr(m, f): return getattr(m, f) elif hasattr(m.module, f): return getattr(m.module, f) else: raise NotImplementedError
class DoxyGroup(DoxyCompound): __module__ = 'gnuradio.utils.doxyxml' kind = 'group' def _parse(self): if self._parsed: return super(DoxyGroup, self)._parse() self.retrieve_data() if self._error: return cdef = self._retrieved_data.compounddef self._data['title'] = description(cdef.title) grps = cdef.innergroup for grp in grps: converted = DoxyGroup.from_refid(grp.refid, top=self.top) self._members.append(converted) klasses = cdef.innerclass for kls in klasses: converted = DoxyClass.from_refid(kls.refid, top=self.top) self._members.append(converted) self.process_memberdefs() title = property((lambda self: self.data()['title']))
def clean_string(string): string = re.sub('[^A-Za-z0-9(),!?\\\'\\`\\"]', ' ', string) string = re.sub("\\'s", " 's", string) string = re.sub("\\'ve", " 've", string) string = re.sub("n\\'t", " n't", string) string = re.sub("\\'re", " 're", string) string = re.sub("\\'d", " 'd", string) string = re.sub("\\'ll", " 'll", string) string = re.sub(',', ' , ', string) string = re.sub('!', ' ! ', string) string = re.sub('\\(', ' \\( ', string) string = re.sub('\\"\\"', ' " ', string) string = re.sub('\\)', ' \\) ', string) string = re.sub('\\?', ' \\? ', string) string = re.sub('\\s{2,}', ' ', string) return string.lower().strip()
class Ebgp(Layer, Graphable): __peerings: Dict[(Tuple[(int, int, int)], PeerRelationship)] __rs_peers: List[Tuple[(int, int)]] __xc_peerings: Dict[(Tuple[(int, int)], PeerRelationship)] def __init__(self): super().__init__() self.__peerings = {} self.__xc_peerings = {} self.__rs_peers = [] self.addDependency('Routing', False, False) def __createPeer(self, nodeA: Router, nodeB: Router, addrA: str, addrB: str, rel: PeerRelationship) -> None: rsNode: Router = None routerA: Router = None routerB: Router = None for node in [nodeA, nodeB]: if (node.getRole() == NodeRole.RouteServer): rsNode = node continue if (routerA == None): routerA = node elif (routerB == None): routerB = node if (not node.getAttribute('__bgp_bootstrapped', False)): self._log('Bootstrapping as{}/{} for BGP...'.format(node.getAsn(), node.getName())) node.setAttribute('__bgp_bootstrapped', True) node.appendFile('/etc/bird/bird.conf', EbgpFileTemplates['bgp_commons'].format(localAsn=node.getAsn())) node.addTable('t_bgp') node.addTablePipe('t_bgp') node.addTablePipe('t_direct', 't_bgp', exportFilter='filter { bgp_large_community.add(LOCAL_COMM); bgp_local_pref = 40; accept; }') assert (routerA != None), 'both nodes are RS node. cannot setup peering.' assert (routerA != routerB), 'cannot peer with oneself.' if (rsNode != None): rsNode.addProtocol('bgp', 'p_as{}'.format(routerA.getAsn()), EbgpFileTemplates['rs_bird_peer'].format(localAddress=addrA, localAsn=rsNode.getAsn(), peerAddress=addrB, peerAsn=routerA.getAsn())) routerA.addProtocol('bgp', 'p_rs{}'.format(rsNode.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrB, localAsn=routerA.getAsn(), peerAddress=addrA, peerAsn=rsNode.getAsn(), exportFilter='where bgp_large_community ~ [LOCAL_COMM, CUSTOMER_COMM]', importCommunity='PEER_COMM', bgpPref=20)) return if (rel == PeerRelationship.Peer): routerA.addProtocol('bgp', 'p_as{}'.format(routerB.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrA, localAsn=routerA.getAsn(), peerAddress=addrB, peerAsn=routerB.getAsn(), exportFilter='where bgp_large_community ~ [LOCAL_COMM, CUSTOMER_COMM]', importCommunity='PEER_COMM', bgpPref=20)) routerB.addProtocol('bgp', 'p_as{}'.format(routerA.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrB, localAsn=routerB.getAsn(), peerAddress=addrA, peerAsn=routerA.getAsn(), exportFilter='where bgp_large_community ~ [LOCAL_COMM, CUSTOMER_COMM]', importCommunity='PEER_COMM', bgpPref=20)) if (rel == PeerRelationship.Provider): routerA.addProtocol('bgp', 'c_as{}'.format(routerB.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrA, localAsn=routerA.getAsn(), peerAddress=addrB, peerAsn=routerB.getAsn(), exportFilter='all', importCommunity='CUSTOMER_COMM', bgpPref=30)) routerB.addProtocol('bgp', 'u_as{}'.format(routerA.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrB, localAsn=routerB.getAsn(), peerAddress=addrA, peerAsn=routerA.getAsn(), exportFilter='where bgp_large_community ~ [LOCAL_COMM, CUSTOMER_COMM]', importCommunity='PROVIDER_COMM', bgpPref=10)) if (rel == PeerRelationship.Unfiltered): routerA.addProtocol('bgp', 'x_as{}'.format(routerB.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrA, localAsn=routerA.getAsn(), peerAddress=addrB, peerAsn=routerB.getAsn(), exportFilter='all', importCommunity='CUSTOMER_COMM', bgpPref=30)) routerB.addProtocol('bgp', 'x_as{}'.format(routerA.getAsn()), EbgpFileTemplates['rnode_bird_peer'].format(localAddress=addrB, localAsn=routerB.getAsn(), peerAddress=addrA, peerAsn=routerA.getAsn(), exportFilter='all', importCommunity='PROVIDER_COMM', bgpPref=10)) def getName(self) -> str: return 'Ebgp' def addPrivatePeering(self, ix: int, a: int, b: int, abRelationship: PeerRelationship=PeerRelationship.Peer) -> Ebgp: assert ((ix, a, b) not in self.__peerings), '{} <-> {} already peered at IX{}'.format(a, b, ix) assert ((ix, b, a) not in self.__peerings), '{} <-> {} already peered at IX{}'.format(b, a, ix) assert ((abRelationship == PeerRelationship.Peer) or (abRelationship == PeerRelationship.Provider) or (abRelationship == PeerRelationship.Unfiltered)), 'unknown peering relationship {}'.format(abRelationship) self.__peerings[(ix, a, b)] = abRelationship return self def addPrivatePeerings(self, ix: int, a_asns: List[int], b_asns: List[int], abRelationship: PeerRelationship=PeerRelationship.Peer) -> Ebgp: for a in a_asns: for b in b_asns: self.addPrivatePeering(ix, a, b, abRelationship) return self def getPrivatePeerings(self) -> Dict[(Tuple[(int, int, int)], PeerRelationship)]: return self.__peerings def addCrossConnectPeering(self, a: int, b: int, abRelationship: PeerRelationship=PeerRelationship.Peer) -> Ebgp: assert ((a, b) not in self.__xc_peerings), '{} <-> {} already configured as XC peer'.format(a, b) assert ((b, a) not in self.__xc_peerings), '{} <-> {} already configured as XC peer'.format(b, a) assert ((abRelationship == PeerRelationship.Peer) or (abRelationship == PeerRelationship.Provider) or (abRelationship == PeerRelationship.Unfiltered)), 'unknown peering relationship {}'.format(abRelationship) self.__xc_peerings[(a, b)] = abRelationship return self def getCrossConnectPeerings(self) -> Dict[(Tuple[(int, int)], PeerRelationship)]: return self.__xc_peerings def addRsPeer(self, ix: int, peer: int) -> Ebgp: assert ((ix, peer) not in self.__rs_peers), '{} already peered with RS at IX{}'.format(peer, ix) self.__rs_peers.append((ix, peer)) return self def addRsPeers(self, ix: int, peers: List[int]): for peer in peers: self.addRsPeer(ix, peer) return self def getRsPeers(self) -> List[Tuple[(int, int)]]: return self.__rs_peers def configure(self, emulator: Emulator) -> None: reg = emulator.getRegistry() for (ix, peer) in self.__rs_peers: ix_reg = ScopedRegistry('ix', reg) p_reg = ScopedRegistry(str(peer), reg) ix_net: Network = ix_reg.get('net', 'ix{}'.format(ix)) ix_rs: Router = ix_reg.get('rs', 'ix{}'.format(ix)) rs_ifs = ix_rs.getInterfaces() assert (len(rs_ifs) == 1), '??? ix{} rs has {} interfaces.'.format(ix, len(rs_ifs)) rs_if = rs_ifs[0] p_rnodes: List[Router] = p_reg.getByType('rnode') p_ixnode: Router = None p_ixif: Interface = None for node in p_rnodes: if (p_ixnode != None): break for iface in node.getInterfaces(): if (iface.getNet() == ix_net): p_ixnode = node p_ixif = iface break assert (p_ixnode != None), 'cannot resolve peering: as{} not in ix{}'.format(peer, ix) self._log('adding peering: {} as {} (RS) <-> {} as {}'.format(rs_if.getAddress(), ix, p_ixif.getAddress(), peer)) self.__createPeer(ix_rs, p_ixnode, rs_if.getAddress(), p_ixif.getAddress(), PeerRelationship.Peer) for ((a, b), rel) in self.__xc_peerings.items(): a_reg = ScopedRegistry(str(a), reg) b_reg = ScopedRegistry(str(b), reg) a_router: Router = None b_router: Router = None a_addr: str = None b_addr: str = None hit = False for node in a_reg.getByType('rnode'): router: Router = node for ((peername, peerasn), (localaddr, _)) in router.getCrossConnects().items(): if (peerasn != b): continue if (not b_reg.has('rnode', peername)): continue hit = True a_router = node b_router = b_reg.get('rnode', peername) a_addr = str(localaddr.ip) (b_ifaddr, _) = b_router.getCrossConnect(a, a_router.getName()) b_addr = str(b_ifaddr.ip) break if hit: break assert hit, 'cannot find XC to configure peer AS{} <--> AS{}'.format(a, b) self._log('adding XC peering: {} as {} <-({})-> {} as {}'.format(a_addr, a, rel, b_addr, b)) self.__createPeer(a_router, b_router, a_addr, b_addr, rel) for ((ix, a, b), rel) in self.__peerings.items(): ix_reg = ScopedRegistry('ix', reg) a_reg = ScopedRegistry(str(a), reg) b_reg = ScopedRegistry(str(b), reg) ix_net: Network = ix_reg.get('net', 'ix{}'.format(ix)) a_rnodes: List[Router] = a_reg.getByType('rnode') b_rnodes: List[Router] = b_reg.getByType('rnode') a_ixnode: Router = None a_ixif: Interface = None for node in a_rnodes: if (a_ixnode != None): break for iface in node.getInterfaces(): if (iface.getNet() == ix_net): a_ixnode = node a_ixif = iface break assert (a_ixnode != None), 'cannot resolve peering: as{} not in ix{}'.format(a, ix) b_ixnode: Router = None b_ixif: Interface = None for node in b_rnodes: if (b_ixnode != None): break for iface in node.getInterfaces(): if (iface.getNet() == ix_net): b_ixnode = node b_ixif = iface break assert (b_ixnode != None), 'cannot resolve peering: as{} not in ix{}'.format(b, ix) self._log('adding IX peering: {} as {} <-({})-> {} as {}'.format(a_ixif.getAddress(), a, rel, b_ixif.getAddress(), b)) self.__createPeer(a_ixnode, b_ixnode, a_ixif.getAddress(), b_ixif.getAddress(), rel) def render(self, emulator: Emulator) -> None: pass def _doCreateGraphs(self, emulator: Emulator): full_graph = self._addGraph('All Peering Sessions', False) ix_list = set() for (i, _) in self.__rs_peers: ix_list.add(i) for ((i, _, _), _) in self.__peerings.items(): ix_list.add(i) for ix in ix_list: self._log('Creating RS peering sessions graph for IX{}...'.format(ix)) ix_graph = self._addGraph('IX{} Peering Sessions'.format(ix), False) mesh_ases = set() for (i, a) in self.__rs_peers: if (i == ix): mesh_ases.add(a) self._log('IX{} RS-mesh: {}'.format(ix, mesh_ases)) while (len(mesh_ases) > 0): a = mesh_ases.pop() if (not full_graph.hasVertex('AS{}'.format(a), 'IX{}'.format(ix))): full_graph.addVertex('AS{}'.format(a), 'IX{}'.format(ix)) if (not ix_graph.hasVertex('AS{}'.format(a), 'IX{}'.format(ix))): ix_graph.addVertex('AS{}'.format(a), 'IX{}'.format(ix)) for b in mesh_ases: if (not full_graph.hasVertex('AS{}'.format(b), 'IX{}'.format(ix))): full_graph.addVertex('AS{}'.format(b), 'IX{}'.format(ix)) if (not ix_graph.hasVertex('AS{}'.format(b), 'IX{}'.format(ix))): ix_graph.addVertex('AS{}'.format(b), 'IX{}'.format(ix)) full_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(ix), 'IX{}'.format(ix), style='dashed', alabel='R', blabel='R') ix_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(ix), 'IX{}'.format(ix), style='dashed', alabel='R', blabel='R') for ((i, a, b), rel) in self.__peerings.items(): self._log('Creating private peering sessions graph for IX{} AS{} <-> AS{}...'.format(i, a, b)) ix_graph = self._addGraph('IX{} Peering Sessions'.format(i), False) if (not full_graph.hasVertex('AS{}'.format(a), 'IX{}'.format(i))): full_graph.addVertex('AS{}'.format(a), 'IX{}'.format(i)) if (not ix_graph.hasVertex('AS{}'.format(a), 'IX{}'.format(i))): ix_graph.addVertex('AS{}'.format(a), 'IX{}'.format(i)) if (not full_graph.hasVertex('AS{}'.format(b), 'IX{}'.format(i))): full_graph.addVertex('AS{}'.format(b), 'IX{}'.format(i)) if (not ix_graph.hasVertex('AS{}'.format(b), 'IX{}'.format(i))): ix_graph.addVertex('AS{}'.format(b), 'IX{}'.format(i)) if (rel == PeerRelationship.Peer): full_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(i), 'IX{}'.format(i), alabel='P', blabel='P') ix_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(i), 'IX{}'.format(i), alabel='P', blabel='P') if (rel == PeerRelationship.Provider): full_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(i), 'IX{}'.format(i), alabel='U', blabel='C') ix_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(i), 'IX{}'.format(i), alabel='U', blabel='C') if (rel == PeerRelationship.Unfiltered): full_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(i), 'IX{}'.format(i), alabel='X', blabel='X') ix_graph.addEdge('AS{}'.format(a), 'AS{}'.format(b), 'IX{}'.format(i), 'IX{}'.format(i), alabel='X', blabel='X') es = list(full_graph.vertices.values()) while (len(es) > 0): a = es.pop() for b in es: if (a.name == b.name): full_graph.addEdge(a.name, b.name, a.group, b.group, style='dotted', alabel='I', blabel='I') def print(self, indent: int) -> str: out = (' ' * indent) out += 'EbgpLayer:\n' indent += 4 for (i, a) in self.__rs_peers: out += (' ' * indent) out += 'IX{}: RS <-> AS{}\n'.format(i, a) for ((i, a, b), rel) in self.__peerings.items(): out += (' ' * indent) out += 'IX{}: AS{} <--({})--> AS{}\n'.format(i, a, rel, b) return out
def _array_select(ar, arg): if isinstance(arg, tuple): args = [ar.domain_n(i).cast(arg[i]) for i in range(len(arg))] (_args, sz) = _to_ast_array(args) return _to_expr_ref(Z3_mk_select_n(ar.ctx_ref(), ar.as_ast(), sz, _args), ar.ctx) arg = ar.domain().cast(arg) return _to_expr_ref(Z3_mk_select(ar.ctx_ref(), ar.as_ast(), arg.as_ast()), ar.ctx)
.parametrize('algorithm', ['SAMME', 'SAMME.R']) def test_rusboost_sample_weight(imbalanced_dataset, algorithm): (X, y) = imbalanced_dataset sample_weight = np.ones_like(y) rusboost = RUSBoostClassifier(algorithm=algorithm, random_state=0) y_pred_sample_weight = rusboost.fit(X, y, sample_weight).predict(X) y_pred_no_sample_weight = rusboost.fit(X, y).predict(X) assert_array_equal(y_pred_sample_weight, y_pred_no_sample_weight) rng = np.random.RandomState(42) sample_weight = rng.rand(y.shape[0]) y_pred_sample_weight = rusboost.fit(X, y, sample_weight).predict(X) with pytest.raises(AssertionError): assert_array_equal(y_pred_no_sample_weight, y_pred_sample_weight)
.script def rpc_async_call_future_ret(dst_worker_name: str, args: Tuple[(Tensor, Tensor)], kwargs: Dict[(str, Tensor)]): fut = rpc.rpc_async(dst_worker_name, two_args_two_kwargs, args, kwargs) return fut
def regnetx_800mf(in_channels, *args, **kwargs): d = 16 w_0 = 56 w_a = 35.73 w_m = 2.28 return regnet(in_channels, w_a, w_0, w_m, d, gw=16)
class T5TokenizationTest(TokenizerTesterMixin, unittest.TestCase): tokenizer_class = T5Tokenizer def setUp(self): super(T5TokenizationTest, self).setUp() tokenizer = T5Tokenizer(SAMPLE_VOCAB) tokenizer.save_pretrained(self.tmpdirname) def get_tokenizer(self, **kwargs): return T5Tokenizer.from_pretrained(self.tmpdirname, **kwargs) def get_input_output_texts(self): input_text = 'This is a test' output_text = 'This is a test' return (input_text, output_text) def test_full_tokenizer(self): tokenizer = T5Tokenizer(SAMPLE_VOCAB) tokens = tokenizer.tokenize('This is a test') self.assertListEqual(tokens, ['This', 'is', 'a', 't', 'est']) self.assertListEqual(tokenizer.convert_tokens_to_ids(tokens), [285, 46, 10, 170, 382]) tokens = tokenizer.tokenize('I was born in 92000, and this is false.') self.assertListEqual(tokens, [(SPIECE_UNDERLINE + 'I'), (SPIECE_UNDERLINE + 'was'), (SPIECE_UNDERLINE + 'b'), 'or', 'n', (SPIECE_UNDERLINE + 'in'), (SPIECE_UNDERLINE + ''), '9', '2', '0', '0', '0', ',', (SPIECE_UNDERLINE + 'and'), (SPIECE_UNDERLINE + 'this'), (SPIECE_UNDERLINE + 'is'), (SPIECE_UNDERLINE + 'f'), 'al', 's', 'e', '.']) ids = tokenizer.convert_tokens_to_ids(tokens) self.assertListEqual(ids, [8, 21, 84, 55, 24, 19, 7, 0, 602, 347, 347, 347, 3, 12, 66, 46, 72, 80, 6, 0, 4]) back_tokens = tokenizer.convert_ids_to_tokens(ids) self.assertListEqual(back_tokens, [(SPIECE_UNDERLINE + 'I'), (SPIECE_UNDERLINE + 'was'), (SPIECE_UNDERLINE + 'b'), 'or', 'n', (SPIECE_UNDERLINE + 'in'), (SPIECE_UNDERLINE + ''), '<unk>', '2', '0', '0', '0', ',', (SPIECE_UNDERLINE + 'and'), (SPIECE_UNDERLINE + 'this'), (SPIECE_UNDERLINE + 'is'), (SPIECE_UNDERLINE + 'f'), 'al', 's', '<unk>', '.'])
.parametrize('tensorlib', ['numpy_backend', 'jax_backend', 'pytorch_backend', 'tensorflow_backend']) .parametrize('precision', ['64b', '32b']) def test_set_tensor_precision(tensorlib, precision): tb = getattr(pyhf.tensor, tensorlib)(precision=precision) assert (tb.precision == precision) assert (f'float{precision[:1]}' in str(tb.dtypemap['float'])) assert (f'int{precision[:1]}' in str(tb.dtypemap['int']))
def test_yaml_script_consistency(recipe_folder='tests/recipes'): avoid_check = [] for recipe_csvfile in os.listdir(recipe_folder): if (recipe_csvfile in __skip_list): continue with open(os.path.join(recipe_folder, recipe_csvfile), newline='') as csvfile: check = True reader = csv.DictReader(csvfile, delimiter=',', skipinitialspace=True) for row in reader: if (row['Hparam_file'] in avoid_check): continue if (not check_yaml_vs_script(row['Hparam_file'], row['Script_file'])): check = False assert check
class SigmoidFocalLoss(nn.Module): def __init__(self, gamma, alpha): super(SigmoidFocalLoss, self).__init__() self.gamma = gamma self.alpha = alpha def forward(self, logits, targets): device = logits.device if logits.is_cuda: loss_func = sigmoid_focal_loss_cuda else: loss_func = sigmoid_focal_loss_cpu loss = loss_func(logits, targets, self.gamma, self.alpha) return loss.sum() def __repr__(self): tmpstr = (self.__class__.__name__ + '(') tmpstr += ('gamma=' + str(self.gamma)) tmpstr += (', alpha=' + str(self.alpha)) tmpstr += ')' return tmpstr
def list_available_packages(*, session): packages = get_available_packages(session=session) print('The following packages are available for download.') print('Please refer to for additional packages and instructions on properly citing third party packages.') for p in packages: info = ' {} -> {}'.format(p['name'], p['size']) if (p['thirdparty'] == '1'): info += ' (third party)' print(info)
def clip_loss(similarity: torch.Tensor) -> torch.Tensor: caption_loss = contrastive_loss(similarity) image_loss = contrastive_loss(similarity.T) return ((caption_loss + image_loss) / 2.0)
class _CheckNoPythonCalls(): def __init__(self): self.num_calls = 0 self.old_tracefunc = None def _tracefunc(self, frame, event, arg): print('*** trace:', frame, event, arg) if (frame.f_globals is vars(typing)): print(' (ignore typing module)') return if (frame.f_code is Tensor.__init__.__code__): print(' (ignoring Tensor.__init__ for now, remains to be implemented...)') return if (frame.f_code is _CheckNoPythonCalls.__exit__.__code__): print(' (ignoring _CheckNoPythonCalls.__exit__)') return self.num_calls += 1 def __enter__(self): self.old_tracefunc = sys.gettrace() sys.settrace(self._tracefunc) return self def __exit__(self, exc_type, exc_val, exc_tb): sys.settrace(self.old_tracefunc) assert (self.num_calls == 0)
class ClusterGraph(UndirectedGraph): def __init__(self, ebunch=None): super(ClusterGraph, self).__init__() if ebunch: self.add_edges_from(ebunch) self.factors = [] def add_node(self, node, **kwargs): if (not isinstance(node, (list, set, tuple))): raise TypeError('Node can only be a list, set or tuple of nodes forming a clique') node = tuple(node) super(ClusterGraph, self).add_node(node, **kwargs) def add_nodes_from(self, nodes, **kwargs): for node in nodes: self.add_node(node, **kwargs) def add_edge(self, u, v, **kwargs): set_u = set(u) set_v = set(v) if set_u.isdisjoint(set_v): raise ValueError('No sepset found between these two edges.') super(ClusterGraph, self).add_edge(u, v) def add_factors(self, *factors): for factor in factors: factor_scope = set(factor.scope()) nodes = [set(node) for node in self.nodes()] if (factor_scope not in nodes): raise ValueError('Factors defined on clusters of variable notpresent in model') self.factors.append(factor) def get_factors(self, node=None): if (node is None): return self.factors else: nodes = [set(n) for n in self.nodes()] if (set(node) not in nodes): raise ValueError('Node not present in Cluster Graph') factors = filter((lambda x: (set(x.scope()) == set(node))), self.factors) return next(factors) def remove_factors(self, *factors): for factor in factors: self.factors.remove(factor) def get_cardinality(self, node=None): if node: for factor in self.factors: for (variable, cardinality) in zip(factor.scope(), factor.cardinality): if (node == variable): return cardinality else: cardinalities = defaultdict(int) for factor in self.factors: for (variable, cardinality) in zip(factor.scope(), factor.cardinality): cardinalities[variable] = cardinality return cardinalities def get_partition_function(self): if self.check_model(): factor = self.factors[0] factor = factor_product(factor, *[self.factors[i] for i in range(1, len(self.factors))]) return np.sum(factor.values) def check_model(self): for clique in self.nodes(): factors = filter((lambda x: (set(x.scope()) == set(clique))), self.factors) if (not any(factors)): raise ValueError('Factors for all the cliques or clusters not defined.') cardinalities = self.get_cardinality() if (len(set((x for clique in self.nodes() for x in clique))) != len(cardinalities)): raise ValueError('Factors for all the variables not defined.') for factor in self.factors: for (variable, cardinality) in zip(factor.scope(), factor.cardinality): if (cardinalities[variable] != cardinality): raise ValueError(f'Cardinality of variable {variable} not matching among factors') return True def copy(self): copy = ClusterGraph(self.edges()) if self.factors: factors_copy = [factor.copy() for factor in self.factors] copy.add_factors(*factors_copy) return copy
def attention(idx, cnt): attention_weights = [] attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_query.convolution_projection.convolution.weight', f'stage{idx}.blocks.{cnt}.attn.conv_proj_q.conv.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_query.convolution_projection.normalization.weight', f'stage{idx}.blocks.{cnt}.attn.conv_proj_q.bn.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_query.convolution_projection.normalization.bias', f'stage{idx}.blocks.{cnt}.attn.conv_proj_q.bn.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_query.convolution_projection.normalization.running_mean', f'stage{idx}.blocks.{cnt}.attn.conv_proj_q.bn.running_mean')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_query.convolution_projection.normalization.running_var', f'stage{idx}.blocks.{cnt}.attn.conv_proj_q.bn.running_var')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_query.convolution_projection.normalization.num_batches_tracked', f'stage{idx}.blocks.{cnt}.attn.conv_proj_q.bn.num_batches_tracked')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_key.convolution_projection.convolution.weight', f'stage{idx}.blocks.{cnt}.attn.conv_proj_k.conv.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_key.convolution_projection.normalization.weight', f'stage{idx}.blocks.{cnt}.attn.conv_proj_k.bn.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_key.convolution_projection.normalization.bias', f'stage{idx}.blocks.{cnt}.attn.conv_proj_k.bn.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_key.convolution_projection.normalization.running_mean', f'stage{idx}.blocks.{cnt}.attn.conv_proj_k.bn.running_mean')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_key.convolution_projection.normalization.running_var', f'stage{idx}.blocks.{cnt}.attn.conv_proj_k.bn.running_var')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_key.convolution_projection.normalization.num_batches_tracked', f'stage{idx}.blocks.{cnt}.attn.conv_proj_k.bn.num_batches_tracked')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_value.convolution_projection.convolution.weight', f'stage{idx}.blocks.{cnt}.attn.conv_proj_v.conv.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_value.convolution_projection.normalization.weight', f'stage{idx}.blocks.{cnt}.attn.conv_proj_v.bn.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_value.convolution_projection.normalization.bias', f'stage{idx}.blocks.{cnt}.attn.conv_proj_v.bn.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_value.convolution_projection.normalization.running_mean', f'stage{idx}.blocks.{cnt}.attn.conv_proj_v.bn.running_mean')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_value.convolution_projection.normalization.running_var', f'stage{idx}.blocks.{cnt}.attn.conv_proj_v.bn.running_var')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.convolution_projection_value.convolution_projection.normalization.num_batches_tracked', f'stage{idx}.blocks.{cnt}.attn.conv_proj_v.bn.num_batches_tracked')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.projection_query.weight', f'stage{idx}.blocks.{cnt}.attn.proj_q.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.projection_query.bias', f'stage{idx}.blocks.{cnt}.attn.proj_q.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.projection_key.weight', f'stage{idx}.blocks.{cnt}.attn.proj_k.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.projection_key.bias', f'stage{idx}.blocks.{cnt}.attn.proj_k.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.projection_value.weight', f'stage{idx}.blocks.{cnt}.attn.proj_v.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.attention.projection_value.bias', f'stage{idx}.blocks.{cnt}.attn.proj_v.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.output.dense.weight', f'stage{idx}.blocks.{cnt}.attn.proj.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.attention.output.dense.bias', f'stage{idx}.blocks.{cnt}.attn.proj.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.intermediate.dense.weight', f'stage{idx}.blocks.{cnt}.mlp.fc1.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.intermediate.dense.bias', f'stage{idx}.blocks.{cnt}.mlp.fc1.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.output.dense.weight', f'stage{idx}.blocks.{cnt}.mlp.fc2.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.output.dense.bias', f'stage{idx}.blocks.{cnt}.mlp.fc2.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.layernorm_before.weight', f'stage{idx}.blocks.{cnt}.norm1.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.layernorm_before.bias', f'stage{idx}.blocks.{cnt}.norm1.bias')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.layernorm_after.weight', f'stage{idx}.blocks.{cnt}.norm2.weight')) attention_weights.append((f'cvt.encoder.stages.{idx}.layers.{cnt}.layernorm_after.bias', f'stage{idx}.blocks.{cnt}.norm2.bias')) return attention_weights
def _get_fig_filename(ebase, images_dir, suffix): fig_base = ebase2fbase(ebase) return os.path.join(images_dir, ((fig_base + suffix) + '.png'))
_utils.test() def test_check_dual_vector_field_not_placed(): b = ti.Vector.field(3, ti.f32, needs_dual=True) ti.root.dense(ti.i, 1).place(b) def foo(): pass with pytest.raises(RuntimeError, match='These field\\(s\\) requrie `needs_dual=True`, however their dual field\\(s\\) are not placed.*'): foo()
((not workspace.C.use_mkldnn), 'No MKLDNN support.') class TestConcatSplitOps(hu.HypothesisTestCase): (tensor_splits=_tensor_splits(), **mu.gcs) (deadline=10000) def test_concat(self, tensor_splits, gc, dc): (axis, _, splits) = tensor_splits op = core.CreateOperator('Concat', ['X_{}'.format(i) for i in range(len(splits))], ['concat_result', 'split_info'], axis=axis) self.assertDeviceChecks(dc, op, splits, [0, 1]) self.assertGradientChecks(gc, op, splits, 0, [0]) (tensor_splits=_tensor_splits(), split_as_arg=st.booleans(), **mu.gcs) (deadline=10000) def test_split(self, tensor_splits, split_as_arg, gc, dc): (axis, split_info, splits) = tensor_splits split_as_arg = True if split_as_arg: input_names = ['input'] input_tensors = [np.concatenate(splits, axis=axis)] kwargs = dict(axis=axis, split=split_info) else: input_names = ['input', 'split'] input_tensors = [np.concatenate(splits, axis=axis), split_info] kwargs = dict(axis=axis) op = core.CreateOperator('Split', input_names, ['X_{}'.format(i) for i in range(len(split_info))], **kwargs) def split_ref(input, split=split_info): s = np.cumsum(([0] + list(split))) return [np.array(input.take(np.arange(s[i], s[(i + 1)]), axis=axis)) for i in range(len(split))] outputs_with_grad = range(len(split_info)) self.assertDeviceChecks(dc, op, input_tensors, outputs_with_grad) self.assertGradientChecks(gc, op, input_tensors, 0, outputs_with_grad) (tensor_splits=_tensor_splits(add_axis=True), **mu.gcs) (deadline=10000) def test_concat_add_axis(self, tensor_splits, gc, dc): (axis, _, splits) = tensor_splits op = core.CreateOperator('Concat', ['X_{}'.format(i) for i in range(len(splits))], ['concat_result', 'split_info'], axis=axis, add_axis=1) self.assertDeviceChecks(dc, op, splits, [0, 1]) for i in range(len(splits)): self.assertGradientChecks(gc, op, splits, i, [0]) (tensor_splits=_tensor_splits(add_axis=True), **mu.gcs) def test_concat_with_TensorCPU(self, tensor_splits, gc, dc): (axis, _, splits) = tensor_splits op0 = core.CreateOperator('Concat', ['X_{}'.format(i) for i in range(len(splits))], ['concat_result0', 'split_info0'], axis=axis, add_axis=1, device_option=dc[0]) op1 = core.CreateOperator('Concat', ['X_{}'.format(i) for i in range(len(splits))], ['concat_result1', 'split_info1'], axis=axis, add_axis=1, device_option=dc[1]) for (i, X) in enumerate(splits): workspace.FeedBlob('X_{}'.format(i), X, dc[0]) workspace.RunOperatorOnce(op0) res0 = workspace.FetchBlob('concat_result0') inf0 = workspace.FetchBlob('split_info0') workspace.RunOperatorOnce(op1) res1 = workspace.FetchBlob('concat_result1') inf1 = workspace.FetchBlob('split_info1') if (not np.allclose(res0, res1, atol=0.0, rtol=0.0)): print(res1.flatten()) print(res0.flatten()) print(np.max(np.abs((res1 - res0)))) self.assertTrue(False) if (not np.allclose(inf0, inf1, atol=0.0, rtol=0.0)): print(inf1.flatten()) print(inf0.flatten()) print(np.max(np.abs((inf1 - inf0)))) self.assertTrue(False)
def device_count(): if is_available(): return torch._C._cuda_getDeviceCount() else: return 0
class LogSummary(object): def __init__(self, log_path): mkdirs(log_path) self.writer = SummaryWriter(log_path) def write_scalars(self, scalar_dict, n_iter, tag=None): for (name, scalar) in scalar_dict.items(): if (tag is not None): name = '/'.join([tag, name]) self.writer.add_scalar(name, scalar, n_iter) def write_hist_parameters(self, net, n_iter): for (name, param) in net.named_parameters(): self.writer.add_histogram(name, param.clone().cpu().numpy(), n_iter)
def empty_function(): return bg.BranchCoverageTestFitness(MagicMock(TestCaseExecutor), MagicMock())
class TestFireReset(): def test_atari_env(self): env = DummyDiscretePixelEnvBaselines() env_wrapped = AtariEnv(env) obs = env.reset() obs_wrapped = env_wrapped.reset() assert (not isinstance(obs, np.ndarray)) assert isinstance(obs_wrapped, np.ndarray) (obs, _, _, _) = env.step(1) (obs_wrapped, _, _, _) = env_wrapped.step(1) assert (not isinstance(obs, np.ndarray)) assert isinstance(obs_wrapped, np.ndarray)
def mask_tokens(inputs, labels, tokenizer, mlm_probability, mlm_ignore_index): probability_matrix = torch.full(labels.shape, mlm_probability) special_tokens_mask = [tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist()] probability_matrix.masked_fill_(torch.tensor(special_tokens_mask, dtype=torch.bool), value=0.0) masked_indices = torch.bernoulli(probability_matrix).bool() labels[(~ masked_indices)] = mlm_ignore_index indices_replaced = (torch.bernoulli(torch.full(labels.shape, 0.8)).bool() & masked_indices) inputs[indices_replaced] = tokenizer.convert_tokens_to_ids(tokenizer.mask_token) indices_random = ((torch.bernoulli(torch.full(labels.shape, 0.5)).bool() & masked_indices) & (~ indices_replaced)) random_words = torch.randint(len(tokenizer), labels.shape, dtype=torch.long) inputs[indices_random] = random_words[indices_random] return (inputs, labels)
def f1_avg(rec, prec): f1s = [] for (k, v) in rec.items(): if v: rec_val = v[0] else: rec_val = 0 if prec[k]: prec_val = prec[k][0] else: prec_val = 0 if ((prec_val == 0) and (rec_val == 0)): val = 0 else: val = ((2 * (rec_val * prec_val)) / (rec_val + prec_val)) f1s.append(val) return np.mean(f1s)
class DictionaryTagger(Tagger): def __init__(self, dictionaries, min_length=2, longest_match_only=True, stopwords={}, split_on=None): self.dictionaries = dictionaries self.longest_match_only = longest_match_only self.min_length = min_length self.stopwords = stopwords self.split_on = split_on def tag(self, document, ngrams=5): candgen = Ngrams(n_max=ngrams, split_on=self.split_on) for sent in document.sentences: m = dict_matcher(sent, candgen, self.dictionaries, min_length=self.min_length, stopwords=self.stopwords) if m: if (sent.position not in document.annotations): print('ERROR - sent.position not in doc annotations', sent.position, dict(m)) continue document.annotations[sent.position].update(dict(m))
def convert_2_sec(fname, outname): format = '%Y-%m-%d %H:%M:%S' with open(fname, 'r') as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') with open(outname, 'w') as out_file: csv_writer = csv.writer(out_file, delimiter=',') csv_writer.writerow(['timestamp', 'user_address', 'token_address', 'value', 'IsSender']) ctr = 0 for row in csv_reader: if (ctr == 0): ctr += 1 continue else: timestamp = row[0][:19] date_object = datetime.datetime.strptime(timestamp, format) timestamp_sec = int(date_object.timestamp()) src = row[1] dst = row[2] w = float(row[3]) IsSender = int(row[4]) if (w != 0): csv_writer.writerow([timestamp_sec, src, dst, w, IsSender])
def write_exports(exports, stream): if (sys.version_info[0] >= 3): stream = codecs.getwriter('utf-8')(stream) cp = configparser.ConfigParser() for (k, v) in exports.items(): cp.add_section(k) for entry in v.values(): if (entry.suffix is None): s = entry.prefix else: s = ('%s:%s' % (entry.prefix, entry.suffix)) if entry.flags: s = ('%s [%s]' % (s, ', '.join(entry.flags))) cp.set(k, entry.name, s) cp.write(stream)
def _get_int_replacement(tok: Token) -> List[Token]: result = [] if (tok.ttype == tokens.Token.Literal.Number.Integer): v = int(tok.value) random_ints = np.random.randint(((- np.abs(v)) - 1), (np.abs(v) + 1), NUM_ALTERNATIVES) for r in random_ints: if (r != v): result.append(r) result.append((v + 1)) result.append((v - 1)) return [Token(tok.ttype, str(r)) for r in set(result)]
class NullContextManager(object): def __init__(self, dummy_resource=None): self.dummy_resource = dummy_resource def __enter__(self): return self.dummy_resource def __exit__(self, *args): pass
class SpeechEncoderDecoderConfig(PretrainedConfig): model_type = 'speech-encoder-decoder' is_composition = True def __init__(self, **kwargs): super().__init__(**kwargs) if (('encoder' not in kwargs) or ('decoder' not in kwargs)): raise ValueError(f'A configuraton of type {self.model_type} cannot be instantiated because not both `encoder` and `decoder` sub-configurations are passed, but only {kwargs}') encoder_config = kwargs.pop('encoder') encoder_model_type = encoder_config.pop('model_type') decoder_config = kwargs.pop('decoder') decoder_model_type = decoder_config.pop('model_type') self.encoder = AutoConfig.for_model(encoder_model_type, **encoder_config) self.decoder = AutoConfig.for_model(decoder_model_type, **decoder_config) self.is_encoder_decoder = True def from_encoder_decoder_configs(cls, encoder_config: PretrainedConfig, decoder_config: PretrainedConfig, **kwargs) -> PretrainedConfig: logger.info('Setting `config.is_decoder=True` and `config.add_cross_attention=True` for decoder_config') decoder_config.is_decoder = True decoder_config.add_cross_attention = True return cls(encoder=encoder_config.to_dict(), decoder=decoder_config.to_dict(), **kwargs) def to_dict(self): output = copy.deepcopy(self.__dict__) output['encoder'] = self.encoder.to_dict() output['decoder'] = self.decoder.to_dict() output['model_type'] = self.__class__.model_type return output
def register_types(module): root_module = module.get_root() module.add_enum('TypeOfStation', ['STA', 'AP', 'ADHOC_STA', 'MESH', 'HT_STA', 'HT_AP', 'HT_ADHOC_STA', 'OCB'], import_from_module='ns.wifi') module.add_enum('WifiMacType', ['WIFI_MAC_CTL_CTLWRAPPER', 'WIFI_MAC_CTL_RTS', 'WIFI_MAC_CTL_CTS', 'WIFI_MAC_CTL_ACK', 'WIFI_MAC_CTL_BACKREQ', 'WIFI_MAC_CTL_BACKRESP', 'WIFI_MAC_CTL_END', 'WIFI_MAC_CTL_END_ACK', 'WIFI_MAC_MGT_BEACON', 'WIFI_MAC_MGT_ASSOCIATION_REQUEST', 'WIFI_MAC_MGT_ASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_DISASSOCIATION', 'WIFI_MAC_MGT_REASSOCIATION_REQUEST', 'WIFI_MAC_MGT_REASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_PROBE_REQUEST', 'WIFI_MAC_MGT_PROBE_RESPONSE', 'WIFI_MAC_MGT_AUTHENTICATION', 'WIFI_MAC_MGT_DEAUTHENTICATION', 'WIFI_MAC_MGT_ACTION', 'WIFI_MAC_MGT_ACTION_NO_ACK', 'WIFI_MAC_MGT_MULTIHOP_ACTION', 'WIFI_MAC_DATA', 'WIFI_MAC_DATA_CFACK', 'WIFI_MAC_DATA_CFPOLL', 'WIFI_MAC_DATA_CFACK_CFPOLL', 'WIFI_MAC_DATA_NULL', 'WIFI_MAC_DATA_NULL_CFACK', 'WIFI_MAC_DATA_NULL_CFPOLL', 'WIFI_MAC_DATA_NULL_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA', 'WIFI_MAC_QOSDATA_CFACK', 'WIFI_MAC_QOSDATA_CFPOLL', 'WIFI_MAC_QOSDATA_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA_NULL', 'WIFI_MAC_QOSDATA_NULL_CFPOLL', 'WIFI_MAC_QOSDATA_NULL_CFACK_CFPOLL'], import_from_module='ns.wifi') module.add_enum('AcIndex', ['AC_BE', 'AC_BK', 'AC_VI', 'AC_VO', 'AC_BE_NQOS', 'AC_UNDEF'], import_from_module='ns.wifi') module.add_enum('WifiPreamble', ['WIFI_PREAMBLE_LONG', 'WIFI_PREAMBLE_SHORT', 'WIFI_PREAMBLE_HT_MF', 'WIFI_PREAMBLE_HT_GF', 'WIFI_PREAMBLE_VHT', 'WIFI_PREAMBLE_HE_SU', 'WIFI_PREAMBLE_HE_ER_SU', 'WIFI_PREAMBLE_HE_MU', 'WIFI_PREAMBLE_HE_TB', 'WIFI_PREAMBLE_NONE'], import_from_module='ns.wifi') module.add_enum('BlockAckType', ['BASIC_BLOCK_ACK', 'COMPRESSED_BLOCK_ACK', 'EXTENDED_COMPRESSED_BLOCK_ACK', 'MULTI_TID_BLOCK_ACK'], import_from_module='ns.wifi') module.add_enum('WifiModulationClass', ['WIFI_MOD_CLASS_UNKNOWN', 'WIFI_MOD_CLASS_IR', 'WIFI_MOD_CLASS_FHSS', 'WIFI_MOD_CLASS_DSSS', 'WIFI_MOD_CLASS_HR_DSSS', 'WIFI_MOD_CLASS_ERP_PBCC', 'WIFI_MOD_CLASS_DSSS_OFDM', 'WIFI_MOD_CLASS_ERP_OFDM', 'WIFI_MOD_CLASS_OFDM', 'WIFI_MOD_CLASS_HT', 'WIFI_MOD_CLASS_VHT', 'WIFI_MOD_CLASS_HE'], import_from_module='ns.wifi') module.add_enum('WifiCodeRate', ['WIFI_CODE_RATE_UNDEFINED', 'WIFI_CODE_RATE_3_4', 'WIFI_CODE_RATE_2_3', 'WIFI_CODE_RATE_1_2', 'WIFI_CODE_RATE_5_6'], import_from_module='ns.wifi') module.add_enum('WifiPhyStandard', ['WIFI_PHY_STANDARD_80211a', 'WIFI_PHY_STANDARD_80211b', 'WIFI_PHY_STANDARD_80211g', 'WIFI_PHY_STANDARD_80211_10MHZ', 'WIFI_PHY_STANDARD_80211_5MHZ', 'WIFI_PHY_STANDARD_holland', 'WIFI_PHY_STANDARD_80211n_2_4GHZ', 'WIFI_PHY_STANDARD_80211n_5GHZ', 'WIFI_PHY_STANDARD_80211ac', 'WIFI_PHY_STANDARD_80211ax_2_4GHZ', 'WIFI_PHY_STANDARD_80211ax_5GHZ', 'WIFI_PHY_STANDARD_UNSPECIFIED'], import_from_module='ns.wifi') module.add_enum('HtProtectionType', ['NO_PROTECTION', 'NON_MEMBER_PROTECTION', 'TWENTY_MHZ_PROTECTION', 'MIXED_MODE_PROTECTION'], import_from_module='ns.wifi') module.add_class('Address', import_from_module='ns.network') module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network') module.add_class('AttributeConstructionList', import_from_module='ns.core') module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList']) typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator') typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*') typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&') module.add_class('Bar', import_from_module='ns.wifi') module.add_class('BlockAckAgreement', import_from_module='ns.wifi') module.add_class('Buffer', import_from_module='ns.network') module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer']) module.add_class('ByteTagIterator', import_from_module='ns.network') module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator']) module.add_class('ByteTagList', import_from_module='ns.network') module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList']) module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator']) module.add_class('CallbackBase', import_from_module='ns.core') module.add_class('CapabilityInformation', import_from_module='ns.wifi') module.add_class('DataRate', import_from_module='ns.network') module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation']) module.add_class('DefaultDeleter', template_parameters=['ns3::MeshWifiInterfaceMacPlugin']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::WifiInformationElement']) module.add_class('DefaultDeleter', template_parameters=['ns3::dot11s::DestinationAddressUnit']) module.add_class('DefaultDeleter', template_parameters=['ns3::dot11s::IeBeaconTimingUnit']) module.add_class('EventId', import_from_module='ns.core') module.add_class('Hasher', import_from_module='ns.core') module.add_class('Ipv4Address', import_from_module='ns.network') root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('Ipv4Mask', import_from_module='ns.network') module.add_class('Ipv6Address', import_from_module='ns.network') root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('Ipv6Prefix', import_from_module='ns.network') module.add_class('Mac48Address', import_from_module='ns.network') typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&') root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('Mac8Address', import_from_module='ns.network') root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('MacLowTransmissionParameters', import_from_module='ns.wifi') module.add_class('MeshHelper') module.add_enum('ChannelPolicy', ['SPREAD_CHANNELS', 'ZERO_CHANNEL'], outer_class=root_module['ns3::MeshHelper']) module.add_class('MeshWifiBeacon') module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core') module.add_class('ObjectDeleter', import_from_module='ns.core') module.add_class('ObjectFactory', import_from_module='ns.core') module.add_class('OriginatorBlockAckAgreement', import_from_module='ns.wifi', parent=root_module['ns3::BlockAckAgreement']) module.add_enum('State', ['PENDING', 'ESTABLISHED', 'INACTIVE', 'NO_REPLY', 'RESET', 'REJECTED'], outer_class=root_module['ns3::OriginatorBlockAckAgreement'], import_from_module='ns.wifi') module.add_class('PacketMetadata', import_from_module='ns.network') module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network') module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) module.add_class('PacketTagIterator', import_from_module='ns.network') module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator']) module.add_class('PacketTagList', import_from_module='ns.network') module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList']) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('StatusCode', import_from_module='ns.wifi') module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) module.add_class('TagBuffer', import_from_module='ns.network') module.add_class('TimeWithUnit', import_from_module='ns.core') module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int']) module.add_class('TypeId', import_from_module='ns.core') module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t') typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*') typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&') module.add_class('WifiMode', import_from_module='ns.wifi') module.add_class('WifiModeFactory', import_from_module='ns.wifi') module.add_class('WifiRemoteStation', import_from_module='ns.wifi') module.add_class('WifiRemoteStationInfo', import_from_module='ns.wifi') module.add_class('WifiRemoteStationState', import_from_module='ns.wifi') module.add_enum('', ['BRAND_NEW', 'DISASSOC', 'WAIT_ASSOC_TX_OK', 'GOT_ASSOC_TX_OK'], outer_class=root_module['ns3::WifiRemoteStationState'], import_from_module='ns.wifi') module.add_class('empty', import_from_module='ns.core') module.add_class('int64x64_t', import_from_module='ns.core') module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core') module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk']) module.add_class('MgtAddBaRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtAddBaResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtAssocRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtAssocResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtDelBaHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtProbeRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtProbeResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('MgtReassocRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object']) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::MeshWifiInterfaceMacPlugin', 'ns3::empty', 'ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::WifiInformationElement', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiInformationElement>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::dot11s::DestinationAddressUnit', 'ns3::empty', 'ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::dot11s::IeBeaconTimingUnit', 'ns3::empty', 'ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('Time', import_from_module='ns.core') module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&') root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk']) module.add_class('Txop', import_from_module='ns.wifi', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxOk') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxOk*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxOk&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxFailed') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxFailed*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxFailed&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxDropped') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxDropped*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxDropped&') module.add_class('WifiActionHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_enum('CategoryValue', ['BLOCK_ACK', 'MESH', 'MULTIHOP', 'SELF_PROTECTED', 'VENDOR_SPECIFIC_ACTION'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi') module.add_enum('SelfProtectedActionValue', ['PEER_LINK_OPEN', 'PEER_LINK_CONFIRM', 'PEER_LINK_CLOSE', 'GROUP_KEY_INFORM', 'GROUP_KEY_ACK'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi') module.add_enum('MultihopActionValue', ['PROXY_UPDATE', 'PROXY_UPDATE_CONFIRMATION'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi') module.add_enum('MeshActionValue', ['LINK_METRIC_REPORT', 'PATH_SELECTION', 'PORTAL_ANNOUNCEMENT', 'CONGESTION_CONTROL_NOTIFICATION', 'MDA_SETUP_REQUEST', 'MDA_SETUP_REPLY', 'MDAOP_ADVERTISMENT_REQUEST', 'MDAOP_ADVERTISMENTS', 'MDAOP_SET_TEARDOWN', 'TBTT_ADJUSTMENT_REQUEST', 'TBTT_ADJUSTMENT_RESPONSE'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi') module.add_enum('BlockAckActionValue', ['BLOCK_ACK_ADDBA_REQUEST', 'BLOCK_ACK_ADDBA_RESPONSE', 'BLOCK_ACK_DELBA'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi') module.add_class('ActionValue', import_from_module='ns.wifi', outer_class=root_module['ns3::WifiActionHeader']) typehandlers.add_type_alias(u'ns3::WifiActionHeader::ActionValue', u'ns3::WifiActionHeader::ActionValue') typehandlers.add_type_alias(u'ns3::WifiActionHeader::ActionValue*', u'ns3::WifiActionHeader::ActionValue*') typehandlers.add_type_alias(u'ns3::WifiActionHeader::ActionValue&', u'ns3::WifiActionHeader::ActionValue&') module.add_typedef(root_module['ns3::WifiActionHeader::ActionValue'], 'ActionValue') module.add_class('WifiInformationElement', import_from_module='ns.wifi', parent=root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >']) module.add_class('WifiInformationElementVector', import_from_module='ns.wifi', parent=root_module['ns3::Header']) typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::WifiInformationElement > > iterator', u'ns3::WifiInformationElementVector::Iterator') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::WifiInformationElement > > iterator*', u'ns3::WifiInformationElementVector::Iterator*') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::WifiInformationElement > > iterator&', u'ns3::WifiInformationElementVector::Iterator&') module.add_class('WifiMac', import_from_module='ns.wifi', parent=root_module['ns3::Object']) module.add_class('WifiMacHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header']) module.add_enum('QosAckPolicy', ['NORMAL_ACK', 'NO_ACK', 'NO_EXPLICIT_ACK', 'BLOCK_ACK'], outer_class=root_module['ns3::WifiMacHeader'], import_from_module='ns.wifi') module.add_enum('AddressType', ['ADDR1', 'ADDR2', 'ADDR3', 'ADDR4'], outer_class=root_module['ns3::WifiMacHeader'], import_from_module='ns.wifi') typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )', u'ns3::WifiMacHeader::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )*', u'ns3::WifiMacHeader::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )&', u'ns3::WifiMacHeader::TracedCallback&') module.add_class('WifiRemoteStationManager', import_from_module='ns.wifi', parent=root_module['ns3::Object']) module.add_enum('ProtectionMode', ['RTS_CTS', 'CTS_TO_SELF'], outer_class=root_module['ns3::WifiRemoteStationManager'], import_from_module='ns.wifi') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >', u'ns3::WifiRemoteStationManager::Stations') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >*', u'ns3::WifiRemoteStationManager::Stations*') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >&', u'ns3::WifiRemoteStationManager::Stations&') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >', u'ns3::WifiRemoteStationManager::StationStates') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >*', u'ns3::WifiRemoteStationManager::StationStates*') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >&', u'ns3::WifiRemoteStationManager::StationStates&') typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )*', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )&', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )*', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )&', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback&') module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) module.add_class('BlockAckManager', import_from_module='ns.wifi', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::BlockAckManager::TxOk') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::BlockAckManager::TxOk*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::BlockAckManager::TxOk&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::BlockAckManager::TxFailed') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::BlockAckManager::TxFailed*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::BlockAckManager::TxFailed&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )', u'ns3::BlockAckManager::AgreementStateTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )*', u'ns3::BlockAckManager::AgreementStateTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )&', u'ns3::BlockAckManager::AgreementStateTracedCallback&') module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('CfParameterSet', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_class('DataRateChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('DataRateValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('DsssParameterSet', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('EdcaParameterSet', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor']) module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('ErpInformation', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) module.add_class('ExtendedCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('ExtendedSupportedRatesIE', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('HeCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('HeOperation', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('HtCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('HtOperation', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('MeshInformationElementVector', parent=root_module['ns3::WifiInformationElementVector']) module.add_class('MeshL2RoutingProtocol', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >', u'ns3::MeshL2RoutingProtocol::RouteReplyCallback') typehandlers.add_type_alias(u'ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::MeshL2RoutingProtocol::RouteReplyCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::MeshL2RoutingProtocol::RouteReplyCallback&') module.add_class('MeshStack', parent=root_module['ns3::Object']) module.add_class('MeshWifiInterfaceMacPlugin', parent=root_module['ns3::SimpleRefCount< ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> >']) module.add_class('MgtBeaconHeader', import_from_module='ns.wifi', parent=root_module['ns3::MgtProbeResponseHeader']) module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network') typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&') module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >']) module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&') module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >']) typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&') module.add_class('QosTxop', import_from_module='ns.wifi', parent=root_module['ns3::Txop']) module.add_class('RegularWifiMac', import_from_module='ns.wifi', parent=root_module['ns3::WifiMac']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RegularWifiMac::ForwardUpCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RegularWifiMac::ForwardUpCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RegularWifiMac::ForwardUpCallback&') module.add_class('Ssid', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('SsidChecker', import_from_module='ns.wifi', parent=root_module['ns3::AttributeChecker']) module.add_class('SsidValue', import_from_module='ns.wifi', parent=root_module['ns3::AttributeValue']) module.add_class('SupportedRates', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('VhtCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('VhtOperation', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement']) module.add_class('WifiModeChecker', import_from_module='ns.wifi', parent=root_module['ns3::AttributeChecker']) module.add_class('WifiModeValue', import_from_module='ns.wifi', parent=root_module['ns3::AttributeValue']) module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('BridgeChannel', import_from_module='ns.bridge', parent=root_module['ns3::Channel']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'bool', 'ns3::Ptr<ns3::Packet>', 'ns3::Mac48Address', 'ns3::Mac48Address', 'unsigned short', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::WifiMacHeader &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Mac48Address', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Mac48Address', 'unsigned char', 'ns3::OriginatorBlockAckAgreement::State', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::dot11s::RouteChange', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('Dot11sStack', parent=root_module['ns3::MeshStack']) module.add_class('FlameStack', parent=root_module['ns3::MeshStack']) module.add_class('MeshPointDevice', parent=root_module['ns3::NetDevice']) module.add_class('MeshWifiInterfaceMac', parent=root_module['ns3::RegularWifiMac']) module.add_container('ns3::WifiModeList', 'ns3::WifiMode', container_type=u'vector') module.add_container('std::vector< ns3::WifiRemoteStation * >', 'ns3::WifiRemoteStation *', container_type=u'vector') module.add_container('std::vector< ns3::WifiRemoteStationState * >', 'ns3::WifiRemoteStationState *', container_type=u'vector') module.add_container('std::map< ns3::Mac48Address, bool >', ('ns3::Mac48Address', 'bool'), container_type=u'map') module.add_container('std::vector< ns3::Ptr< ns3::NetDevice > >', 'ns3::Ptr< ns3::NetDevice >', container_type=u'vector') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >', u'ns3::WifiModeList') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >*', u'ns3::WifiModeList*') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >&', u'ns3::WifiModeList&') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator', u'ns3::WifiModeListIterator') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator*', u'ns3::WifiModeListIterator*') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator&', u'ns3::WifiModeListIterator&') typehandlers.add_type_alias(u'uint8_t', u'ns3::WifiInformationElementId') typehandlers.add_type_alias(u'uint8_t*', u'ns3::WifiInformationElementId*') typehandlers.add_type_alias(u'uint8_t&', u'ns3::WifiInformationElementId&') nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) nested_module = module.add_cpp_namespace('Hash') register_types_ns3_Hash(nested_module) nested_module = module.add_cpp_namespace('TracedValueCallback') register_types_ns3_TracedValueCallback(nested_module) nested_module = module.add_cpp_namespace('dot11s') register_types_ns3_dot11s(nested_module) nested_module = module.add_cpp_namespace('flame') register_types_ns3_flame(nested_module) nested_module = module.add_cpp_namespace('internal') register_types_ns3_internal(nested_module)
def _prepare_output_docstrings(output_type, config_class, min_indent=None): output_docstring = output_type.__doc__ lines = output_docstring.split('\n') i = 0 while ((i < len(lines)) and (re.search('^\\s*(Args|Parameters):\\s*$', lines[i]) is None)): i += 1 if (i < len(lines)): params_docstring = '\n'.join(lines[(i + 1):]) params_docstring = _convert_output_args_doc(params_docstring) full_output_type = f'{output_type.__module__}.{output_type.__name__}' intro = (TF_RETURN_INTRODUCTION if output_type.__name__.startswith('TF') else PT_RETURN_INTRODUCTION) intro = intro.format(full_output_type=full_output_type, config_class=config_class) result = (intro + params_docstring) if (min_indent is not None): lines = result.split('\n') i = 0 while (len(lines[i]) == 0): i += 1 indent = len(_get_indent(lines[i])) if (indent < min_indent): to_add = (' ' * (min_indent - indent)) lines = [(f'{to_add}{line}' if (len(line) > 0) else line) for line in lines] result = '\n'.join(lines) return result
class NorthwestDiagrams(Diagrams): def _repr_(self): return 'Combinatorial northwest diagrams' def _an_element_(self): return self([(0, 1), (0, 2), (1, 1), (2, 3)]) def rothe_diagram(self, w): return RotheDiagram(w) from_permutation = rothe_diagram def from_partition(self, mu): if (not isinstance(mu, Partition)): raise ValueError('mu must be a Partition') return self.element_class(self, mu.cells(), check=False) def from_skew_partition(self, s): if (not isinstance(s, SkewPartition)): raise ValueError('mu must be a SkewPartition') n_cols = s.outer()[0] n_rows = len(s.outer()) cells = [(i, ((n_cols - 1) - j)) for (i, j) in s.cells()] return self.element_class(self, cells, n_rows, n_cols, check=False) def from_parallelogram_polyomino(self, p): from sage.matrix.constructor import Matrix M = Matrix(p.get_array()) return self.from_zero_one_matrix(M) Element = NorthwestDiagram
def train_baseline_multiple(data, model_type, device, retrain_iters=5, suffix='', verbose=True): models = [] accs = {} (adj, features, labels) = (data.adj, data.features, data.labels) (idx_train, idx_val, idx_test) = (data.idx_train, data.idx_val, data.idx_test) for i in range(retrain_iters): if verbose: print('\nRetrain iter {}:'.format(i)) if (model_type == 'GCNJaccard'): model = GCNJaccard(nfeat=features.shape[1], nclass=(labels.max() + 1), num_layers=num_layers, nhid=latent_size, device=device, weight_decay=(weight_decay if (weight_decay is not None) else 0.0005), lr=(lr if (lr is not None) else 0.01)) model = model.to(device) model.fit(features, adj, labels, idx_train, idx_val, train_iters=epochs, threshold=threshold, verbose=verbose) elif (model_type == 'RGCN'): model = RGCN(nnodes=adj.shape[0], nfeat=features.shape[1], nclass=(labels.max() + 1), num_layers=num_layers, nhid=latent_size, device=device, gamma=(gamma if (gamma is not None) else 0.5), beta1=(beta1 if (beta1 is not None) else 0.0005), beta2=(weight_decay if (weight_decay is not None) else 0.0005)) model = model.to(device) model.fit(features, adj, labels, idx_train, idx_val, train_iters=epochs, verbose=verbose) model.eval() output = model.test(idx_test) model.to(torch.device('cpu')) model.device = torch.device('cpu') models.append(model) record_data(accs, (output + [epochs]), ['test_loss', 'test_acc', 'epochs']) accs = {(key + suffix): np.mean(value) for (key, value) in accs.items()} return (models, accs)
class AdaptiveMaxPool3d(_AdaptiveMaxPoolNd): def forward(self, input): return F.adaptive_max_pool3d(input, self.output_size, self.return_indices)
_toolkit() class RealTerminal(FunctionToolkit): name_for_human = 'Terminal command executor' description_for_human = 'Executes commands in a terminal.' name_for_model = 'Terminal' description_for_model = "Executes commands in a terminal on the user's local system. Use it to run valid terminal commands for tasks such as file management, system control, and more" tool_classes = [RealTerminalExecute]
class DropPath(nn.Module): def __init__(self, drop_prob=0.0, scale_by_keep=True): super(DropPath, self).__init__() self.drop_prob = drop_prob self.scale_by_keep = scale_by_keep def forward(self, x): return drop_path(x, self.drop_prob, self.training, self.scale_by_keep) def extra_repr(self): return f'drop_prob={round(self.drop_prob, 3):0.3f}'
class TestOptimizer(Enum): SMB = SMB SLS = Sls ADAM = Adam SGD = SGD def config_from_dict(self, params, data: Dict): return self.value(params, **data)
def Reals(names, ctx=None): ctx = _get_ctx(ctx) if isinstance(names, str): names = names.split(' ') return [Real(name, ctx) for name in names]
def save_image(stl_file, output_dir='', angle=np.radians(45.0), direction=None, resolution=None): if (resolution is None): resolution = [640, 480] if (direction is None): direction = [1, 1, 1] if output_dir: make_dir(output_dir) mesh = load_mesh(stl_file) base = os.path.basename(stl_file) (filename, ext) = splitext(base) scene = mesh.scene() scene.set_camera() rotate = trimesh.transformations.rotation_matrix(angle=angle, direction=direction, point=scene.centroid) trimesh.constants.log.info('Saving image %d') (camera_old, _geometry) = scene.graph['camera'] camera_new = np.dot(camera_old, rotate) scene.graph['camera'] = camera_new try: png = scene.save_image(resolution=resolution, visible=True) with open(os.path.join(output_dir, (filename + '.png')), 'wb') as f: f.write(png) f.close() except BaseException as E: print('unable to save image', str(E))
class TestMultiInputs(unittest.TestCase): def setUp(self) -> None: directory = os.path.dirname(os.path.abspath(__file__)) img = Image(PilImage.open(os.path.join(directory, '../datasets/images/dog.jpg'))) text = Text('A dog.') self.inputs = MultiInputs(image=img, text=text) def test(self): self.assertEqual(self.inputs.num_samples(), 1) print(self.inputs.image) print(self.inputs.text) print(self.inputs[0].image) print(self.inputs[0].text)