code stringlengths 101 5.91M |
|---|
def register_all_mapillary_vistas(root):
root = os.path.join(root, 'mapillary_vistas')
meta = _get_mapillary_vistas_meta()
for (name, dirname) in [('train', 'training'), ('val', 'validation')]:
image_dir = os.path.join(root, dirname, 'images')
gt_dir = os.path.join(root, dirname, 'labels')
name = f'mapillary_vistas_sem_seg_{name}'
DatasetCatalog.register(name, (lambda x=image_dir, y=gt_dir: load_sem_seg(y, x, gt_ext='png', image_ext='jpg')))
MetadataCatalog.get(name).set(image_root=image_dir, sem_seg_root=gt_dir, evaluator_type='sem_seg', ignore_label=65, **meta) |
class MultiRPN(RPN):
def __init__(self, anchor_num, in_channels, weighted=False):
super(MultiRPN, self).__init__()
self.weighted = weighted
for i in range(len(in_channels)):
self.add_module(('rpn' + str((i + 2))), DepthwiseRPN(anchor_num, in_channels[i], in_channels[i]))
if self.weighted:
self.cls_weight = nn.Parameter(torch.ones(len(in_channels)))
self.loc_weight = nn.Parameter(torch.ones(len(in_channels)))
def forward(self, z_fs, x_fs):
cls = []
loc = []
for (idx, (z_f, x_f)) in enumerate(zip(z_fs, x_fs), start=2):
rpn = getattr(self, ('rpn' + str(idx)))
(c, l) = rpn(z_f, x_f)
cls.append(c)
loc.append(l)
if self.weighted:
cls_weight = F.softmax(self.cls_weight, 0)
loc_weight = F.softmax(self.loc_weight, 0)
def avg(lst):
return (sum(lst) / len(lst))
def weighted_avg(lst, weight):
s = 0
for i in range(len(weight)):
s += (lst[i] * weight[i])
return s
if self.weighted:
return (weighted_avg(cls, cls_weight), weighted_avg(loc, loc_weight))
else:
return (avg(cls), avg(loc)) |
_method
class RubiksCube(SageObject):
def __init__(self, state=None, history=[], colors=[lpurple, yellow, red, green, orange, blue]):
self.colors = colors
self._history = history
self._group = CubeGroup()
if (state is None):
self._state = self._group.identity()
else:
if isinstance(state, str):
state = self._group.faces(state)
if (not isinstance(state, PermutationGroupElement)):
(legal, state) = self._group.legal(state, mode='gimme_group_element')
if (not legal):
raise ValueError('not a legal cube')
self._state = state
def move(self, g):
if (not isinstance(g, self._group.element_class)):
g = self._group.move(g)[0]
return RubiksCube((self._state * g), (self._history + [g]), self.colors)
def undo(self):
if (not self._history):
raise ValueError('no moves to undo')
g = self._history[(- 1)]
return RubiksCube((self._state * (~ g)), self._history[:(- 1)], self.colors)
def _repr_(self):
return self._group.repr2d(self._state)
def facets(self):
return self._group.facets(self._state)
def plot(self):
return self._group.plot_cube(self._state)
def show(self):
self.plot().show()
def cubie(self, size, gap, x, y, z, colors, stickers=True):
sides = cubie_face_list[(x, y, z)]
t = ((2 * size) + gap)
my_colors = [colors[(sides[i] + 6)] for i in range(6)]
if stickers:
B = Box(size, size, size, color=(0.1, 0.1, 0.1))
S = (B + B.stickers(my_colors, (size * 0.1), (size * 0.01)))
return S.translate(((- t) * x), ((- t) * z), ((- t) * y))
else:
return ColorCube(size, [colors[(sides[i] + 6)] for i in range(6)]).translate(((- t) * x), ((- t) * z), ((- t) * y))
def plot3d(self, stickers=True):
while (len(self.colors) < 7):
self.colors.append((0.1, 0.1, 0.1))
side_colors = [Texture(color=c, ambient=0.75) for c in self.colors]
start_colors = sum([([c] * 8) for c in side_colors], [])
facets = self._group.facets(self._state)
facet_colors = ([0] * 48)
for i in range(48):
facet_colors[(facets[i] - 1)] = start_colors[i]
all_colors = (side_colors + facet_colors)
pm = [(- 1), 0, 1]
C = sum([self.cubie(0.15, 0.025, x, y, z, all_colors, stickers) for x in pm for y in pm for z in pm], Box(0.35, 0.35, 0.35, color=self.colors[(- 1)]))
return C.rotateZ(1.5)
def show3d(self):
return self.plot3d().show()
def __richcmp__(self, other, op):
if (not isinstance(other, RubiksCube)):
return NotImplemented
return richcmp(self._state, other._state, op)
def solve(self, algorithm='hybrid', timeout=15):
from sage.features.rubiks import Rubiks
if Rubiks().is_present():
import sage.interfaces.rubik
else:
algorithm = 'gap'
if (algorithm == 'default'):
algorithm = 'hybrid'
if (algorithm == 'hybrid'):
try:
solver = sage.interfaces.rubik.DikSolver()
return solver.solve(self.facets(), timeout=timeout)
except RuntimeError:
solver = sage.interfaces.rubik.CubexSolver()
return solver.solve(self.facets())
elif (algorithm == 'kociemba'):
solver = sage.interfaces.rubik.DikSolver()
return solver.solve(self.facets(), timeout=timeout)
elif (algorithm == 'dietz'):
solver = sage.interfaces.rubik.CubexSolver()
return solver.solve(self.facets())
elif (algorithm == 'optimal'):
solver = sage.interfaces.rubik.OptimalSolver()
return solver.solve(self.facets())
elif (algorithm == 'gap'):
solver = CubeGroup()
return solver.solve(self._state, algorithm='gap')
else:
raise ValueError(f'Unrecognized algorithm: {algorithm}')
def scramble(self, moves=30):
last_move = move = ' '
all = []
for i in range(moves):
while (move[0] == last_move[0]):
move = ('RLUDBF'[randint(0, 5)] + " '2"[randint(0, 2)])
last_move = move
all.append(move)
return self.move(' '.join(all)) |
def ConsonniTodeschiniI_calc(TP, FP, FN, TN):
try:
n = (((TP + FP) + FN) + TN)
return (math.log(((1 + TP) + TN)) / math.log((1 + n)))
except Exception:
return 'None' |
def load_real_images(path, N=100):
images = []
for i in range(N):
f = os.path.join(path, '{:04d}_gt.png'.format(i))
if (not os.path.exists(f)):
return
images.append(trn(Image.open(f)))
return torch.stack(images) |
_properties
class Pipeline(Pass):
CATEGORY: str = 'Helper'
passes = properties.ListProperty(element_type=Pass, default=[], category='(Debug)', desc='List of passes that this pipeline contains')
def __init__(self, passes: List[Pass]):
self.passes = []
self._pass_names = set((type(p).__name__ for p in passes))
self.passes.extend(passes)
self._add_dependencies(passes)
self._depgraph: Optional[gr.OrderedDiGraph[(Pass, None)]] = None
self._modified: Modifies = Modifies.Nothing
def _add_dependencies(self, passes: List[Pass]):
unique_pass_types = set((type(p) for p in passes))
check_if_unique: Set[Type[Pass]] = unique_pass_types
if (len(check_if_unique) != len(passes)):
pass_types = [type(p) for p in passes]
dups = set([x for x in pass_types if (pass_types.count(x) > 1)])
raise NameError(f'Duplicate pass types found in pipeline. Please use unique Pass type objects within one Pipeline. Duplicates: {dups}')
passes_to_check = passes
while (len(passes_to_check) > 0):
new_passes = []
for p in passes_to_check:
deps = p.depends_on()
for dep in deps:
if isinstance(dep, Pass):
if (type(dep) in check_if_unique):
raise NameError(f'Duplicate dependency passes given: "{type(dep).__name__}" is a Pass object dependency that is already a dependency of a pass or used directly in the pipeline. Please use a class instead of an object in the `depends_on` method.')
check_if_unique.add(type(dep))
self.passes.append(dep)
new_passes.append(dep)
elif isinstance(dep, type):
if (dep not in check_if_unique):
check_if_unique.add(dep)
dep_obj = dep()
self.passes.append(dep_obj)
new_passes.append(dep_obj)
else:
raise TypeError(f'Invalid pass type {type(dep).__name__} given to pipeline')
passes_to_check = new_passes
def modifies(self) -> Modifies:
result = Modifies.Nothing
for p in self.passes:
result |= p.modifies()
return result
def should_reapply(self, modified: Modifies) -> bool:
return any((p.should_reapply(modified) for p in self.passes))
def depends_on(self) -> Set[Type[Pass]]:
result = set()
for p in self.passes:
result.update(p.depends_on())
return result
def _make_dependency_graph(self) -> gr.OrderedDiGraph:
result = gr.OrderedDiGraph()
ptype_to_pass = {type(p): p for p in self.passes}
for p in self.passes:
if (p not in result._nodes):
result.add_node(p)
for dep in p.depends_on():
if isinstance(dep, type):
dep = ptype_to_pass[dep]
result.add_edge(dep, p)
return result
def iterate_over_passes(self, sdfg: SDFG) -> Iterator[Pass]:
if (self._depgraph is None):
self._depgraph = self._make_dependency_graph()
applied_passes: Dict[(Pass, Modifies)] = {}
def reapply_recursive(p: Pass):
if ((p in applied_passes) and (not p.should_reapply(applied_passes[p]))):
return
for dep in self._depgraph.predecessors(p):
(yield from reapply_recursive(dep))
(yield p)
for p in sdutil.dfs_topological_sort(self._depgraph):
p: Pass
for pass_to_apply in reapply_recursive(p):
self._modified = Modifies.Nothing
(yield pass_to_apply)
if (self._modified != Modifies.Nothing):
for old_pass in applied_passes.keys():
applied_passes[old_pass] |= self._modified
applied_passes[pass_to_apply] = Modifies.Nothing
def apply_subpass(self, sdfg: SDFG, p: Pass, state: Dict[(str, Any)]) -> Optional[Any]:
return p.apply_pass(sdfg, state)
def apply_pass(self, sdfg: SDFG, pipeline_results: Dict[(str, Any)]) -> Optional[Dict[(str, Any)]]:
state = pipeline_results
retval = {}
self._modified = Modifies.Nothing
for p in self.iterate_over_passes(sdfg):
r = self.apply_subpass(sdfg, p, state)
if (r is not None):
state[type(p).__name__] = r
retval[type(p).__name__] = r
self._modified = p.modifies()
if retval:
return retval
return None
def to_json(self, parent=None) -> Dict[(str, Any)]:
props = serialize.all_properties_to_json(self)
return {'type': 'Pipeline', 'transformation': type(self).__name__, 'CATEGORY': type(self).CATEGORY, **props} |
class Command(Node):
class PIPE(object):
pass
class STDOUT(object):
pass
def __init__(self, name):
super(Command, self).__init__()
self.name = name
self.argv = [name]
self.stdin = None
self.stdout = None
self.stderr = None
self.env_vars = None
def __repr__(self):
return ('Command(%r, argv=%r, stdin=%r, stdout=%r, stderr=%r)' % (self.name, self.argv, self.stdin, self.stdout, self.stderr)) |
class LegalDataset(Dataset):
def __init__(self, text):
self.encodings = text
def __len__(self):
return len(self.encodings)
def __getitem__(self, index):
item = {'input_ids': torch.tensor(self.encodings.iloc[index])}
return item |
class Classification_data(Dataset):
def __init__(self, data, label=None):
super(Classification_data, self).__init__()
self.data = data
self.label = label
def __getitem__(self, index):
if (self.label is None):
return self.data[index]
return (self.data[index], self.label[index])
def __len__(self):
return len(self.data) |
def _return_counts(input, sorted=True, return_inverse=False, return_counts=False, dim=None):
if (not torch.jit.is_scripting()):
if ((type(input) is not Tensor) and has_torch_function((input,))):
return _unique_impl(input, sorted, return_inverse, return_counts, dim)
(output, _, counts) = _unique_impl(input, sorted, return_inverse, return_counts, dim)
return (output, counts) |
def _expand_dollars(m):
match = m.group(1)
parts = match.split('.')
if (len(parts) > 2):
return (match + ' dollars')
dollars = (int(parts[0]) if parts[0] else 0)
cents = (int(parts[1]) if ((len(parts) > 1) and parts[1]) else 0)
if (dollars and cents):
dollar_unit = ('dollar' if (dollars == 1) else 'dollars')
cent_unit = ('cent' if (cents == 1) else 'cents')
return ('%s %s, %s %s' % (dollars, dollar_unit, cents, cent_unit))
elif dollars:
dollar_unit = ('dollar' if (dollars == 1) else 'dollars')
return ('%s %s' % (dollars, dollar_unit))
elif cents:
cent_unit = ('cent' if (cents == 1) else 'cents')
return ('%s %s' % (cents, cent_unit))
else:
return 'zero dollars' |
def test_vectorizer():
train_data = iter(ALL_FOOD_DOCS[:(- 1)])
test_data = [ALL_FOOD_DOCS[(- 1)]]
n_train = (len(ALL_FOOD_DOCS) - 1)
v1 = CountVectorizer(max_df=0.5)
counts_train = v1.fit_transform(train_data)
if hasattr(counts_train, 'tocsr'):
counts_train = counts_train.tocsr()
assert (counts_train[(0, v1.vocabulary_['pizza'])] == 2)
v2 = CountVectorizer(vocabulary=v1.vocabulary_)
for v in (v1, v2):
counts_test = v.transform(test_data)
if hasattr(counts_test, 'tocsr'):
counts_test = counts_test.tocsr()
vocabulary = v.vocabulary_
assert (counts_test[(0, vocabulary['salad'])] == 1)
assert (counts_test[(0, vocabulary['tomato'])] == 1)
assert (counts_test[(0, vocabulary['water'])] == 1)
assert ('the' not in vocabulary)
assert ('copyright' not in vocabulary)
assert (counts_test[(0, vocabulary['coke'])] == 0)
assert (counts_test[(0, vocabulary['burger'])] == 0)
assert (counts_test[(0, vocabulary['beer'])] == 0)
assert (counts_test[(0, vocabulary['pizza'])] == 0)
t1 = TfidfTransformer(norm='l1')
tfidf = t1.fit(counts_train).transform(counts_train).toarray()
assert (len(t1.idf_) == len(v1.vocabulary_))
assert (tfidf.shape == (n_train, len(v1.vocabulary_)))
tfidf_test = t1.transform(counts_test).toarray()
assert (tfidf_test.shape == (len(test_data), len(v1.vocabulary_)))
t2 = TfidfTransformer(norm='l1', use_idf=False)
tf = t2.fit(counts_train).transform(counts_train).toarray()
assert (not hasattr(t2, 'idf_'))
t3 = TfidfTransformer(use_idf=True)
with pytest.raises(ValueError):
t3.transform(counts_train)
assert_array_almost_equal(np.sum(tf, axis=1), ([1.0] * n_train))
train_data = iter(ALL_FOOD_DOCS[:(- 1)])
tv = TfidfVectorizer(norm='l1')
tv.max_df = v1.max_df
tfidf2 = tv.fit_transform(train_data).toarray()
assert (not tv.fixed_vocabulary_)
assert_array_almost_equal(tfidf, tfidf2)
tfidf_test2 = tv.transform(test_data).toarray()
assert_array_almost_equal(tfidf_test, tfidf_test2)
v3 = CountVectorizer(vocabulary=None)
with pytest.raises(ValueError):
v3.transform(train_data)
v3.set_params(strip_accents='ascii', lowercase=False)
processor = v3.build_preprocessor()
text = "J'ai mange du kangourou ce midi, c'etait pas tres bon."
expected = strip_accents_ascii(text)
result = processor(text)
assert (expected == result)
v3.set_params(strip_accents='_gabbledegook_', preprocessor=None)
with pytest.raises(ValueError):
v3.build_preprocessor()
v3.set_params = '_invalid_analyzer_type_'
with pytest.raises(ValueError):
v3.build_analyzer() |
class CrossEvalQueueConf(BaseQueueConf):
_target_: str = 'hydra_plugins.hydra_drill_launcher.drill_launcher.CrossEvalLauncher' |
class HistologyShardDescriptor(ShardDescriptor):
URL = '
FILENAME = 'Kather_texture_2016_image_tiles_5000.zip'
ZIP_SHA384 = '7d86abe1d04e68b77c055820c2a4c582a1d25d2983e38ab724eac75affce8b7cb2cbf5ba68848dcfd9d84005d87d6790'
DEFAULT_PATH = ((Path.home() / '.openfl') / 'data')
def __init__(self, data_folder: Path=DEFAULT_PATH, rank_worldsize: str='1,1', **kwargs):
self.data_folder = (Path.cwd() / data_folder)
self.download_data()
(self.rank, self.worldsize) = tuple((int(num) for num in rank_worldsize.split(',')))
def download_data(self):
os.makedirs(self.data_folder, exist_ok=True)
filepath = (self.data_folder / HistologyShardDescriptor.FILENAME)
if (not filepath.exists()):
reporthook = tqdm_report_hook()
urlretrieve(HistologyShardDescriptor.URL, filepath, reporthook)
validate_file_hash(filepath, HistologyShardDescriptor.ZIP_SHA384)
with ZipFile(filepath, 'r') as f:
f.extractall(self.data_folder)
def get_dataset(self, dataset_type):
return HistologyShardDataset(data_folder=self.data_folder, data_type=dataset_type, rank=self.rank, worldsize=self.worldsize)
def sample_shape(self):
shape = self.get_dataset('train')[0][0].size
return [str(dim) for dim in shape]
def target_shape(self):
target = self.get_dataset('train')[0][1]
shape = np.array([target]).shape
return [str(dim) for dim in shape]
def dataset_description(self) -> str:
return f'Histology dataset, shard number {self.rank} out of {self.worldsize}' |
_class
class EDMLoss():
def __init__(self, P_mean=(- 1.2), P_std=1.2, sigma_data=0.5):
self.P_mean = P_mean
self.P_std = P_std
self.sigma_data = sigma_data
def __call__(self, net, images, labels=None, augment_pipe=None):
rnd_normal = torch.randn([images.shape[0], 1, 1, 1], device=images.device)
sigma = ((rnd_normal * self.P_std) + self.P_mean).exp()
weight = (((sigma ** 2) + (self.sigma_data ** 2)) / ((sigma * self.sigma_data) ** 2))
(y, augment_labels) = (augment_pipe(images) if (augment_pipe is not None) else (images, None))
n = (torch.randn_like(y) * sigma)
D_yn = net((y + n), sigma, labels, augment_labels=augment_labels)
loss = (weight * ((D_yn - y) ** 2))
return loss |
class SquadReader(BaseReader):
def __init__(self, fine_grained=False):
self.tokenizer = SpacyTokenizer(fine_grained)
def read(self, file_path):
logging.info('Reading file at %s', file_path)
logging.info('Processing the dataset.')
instances = self._read(file_path)
instances = [instance for instance in tqdm(instances)]
return instances
def _read(self, file_path, context_limit=(- 1)):
with open(file_path) as dataset_file:
dataset_json = json.load(dataset_file)
dataset = dataset_json['data']
for article in dataset:
for paragraph in article['paragraphs']:
context = paragraph['context']
(context_tokens, context_token_spans) = self.tokenizer.word_tokenizer(context)
for question_answer in paragraph['qas']:
question = question_answer['question'].strip()
(question_tokens, _) = self.tokenizer.word_tokenizer(question)
(answers, span_starts, span_ends) = ([], [], [])
if ('answers' in question_answer):
answers = [answer['text'] for answer in question_answer['answers']]
span_starts = [answer['answer_start'] for answer in question_answer['answers']]
span_ends = [(start + len(answer)) for (start, answer) in zip(span_starts, answers)]
answer_char_spans = (zip(span_starts, span_ends) if ((len(span_starts) > 0) and (len(span_ends) > 0)) else None)
answers = (answers if (len(answers) > 0) else None)
qid = question_answer['id']
instance = self._make_instance(context, context_tokens, context_token_spans, question, question_tokens, answer_char_spans, answers, qid)
if ((len(instance['context_tokens']) > context_limit) and (context_limit > 0)):
if ((instance['answer_start'] > context_limit) or (instance['answer_end'] > context_limit)):
continue
else:
instance['context_tokens'] = instance['context_tokens'][:context_limit]
(yield instance)
def _make_instance(self, context, context_tokens, context_token_spans, question, question_tokens, answer_char_spans=None, answers=None, qid=None):
(answer_token_starts, answer_token_ends) = ([], [])
if (answers is not None):
for (answer_char_start, answer_char_end) in answer_char_spans:
answer_token_span = []
for (idx, span) in enumerate(context_token_spans):
if (not ((answer_char_end <= span[0]) or (answer_char_start >= span[1]))):
answer_token_span.append(idx)
assert (len(answer_token_span) > 0)
answer_token_starts.append(answer_token_span[0])
answer_token_ends.append(answer_token_span[(- 1)])
return OrderedDict({'context': context, 'context_tokens': context_tokens, 'context_token_spans': context_token_spans, 'context_word_len': [len(word) for word in context_tokens], 'question_word_len': [len(word) for word in question_tokens], 'question': question, 'qid': qid, 'question_tokens': question_tokens, 'answer': (answers[0] if (answers is not None) else None), 'answer_start': (answer_token_starts[0] if (answers is not None) else None), 'answer_end': (answer_token_ends[0] if (answers is not None) else None)}) |
class BarChart(GraphicPrimitive):
def __init__(self, ind, datalist, options):
self.datalist = datalist
self.ind = ind
GraphicPrimitive.__init__(self, options)
def get_minmax_data(self):
return minmax_data([0, len(self.datalist)], self.datalist, dict=True)
def _allowed_options(self):
return {'rgbcolor': 'The color as an RGB tuple.', 'hue': 'The color given as a hue.', 'legend_label': 'The label for this item in the legend.', 'width': 'The width of the bars', 'zorder': 'The layer level in which to draw'}
def _repr_(self):
return ('BarChart defined by a %s datalist' % len(self.datalist))
def _render_on_subplot(self, subplot):
options = self.options()
color = options['rgbcolor']
width = float(options['width'])
import numpy
ind = numpy.array(self.ind, dtype=float)
datalist = numpy.array(self.datalist, dtype=float)
subplot.bar(ind, datalist, color=color, width=width, label=options['legend_label']) |
def test_enc_dec_model_seq_at_a_time(test_dl, model, scaler, output_sequence_length):
x_input = []
truth = []
predicted = []
with torch.no_grad():
model.eval()
step = 0
for (x, y, mask) in test_dl:
x = x.to('cuda')
y = y.unsqueeze((- 1)).to('cuda')
tgt_mask = mask.to('cuda')
memory = model.encoder(model.positional_encoding(x))
outputs = torch.clone(x)
for i in range(1, (output_sequence_length + 1), 1):
mask = (torch.triu(torch.ones(1, 1)) == 1).transpose(0, 1)
mask = mask.float().masked_fill((mask == 0), float('-inf')).masked_fill((mask == 1), float(0.0))
tgt_mask = mask.to('cuda')
if (i == 1):
dec_in = outputs[:i].unsqueeze((- 1))
else:
dec_in = outputs[i:(i + 1)]
dec_in = dec_in.unsqueeze((- 1)).to('cuda')
out = model.out(model.decoder(model.positional_encoding(dec_in), memory, tgt_mask))
outputs[i] = out.view((- 1)).squeeze((- 1))
x = x.to('cpu')
y = y.to('cpu')
out = out.to('cpu')
x_input.append(scaler.inverse_transform(np.reshape(np.array(x[0].view((- 1)).numpy()), (x.shape[1], 1))))
truth.append(scaler.inverse_transform(np.reshape(np.array(y[0].view((- 1)).numpy()), (y.shape[1], 1))))
predicted.append(scaler.inverse_transform(np.reshape(np.array(out[0].view((- 1)).numpy()), (out.shape[1], 1))))
return (x_input, truth, predicted) |
class GeneratorHyperParameters():
def __init__(self):
self.leaky_relu_coeff = 0.05
self.with_batchnorm = True
self.batchnorm_decay = 0.98
self.input_noise_size = 300
self.input_noise_bound = 1
self.e_layer_sizes = [300, 300]
self.code1_size = 15
self.code2_size = 15
self.code3_size = 15
self.code4_size = 15
self.w1_layer_sizes = [40, 40]
self.w2_layer_sizes = [100, 100]
self.w3_layer_sizes = [100, 100]
self.w4_layer_sizes = [60, 60]
self.fix_gauge = True
self.zero_fixer = 1e-08
self.initialization_std = 0.1
self.noise_batch_size = 32
self.images_batch_size = 32
self.noise_batch_size_for_validation = 200
self.learning_rate = 0.0003
self.learning_rate_rate = 0.99998
self.lamBda = 1000.0
self.lambda_rate = 1.0 |
def url_unescape(data):
return re.sub('%([0-9a-fA-F]{2})', (lambda m: unichr(int(m.group(1), 16))), data) |
def test_offset_not_none():
seed = np.array([0, 3, 6, 2, 1, 1, 1, 4, 2, 0])
mask = np.array([0, 8, 6, 8, 8, 8, 8, 4, 4, 0])
expected = np.array([0, 3, 6, 6, 6, 6, 6, 4, 4, 0])
assert_array_almost_equal(reconstruction(seed, mask, method='dilation', footprint=np.ones(3), offset=np.array([0])), expected) |
def save_module_to_file(module: ast.Module, target: Path, format_with_black: bool=True) -> None:
target.parent.mkdir(parents=True, exist_ok=True)
with target.open(mode='w', encoding='UTF-8') as file:
file.write(_PYNGUIN_FILE_HEADER)
output = ast.unparse(ast.fix_missing_locations(module))
if format_with_black:
import black
output = black.format_str(output, mode=black.FileMode())
file.write(output) |
def evaluate_conll(conll_scorer, gold_path, predictions, subtoken_maps, prediction_path, all_metrics=False, official_stdout=False):
with open(prediction_path, 'w') as prediction_file:
with open(gold_path, 'r') as gold_file:
output_conll(gold_file, prediction_file, predictions, subtoken_maps)
result = {metric: official_conll_eval(conll_scorer, gold_file.name, prediction_file.name, metric, official_stdout) for metric in ('muc', 'bcub', 'ceafe')}
return result |
def _is_punctuation(char):
cp = ord(char)
if (((cp >= 33) and (cp <= 47)) or ((cp >= 58) and (cp <= 64)) or ((cp >= 91) and (cp <= 96)) or ((cp >= 123) and (cp <= 126))):
return True
cat = unicodedata.category(char)
if cat.startswith('P'):
return True
return False |
class TransformerBlocks(nn.Module):
def __init__(self, d_model=768, nlayers=3):
super(TransformerBlocks, self).__init__()
self.nlayers = nlayers
block = TransformerBlock(d_model=d_model)
self.h = _get_clones(block, nlayers)
def forward(self, inp):
for i in range(self.nlayers):
inp = self.h[i](inp)
return inp.sum() |
class InferenceHost():
def __init__(self, object_directory_address, scale=1):
self.store = hoplite.HopliteClient(object_directory_address)
self.object_directory_address = object_directory_address
self.images = torch.rand(input_shape)
self.models = []
for _ in range(scale):
for model_name in served_models:
self.models.append(ModelWorker.remote(model_name, object_directory_address))
self.request_id = 0
self.rebooting_tasks = {}
def __call__(self, request):
x = self.images.numpy()
object_id = hoplite.object_id_from_int(self.request_id)
buffer = hoplite.Buffer.from_buffer(x)
self.store.put(buffer, object_id)
self.request_id += 1
results = []
refs = [m.inference.remote(object_id) for m in self.models]
event = 'ok'
for (i, f) in enumerate(refs):
try:
r = ray.get(f)
results.append(r)
except:
if (i not in self.rebooting_tasks):
print(f'task {i} failed, restarting...')
event = 'fail'
handle = ModelWorker.remote('alexnet', self.object_directory_address)
self.rebooting_tasks[i] = (handle, handle.poll.remote())
else:
print(f'waiting failed task {i} to restart...')
ready_ones = set()
for (i, v) in self.rebooting_tasks.items():
(rd, _) = ray.wait([v[1]], timeout=0)
if rd:
ready_ones.add(i)
self.models[i] = v[0]
print(f'failed task {i} recovered!')
event = 'rejoin'
else:
print(f'failed task {i} still initializing!')
for i in ready_ones:
del self.rebooting_tasks[i]
cls = np.argmax(sum(results), 1)
return {'id': self.request_id, 'data': cls.tolist(), 'event': event} |
def ButterflyGraph():
edge_dict = {0: [3, 4], 1: [2, 4], 2: [4], 3: [4]}
pos_dict = {0: [(- 1), 1], 1: [1, 1], 2: [1, (- 1)], 3: [(- 1), (- 1)], 4: [0, 0]}
return Graph(edge_dict, pos=pos_dict, name='Butterfly graph') |
class LayoutLMv3Model(metaclass=DummyObject):
_backends = ['torch']
def __init__(self, *args, **kwargs):
requires_backends(self, ['torch']) |
class QuantizedGRU(QuantizedRNNBase):
__overloads__ = {'forward': ['forward_packed', 'forward_tensor']}
.script_method
def forward_impl(self, input, hx, batch_sizes, max_batch_size, sorted_indices):
if (hx is None):
num_directions = (2 if self.bidirectional else 1)
hx = torch.zeros((self.num_layers * num_directions), max_batch_size, self.hidden_size, dtype=input.dtype, device=input.device)
else:
hx = self.permute_hidden(hx, sorted_indices)
self.check_forward_args(input, hx, batch_sizes)
if (batch_sizes is None):
result = torch.quantized_gru(input, hx, self.all_weights, self.bias, self.num_layers, float(self.dropout), self.training, self.bidirectional, self.batch_first)
else:
result = torch.quantized_gru(input, batch_sizes, hx, self.all_weights, self.bias, self.num_layers, float(self.dropout), self.training, self.bidirectional)
output = result[0]
hidden = result[1]
return (output, hidden)
.script_method
def forward_tensor(self, input, hx=None):
batch_sizes = None
max_batch_size = (input.size(0) if self.batch_first else input.size(1))
sorted_indices = None
unsorted_indices = None
(output, hidden) = self.forward_impl(input, hx, batch_sizes, max_batch_size, sorted_indices)
return (output, self.permute_hidden(hidden, unsorted_indices))
.script_method
def forward_packed(self, input, hx=None):
(input, batch_sizes, sorted_indices, unsorted_indices) = input
max_batch_size = batch_sizes[0]
max_batch_size = int(max_batch_size)
(output, hidden) = self.forward_impl(input, hx, batch_sizes, max_batch_size, sorted_indices)
output = PackedSequence(output, batch_sizes, sorted_indices, unsorted_indices)
return (output, self.permute_hidden(hidden, unsorted_indices))
def forward(self, input, hx=None):
if isinstance(input, PackedSequence):
return self.forward_packed(input, hx)
else:
return self.forward_tensor(input, hx) |
class MeshRelationAccessProxy():
def __init__(self, mesh: MeshInstance, from_index: impl.Expr, to_element_type: MeshElementType):
self.mesh = mesh
self.from_index = from_index
self.to_element_type = to_element_type
def size(self):
return impl.Expr(self.mesh.get_relation_size(self.from_index, self.to_element_type))
def subscript(self, *indices):
assert (len(indices) == 1)
entry_expr = self.mesh.get_relation_access(self.from_index, self.to_element_type, impl.Expr(indices[0]).ptr)
entry_expr.type_check(impl.get_runtime().prog.config())
return MeshElementFieldProxy(self.mesh, self.to_element_type, entry_expr) |
def _scope_path(sdict: ScopeDictType, scope: NodeType) -> List[NodeType]:
result = []
curnode = scope
while (curnode is not None):
curnode = sdict[scope]
result.append(curnode)
return result |
class InstanceNorm2d(torch.nn.InstanceNorm2d):
def __init__(self, num_features, weight, bias, scale, zero_point, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False):
super(InstanceNorm2d, self).__init__(num_features, eps, momentum, affine, track_running_stats)
self.weight = weight
self.bias = bias
self.scale = scale
self.zero_point = zero_point
def forward(self, input):
return torch.ops.quantized.instance_norm(input, self.weight, self.bias, self.eps, self.scale, self.zero_point)
def _get_name(self):
return 'QuantizedInstanceNorm2d'
def from_float(cls, mod):
activation_post_process = mod.activation_post_process
(scale, zero_point) = mod.activation_post_process.calculate_qparams()
new_mod = cls(mod.num_features, mod.weight, mod.bias, float(scale), int(zero_point), mod.eps, mod.affine)
return new_mod |
class SparqlParse():
def __init__(self):
select_stmt = None
prefix_stmts = None
where_stmts = None
query_stmts = None |
def _gross_pitch_error_frames(true_t, true_f, est_t, est_f, eps=1e-08):
voiced_frames = _true_voiced_frames(true_t, true_f, est_t, est_f)
true_f_p_eps = [(x + eps) for x in true_f]
pitch_error_frames = (np.abs(((est_f / true_f_p_eps) - 1)) > 0.2)
return (voiced_frames & pitch_error_frames) |
def compute_bits_per_dim(x, model):
zero = torch.zeros(x.shape[0], 1).to(x)
(z, delta_logp) = model(x, zero)
logpz = standard_normal_logprob(z).view(z.shape[0], (- 1)).sum(1, keepdim=True)
logpx = (logpz - delta_logp)
logpx_per_dim = (torch.sum(logpx) / x.nelement())
bits_per_dim = ((- (logpx_per_dim - np.log(256))) / np.log(2))
return bits_per_dim |
def test_luminosity_density_nu(spectrum):
expected = (spectrum.luminosity / np.diff(spectrum._frequency))
test_helper.assert_quantity_allclose(spectrum.luminosity_density_nu, expected) |
class EpicFHIRManageAppointments(VirtualFunctionTool):
name = 'EpicFHIRManageAppointments'
summary = 'List, access, create, update, and delete patient appointments.'
parameters: List[ArgParameter] = [{'name': 'patient_id', 'type': 'string', 'description': 'The unique identifier of the patient. The identifier should be a string of alphanumeric characters.', 'required': True}, {'name': 'action', 'type': 'string', 'description': "The action to perform on the appointment. The value should be one of ['list', 'create', 'update', 'delete'].", 'required': True}, {'name': 'appointment_id', 'type': 'string', 'description': 'The unique identifier of the appointment, required for update and delete actions.', 'required': False}, {'name': 'appointment_data', 'type': 'object', 'description': "The appointment data, required for create and update actions. The object includes fields such as 'date', 'time', 'location', and 'doctor_id'.", 'required': False}, {'name': 'max_results', 'type': 'integer', 'description': 'The maximum number of results to return for the list action, default is 10.', 'required': False}]
returns: List[ArgReturn] = [{'name': 'success', 'type': 'boolean', 'description': 'Whether the operation was successful.'}, {'name': 'appointments', 'type': 'array', 'description': "An array of objects each containing the 'appointment_id' and 'appointment_data' (including fields such as 'date', 'time', 'location', 'doctor_id', and 'doctor_name'). Returned the created appointment for the create action and listed appointments for the list action, otherwise empty."}]
exceptions: List[ArgException] = [{'name': 'InvalidRequestException', 'description': "The 'patient_id' does not exist or the 'action' is not one of ['list', 'create', 'update', 'delete']."}, {'name': 'NotFoundException', 'description': "The 'appointment_id' does not exist for update or delete actions."}] |
class vJoy(object):
def __init__(self, reference=1):
self.handle = None
self.dll = ctypes.CDLL(CONST_DLL_VJOY)
self.reference = reference
self.acquired = False
def open(self):
if self.dll.AcquireVJD(self.reference):
self.acquired = True
return True
return False
def close(self):
if self.dll.RelinquishVJD(self.reference):
self.acquired = False
return True
return False
def generateJoystickPosition(self, wThrottle=0, wRudder=0, wAileron=0, wAxisX=16393, wAxisY=16393, wAxisZ=0, wAxisXRot=16393, wAxisYRot=16393, wAxisZRot=0, wSlider=0, wDial=0, wWheel=0, wAxisVX=0, wAxisVY=0, wAxisVZ=0, wAxisVBRX=0, wAxisVBRY=0, wAxisVBRZ=0, lButtons=0, bHats=0, bHatsEx1=0, bHatsEx2=0, bHatsEx3=0):
joyPosFormat = 'BlllllllllllllllllllIIII'
pos = struct.pack(joyPosFormat, self.reference, wThrottle, wRudder, wAileron, wAxisX, wAxisY, wAxisZ, wAxisXRot, wAxisYRot, wAxisZRot, wSlider, wDial, wWheel, wAxisVX, wAxisVY, wAxisVZ, wAxisVBRX, wAxisVBRY, wAxisVBRZ, lButtons, bHats, bHatsEx1, bHatsEx2, bHatsEx3)
return pos
def update(self, joystickPosition):
if self.dll.UpdateVJD(self.reference, joystickPosition):
return True
return False
def sendButtons(self, bState):
joyPosition = self.generateJoystickPosition(lButtons=bState)
return self.update(joyPosition)
def setButton(self, index, state):
if self.dll.SetBtn(state, self.reference, index):
return True
return False |
.parametrize('a, feat_idxs, expected', [(B, [0], []), (B, [0, 1], [[0, 1, 0, 1, 1, 0]]), (B, [0, 1, 2, 3, 4], [[0, 1, 0, 1, 1, 0]]), (B, [0, 1, 2, 3, 4, 5], [[0, 1, 0, 1, 1, 0], [1, 0, 0, 1, 0, 1]])])
def test_expand_collection_unset(a, feat_idxs, expected):
children = expand_collection_unset(a, feat_idxs)
assert np.array_equal(np.array(children), np.array(expected)) |
def validate(model, data_loader):
print('validating ... ', flush=True, end='')
val_loss_meter = pyutils.AverageMeter('loss1', 'loss2')
model.eval()
with torch.no_grad():
for pack in data_loader:
img = pack['img']
label = pack['label'].cuda(non_blocking=True)
aug_img = pack['aug_img']
aug_label = pack['aug_label'].cuda(non_blocking=True)
con_imgs = torch.cat([img, aug_img], 0)
con_labels = torch.cat([label, aug_label], 0)
x = model(con_imgs)
loss1 = F.multilabel_soft_margin_loss(x, con_labels)
val_loss_meter.add({'loss1': loss1.item()})
model.train()
print(('loss: %.4f' % val_loss_meter.pop('loss1')))
return |
def bw_dynamic_rnn(cell, inputs, sequence_length=None, initial_state=None, dtype=None, parallel_iterations=None, swap_memory=False, time_major=False, scope=None):
assert (not time_major)
flat_inputs = flatten(inputs, 2)
flat_len = (None if (sequence_length is None) else tf.cast(flatten(sequence_length, 0), 'int64'))
flat_inputs = (tf.reverse(flat_inputs, 1) if (sequence_length is None) else tf.reverse_sequence(flat_inputs, sequence_length, 1))
(flat_outputs, final_state) = tf.nn.dynamic_rnn(cell, flat_inputs, sequence_length=flat_len, initial_state=initial_state, dtype=dtype, parallel_iterations=parallel_iterations, swap_memory=swap_memory, time_major=time_major, scope=scope)
flat_outputs = (tf.reverse(flat_outputs, 1) if (sequence_length is None) else tf.reverse_sequence(flat_outputs, sequence_length, 1))
outputs = reconstruct(flat_outputs, inputs, 2)
return (outputs, final_state) |
def add_rulebased_arguments(parser):
parser.add_argument('--templates', help='Path to templates (.pkl)')
parser.add_argument('--policy', help='Path to manager model (.pkl)') |
class ActivationsTestModel(torch.nn.Module):
def __init__(self):
super().__init__()
self.qconfig = torch.quantization.get_default_qconfig('fbgemm')
self.quant = torch.quantization.QuantStub()
self.hardswish = torch.nn.Hardswish().to(dtype=torch.float)
self.elu = torch.nn.ELU().to(dtype=torch.float)
self.dequant = torch.quantization.DeQuantStub()
def forward(self, x):
x = self.quant(x)
x = self.hardswish(x)
x = self.elu(x)
x = self.dequant(x)
return x |
class DirichletCharacter(MultiplicativeGroupElement):
def __init__(self, parent, x, check=True):
MultiplicativeGroupElement.__init__(self, parent)
if check:
orders = parent.integers_mod().unit_group().gens_orders()
if (len(x) != len(orders)):
raise ValueError('wrong number of values (= {}) on generators (want {})'.format(x, len(orders)))
if free_module_element.is_FreeModuleElement(x):
x = parent._module(x)
if any(((u * v) for (u, v) in zip(x, orders))):
raise ValueError('values (= {} modulo {}) must have additive orders dividing {}, respectively'.format(x, parent.zeta_order(), orders))
self.element.set_cache(x)
else:
R = parent.base_ring()
x = tuple(map(R, x))
if (R.is_exact() and any((((u ** v) != 1) for (u, v) in zip(x, orders)))):
raise ValueError('values (= {}) must have multiplicative orders dividing {}, respectively'.format(x, orders))
self.values_on_gens.set_cache(x)
elif free_module_element.is_FreeModuleElement(x):
self.element.set_cache(x)
else:
self.values_on_gens.set_cache(x)
_method
def __eval_at_minus_one(self):
D = self.decomposition()
val = self.base_ring()(1)
for e in D:
if ((e.modulus() % 2) == 0):
if ((e.modulus() % 4) == 0):
val *= e.values_on_gens()[0]
elif ((euler_phi(e.parent().modulus()) / e.order()) % 2):
val *= (- 1)
return val
def __call__(self, m):
N = self.modulus()
m = (m % N)
if (self.values.is_in_cache() or (m != (N - 1))):
return self.values()[m]
else:
return self.__eval_at_minus_one()
def change_ring(self, R):
if (self.base_ring() is R):
return self
G = self.parent().change_ring(R)
return G.element_class(G, [R(x) for x in self.values_on_gens()])
def _richcmp_(self, other, op):
return richcmp(self.values_on_gens(), other.values_on_gens(), op)
def __hash__(self):
return hash(self.values_on_gens())
def __invert__(self):
G = self.parent()
if G.zeta.is_in_cache():
x = (- self.element())
else:
x = tuple(((~ z) for z in self.values_on_gens()))
return G.element_class(G, x, check=False)
def _mul_(self, other):
G = self.parent()
if G.zeta.is_in_cache():
x = (self.element() + other.element())
else:
x = tuple(((y * z) for (y, z) in zip(self.values_on_gens(), other.values_on_gens())))
return G.element_class(G, x, check=False)
def __copy__(self):
G = self.parent()
return G.element_class(G, self.values_on_gens(), check=False)
def __pow__(self, n):
G = self.parent()
if G.zeta.is_in_cache():
x = (n * self.element())
else:
x = tuple(((z ** n) for z in self.values_on_gens()))
return G.element_class(G, x, check=False)
def _repr_short_(self):
return str(list(self.values_on_gens()))
def _repr_(self):
s = ('Dirichlet character modulo %s of conductor %s' % (self.modulus(), self.conductor()))
r = len(self.values_on_gens())
if r:
s += ' mapping '
for i in range(r):
if i:
s += ', '
s += ((str(self.parent().unit_gens()[i]) + ' |--> ') + str(self.values_on_gens()[i]))
return s
def _latex_(self):
s = ('\\hbox{Dirichlet character modulo } %s \\hbox{ of conductor } %s' % (self.modulus(), self.conductor()))
r = len(self.values_on_gens())
if (r != 0):
s += ' \\hbox{ mapping } '
for i in range(r):
if (i != 0):
s += ',\\ '
s += ((self.parent().unit_gens()[i]._latex_() + ' \\mapsto ') + self.values_on_gens()[i]._latex_())
return s
def base_ring(self):
return self.parent().base_ring()
def bar(self):
return (~ self)
def bernoulli(self, k, algorithm='recurrence', cache=True, **opts):
if cache:
try:
self.__bernoulli
except AttributeError:
self.__bernoulli = {}
if (k in self.__bernoulli):
return self.__bernoulli[k]
N = self.modulus()
K = self.base_ring()
if (N == 1):
ber = ((K.one() / 2) if (k == 1) else K(bernoulli(k)))
elif (self((- 1)) != K(((- 1) ** k))):
ber = K.zero()
elif (algorithm == 'recurrence'):
v = self.values()
def S(n):
return sum(((v[r] * (r ** n)) for r in range(1, N)))
ber = sum(((((binomial(k, j) * bernoulli(j, **opts)) * (N ** (j - 1))) * S((k - j))) for j in range((k + 1))))
elif (algorithm == 'definition'):
prec = (k + 2)
R = PowerSeriesRing(QQ, 't')
t = R.gen()
g = (t / ((N * t).exp(prec) - 1))
h = ([0] + [(g * (n * t).exp(prec)) for n in range(1, (N + 1))])
ber = (sum([(self(a) * h[a][k]) for a in range(1, (N + 1))]) * factorial(k))
else:
raise ValueError(f"algorithm = '{algorithm}' unknown")
if cache:
self.__bernoulli[k] = ber
return ber
def lfunction(self, prec=53, algorithm='pari'):
if (algorithm is None):
algorithm = 'pari'
if (algorithm == 'pari'):
from sage.lfunctions.pari import lfun_character, LFunction
Z = LFunction(lfun_character(self), prec=prec)
Z.rename(('PARI L-function associated to %s' % self))
return Z
elif (algorithm == 'lcalc'):
from sage.libs.lcalc.lcalc_Lfunction import Lfunction_from_character
return Lfunction_from_character(self)
raise ValueError('algorithm must be "pari" or "lcalc"')
_method
def conductor(self):
if ((self.modulus() == 1) or self.is_trivial()):
return Integer(1)
F = factor(self.modulus())
if (len(F) > 1):
return prod([d.conductor() for d in self.decomposition()])
p = F[0][0]
cond = (p ** (valuation(self.order(), p) + 1))
if ((p == 2) and (F[0][1] > 2) and (self.values_on_gens()[1].multiplicative_order() != 1)):
cond *= 2
return Integer(cond)
_method
def fixed_field_polynomial(self, algorithm='pari'):
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.matrix.constructor import matrix
if (algorithm == 'sage'):
n = ZZ(self.conductor())
if (not n.is_prime()):
raise NotImplementedError(('the conductor %s is supposed to be prime' % n))
d = self.order()
if ((euler_phi(n) % d) != 0):
raise ValueError(('No field exists because %s does not divide %s=phi(%s)' % (d, euler_phi(n), n)))
f = (euler_phi(n) // d)
S = PolynomialRing(ZZ, 'x')
if (f == 1):
from sage.misc.functional import cyclotomic_polynomial
return cyclotomic_polynomial(n, S.gen())
if (d == 2):
if (n.mod(4) == 1):
s = (- 1)
else:
s = 1
return S([((s * (n + s)) / 4), 1, 1])
R = IntegerModRing(n)
g = R.unit_gens()[0]
gen_index = {}
eta = []
for i in range(d):
eta.append([])
for j in range(f):
r = (g ** (i + (d * j)))
eta[i].append(r)
gen_index[r] = i
V = FreeModule(ZZ, d)
eta_zero = V(([(- f)] * d))
m = []
for j in range(d):
v = 0
for e in eta[j]:
try:
s = V.gen(gen_index[(1 + e)])
except KeyError:
s = eta_zero
v += s
m.append(v)
m = matrix(m)
xx = S.gen()
return m.charpoly(xx)
elif (algorithm == 'pari'):
(G, chi) = self._pari_init_()
K = pari.charker(G, chi)
H = pari.galoissubcyclo(G, K)
P = PolynomialRing(QQ, 'x')
x = P.gen()
return H.sage({'x': x})
else:
raise NotImplementedError("algorithm must be one of 'pari' or 'sage'")
def fixed_field(self):
return NumberField(self.fixed_field_polynomial(), 'a')
_method
def decomposition(self):
D = self.parent().decomposition()
vals = [[z] for z in self.values_on_gens()]
if ((self.modulus() % 8) == 0):
vals[0].append(vals[1][0])
del vals[1]
elif ((self.modulus() % 4) == 2):
vals = ([1] + vals)
return [D[i](vals[i]) for i in range(len(D))]
def extend(self, M):
if (M % self.modulus()):
raise ArithmeticError(('M(=%s) must be a multiple of the modulus(=%s)' % (M, self.modulus())))
H = DirichletGroup(M, self.base_ring())
return H(self)
def _pari_init_(self):
G = pari.znstar(self.modulus(), 1)
pari_orders = G[1][1]
pari_gens = G[1][2]
values_on_gens = (self(x) for x in pari_gens)
P = self.parent()
if isinstance(P.base_ring(), sage.rings.abc.ComplexField):
zeta = P.zeta()
zeta_argument = zeta.argument()
v = [int((x.argument() / zeta_argument)) for x in values_on_gens]
else:
dlog = P._zeta_dlog
v = [dlog[x] for x in values_on_gens]
m = P.zeta_order()
v = [((vi * oi) // m) for (vi, oi) in zip(v, pari_orders)]
return (G, v)
def conrey_number(self):
if (self.modulus() == 1):
return 1
(G, v) = self._pari_init_()
return pari.znconreyexp(G, v).sage()
def lmfdb_page(self):
import webbrowser
lmfdb_url = '
url = lmfdb_url.format(self.modulus(), self.conrey_number())
webbrowser.open(url)
def galois_orbit(self, sort=True):
if (not self.base_ring().is_integral_domain()):
raise TypeError('Galois orbits only defined if base ring is an integral domain')
k = self.order()
if (k <= 2):
return [self]
P = self.parent()
z = self.element()
o = int(z.additive_order())
Auts = {(m % o) for m in P._automorphisms()}
v = [P.element_class(P, (m * z), check=False) for m in Auts]
if sort:
v.sort()
return v
def gauss_sum(self, a=1):
G = self.parent()
K = G.base_ring()
chi = self
m = G.modulus()
if isinstance(K, sage.rings.abc.ComplexField):
return self.gauss_sum_numerical(a=a)
elif isinstance(K, sage.rings.abc.AlgebraicField):
L = K
zeta = L.zeta(m)
elif (isinstance(K, sage.rings.abc.NumberField_cyclotomic) or is_RationalField(K)):
chi = chi.minimize_base_ring()
n = lcm(m, G.zeta_order())
L = CyclotomicField(n)
zeta = (L.gen(0) ** (n // m))
else:
raise NotImplementedError('Gauss sums only currently implemented when the base ring is a cyclotomic field, QQ, QQbar, or a complex field')
zeta = (zeta ** a)
g = L(chi(0))
z = L.one()
for c in chi.values()[1:]:
z *= zeta
g += (L(c) * z)
return g
def gauss_sum_numerical(self, prec=53, a=1):
G = self.parent()
K = G.base_ring()
if isinstance(K, sage.rings.abc.ComplexField):
def phi(t):
return t
CC = K
elif isinstance(K, sage.rings.abc.AlgebraicField):
from sage.rings.complex_mpfr import ComplexField
CC = ComplexField(prec)
phi = CC.coerce_map_from(K)
elif (isinstance(K, sage.rings.abc.NumberField_cyclotomic) or is_RationalField(K)):
phi = K.complex_embedding(prec)
CC = phi.codomain()
else:
raise NotImplementedError('Gauss sums only currently implemented when the base ring is a cyclotomic field, QQ, QQbar, or a complex field')
zeta = (CC.zeta(G.modulus()) ** a)
g = phi(self(0))
z = CC.one()
for c in self.values()[1:]:
z *= zeta
g += (phi(c) * z)
return g
def jacobi_sum(self, char, check=True):
if check:
if (self.parent() != char.parent()):
raise NotImplementedError('Characters must be from the same Dirichlet Group.')
return sum([(self(x) * char((1 - x))) for x in IntegerModRing(self.modulus())])
def kloosterman_sum(self, a=1, b=0):
G = self.parent()
zo = G.zeta_order()
m = G.modulus()
g = 0
L = CyclotomicField(m.lcm(zo))
zeta = L.gen(0)
try:
(self(1) * (zeta ** (a + b)))
except TypeError:
raise NotImplementedError('Kloosterman sums not implemented over this ring')
n = zeta.multiplicative_order()
zeta = (zeta ** (n // m))
for c in m.coprime_integers(m):
e = Mod(c, m)
g += (self(c) * (zeta ** int(((a * e) + (b * (e ** (- 1)))))))
return g
def kloosterman_sum_numerical(self, prec=53, a=1, b=0):
G = self.parent()
K = G.base_ring()
if (not (isinstance(K, sage.rings.abc.NumberField_cyclotomic) or is_RationalField(K))):
raise NotImplementedError('Kloosterman sums only currently implemented when the base ring is a cyclotomic field or QQ.')
phi = K.complex_embedding(prec)
CC = phi.codomain()
g = 0
m = G.modulus()
zeta = CC.zeta(m)
for c in m.coprime_integers(m):
e = Mod(c, m)
z = (zeta ** int(((a * e) + (b * (e ** (- 1))))))
g += (phi(self(c)) * z)
return g
_method
def is_even(self):
R = self.base_ring()
if (not R.is_exact()):
return (abs((self((- 1)) - R.one())) < 0.5)
return (self((- 1)) == R.one())
_method
def is_odd(self):
R = self.base_ring()
if (not R.is_exact()):
return (abs((self((- 1)) - R((- 1)))) < 0.5)
return (self((- 1)) == R((- 1)))
_method
def is_primitive(self):
return (self.conductor() == self.modulus())
_method
def is_trivial(self):
if self.element.is_in_cache():
return (not self.element())
one = self.base_ring().one()
return all(((x == one) for x in self.values_on_gens()))
def kernel(self):
one = self.base_ring().one()
return [x for x in range(self.modulus()) if (self(x) == one)]
def maximize_base_ring(self):
g = IntegerModRing(self.modulus()).unit_group_exponent()
if (g == 1):
g = 2
z = self.base_ring().zeta()
n = z.multiplicative_order()
m = lcm(g, n)
if (n == m):
return self
K = CyclotomicField(m)
return self.change_ring(K)
def minimize_base_ring(self):
R = self.base_ring()
if R.is_prime_field():
return self
p = R.characteristic()
if p:
K = IntegerModRing(p)
elif (self.order() <= 2):
K = QQ
elif (isinstance(R, NumberField_generic) and (euler_phi(self.order()) < R.absolute_degree())):
K = CyclotomicField(self.order())
else:
return self
try:
return self.change_ring(K)
except (TypeError, ValueError, ArithmeticError):
return self
def modulus(self):
return self.parent().modulus()
def level(self):
return self.modulus()
_method
def multiplicative_order(self):
if self.parent().zeta.is_in_cache():
return self.element().additive_order()
return lcm([z.multiplicative_order() for z in self.values_on_gens()])
def primitive_character(self):
return self.restrict(self.conductor())
def restrict(self, M):
M = int(M)
if (self.modulus() % M):
raise ValueError(('M(=%s) must divide the modulus(=%s)' % (M, self.modulus())))
if (M % self.conductor()):
raise ValueError(('conductor(=%s) must divide M(=%s)' % (self.conductor(), M)))
H = DirichletGroup(M, self.base_ring())
return H(self)
_method
def values(self):
G = self.parent()
R = G.base_ring()
mod = self.parent().modulus()
if (mod == 1):
return [R.one()]
elif (mod == 2):
return [R.zero(), R.one()]
result_list = ([R.zero()] * mod)
gens = G.unit_gens()
orders = G.integers_mod().unit_group().gens_orders()
R_values = G._zeta_powers
val_on_gen = self.element()
exponents = ([0] * len(orders))
n = G.integers_mod().one()
value = val_on_gen.base_ring().zero()
while True:
result_list[n] = R_values[value]
i = 0
while True:
try:
exponents[i] += 1
except IndexError:
return result_list
value += val_on_gen[i]
n *= gens[i]
if (exponents[i] < orders[i]):
break
exponents[i] = 0
i += 1
_method(do_pickle=True)
def values_on_gens(self):
pows = self.parent()._zeta_powers
return tuple([pows[i] for i in self.element()])
_method(do_pickle=True)
def element(self):
P = self.parent()
M = P._module
if isinstance(P.base_ring(), sage.rings.abc.ComplexField):
zeta = P.zeta()
zeta_argument = zeta.argument()
v = M([int(round((x.argument() / zeta_argument))) for x in self.values_on_gens()])
else:
dlog = P._zeta_dlog
v = M([dlog[x] for x in self.values_on_gens()])
v.set_immutable()
return v
def __setstate__(self, state):
values_on_gens_key = '_DirichletCharacter__values_on_gens'
values_on_gens = None
state_dict = state[1]
if (values_on_gens_key in state_dict):
values_on_gens = state_dict[values_on_gens_key]
del state_dict[values_on_gens_key]
element_key = '_DirichletCharacter__element'
element = None
if (element_key in state_dict):
element = state_dict[element_key]
del state_dict[element_key]
super().__setstate__(state)
if (values_on_gens is not None):
self.values_on_gens.set_cache(values_on_gens)
if (element is not None):
self.element.set_cache(element) |
class Function_log_integral_offset(BuiltinFunction):
def __init__(self):
BuiltinFunction.__init__(self, 'log_integral_offset', nargs=1, latex_name='\\operatorname{log\\_integral\\_offset}', conversions=dict(sympy='Li'))
def _eval_(self, z):
if (z == 2):
return SR(0)
return (li(z) - li(2))
def _evalf_(self, z, parent=None, algorithm=None):
return _mpmath_utils_call(_mpmath_li, z, offset=True, parent=parent)
def _derivative_(self, z, diff_param=None):
return (1 / log(z)) |
def test_random_public_method(executor):
config.configuration.algorithm = config.Algorithm.RANDOM
algorithm = gaf.TestSuiteGenerationAlgorithmFactory(executor, MagicMock(ModuleTestCluster)).get_search_algorithm()
out_0 = MagicMock(GenericCallableAccessibleObject)
out_1 = MagicMock(GenericAccessibleObject)
out_2 = MagicMock(GenericCallableAccessibleObject)
outs = {out_0, out_1, out_2}
assert isinstance(algorithm, RandomAlgorithm)
result = algorithm._random_public_method(outs)
assert ((result == out_0) or (result == out_2)) |
.parametrize('backend', ['numpy', 'tensorflow', 'pytorch', 'jax'])
def test_cls_backend_option(tmp_path, script_runner, backend):
temp = tmp_path.joinpath('parsed_output.json')
command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}'
ret = script_runner.run(shlex.split(command))
command = f'pyhf cls --backend {backend:s} {temp}'
ret = script_runner.run(shlex.split(command))
assert ret.success
d = json.loads(ret.stdout)
assert d
assert ('CLs_obs' in d)
assert ('CLs_exp' in d) |
def test_allowable_amino_acid_locations_do_not_contain_amino_acids_we_cant_create(esm_sampler_fixture):
actual_allowed = map_aa_idx_to_tok_set(esm_sampler_fixture)
non_single_standard = set('XBUXZO.-')
assert actual_allowed.isdisjoint(non_single_standard) |
def run_local(local_rank, num_proc, func, init_method, shard_id, num_shards, backend, cfg):
world_size = (num_proc * num_shards)
rank = ((shard_id * num_proc) + local_rank)
try:
torch.distributed.init_process_group(backend=backend, init_method=init_method, world_size=world_size, rank=rank)
except Exception as e:
raise e
torch.cuda.set_device(local_rank)
func(local_rank, cfg) |
class BasicStage(nn.Module):
def __init__(self, in_channels, out_channels, ratio, kernel_size, stride, groups, i_stage, m_blocks, use_bn=True, use_do=True, verbose=False):
super(BasicStage, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.ratio = ratio
self.kernel_size = kernel_size
self.groups = groups
self.i_stage = i_stage
self.m_blocks = m_blocks
self.use_bn = use_bn
self.use_do = use_do
self.verbose = verbose
self.block_list = nn.ModuleList()
for i_block in range(self.m_blocks):
if ((self.i_stage == 0) and (i_block == 0)):
self.is_first_block = True
else:
self.is_first_block = False
if (i_block == 0):
self.downsample = True
self.stride = stride
self.tmp_in_channels = self.in_channels
else:
self.downsample = False
self.stride = 1
self.tmp_in_channels = self.out_channels
tmp_block = BasicBlock(in_channels=self.tmp_in_channels, out_channels=self.out_channels, ratio=self.ratio, kernel_size=self.kernel_size, stride=self.stride, groups=self.groups, downsample=self.downsample, is_first_block=self.is_first_block, use_bn=self.use_bn, use_do=self.use_do)
self.block_list.append(tmp_block)
def forward(self, x):
out = x
for i_block in range(self.m_blocks):
net = self.block_list[i_block]
out = net(out)
if self.verbose:
print('stage: {}, block: {}, in_channels: {}, out_channels: {}, outshape: {}'.format(self.i_stage, i_block, net.in_channels, net.out_channels, list(out.shape)))
print('stage: {}, block: {}, conv1: {}->{} k={} s={} C={}'.format(self.i_stage, i_block, net.conv1.in_channels, net.conv1.out_channels, net.conv1.kernel_size, net.conv1.stride, net.conv1.groups))
print('stage: {}, block: {}, convk: {}->{} k={} s={} C={}'.format(self.i_stage, i_block, net.conv2.in_channels, net.conv2.out_channels, net.conv2.kernel_size, net.conv2.stride, net.conv2.groups))
print('stage: {}, block: {}, conv1: {}->{} k={} s={} C={}'.format(self.i_stage, i_block, net.conv3.in_channels, net.conv3.out_channels, net.conv3.kernel_size, net.conv3.stride, net.conv3.groups))
return out |
def is_para_break(index, text):
if (text[index] == '\n'):
para_break = PARAGRAPH_BREAK.match(text, index)
if para_break:
break_len = len(para_break.group(0))
return (True, break_len)
return (False, 0) |
class Completions():
def __init__(self, list_or_dict, signature=None):
self.signature = signature
if isinstance(list_or_dict, list):
kwargs = {}
for arg in list_or_dict:
for (k, v) in arg.items():
kwargs.setdefault(k, []).append(v)
else:
kwargs = list_or_dict
assert all((isinstance(v, list) for v in kwargs.values())), 'All values must be lists'
if kwargs:
length = len(next(iter(kwargs.values())))
assert all(((len(v) == length) for v in kwargs.values())), 'All lists must have the same length'
self._completions = kwargs
def items(self):
return self._completions.items()
def __getitem__(self, key):
if isinstance(key, int):
if ((key < 0) or (key >= len(self))):
raise IndexError('Index out of range')
return Prediction(**{k: v[key] for (k, v) in self._completions.items()})
return self._completions[key]
def __getattr__(self, name):
if (name in self._completions):
return self._completions[name]
raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'")
def __len__(self):
return len(next(iter(self._completions.values())))
def __contains__(self, key):
return (key in self._completions)
def __repr__(self):
items_repr = ',\n '.join((f'{k}={repr(v)}' for (k, v) in self._completions.items()))
return f'''Completions(
{items_repr}
)'''
def __str__(self):
return self.__repr__() |
def register_types(module):
root_module = module.get_root()
module.add_enum('MpduType', ['NORMAL_MPDU', 'MPDU_IN_AGGREGATE', 'LAST_MPDU_IN_AGGREGATE'], import_from_module='ns.wifi')
module.add_enum('ChannelAccess', ['ContinuousAccess', 'AlternatingAccess', 'ExtendedAccess', 'DefaultCchAccess', 'NoAccess'])
module.add_enum('VsaTransmitInterval', ['VSA_TRANSMIT_IN_CCHI', 'VSA_TRANSMIT_IN_SCHI', 'VSA_TRANSMIT_IN_BOTHI'])
module.add_enum('QueueSizeUnit', ['PACKETS', 'BYTES'], import_from_module='ns.network')
module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL'], import_from_module='ns.core')
module.add_enum('TypeOfStation', ['STA', 'AP', 'ADHOC_STA', 'MESH', 'HT_STA', 'HT_AP', 'HT_ADHOC_STA', 'OCB'], import_from_module='ns.wifi')
module.add_enum('WifiMacType', ['WIFI_MAC_CTL_CTLWRAPPER', 'WIFI_MAC_CTL_RTS', 'WIFI_MAC_CTL_CTS', 'WIFI_MAC_CTL_ACK', 'WIFI_MAC_CTL_BACKREQ', 'WIFI_MAC_CTL_BACKRESP', 'WIFI_MAC_CTL_END', 'WIFI_MAC_CTL_END_ACK', 'WIFI_MAC_MGT_BEACON', 'WIFI_MAC_MGT_ASSOCIATION_REQUEST', 'WIFI_MAC_MGT_ASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_DISASSOCIATION', 'WIFI_MAC_MGT_REASSOCIATION_REQUEST', 'WIFI_MAC_MGT_REASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_PROBE_REQUEST', 'WIFI_MAC_MGT_PROBE_RESPONSE', 'WIFI_MAC_MGT_AUTHENTICATION', 'WIFI_MAC_MGT_DEAUTHENTICATION', 'WIFI_MAC_MGT_ACTION', 'WIFI_MAC_MGT_ACTION_NO_ACK', 'WIFI_MAC_MGT_MULTIHOP_ACTION', 'WIFI_MAC_DATA', 'WIFI_MAC_DATA_CFACK', 'WIFI_MAC_DATA_CFPOLL', 'WIFI_MAC_DATA_CFACK_CFPOLL', 'WIFI_MAC_DATA_NULL', 'WIFI_MAC_DATA_NULL_CFACK', 'WIFI_MAC_DATA_NULL_CFPOLL', 'WIFI_MAC_DATA_NULL_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA', 'WIFI_MAC_QOSDATA_CFACK', 'WIFI_MAC_QOSDATA_CFPOLL', 'WIFI_MAC_QOSDATA_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA_NULL', 'WIFI_MAC_QOSDATA_NULL_CFPOLL', 'WIFI_MAC_QOSDATA_NULL_CFACK_CFPOLL'], import_from_module='ns.wifi')
module.add_enum('AcIndex', ['AC_BE', 'AC_BK', 'AC_VI', 'AC_VO', 'AC_BE_NQOS', 'AC_UNDEF'], import_from_module='ns.wifi')
module.add_enum('BlockAckType', ['BASIC_BLOCK_ACK', 'COMPRESSED_BLOCK_ACK', 'EXTENDED_COMPRESSED_BLOCK_ACK', 'MULTI_TID_BLOCK_ACK'], import_from_module='ns.wifi')
module.add_enum('WifiPhyStandard', ['WIFI_PHY_STANDARD_80211a', 'WIFI_PHY_STANDARD_80211b', 'WIFI_PHY_STANDARD_80211g', 'WIFI_PHY_STANDARD_80211_10MHZ', 'WIFI_PHY_STANDARD_80211_5MHZ', 'WIFI_PHY_STANDARD_holland', 'WIFI_PHY_STANDARD_80211n_2_4GHZ', 'WIFI_PHY_STANDARD_80211n_5GHZ', 'WIFI_PHY_STANDARD_80211ac', 'WIFI_PHY_STANDARD_80211ax_2_4GHZ', 'WIFI_PHY_STANDARD_80211ax_5GHZ', 'WIFI_PHY_STANDARD_UNSPECIFIED'], import_from_module='ns.wifi')
module.add_enum('WifiPreamble', ['WIFI_PREAMBLE_LONG', 'WIFI_PREAMBLE_SHORT', 'WIFI_PREAMBLE_HT_MF', 'WIFI_PREAMBLE_HT_GF', 'WIFI_PREAMBLE_VHT', 'WIFI_PREAMBLE_HE_SU', 'WIFI_PREAMBLE_HE_ER_SU', 'WIFI_PREAMBLE_HE_MU', 'WIFI_PREAMBLE_HE_TB', 'WIFI_PREAMBLE_NONE'], import_from_module='ns.wifi')
module.add_enum('WifiModulationClass', ['WIFI_MOD_CLASS_UNKNOWN', 'WIFI_MOD_CLASS_IR', 'WIFI_MOD_CLASS_FHSS', 'WIFI_MOD_CLASS_DSSS', 'WIFI_MOD_CLASS_HR_DSSS', 'WIFI_MOD_CLASS_ERP_PBCC', 'WIFI_MOD_CLASS_DSSS_OFDM', 'WIFI_MOD_CLASS_ERP_OFDM', 'WIFI_MOD_CLASS_OFDM', 'WIFI_MOD_CLASS_HT', 'WIFI_MOD_CLASS_VHT', 'WIFI_MOD_CLASS_HE'], import_from_module='ns.wifi')
module.add_enum('WifiCodeRate', ['WIFI_CODE_RATE_UNDEFINED', 'WIFI_CODE_RATE_3_4', 'WIFI_CODE_RATE_2_3', 'WIFI_CODE_RATE_1_2', 'WIFI_CODE_RATE_5_6'], import_from_module='ns.wifi')
module.add_class('Address', import_from_module='ns.network')
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
module.add_class('ApplicationContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Application > > const_iterator', u'ns3::ApplicationContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Application > > const_iterator*', u'ns3::ApplicationContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Application > > const_iterator&', u'ns3::ApplicationContainer::Iterator&')
module.add_class('AsciiTraceHelper', import_from_module='ns.network')
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
module.add_class('AsciiTraceHelperForIpv4', allow_subclassing=True, import_from_module='ns.internet')
module.add_class('AsciiTraceHelperForIpv6', allow_subclassing=True, import_from_module='ns.internet')
module.add_class('AttributeConstructionList', import_from_module='ns.core')
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
module.add_class('Bar', import_from_module='ns.wifi')
module.add_class('BlockAckAgreement', import_from_module='ns.wifi')
module.add_class('BlockAckCache', import_from_module='ns.wifi')
module.add_class('Buffer', import_from_module='ns.network')
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
module.add_class('ByteTagIterator', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
module.add_class('ByteTagList', import_from_module='ns.network')
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
module.add_class('CallbackBase', import_from_module='ns.core')
module.add_class('DataRate', import_from_module='ns.network')
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
module.add_class('DefaultDeleter', template_parameters=['ns3::ChannelCoordinationListener'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Event'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::QueueItem'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::WifiInformationElement'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::WifiMacQueueItem'])
module.add_class('EdcaParameter')
module.add_class('EventId', import_from_module='ns.core')
module.add_class('Hasher', import_from_module='ns.core')
module.add_class('HePreambleParameters', import_from_module='ns.wifi')
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('InetSocketAddress', import_from_module='ns.network')
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('InterferenceHelper', import_from_module='ns.wifi')
module.add_class('SnrPer', import_from_module='ns.wifi', outer_class=root_module['ns3::InterferenceHelper'])
module.add_class('Ipv4Address', import_from_module='ns.network')
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
module.add_class('Ipv4InterfaceContainer', import_from_module='ns.internet')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv4 >, unsigned int > > const_iterator', u'ns3::Ipv4InterfaceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv4 >, unsigned int > > const_iterator*', u'ns3::Ipv4InterfaceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv4 >, unsigned int > > const_iterator&', u'ns3::Ipv4InterfaceContainer::Iterator&')
module.add_class('Ipv4Mask', import_from_module='ns.network')
module.add_class('Ipv6Address', import_from_module='ns.network')
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
module.add_class('Ipv6InterfaceContainer', import_from_module='ns.internet')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv6 >, unsigned int > > const_iterator', u'ns3::Ipv6InterfaceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv6 >, unsigned int > > const_iterator*', u'ns3::Ipv6InterfaceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv6 >, unsigned int > > const_iterator&', u'ns3::Ipv6InterfaceContainer::Iterator&')
module.add_class('Ipv6Prefix', import_from_module='ns.network')
module.add_class('LogComponent', import_from_module='ns.core')
typehandlers.add_type_alias(u'std::map< std::string, ns3::LogComponent * >', u'ns3::LogComponent::ComponentList')
typehandlers.add_type_alias(u'std::map< std::string, ns3::LogComponent * >*', u'ns3::LogComponent::ComponentList*')
typehandlers.add_type_alias(u'std::map< std::string, ns3::LogComponent * >&', u'ns3::LogComponent::ComponentList&')
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Mac8Address', import_from_module='ns.network')
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('MacLowTransmissionParameters', import_from_module='ns.wifi')
module.add_class('MpduInfo', import_from_module='ns.wifi')
module.add_class('NetDeviceContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator', u'ns3::NetDeviceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator*', u'ns3::NetDeviceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator&', u'ns3::NetDeviceContainer::Iterator&')
module.add_class('NodeContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&')
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
module.add_class('ObjectDeleter', import_from_module='ns.core')
module.add_class('ObjectFactory', import_from_module='ns.core')
module.add_class('OrganizationIdentifier')
module.add_enum('OrganizationIdentifierType', ['OUI24', 'OUI36', 'Unknown'], outer_class=root_module['ns3::OrganizationIdentifier'])
module.add_class('OriginatorBlockAckAgreement', import_from_module='ns.wifi', parent=root_module['ns3::BlockAckAgreement'])
module.add_enum('State', ['PENDING', 'ESTABLISHED', 'INACTIVE', 'NO_REPLY', 'RESET', 'REJECTED'], outer_class=root_module['ns3::OriginatorBlockAckAgreement'], import_from_module='ns.wifi')
module.add_class('PacketMetadata', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
module.add_class('PacketTagIterator', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
module.add_class('PacketTagList', import_from_module='ns.network')
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
module.add_class('ParameterLogger', import_from_module='ns.core')
module.add_class('PcapFile', import_from_module='ns.network')
module.add_class('PcapHelper', import_from_module='ns.network')
module.add_enum('DataLinkType', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_LINUX_SLL', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO', 'DLT_IEEE802_15_4', 'DLT_NETLINK'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network')
module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
module.add_class('PcapHelperForIpv4', allow_subclassing=True, import_from_module='ns.internet')
module.add_class('PcapHelperForIpv6', allow_subclassing=True, import_from_module='ns.internet')
module.add_class('QueueSize', import_from_module='ns.network')
module.add_class('SchInfo')
module.add_class('SignalNoiseDbm', import_from_module='ns.wifi')
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
module.add_class('TagBuffer', import_from_module='ns.network')
module.add_class('TimeWithUnit', import_from_module='ns.core')
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int'])
module.add_class('TxInfo')
module.add_class('TxProfile')
module.add_class('TypeId', import_from_module='ns.core')
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
module.add_class('Vector2D', import_from_module='ns.core')
module.add_class('Vector3D', import_from_module='ns.core')
module.add_class('VendorSpecificContentManager')
module.add_class('VsaInfo')
module.add_class('WaveBsmHelper')
module.add_class('WaveHelper', allow_subclassing=True)
module.add_class('WifiHelper', allow_subclassing=True, import_from_module='ns.wifi')
typehandlers.add_type_alias(u'std::function< unsigned long long ( ns3::Ptr< ns3::QueueItem > ) >', u'ns3::WifiHelper::SelectQueueCallback')
typehandlers.add_type_alias(u'std::function< unsigned long long ( ns3::Ptr< ns3::QueueItem > ) >*', u'ns3::WifiHelper::SelectQueueCallback*')
typehandlers.add_type_alias(u'std::function< unsigned long long ( ns3::Ptr< ns3::QueueItem > ) >&', u'ns3::WifiHelper::SelectQueueCallback&')
module.add_class('WifiMacHelper', allow_subclassing=True, import_from_module='ns.wifi')
module.add_class('WifiMode', import_from_module='ns.wifi')
module.add_class('WifiModeFactory', import_from_module='ns.wifi')
module.add_class('WifiPhyHelper', import_from_module='ns.wifi', parent=[root_module['ns3::PcapHelperForDevice'], root_module['ns3::AsciiTraceHelperForDevice']])
module.add_enum('SupportedPcapDataLinkTypes', ['DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::WifiPhyHelper'], import_from_module='ns.wifi')
module.add_class('WifiRemoteStation', import_from_module='ns.wifi')
module.add_class('WifiRemoteStationInfo', import_from_module='ns.wifi')
module.add_class('WifiRemoteStationState', import_from_module='ns.wifi')
module.add_enum('', ['BRAND_NEW', 'DISASSOC', 'WAIT_ASSOC_TX_OK', 'GOT_ASSOC_TX_OK'], outer_class=root_module['ns3::WifiRemoteStationState'], import_from_module='ns.wifi')
module.add_class('WifiTxVector', import_from_module='ns.wifi')
module.add_class('YansWifiChannelHelper', import_from_module='ns.wifi')
module.add_class('YansWifiPhyHelper', import_from_module='ns.wifi', parent=root_module['ns3::WifiPhyHelper'])
module.add_class('empty', import_from_module='ns.core')
module.add_class('int64x64_t', import_from_module='ns.core')
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
module.add_class('HigherLayerTxVectorTag', parent=root_module['ns3::Tag'])
module.add_class('InternetStackHelper', import_from_module='ns.internet', parent=[root_module['ns3::PcapHelperForIpv4'], root_module['ns3::PcapHelperForIpv6'], root_module['ns3::AsciiTraceHelperForIpv4'], root_module['ns3::AsciiTraceHelperForIpv6']])
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
module.add_class('NqosWaveMacHelper', parent=root_module['ns3::WifiMacHelper'])
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
module.add_class('PcapFileWrapper', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('QosWaveMacHelper', parent=root_module['ns3::WifiMacHelper'])
module.add_class('QueueBase', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::ChannelCoordinationListener', 'ns3::empty', 'ns3::DefaultDeleter<ns3::ChannelCoordinationListener>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Event', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Event>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::WifiInformationElement', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiInformationElement>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::WifiMacQueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiMacQueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('SocketPriority', ['NS3_PRIO_BESTEFFORT', 'NS3_PRIO_FILLER', 'NS3_PRIO_BULK', 'NS3_PRIO_INTERACTIVE_BULK', 'NS3_PRIO_INTERACTIVE', 'NS3_PRIO_CONTROL'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('Ipv6MulticastFilterMode', ['INCLUDE', 'EXCLUDE'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketPriorityTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('Time', import_from_module='ns.core')
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('Txop', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxOk')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxOk*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxOk&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxFailed')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxFailed*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxFailed&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxDropped')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxDropped*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxDropped&')
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('VendorSpecificActionHeader', parent=root_module['ns3::Header'])
module.add_class('VsaManager', parent=root_module['ns3::Object'])
module.add_class('WaveBsmStats', parent=root_module['ns3::Object'])
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('Wifi80211pHelper', parent=root_module['ns3::WifiHelper'])
module.add_class('WifiInformationElement', import_from_module='ns.wifi', parent=root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >'])
module.add_class('WifiMac', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
module.add_class('WifiMacHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
module.add_enum('QosAckPolicy', ['NORMAL_ACK', 'NO_ACK', 'NO_EXPLICIT_ACK', 'BLOCK_ACK'], outer_class=root_module['ns3::WifiMacHeader'], import_from_module='ns.wifi')
module.add_enum('AddressType', ['ADDR1', 'ADDR2', 'ADDR3', 'ADDR4'], outer_class=root_module['ns3::WifiMacHeader'], import_from_module='ns.wifi')
typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )', u'ns3::WifiMacHeader::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )*', u'ns3::WifiMacHeader::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )&', u'ns3::WifiMacHeader::TracedCallback&')
module.add_class('WifiMacQueueItem', import_from_module='ns.wifi', parent=root_module['ns3::SimpleRefCount< ns3::WifiMacQueueItem, ns3::empty, ns3::DefaultDeleter<ns3::WifiMacQueueItem> >'])
module.add_class('WifiPhy', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'std::pair< unsigned char, ns3::WifiPhyStandard >', u'ns3::WifiPhy::ChannelNumberStandardPair')
typehandlers.add_type_alias(u'std::pair< unsigned char, ns3::WifiPhyStandard >*', u'ns3::WifiPhy::ChannelNumberStandardPair*')
typehandlers.add_type_alias(u'std::pair< unsigned char, ns3::WifiPhyStandard >&', u'ns3::WifiPhy::ChannelNumberStandardPair&')
typehandlers.add_type_alias(u'std::pair< unsigned short, unsigned short >', u'ns3::WifiPhy::FrequencyWidthPair')
typehandlers.add_type_alias(u'std::pair< unsigned short, unsigned short >*', u'ns3::WifiPhy::FrequencyWidthPair*')
typehandlers.add_type_alias(u'std::pair< unsigned short, unsigned short >&', u'ns3::WifiPhy::FrequencyWidthPair&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, uint16_t, ns3::WifiTxVector, ns3::MpduInfo, ns3::SignalNoiseDbm )', u'ns3::WifiPhy::MonitorSnifferRxCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, uint16_t, ns3::WifiTxVector, ns3::MpduInfo, ns3::SignalNoiseDbm )*', u'ns3::WifiPhy::MonitorSnifferRxCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, uint16_t, ns3::WifiTxVector, ns3::MpduInfo, ns3::SignalNoiseDbm )&', u'ns3::WifiPhy::MonitorSnifferRxCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, uint16_t, ns3::WifiTxVector, ns3::MpduInfo )', u'ns3::WifiPhy::MonitorSnifferTxCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, uint16_t, ns3::WifiTxVector, ns3::MpduInfo )*', u'ns3::WifiPhy::MonitorSnifferTxCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, uint16_t, ns3::WifiTxVector, ns3::MpduInfo )&', u'ns3::WifiPhy::MonitorSnifferTxCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::HePreambleParameters )', u'ns3::WifiPhy::EndOfHePreambleCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::HePreambleParameters )*', u'ns3::WifiPhy::EndOfHePreambleCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::HePreambleParameters )&', u'ns3::WifiPhy::EndOfHePreambleCallback&')
module.add_class('WifiPhyStateHelper', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time, WifiPhyState )', u'ns3::WifiPhyStateHelper::StateTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time, WifiPhyState )*', u'ns3::WifiPhyStateHelper::StateTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time, WifiPhyState )&', u'ns3::WifiPhyStateHelper::StateTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double, ns3::WifiMode, ns3::WifiPreamble )', u'ns3::WifiPhyStateHelper::RxOkTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double, ns3::WifiMode, ns3::WifiPreamble )*', u'ns3::WifiPhyStateHelper::RxOkTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double, ns3::WifiMode, ns3::WifiPreamble )&', u'ns3::WifiPhyStateHelper::RxOkTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::WifiPhyStateHelper::RxEndErrorTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::WifiPhyStateHelper::RxEndErrorTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::WifiPhyStateHelper::RxEndErrorTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::WifiMode, ns3::WifiPreamble, uint8_t )', u'ns3::WifiPhyStateHelper::TxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::WifiMode, ns3::WifiPreamble, uint8_t )*', u'ns3::WifiPhyStateHelper::TxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::WifiMode, ns3::WifiPreamble, uint8_t )&', u'ns3::WifiPhyStateHelper::TxTracedCallback&')
module.add_class('WifiRemoteStationManager', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
module.add_enum('ProtectionMode', ['RTS_CTS', 'CTS_TO_SELF'], outer_class=root_module['ns3::WifiRemoteStationManager'], import_from_module='ns.wifi')
typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >', u'ns3::WifiRemoteStationManager::Stations')
typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >*', u'ns3::WifiRemoteStationManager::Stations*')
typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >&', u'ns3::WifiRemoteStationManager::Stations&')
typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >', u'ns3::WifiRemoteStationManager::StationStates')
typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >*', u'ns3::WifiRemoteStationManager::StationStates*')
typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >&', u'ns3::WifiRemoteStationManager::StationStates&')
typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )*', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )&', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )*', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )&', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback&')
module.add_class('YansWavePhyHelper', parent=root_module['ns3::YansWifiPhyHelper'])
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('Application', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time const &, ns3::Address const & )', u'ns3::Application::DelayAddressCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time const &, ns3::Address const & )*', u'ns3::Application::DelayAddressCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time const &, ns3::Address const & )&', u'ns3::Application::DelayAddressCallback&')
typehandlers.add_type_alias(u'void ( * ) ( std::string const &, std::string const & )', u'ns3::Application::StateTransitionCallback')
typehandlers.add_type_alias(u'void ( * ) ( std::string const &, std::string const & )*', u'ns3::Application::StateTransitionCallback*')
typehandlers.add_type_alias(u'void ( * ) ( std::string const &, std::string const & )&', u'ns3::Application::StateTransitionCallback&')
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
module.add_class('BlockAckManager', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::BlockAckManager::TxOk')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::BlockAckManager::TxOk*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::BlockAckManager::TxOk&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::BlockAckManager::TxFailed')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::BlockAckManager::TxFailed*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::BlockAckManager::TxFailed&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )', u'ns3::BlockAckManager::AgreementStateTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )*', u'ns3::BlockAckManager::AgreementStateTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )&', u'ns3::BlockAckManager::AgreementStateTracedCallback&')
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('BsmApplication', parent=root_module['ns3::Application'])
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('ChannelAccessManager', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
module.add_class('ChannelCoordinationListener', parent=root_module['ns3::SimpleRefCount< ns3::ChannelCoordinationListener, ns3::empty, ns3::DefaultDeleter<ns3::ChannelCoordinationListener> >'])
module.add_class('ChannelCoordinator', parent=root_module['ns3::Object'])
module.add_class('ChannelManager', parent=root_module['ns3::Object'])
module.add_class('ChannelScheduler', parent=root_module['ns3::Object'])
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('DataRateChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('DataRateValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('DefaultChannelScheduler', parent=root_module['ns3::ChannelScheduler'])
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ErrorModel', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('Event', import_from_module='ns.wifi', parent=root_module['ns3::SimpleRefCount< ns3::Event, ns3::empty, ns3::DefaultDeleter<ns3::Event> >'])
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('HeCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
module.add_class('HtCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv4L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv4L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv4L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::DropTracedCallback&')
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::ErrorCallback&')
module.add_class('Ipv6', import_from_module='ns.internet', parent=root_module['ns3::Object'])
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv6L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv6'])
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv6L3Protocol'], import_from_module='ns.internet')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv6L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv6L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv6L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::DropTracedCallback&')
module.add_class('Ipv6PmtuCache', import_from_module='ns.internet', parent=root_module['ns3::Object'])
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('ListErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel'])
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('MacLow', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::MacLow::MacLowRxCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::MacLow::MacLowRxCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::MacLow::MacLowRxCallback&')
module.add_class('MobilityModel', import_from_module='ns.mobility', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const > )', u'ns3::MobilityModel::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const > )*', u'ns3::MobilityModel::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const > )&', u'ns3::MobilityModel::TracedCallback&')
module.add_class('MpduAggregator', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >', u'ns3::MpduAggregator::DeaggregatedMpdus')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >*', u'ns3::MpduAggregator::DeaggregatedMpdus*')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >&', u'ns3::MpduAggregator::DeaggregatedMpdus&')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > > const_iterator', u'ns3::MpduAggregator::DeaggregatedMpdusCI')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > > const_iterator*', u'ns3::MpduAggregator::DeaggregatedMpdusCI*')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > > const_iterator&', u'ns3::MpduAggregator::DeaggregatedMpdusCI&')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >', u'ns3::MpduAggregator::EdcaQueues')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >*', u'ns3::MpduAggregator::EdcaQueues*')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >&', u'ns3::MpduAggregator::EdcaQueues&')
module.add_class('MsduAggregator', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >', u'ns3::MsduAggregator::DeaggregatedMsdus')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >*', u'ns3::MsduAggregator::DeaggregatedMsdus*')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >&', u'ns3::MsduAggregator::DeaggregatedMsdus&')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > > const_iterator', u'ns3::MsduAggregator::DeaggregatedMsdusCI')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > > const_iterator*', u'ns3::MsduAggregator::DeaggregatedMsdusCI*')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > > const_iterator&', u'ns3::MsduAggregator::DeaggregatedMsdusCI&')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >', u'ns3::MsduAggregator::EdcaQueues')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >*', u'ns3::MsduAggregator::EdcaQueues*')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >&', u'ns3::MsduAggregator::EdcaQueues&')
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('OrganizationIdentifierChecker', parent=root_module['ns3::AttributeChecker'])
module.add_class('OrganizationIdentifierValue', parent=root_module['ns3::AttributeValue'])
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('PointerChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('PointerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('QosTxop', import_from_module='ns.wifi', parent=root_module['ns3::Txop'])
module.add_class('Queue', import_from_module='ns.network', template_parameters=['ns3::Packet'], parent=root_module['ns3::QueueBase'])
typehandlers.add_type_alias(u'ns3::Packet', u'ns3::Queue< ns3::Packet > ItemType')
typehandlers.add_type_alias(u'ns3::Packet*', u'ns3::Queue< ns3::Packet > ItemType*')
typehandlers.add_type_alias(u'ns3::Packet&', u'ns3::Queue< ns3::Packet > ItemType&')
module.add_typedef(root_module['ns3::Packet'], 'ItemType')
module.add_class('Queue', import_from_module='ns.network', template_parameters=['ns3::QueueDiscItem'], parent=root_module['ns3::QueueBase'])
typehandlers.add_type_alias(u'ns3::QueueDiscItem', u'ns3::Queue< ns3::QueueDiscItem > ItemType')
typehandlers.add_type_alias(u'ns3::QueueDiscItem*', u'ns3::Queue< ns3::QueueDiscItem > ItemType*')
typehandlers.add_type_alias(u'ns3::QueueDiscItem&', u'ns3::Queue< ns3::QueueDiscItem > ItemType&')
module.add_class('Queue', import_from_module='ns.wifi', template_parameters=['ns3::WifiMacQueueItem'], parent=root_module['ns3::QueueBase'])
typehandlers.add_type_alias(u'ns3::WifiMacQueueItem', u'ns3::Queue< ns3::WifiMacQueueItem > ItemType')
typehandlers.add_type_alias(u'ns3::WifiMacQueueItem*', u'ns3::Queue< ns3::WifiMacQueueItem > ItemType*')
typehandlers.add_type_alias(u'ns3::WifiMacQueueItem&', u'ns3::Queue< ns3::WifiMacQueueItem > ItemType&')
module.add_typedef(root_module['ns3::WifiMacQueueItem'], 'ItemType')
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
module.add_enum('Uint8Values', ['IP_DSFIELD'], outer_class=root_module['ns3::QueueItem'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )', u'ns3::QueueItem::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )*', u'ns3::QueueItem::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )&', u'ns3::QueueItem::TracedCallback&')
module.add_class('QueueSizeChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('QueueSizeValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('RateErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel'])
module.add_enum('ErrorUnit', ['ERROR_UNIT_BIT', 'ERROR_UNIT_BYTE', 'ERROR_UNIT_PACKET'], outer_class=root_module['ns3::RateErrorModel'], import_from_module='ns.network')
module.add_class('ReceiveListErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel'])
module.add_class('RegularWifiMac', import_from_module='ns.wifi', parent=root_module['ns3::WifiMac'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RegularWifiMac::ForwardUpCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RegularWifiMac::ForwardUpCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RegularWifiMac::ForwardUpCallback&')
module.add_class('Ssid', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
module.add_class('SsidChecker', import_from_module='ns.wifi', parent=root_module['ns3::AttributeChecker'])
module.add_class('SsidValue', import_from_module='ns.wifi', parent=root_module['ns3::AttributeValue'])
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('VhtCapabilities', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
module.add_class('WaveMacLow', parent=root_module['ns3::MacLow'])
module.add_class('WifiMacQueue', import_from_module='ns.wifi', parent=root_module['ns3::Queue< ns3::WifiMacQueueItem >'])
module.add_enum('DropPolicy', ['DROP_NEWEST', 'DROP_OLDEST'], outer_class=root_module['ns3::WifiMacQueue'], import_from_module='ns.wifi')
module.add_class('WifiModeChecker', import_from_module='ns.wifi', parent=root_module['ns3::AttributeChecker'])
module.add_class('WifiModeValue', import_from_module='ns.wifi', parent=root_module['ns3::AttributeValue'])
module.add_class('WifiNetDevice', import_from_module='ns.wifi', parent=root_module['ns3::NetDevice'])
module.add_class('YansWifiChannel', import_from_module='ns.wifi', parent=root_module['ns3::Channel'])
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('BinaryErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel'])
module.add_class('BurstErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<const ns3::Packet>', 'const ns3::Address &', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::WifiMac>', 'const ns3::OrganizationIdentifier &', 'ns3::Ptr<const ns3::Packet>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv4L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv6L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::WifiMacHeader &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Address', 'ns3::Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::HePreambleParameters', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::MobilityModel>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'double', 'ns3::WifiMode', 'ns3::WifiPreamble', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::WifiMode', 'ns3::WifiPreamble', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'ns3::WifiTxVector', 'ns3::MpduInfo', 'ns3::SignalNoiseDbm', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'ns3::WifiTxVector', 'ns3::MpduInfo', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::QueueDiscItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::WifiMacQueueItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'const ns3::WifiMacHeader *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'double', 'ns3::WifiTxVector', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Mac48Address', 'unsigned char', 'ns3::OriginatorBlockAckAgreement::State', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Time', 'WifiPhyState', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('OcbWifiMac', parent=root_module['ns3::RegularWifiMac'])
module.add_class('QueueDiscItem', import_from_module='ns.network', parent=root_module['ns3::QueueItem'])
module.add_class('WaveNetDevice', parent=root_module['ns3::WifiNetDevice'])
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::Packet const >, ns3::Address const &, unsigned int, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::WaveNetDevice::WaveVsaCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::Packet const >, ns3::Address const &, unsigned int, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::WaveNetDevice::WaveVsaCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::Packet const >, ns3::Address const &, unsigned int, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::WaveNetDevice::WaveVsaCallback&')
module.add_container('std::map< std::string, ns3::LogComponent * >', ('std::string', 'ns3::LogComponent *'), container_type=u'map')
module.add_container('ns3::EdcaParameters', ('ns3::AcIndex', 'ns3::EdcaParameter'), container_type=u'map')
module.add_container('std::vector< double >', 'double', container_type=u'vector')
module.add_container('std::vector< int >', 'int', container_type=u'vector')
module.add_container('std::vector< unsigned int >', 'unsigned int', container_type=u'vector')
module.add_container('ns3::WifiModeList', 'ns3::WifiMode', container_type=u'vector')
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::vector< unsigned short >', 'short unsigned int', container_type=u'vector')
module.add_container('std::vector< ns3::WifiRemoteStation * >', 'ns3::WifiRemoteStation *', container_type=u'vector')
module.add_container('std::vector< ns3::WifiRemoteStationState * >', 'ns3::WifiRemoteStationState *', container_type=u'vector')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
module.add_container('std::list< unsigned int >', 'unsigned int', container_type=u'list')
module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader >', container_type=u'list')
module.add_container('std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >', ('ns3::AcIndex', 'ns3::Ptr< ns3::QosTxop >'), container_type=u'map')
module.add_container('std::vector< ns3::Ptr< ns3::WifiMacQueueItem > >', 'ns3::Ptr< ns3::WifiMacQueueItem >', container_type=u'vector')
module.add_container('ns3::MpduAggregator::DeaggregatedMpdus', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader >', container_type=u'list')
module.add_container('ns3::MpduAggregator::EdcaQueues', ('ns3::AcIndex', 'ns3::Ptr< ns3::QosTxop >'), container_type=u'map')
module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader >', container_type=u'list')
module.add_container('ns3::MsduAggregator::DeaggregatedMsdus', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader >', container_type=u'list')
module.add_container('ns3::MsduAggregator::EdcaQueues', ('ns3::AcIndex', 'ns3::Ptr< ns3::QosTxop >'), container_type=u'map')
module.add_container('std::map< ns3::Mac48Address, bool >', ('ns3::Mac48Address', 'bool'), container_type=u'map')
module.add_container('std::map< unsigned int, ns3::Ptr< ns3::OcbWifiMac > >', ('unsigned int', 'ns3::Ptr< ns3::OcbWifiMac >'), container_type=u'map')
module.add_container('std::vector< ns3::Ptr< ns3::WifiPhy > >', 'ns3::Ptr< ns3::WifiPhy >', container_type=u'vector')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiTxVector, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RxOkCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiTxVector, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RxOkCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiTxVector, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RxOkCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RxErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RxErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RxErrorCallback&')
typehandlers.add_type_alias(u'ns3::Vector3D', u'ns3::Vector')
typehandlers.add_type_alias(u'ns3::Vector3D*', u'ns3::Vector*')
typehandlers.add_type_alias(u'ns3::Vector3D&', u'ns3::Vector&')
module.add_typedef(root_module['ns3::Vector3D'], 'Vector')
typehandlers.add_type_alias(u'ns3::Vector3DValue', u'ns3::VectorValue')
typehandlers.add_type_alias(u'ns3::Vector3DValue*', u'ns3::VectorValue*')
typehandlers.add_type_alias(u'ns3::Vector3DValue&', u'ns3::VectorValue&')
module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue')
typehandlers.add_type_alias(u'ns3::Vector3DChecker', u'ns3::VectorChecker')
typehandlers.add_type_alias(u'ns3::Vector3DChecker*', u'ns3::VectorChecker*')
typehandlers.add_type_alias(u'ns3::Vector3DChecker&', u'ns3::VectorChecker&')
module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::EdcaParameter >', u'ns3::EdcaParameters')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::EdcaParameter >*', u'ns3::EdcaParameters*')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::EdcaParameter >&', u'ns3::EdcaParameters&')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::EdcaParameter > const_iterator', u'ns3::EdcaParametersI')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::EdcaParameter > const_iterator*', u'ns3::EdcaParametersI*')
typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::EdcaParameter > const_iterator&', u'ns3::EdcaParametersI&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::WifiMac >, ns3::OrganizationIdentifier const &, ns3::Ptr< ns3::Packet const >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::VscCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::WifiMac >, ns3::OrganizationIdentifier const &, ns3::Ptr< ns3::Packet const >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::VscCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::WifiMac >, ns3::OrganizationIdentifier const &, ns3::Ptr< ns3::Packet const >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::VscCallback&')
typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )', u'ns3::TimePrinter')
typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )*', u'ns3::TimePrinter*')
typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )&', u'ns3::TimePrinter&')
typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )', u'ns3::NodePrinter')
typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )*', u'ns3::NodePrinter*')
typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )&', u'ns3::NodePrinter&')
typehandlers.add_type_alias(u'uint8_t', u'ns3::WifiInformationElementId')
typehandlers.add_type_alias(u'uint8_t*', u'ns3::WifiInformationElementId*')
typehandlers.add_type_alias(u'uint8_t&', u'ns3::WifiInformationElementId&')
typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >', u'ns3::WifiModeList')
typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >*', u'ns3::WifiModeList*')
typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >&', u'ns3::WifiModeList&')
typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator', u'ns3::WifiModeListIterator')
typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator*', u'ns3::WifiModeListIterator*')
typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator&', u'ns3::WifiModeListIterator&')
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module) |
class IndexedMonoidElement(MonoidElement):
def __init__(self, F, x):
MonoidElement.__init__(self, F)
self._monomial = x
_method
def _sorted_items(self):
def _repr_(self):
if (not self._monomial):
return '1'
monomial = self._sorted_items()
P = self.parent()
scalar_mult = P._print_options['scalar_mult']
exp = (lambda v: ('^{}'.format(v) if (v != 1) else ''))
return scalar_mult.join(((P._repr_generator(g) + exp(v)) for (g, v) in monomial))
def _ascii_art_(self):
from sage.typeset.ascii_art import AsciiArt, ascii_art, empty_ascii_art
if (not self._monomial):
return AsciiArt(['1'])
monomial = self._sorted_items()
P = self.parent()
scalar_mult = P._print_options['scalar_mult']
if all(((x[1] == 1) for x in monomial)):
ascii_art_gen = (lambda m: P._ascii_art_generator(m[0]))
else:
pref = AsciiArt([P.prefix()])
def ascii_art_gen(m):
if (m[1] != 1):
r = (AsciiArt([(' ' * len(pref))]) + ascii_art(m[1]))
else:
r = empty_ascii_art
r = (r * P._ascii_art_generator(m[0]))
r._baseline = (r._h - 2)
return r
b = ascii_art_gen(monomial[0])
for x in monomial[1:]:
b = ((b + AsciiArt([scalar_mult])) + ascii_art_gen(x))
return b
def _latex_(self):
if (not self._monomial):
return '1'
monomial = self._sorted_items()
P = self.parent()
scalar_mult = P._print_options['latex_scalar_mult']
if (scalar_mult is None):
scalar_mult = P._print_options['scalar_mult']
if (scalar_mult == '*'):
scalar_mult = ' '
exp = (lambda v: ('^{{{}}}'.format(v) if (v != 1) else ''))
return scalar_mult.join(((P._latex_generator(g) + exp(v)) for (g, v) in monomial))
def __iter__(self):
return ((self.parent().gen(index), exp) for (index, exp) in self._sorted_items())
def _richcmp_(self, other, op):
if (self._monomial == other._monomial):
return rich_to_bool(op, 0)
if ((op == op_EQ) or (op == op_NE)):
return rich_to_bool(op, 1)
return richcmp(self.to_word_list(), other.to_word_list(), op)
def support(self):
supp = set((key for (key, exp) in self._sorted_items() if (exp != 0)))
return sorted(supp)
def leading_support(self):
if (not self):
return None
return self._sorted_items()[0][0]
def trailing_support(self):
if (not self):
return None
return self._sorted_items()[(- 1)][0]
def to_word_list(self):
return [k for (k, e) in self._sorted_items() for dummy in range(e)] |
def get_monitors():
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetMonitors(count)
monitors = [result[i] for i in range(count_value.value)]
return monitors |
def main(args):
(jobs, arrival_times) = utils.parse_trace(args.trace_file)
policy = utils.get_policy(args.policy, solver=args.solver, seed=args.seed)
sched = scheduler.Scheduler(policy, throughputs_file=args.throughputs_file, simulate=True, seed=args.seed, time_per_iteration=args.time_per_iteration)
num_gpus = args.cluster_spec.split(':')
cluster_spec = {'v100': int(num_gpus[0]), 'p100': int(num_gpus[1]), 'k80': int(num_gpus[2])}
num_gpus_per_server_split = args.num_gpus_per_server.split(':')
num_gpus_per_server = {'v100': int(num_gpus_per_server_split[0]), 'p100': int(num_gpus_per_server_split[1]), 'k80': int(num_gpus_per_server_split[2])}
if ((args.window_start is not None) and (args.window_end is not None)):
jobs_to_complete = set()
for i in range(args.window_start, args.window_end):
jobs_to_complete.add(JobIdPair(i, None))
else:
jobs_to_complete = None
sched.simulate(cluster_spec, arrival_times, jobs, debug=args.debug, checkpoint_threshold=args.checkpoint_threshold, checkpoint_file=args.checkpoint_file, num_gpus_per_server=num_gpus_per_server, jobs_to_complete=jobs_to_complete)
sched.get_average_jct(jobs_to_complete)
sched.get_cluster_utilization()
sched.get_num_lease_extensions()
sched.shutdown() |
def sp2torch(sparse_mx):
sparse_mx = sparse_mx.tocoo().astype(np.float32)
indices = torch.from_numpy(np.vstack((sparse_mx.row, sparse_mx.col)).astype(np.int64))
values = torch.from_numpy(sparse_mx.data)
shape = torch.Size(sparse_mx.shape)
return torch.sparse.FloatTensor(indices, values, shape) |
def run_algo(**kwargs):
config = {}
config['kwargs'] = kwargs
config['kwargs']['seed'] = random.randint(0, 1000000)
(_, _, algo_config) = algo_select(kwargs)
load_data_from_neorl(algo_config['task'], algo_config['task_data_type'], algo_config['task_train_num'])
grid_tune = algo_config['grid_tune']
for (k, v) in grid_tune.items():
config[k] = tune.grid_search(v)
analysis = tune.run(training_function, config=config, resources_per_trial={'gpu': 1}, queue_trials=True) |
class ALSModelJavaMLReadable(MLReadable):
def read(cls):
return ALSModelJavaMLReader(cls) |
def sr_create_model(large_size, small_size, num_channels, num_res_blocks, learn_sigma, class_cond, use_checkpoint, attention_resolutions, num_heads, num_head_channels, num_heads_upsample, use_scale_shift_norm, dropout, resblock_updown, use_fp16):
_ = small_size
if (large_size == 512):
channel_mult = (1, 1, 2, 2, 4, 4)
elif (large_size == 256):
channel_mult = (1, 1, 2, 2, 4, 4)
elif (large_size == 64):
channel_mult = (1, 2, 3, 4)
else:
raise ValueError(f'unsupported large size: {large_size}')
attention_ds = []
for res in attention_resolutions.split(','):
attention_ds.append((large_size // int(res)))
return SuperResModel(image_size=large_size, in_channels=3, model_channels=num_channels, out_channels=(3 if (not learn_sigma) else 6), num_res_blocks=num_res_blocks, attention_resolutions=tuple(attention_ds), dropout=dropout, channel_mult=channel_mult, num_classes=(NUM_CLASSES if class_cond else None), use_checkpoint=use_checkpoint, num_heads=num_heads, num_head_channels=num_head_channels, num_heads_upsample=num_heads_upsample, use_scale_shift_norm=use_scale_shift_norm, resblock_updown=resblock_updown, use_fp16=use_fp16) |
def write_to_hdf(file_list, transcription_list, charlist, n_labels, out_file_name, dataset_prefix, pad_y=15, pad_x=15, compress=True):
with h5py.File(out_file_name, 'w') as f:
f.attrs['inputPattSize'] = 1
f.attrs['numDims'] = 1
f.attrs['numSeqs'] = len(file_list)
classes = charlist
inputs = []
sizes = []
seq_lengths = []
targets = []
for (i, (img_name, transcription)) in enumerate(zip(file_list, transcription_list)):
targets += transcription
img = imread(img_name)
img = (255 - img)
img = numpy.pad(img, ((pad_y, pad_y), (pad_x, pad_x)), 'constant')
sizes.append(img.shape)
img = img.reshape(img.size, 1)
inputs.append(img)
seq_lengths.append([[img.size, len(transcription), 2]])
if ((i % 100) == 0):
print(i, '/', len(file_list))
inputs = numpy.concatenate(inputs, axis=0)
sizes = numpy.concatenate(numpy.array(sizes, dtype='int32'), axis=0)
seq_lengths = numpy.concatenate(numpy.array(seq_lengths, dtype='int32'), axis=0)
targets = numpy.array(targets, dtype='int32')
f.attrs['numTimesteps'] = inputs.shape[0]
if compress:
f.create_dataset('inputs', compression='gzip', data=(inputs.astype('float32') / 255.0))
else:
f['inputs'] = (inputs.astype('float32') / 255.0)
hdf5_strings(f, 'labels', classes)
f['seqLengths'] = seq_lengths
seq_tags = [((dataset_prefix + '/') + tag.split('/')[(- 1)].split('.png')[0]) for tag in file_list]
hdf5_strings(f, 'seqTags', seq_tags)
f['targets/data/classes'] = targets
f['targets/data/sizes'] = sizes
hdf5_strings(f, 'targets/labels/classes', classes)
hdf5_strings(f, 'targets/labels/sizes', ['foo'])
g = f.create_group('targets/size')
g.attrs['classes'] = len(classes)
g.attrs['sizes'] = 2 |
def _k_radius_of_gyration_individual(traj, k=2):
traj['visits'] = traj.groupby([constants.LATITUDE, constants.LONGITUDE]).transform('count')[constants.DATETIME]
top_k_locations = traj.drop_duplicates(subset=[constants.LATITUDE, constants.LONGITUDE]).sort_values(by=['visits', constants.DATETIME], ascending=[False, True])[:k]
visits = top_k_locations['visits'].values
total_visits = sum(visits)
lats_lngs = top_k_locations[[constants.LATITUDE, constants.LONGITUDE]].values
center_of_mass = (visits.dot(lats_lngs) / total_visits)
krg = np.sqrt((sum([(visits[i] * (getDistanceByHaversine((lat, lng), center_of_mass) ** 2.0)) for (i, (lat, lng)) in enumerate(lats_lngs)]) / total_visits))
return krg |
def symbolic_fg(x, grad, eps=0.3, clipping=True):
reduc_ind = list(xrange(1, len(x.get_shape())))
normed_grad = (grad / tf.sqrt(tf.reduce_sum(tf.square(grad), reduction_indices=reduc_ind, keep_dims=True)))
scaled_grad = (eps * normed_grad)
adv_x = K.stop_gradient((x + scaled_grad))
if clipping:
adv_x = K.clip(adv_x, 0, 1)
return adv_x |
def step(mouse_data):
advect(velocities_pair.cur, velocities_pair.cur, velocities_pair.nxt)
advect(velocities_pair.cur, dyes_pair.cur, dyes_pair.nxt)
velocities_pair.swap()
dyes_pair.swap()
apply_impulse(velocities_pair.cur, dyes_pair.cur, mouse_data)
divergence(velocities_pair.cur)
if curl_strength:
vorticity(velocities_pair.cur)
enhance_vorticity(velocities_pair.cur, velocity_curls)
for _ in range(p_jacobi_iters):
pressure_jacobi(pressures_pair.cur, pressures_pair.nxt)
pressures_pair.swap()
subtract_gradient(velocities_pair.cur, pressures_pair.cur)
if debug:
divergence(velocities_pair.cur)
div_s = np.sum(velocity_divs.to_numpy())
print(f'divergence={div_s}') |
def wait_for_tag(wtag, num=1):
ndone = num
start = MPI.Wtime()
while (ndone > 0):
mpi_comm.recv(source=MPI.ANY_SOURCE, tag=wtag, status=mpi_status)
tag = mpi_status.Get_tag()
source = mpi_status.Get_source()
logger.debug(('received %s from %d (%.03fs)' % (tags.name[tag], source, (MPI.Wtime() - start))))
if (tag == wtag):
ndone -= 1 |
class AdditiveAttention(nn.Module):
def __init__(self, d_model: int) -> None:
super(AdditiveAttention, self).__init__()
self.query_proj = Linear(d_model, d_model, bias=False)
self.key_proj = Linear(d_model, d_model, bias=False)
self.bias = nn.Parameter(torch.rand(d_model).uniform_((- 0.1), 0.1))
self.score_proj = Linear(d_model, 1)
def forward(self, query: Tensor, key: Tensor, value: Tensor) -> Tuple[(Tensor, Tensor)]:
score = self.score_proj(torch.tanh(((self.key_proj(key) + self.query_proj(query)) + self.bias))).squeeze((- 1))
attn = F.softmax(score, dim=(- 1))
context = torch.bmm(attn.unsqueeze(1), value)
context += query
return (context, attn) |
class AutoModelForCausalLM():
def __init__(self, *args, **kwargs):
requires_pytorch(self)
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self) |
def get_model(point_cloud, is_training, num_class, bn_decay=None, gripper_feat=None, env_feat=None):
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
l0_xyz = point_cloud
l0_points = None
end_points['l0_xyz'] = l0_xyz
(l1_xyz, l1_points, l1_indices) = pointnet_sa_module(l0_xyz, l0_points, npoint=512, radius=0.01, nsample=32, mlp=[32, 32, 64], mlp2=None, group_all=False, is_training=is_training, bn_decay=bn_decay, scope='layer1')
(l2_xyz, l2_points, l2_indices) = pointnet_sa_module(l1_xyz, l1_points, npoint=256, radius=0.02, nsample=32, mlp=[64, 64, 128], mlp2=None, group_all=False, is_training=is_training, bn_decay=bn_decay, scope='layer2')
(l3_xyz, l3_points, l3_indices) = pointnet_sa_module(l2_xyz, l2_points, npoint=128, radius=0.04, nsample=32, mlp=[128, 128, 256], mlp2=None, group_all=False, is_training=is_training, bn_decay=bn_decay, scope='layer3')
(l4_xyz, l4_points, l4_indices) = pointnet_sa_module(l3_xyz, l3_points, npoint=64, radius=0.08, nsample=32, mlp=[256, 256, 512], mlp2=None, group_all=False, is_training=is_training, bn_decay=bn_decay, scope='layer4')
(l5_xyz, l5_points, l5_indices) = pointnet_sa_module(l4_xyz, l4_points, npoint=48, radius=0.16, nsample=32, mlp=[512, 512, 1024], mlp2=None, group_all=False, is_training=is_training, bn_decay=bn_decay, scope='layer5')
(l6_xyz, l6_points, l6_indices) = pointnet_sa_module(l5_xyz, l5_points, npoint=4, radius=0.2, nsample=32, mlp=[1024, 1024, 2048], mlp2=None, group_all=False, is_training=is_training, bn_decay=bn_decay, scope='layer6')
if (env_feat is None):
extra_feat = gripper_feat
else:
extra_feat = tf.concat([gripper_feat, env_feat], axis=(- 1))
extra_feat = tf.expand_dims(extra_feat, axis=1)
extra_feat0 = extra_feat
extra_feat = tflearn.layers.conv.conv_1d(extra_feat, 512, filter_size=1, strides=1, activation=tf.nn.leaky_relu)
extra_feat = tflearn.layers.conv.conv_1d(extra_feat, 256, filter_size=1, strides=1, activation=tf.nn.leaky_relu)
extra_feat = tflearn.layers.conv.conv_1d(extra_feat, 256, filter_size=1, strides=1, activation=tf.nn.leaky_relu)
extra_feat2 = extra_feat
extra_feat = tflearn.layers.conv.conv_1d(extra_feat, 128, filter_size=1, strides=1, activation=tf.nn.leaky_relu)
extra_feat = tflearn.layers.conv.conv_1d(extra_feat, 128, filter_size=1, strides=1, activation=tf.nn.leaky_relu)
extra_feat = tflearn.layers.conv.conv_1d(extra_feat, 64, filter_size=1, strides=1, activation=tf.nn.leaky_relu)
extra_feat5 = extra_feat
extra_feat0 = tf.tile(extra_feat0, [1, 4, 1])
l6_points = tf.concat([l6_points, extra_feat0], axis=(- 1))
l5_points = pointnet_fp_module(l5_xyz, l6_xyz, l5_points, l6_points, [2048, 2048, 1024], is_training, bn_decay, scope='fa_layer5', bn=True)
l4_points = pointnet_fp_module(l4_xyz, l5_xyz, l4_points, l5_points, [1024, 1024, 512], is_training, bn_decay, scope='fa_layer0', bn=True)
l3_points = pointnet_fp_module(l3_xyz, l4_xyz, l3_points, l4_points, [512, 512, 384], is_training, bn_decay, scope='fa_layer1', bn=True)
extra_feat2 = tf.tile(extra_feat2, [1, 128, 1])
l3_points = tf.concat([l3_points, extra_feat2], axis=(- 1))
l2_points = pointnet_fp_module(l2_xyz, l3_xyz, l2_points, l3_points, [384, 384, 256], is_training, bn_decay, scope='fa_layer2', bn=True)
l1_points = pointnet_fp_module(l1_xyz, l2_xyz, l1_points, l2_points, [256, 256, 128], is_training, bn_decay, scope='fa_layer3', bn=True)
l0_points = pointnet_fp_module(l0_xyz, l1_xyz, l0_points, l1_points, [128, 128, 64], is_training, bn_decay, scope='fa_layer4', bn=True)
extra_feat5 = tf.tile(extra_feat5, [1, 2048, 1])
l0_points = tf.concat([l0_points, extra_feat5], axis=(- 1))
l0_points = tf_util.conv1d(l0_points, 128, 1, padding='VALID', bn=False, is_training=is_training, scope='fc1_3', bn_decay=bn_decay)
l0_points = tf_util.conv1d(l0_points, 64, 1, padding='VALID', bn=False, is_training=is_training, scope='fc1_4', bn_decay=bn_decay)
net = l0_points
end_points['feats'] = net
return end_points |
class ECAPA_TDNN(torch.nn.Module):
def __init__(self, input_size, device='cpu', lin_neurons=192, activation=torch.nn.ReLU, channels=[512, 512, 512, 512, 1536], kernel_sizes=[5, 3, 3, 3, 1], dilations=[1, 2, 3, 4, 1], attention_channels=128, res2net_scale=8, se_channels=128, global_context=True, groups=[1, 1, 1, 1, 1]):
super().__init__()
assert (len(channels) == len(kernel_sizes))
assert (len(channels) == len(dilations))
self.channels = channels
self.blocks = nn.ModuleList()
self.blocks.append(TDNNBlock(input_size, channels[0], kernel_sizes[0], dilations[0], activation, groups[0]))
for i in range(1, (len(channels) - 1)):
self.blocks.append(SERes2NetBlock(channels[(i - 1)], channels[i], res2net_scale=res2net_scale, se_channels=se_channels, kernel_size=kernel_sizes[i], dilation=dilations[i], activation=activation, groups=groups[i]))
self.mfa = TDNNBlock((channels[(- 2)] * (len(channels) - 2)), channels[(- 1)], kernel_sizes[(- 1)], dilations[(- 1)], activation, groups=groups[(- 1)])
self.asp = AttentiveStatisticsPooling(channels[(- 1)], attention_channels=attention_channels, global_context=global_context)
self.asp_bn = BatchNorm1d(input_size=(channels[(- 1)] * 2))
self.fc = Conv1d(in_channels=(channels[(- 1)] * 2), out_channels=lin_neurons, kernel_size=1)
def forward(self, x, lengths=None):
x = x.transpose(1, 2)
xl = []
for layer in self.blocks:
try:
x = layer(x, lengths=lengths)
except TypeError:
x = layer(x)
xl.append(x)
x = torch.cat(xl[1:], dim=1)
x = self.mfa(x)
x = self.asp(x, lengths=lengths)
x = self.asp_bn(x)
x = self.fc(x)
x = x.transpose(1, 2)
return x |
def main():
last_time = time.time()
for i in list(range(4))[::(- 1)]:
print((i + 1))
time.sleep(1)
paused = False
while True:
if (not paused):
screen = grab_screen(region=(0, 40, 960, 560))
print('loop took {} seconds'.format((time.time() - last_time)))
last_time = time.time()
screen = cv2.cvtColor(screen, cv2.COLOR_BGR2GRAY)
screen = cv2.resize(screen, (160, 120))
prediction = model.predict([screen.reshape(160, 120, 1)])[0]
print(prediction)
turn_thresh = 0.75
fwd_thresh = 0.7
if (prediction[0] > fwd_thresh):
straight()
elif (prediction[5] > turn_thresh):
left()
elif (prediction[6] > turn_thresh):
right()
else:
straight()
keys = key_check()
print(keys)
if ('T' in keys):
if paused:
paused = False
time.sleep(1)
else:
paused = True
ReleaseKey(Q)
ReleaseKey(Z)
ReleaseKey(D)
time.sleep(1) |
def construct_simple_trajec(traject_dict, **kwargs):
return construct_trajec(traject_dict, include_agent_log=False, include_simulator_log=False, **kwargs) |
class CaptureStd():
def __init__(self, out=True, err=True, replay=True):
self.replay = replay
if out:
self.out_buf = StringIO()
self.out = 'error: CaptureStd context is unfinished yet, called too early'
else:
self.out_buf = None
self.out = 'not capturing stdout'
if err:
self.err_buf = StringIO()
self.err = 'error: CaptureStd context is unfinished yet, called too early'
else:
self.err_buf = None
self.err = 'not capturing stderr'
def __enter__(self):
if self.out_buf:
self.out_old = sys.stdout
sys.stdout = self.out_buf
if self.err_buf:
self.err_old = sys.stderr
sys.stderr = self.err_buf
return self
def __exit__(self, *exc):
if self.out_buf:
sys.stdout = self.out_old
captured = self.out_buf.getvalue()
if self.replay:
sys.stdout.write(captured)
self.out = apply_print_resets(captured)
if self.err_buf:
sys.stderr = self.err_old
captured = self.err_buf.getvalue()
if self.replay:
sys.stderr.write(captured)
self.err = captured
def __repr__(self):
msg = ''
if self.out_buf:
msg += f'''stdout: {self.out}
'''
if self.err_buf:
msg += f'''stderr: {self.err}
'''
return msg |
.parametrize('alpha', [np.linspace(0.05, 0.07), [0.05, 0.07, 0.9], (0.05, 0.07, 0.9), np.array([0.05, 0.07, 0.9])])
def test_invalid_calculation_of_quantile(alpha: Any) -> None:
n = 10
with pytest.raises(ValueError, match='.*Number of samples of the score is too low.*'):
check_alpha_and_n_samples(alpha, n) |
class TestEnvironmentReset(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dm = NumbaDataManager(num_agents=5, num_envs=2, episode_length=2)
self.fm = NumbaFunctionManager(num_agents=int(self.dm.meta_info('n_agents')), num_envs=int(self.dm.meta_info('n_envs')))
self.fm.import_numba_from_source_code(f'{_NUMBA_FILEPATH}.test_build')
self.resetter = NumbaEnvironmentReset(function_manager=self.fm)
def test_reset_for_different_dim(self):
self.dm.data_on_device_via_torch('_done_')[:] = torch.from_numpy(np.array([1, 0])).cuda()
done = self.dm.pull_data_from_device('_done_')
self.assertSequenceEqual(list(done), [1, 0])
data_feed = DataFeed()
data_feed.add_data(name='a', data=np.random.randn(2, 10, 3), save_copy_and_apply_at_reset=True)
data_feed.add_data(name='b', data=np.random.randn(2, 10), save_copy_and_apply_at_reset=True)
data_feed.add_data(name='c', data=np.random.randn(2), save_copy_and_apply_at_reset=True)
data_feed.add_data(name='d', data=np.random.randint(10, size=(2, 10, 3), dtype=np.int32), save_copy_and_apply_at_reset=True)
data_feed.add_data(name='e', data=np.random.randint(10, size=(2, 10), dtype=np.int32), save_copy_and_apply_at_reset=True)
data_feed.add_data(name='f', data=np.random.randint(10, size=2, dtype=np.int32), save_copy_and_apply_at_reset=True)
self.dm.push_data_to_device(data_feed)
torch_data_feed = DataFeed()
torch_data_feed.add_data(name='at', data=np.random.randn(2, 10, 3), save_copy_and_apply_at_reset=True)
torch_data_feed.add_data(name='bt', data=np.random.randn(2, 10), save_copy_and_apply_at_reset=True)
torch_data_feed.add_data(name='ct', data=np.random.randn(2), save_copy_and_apply_at_reset=True)
torch_data_feed.add_data(name='dt', data=np.random.randint(10, size=(2, 10, 3), dtype=np.int32), save_copy_and_apply_at_reset=True)
torch_data_feed.add_data(name='et', data=np.random.randint(10, size=(2, 10), dtype=np.int32), save_copy_and_apply_at_reset=True)
torch_data_feed.add_data(name='ft', data=np.random.randint(10, size=2, dtype=np.int32), save_copy_and_apply_at_reset=True)
self.dm.push_data_to_device(torch_data_feed, torch_accessible=True)
a = self.dm.pull_data_from_device('a')
b = self.dm.pull_data_from_device('b')
c = self.dm.pull_data_from_device('c')
d = self.dm.pull_data_from_device('d')
e = self.dm.pull_data_from_device('e')
f = self.dm.pull_data_from_device('f')
at = self.dm.pull_data_from_device('at')
bt = self.dm.pull_data_from_device('bt')
ct = self.dm.pull_data_from_device('ct')
dt = self.dm.pull_data_from_device('dt')
et = self.dm.pull_data_from_device('et')
ft = self.dm.pull_data_from_device('ft')
self.dm.data_on_device_via_torch('at')[:] = torch.rand(2, 10, 3).cuda()
self.dm.data_on_device_via_torch('bt')[:] = torch.rand(2, 10).cuda()
self.dm.data_on_device_via_torch('ct')[:] = torch.rand(2).cuda()
self.dm.data_on_device_via_torch('dt')[:] = torch.randint(10, size=(2, 10, 3)).cuda()
self.dm.data_on_device_via_torch('et')[:] = torch.randint(10, size=(2, 10)).cuda()
self.dm.data_on_device_via_torch('ft')[:] = torch.randint(10, size=(2,)).cuda()
self.resetter.reset_when_done(self.dm)
a_after_reset = self.dm.pull_data_from_device('a')
b_after_reset = self.dm.pull_data_from_device('b')
c_after_reset = self.dm.pull_data_from_device('c')
d_after_reset = self.dm.pull_data_from_device('d')
e_after_reset = self.dm.pull_data_from_device('e')
f_after_reset = self.dm.pull_data_from_device('f')
at_after_reset = self.dm.pull_data_from_device('at')
bt_after_reset = self.dm.pull_data_from_device('bt')
ct_after_reset = self.dm.pull_data_from_device('ct')
dt_after_reset = self.dm.pull_data_from_device('dt')
et_after_reset = self.dm.pull_data_from_device('et')
ft_after_reset = self.dm.pull_data_from_device('ft')
self.assertTrue((np.absolute((a - a_after_reset).mean()) < 1e-05))
self.assertTrue((np.absolute((b - b_after_reset).mean()) < 1e-05))
self.assertTrue((np.absolute((c - c_after_reset).mean()) < 1e-05))
self.assertTrue((np.count_nonzero((d - d_after_reset)) == 0))
self.assertTrue((np.count_nonzero((e - e_after_reset)) == 0))
self.assertTrue((np.count_nonzero((f - f_after_reset)) == 0))
self.assertTrue((np.absolute((at - at_after_reset)[0].mean()) < 1e-05))
self.assertTrue((np.absolute((bt - bt_after_reset)[0].mean()) < 1e-05))
self.assertTrue((np.absolute((ct - ct_after_reset)[0].mean()) < 1e-05))
self.assertTrue((np.absolute((at - at_after_reset)[1].mean()) > 1e-05))
self.assertTrue((np.absolute((bt - bt_after_reset)[1].mean()) > 1e-05))
self.assertTrue((np.absolute((ct - ct_after_reset)[1].mean()) > 1e-05))
self.assertTrue((np.count_nonzero((dt - dt_after_reset)[0]) == 0))
self.assertTrue((np.count_nonzero((et - et_after_reset)[0]) == 0))
self.assertTrue((np.count_nonzero((ft - ft_after_reset)[0]) == 0))
self.assertTrue((np.count_nonzero((dt - dt_after_reset)[1]) > 0))
self.assertTrue((np.count_nonzero((et - et_after_reset)[1]) > 0))
self.assertTrue((np.count_nonzero((ft - ft_after_reset)[1]) >= 0))
done = self.dm.pull_data_from_device('_done_')
self.assertSequenceEqual(list(done), [0, 0])
torch_data_feed2 = DataFeed()
torch_data_feed2.add_data(name='af', data=np.random.randn(2, 10, 3), save_copy_and_apply_at_reset=True)
torch_data_feed2.add_data(name='bf', data=np.random.randn(2, 10), save_copy_and_apply_at_reset=True)
torch_data_feed2.add_data(name='cf', data=np.random.randn(2), save_copy_and_apply_at_reset=True)
torch_data_feed2.add_data(name='df', data=np.random.randint(10, size=(2, 10, 3), dtype=np.int32), save_copy_and_apply_at_reset=True)
torch_data_feed2.add_data(name='ef', data=np.random.randint(10, size=(2, 10), dtype=np.int32), save_copy_and_apply_at_reset=True)
torch_data_feed2.add_data(name='ff', data=np.random.randint(10, size=2, dtype=np.int32), save_copy_and_apply_at_reset=True)
self.dm.push_data_to_device(torch_data_feed2, torch_accessible=True)
af = self.dm.pull_data_from_device('af')
bf = self.dm.pull_data_from_device('bf')
cf = self.dm.pull_data_from_device('cf')
df = self.dm.pull_data_from_device('df')
ef = self.dm.pull_data_from_device('ef')
ff = self.dm.pull_data_from_device('ff')
self.dm.data_on_device_via_torch('af')[:] = torch.rand(2, 10, 3).cuda()
self.dm.data_on_device_via_torch('bf')[:] = torch.rand(2, 10).cuda()
self.dm.data_on_device_via_torch('cf')[:] = torch.rand(2).cuda()
self.dm.data_on_device_via_torch('df')[:] = torch.randint(10, size=(2, 10, 3)).cuda()
self.dm.data_on_device_via_torch('ef')[:] = torch.randint(10, size=(2, 10)).cuda()
self.dm.data_on_device_via_torch('ff')[:] = torch.randint(10, size=(2,)).cuda()
self.resetter.reset_when_done(self.dm)
af_after_soft_reset = self.dm.pull_data_from_device('af')
bf_after_soft_reset = self.dm.pull_data_from_device('bf')
cf_after_soft_reset = self.dm.pull_data_from_device('cf')
df_after_soft_reset = self.dm.pull_data_from_device('df')
ef_after_soft_reset = self.dm.pull_data_from_device('ef')
ff_after_soft_reset = self.dm.pull_data_from_device('ff')
self.assertTrue((np.absolute((af - af_after_soft_reset).mean()) > 1e-05))
self.assertTrue((np.absolute((bf - bf_after_soft_reset).mean()) > 1e-05))
self.assertTrue((np.absolute((cf - cf_after_soft_reset).mean()) > 1e-05))
self.assertTrue((np.count_nonzero((df - df_after_soft_reset)) > 0))
self.assertTrue((np.count_nonzero((ef - ef_after_soft_reset)) > 0))
self.assertTrue((np.count_nonzero((ff - ff_after_soft_reset)) > 0))
self.resetter.reset_when_done(self.dm, mode='force_reset')
af_after_hard_reset = self.dm.pull_data_from_device('af')
bf_after_hard_reset = self.dm.pull_data_from_device('bf')
cf_after_hard_reset = self.dm.pull_data_from_device('cf')
df_after_hard_reset = self.dm.pull_data_from_device('df')
ef_after_hard_reset = self.dm.pull_data_from_device('ef')
ff_after_hard_reset = self.dm.pull_data_from_device('ff')
self.assertTrue((np.absolute((af - af_after_hard_reset).mean()) < 1e-05))
self.assertTrue((np.absolute((bf - bf_after_hard_reset).mean()) < 1e-05))
self.assertTrue((np.absolute((cf - cf_after_hard_reset).mean()) < 1e-05))
self.assertTrue((np.count_nonzero((df - df_after_hard_reset)) == 0))
self.assertTrue((np.count_nonzero((ef - ef_after_hard_reset)) == 0))
self.assertTrue((np.count_nonzero((ff - ff_after_hard_reset)) == 0)) |
def extract_imdb_wiki_arcface(dataset: str='imdb', docker_port: int=10002, cuda: bool=False, resize: int=640):
if cuda:
image_name = 'tae898/face-detection-recognition-cuda'
gpus = 'all'
else:
image_name = 'tae898/face-detection-recognition'
gpus = None
container = docker.run(image=image_name, gpus=gpus, detach=True, publish=[(docker_port, 10002)])
logging.info(f'starting a docker container ...')
logging.debug(f'warming up the container ...')
time.sleep(10)
logging.info(f'extracting Adience arcface embedding vectors ...')
image_paths = glob(f'./data/{dataset}_crop/*/*.jpg')
COUNT_FAIL = 0
for image_path in tqdm(image_paths):
try:
if resize:
image = Image.open(image_path)
image = resize_square_image(image, width=resize, background_color=(0, 0, 0))
assert (image is not None)
image.save((image_path + '.RESIZED.jpg'))
with open((image_path + '.RESIZED.jpg'), 'rb') as stream:
frame_bytestring = stream.read()
else:
with open(image_path, 'rb') as stream:
frame_bytestring = stream.read()
data = {'image': frame_bytestring}
data = jsonpickle.encode(data)
response = requests.post(f' json=data)
response = jsonpickle.decode(response.text)
face_detection_recognition = response['face_detection_recognition']
assert (face_detection_recognition is not None)
with open((image_path + '.pkl'), 'wb') as stream:
pickle.dump(face_detection_recognition, stream)
del frame_bytestring, data, response, face_detection_recognition
except Exception as e:
logging.error(f'failed to process {image_path}: {e}')
COUNT_FAIL += 1
logging.error(f'in total {COUNT_FAIL} number of images failed to extract face embeddings!')
logging.debug(f'killing the container ...')
container.kill()
logging.info(f'container killed.')
logging.info(f'DONE!') |
def draw_interactive(G, c, x, hover_text=None, node_size=10.0, pos=None, cmap=None):
(node_colors, node_edge_colors) = set_node_colors(G, c, x, cmap)
if (pos is None):
pos = nx.spring_layout(G)
nodelist = [d for d in G.nodes()]
group_ids = [(c[d] if (c[d] is not None) else 'residual') for d in nodelist]
coreness = [(x[d] if (x[d] is not None) else 'residual') for d in nodelist]
node_size_list = [((x[d] + 1) if (x[d] is not None) else (1 / 2)) for d in nodelist]
pos_x = [pos[d][0] for d in nodelist]
pos_y = [pos[d][1] for d in nodelist]
df = pd.DataFrame({'x': pos_x, 'y': pos_y, 'name': nodelist, 'group_id': group_ids, 'coreness': coreness, 'node_size': node_size_list})
df['marker'] = df['group_id'].apply((lambda s: ('circle' if (s != 'residual') else 'square')))
df['hovertext'] = df.apply((lambda s: '{ht}<br>Group: {group}<br>Coreness: {coreness}'.format(ht=(('Node %s' % s['name']) if (hover_text is None) else hover_text.get(s['name'], '')), group=s['group_id'], coreness=s['coreness'])), axis=1)
fig = go.Figure(data=go.Scatter(x=df['x'], y=df['y'], marker_size=df['node_size'], marker_symbol=df['marker'], hovertext=df['hovertext'], hoverlabel=dict(namelength=0), hovertemplate='%{hovertext}', marker={'color': node_colors, 'sizeref': (1.0 / node_size), 'line': {'color': node_edge_colors, 'width': 1}}, mode='markers'))
fig.update_layout(autosize=False, width=800, height=800, template='plotly_white')
return fig |
def test_imagecollection_input():
pics = [fetch('data/coffee.png'), fetch('data/chessboard_GRAY.png'), fetch('data/rocket.jpg')]
pattern = [os.path.join(data_dir, pic) for pic in pics]
images = ImageCollection(pattern)
assert (len(images) == 3) |
def Chicken(A=0, a=0, B=1, b=(- 1), C=(- 1), c=1, D=(- 10), d=(- 10)):
if (not ((B > A > C > D) and (c > a > b > d))):
raise TypeError('the input values for a game of chicken must be of the form B > A > C > D and c > a > b > d')
g = AntiCoordinationGame(A=A, a=a, B=B, b=b, C=C, c=c, D=D, d=d)
g.rename(('Chicken - ' + repr(g)))
return g |
class CiscoUmbrellaUpdatePolicy(VirtualFunctionTool):
name = 'CiscoUmbrellaUpdatePolicy'
summary = 'Update an existing security policy.'
parameters: List[ArgParameter] = [{'name': 'policy_id', 'type': 'string', 'description': 'The unique identifier of the policy to be updated.', 'required': True}, {'name': 'policy_name', 'type': 'string', 'description': 'The new name of the policy.', 'required': False}, {'name': 'policy_description', 'type': 'string', 'description': 'The new description of the policy.', 'required': False}, {'name': 'policy_type', 'type': 'string', 'description': "The new type of the policy. One of ['whitelist', 'blacklist'].", 'required': False}, {'name': 'domains', 'type': 'array', 'description': 'An array of strings, each string is a domain to be added to the policy.', 'required': False}]
returns: List[ArgReturn] = [{'name': 'success', 'type': 'boolean', 'description': 'Whether the policy was successfully updated.'}]
exceptions: List[ArgException] = [{'name': 'InvalidRequestException', 'description': "The 'policy_id' argument is not a valid policy identifier, the 'policy_name' argument is empty, the 'policy_description' argument is empty, the 'policy_type' argument is not one of ['whitelist', 'blacklist'], or the 'domains' argument is empty."}, {'name': 'NotFoundException', 'description': "The 'policy_id' does not exist."}] |
class SimpleLSTMModel(Model):
def __init__(self, output_dim, hidden_dim, name='SimpleLSTMModel', *args, **kwargs):
super().__init__(name)
self.output_dim = output_dim
self.hidden_dim = hidden_dim
def network_input_spec(self):
return ['full_input', 'step_input', 'step_hidden_input', 'step_cell_input']
def network_output_spec(self):
return ['all_output', 'step_output', 'step_hidden', 'step_cell', 'init_hidden', 'init_cell']
def _build(self, obs_input, step_obs_input, step_hidden, step_cell, name=None):
return_var = tf.compat.v1.get_variable('return_var', (), initializer=tf.constant_initializer(0.5))
outputs = tf.fill((tf.shape(obs_input)[0], tf.shape(obs_input)[1], self.output_dim), return_var)
output = tf.fill((tf.shape(step_obs_input)[0], self.output_dim), return_var)
hidden_init_var = tf.compat.v1.get_variable(name='initial_hidden', shape=(self.hidden_dim,), initializer=tf.zeros_initializer(), trainable=False, dtype=tf.float32)
cell_init_var = tf.compat.v1.get_variable(name='initial_cell', shape=(self.hidden_dim,), initializer=tf.zeros_initializer(), trainable=False, dtype=tf.float32)
return (outputs, output, step_hidden, step_cell, hidden_init_var, cell_init_var) |
def max_pool(input_tensor, last_dim, sequence_length=None):
with tf.name_scope('max_pool'):
mid_dim = tf.shape(input_tensor)[1]
input_tensor = handle_pad_max_pooling(input_tensor, last_dim)
input_tensor = tf.reshape(input_tensor, [(- 1), mid_dim, last_dim])
input_tensor_max = tf.reduce_max(input_tensor, axis=(- 2))
return input_tensor_max |
def register_Ns3LteRrcSapMeasIdToAddMod_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::LteRrcSap::MeasIdToAddMod const &', 'arg0')])
cls.add_instance_attribute('measId', 'uint8_t', is_const=False)
cls.add_instance_attribute('measObjectId', 'uint8_t', is_const=False)
cls.add_instance_attribute('reportConfigId', 'uint8_t', is_const=False)
return |
def _load_pretrained_model(model_name_or_path, *args, **kwargs):
if PathManager.exists(model_name_or_path):
download_path = model_name_or_path
model_name = model_name_or_path
else:
download_path = download_pretrained_model(model_name_or_path, *args, **kwargs)
model_name = model_name_or_path
_hack_imports()
ckpt_path = get_ckpt_path_from_folder(download_path)
ckpt = get_ckpt_from_path(ckpt_path)
config = get_config_from_folder_or_ckpt(download_path, ckpt)
model_config = config.get('model_config', config)
ckpt = ckpt.get('model', ckpt)
if PathManager.exists(model_name):
assert (len(model_config.keys()) == 1), 'Checkpoint contains more than one model?'
model_config = model_config[list(model_config.keys())[0]]
else:
model_config = model_config.get(model_name.split(os.path.sep)[(- 1)].split('.')[0])
return {'config': model_config, 'checkpoint': ckpt, 'full_config': config} |
class TrainOptions(BaseOptions):
def initialize(self, parser):
parser = BaseOptions.initialize(self, parser)
parser.add_argument('--display_freq', type=int, default=400, help='frequency of showing training results on screen')
parser.add_argument('--display_ncols', type=int, default=4, help='if positive, display all images in a single visdom web panel with certain number of images per row.')
parser.add_argument('--display_id', type=int, default=None, help='window id of the web display. Default is random window id')
parser.add_argument('--display_server', type=str, default=' help='visdom server of the web display')
parser.add_argument('--display_env', type=str, default='main', help='visdom display environment name (default is "main")')
parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display')
parser.add_argument('--update_html_freq', type=int, default=1000, help='frequency of saving training results to html')
parser.add_argument('--print_freq', type=int, default=100, help='frequency of showing training results on console')
parser.add_argument('--no_html', action='store_true', help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/')
parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving the latest results')
parser.add_argument('--save_epoch_freq', type=int, default=5, help='frequency of saving checkpoints at the end of epochs')
parser.add_argument('--evaluation_freq', type=int, default=5000, help='evaluation freq')
parser.add_argument('--save_by_iter', action='store_true', help='whether saves model by iteration')
parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model')
parser.add_argument('--epoch_count', type=int, default=1, help='the starting epoch count, we save the model by <epoch_count>, <epoch_count>+<save_latest_freq>, ...')
parser.add_argument('--phase', type=str, default='train', help='train, val, test, etc')
parser.add_argument('--pretrained_name', type=str, default=None, help='resume training from another checkpoint')
parser.add_argument('--n_epochs', type=int, default=200, help='number of epochs with the initial learning rate')
parser.add_argument('--n_epochs_decay', type=int, default=200, help='number of epochs to linearly decay learning rate to zero')
parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam')
parser.add_argument('--beta2', type=float, default=0.999, help='momentum term of adam')
parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam')
parser.add_argument('--gan_mode', type=str, default='lsgan', help='the type of GAN objective. [vanilla| lsgan | wgangp]. vanilla GAN loss is the cross-entropy objective used in the original GAN paper.')
parser.add_argument('--pool_size', type=int, default=50, help='the size of image buffer that stores previously generated images')
parser.add_argument('--lr_policy', type=str, default='linear', help='learning rate policy. [linear | step | plateau | cosine]')
parser.add_argument('--lr_decay_iters', type=int, default=50, help='multiply by a gamma every lr_decay_iters iterations')
self.isTrain = True
return parser |
def convert(src, dst, depth):
if (depth not in arch_settings):
raise ValueError('Only support ResNet-50 and ResNet-101 currently')
block_nums = arch_settings[depth]
caffe_model = mmcv.load(src, encoding='latin1')
blobs = (caffe_model['blobs'] if ('blobs' in caffe_model) else caffe_model)
state_dict = OrderedDict()
converted_names = set()
convert_conv_fc(blobs, state_dict, 'conv1', 'conv1', converted_names)
convert_bn(blobs, state_dict, 'res_conv1_bn', 'bn1', converted_names)
for i in range(1, (len(block_nums) + 1)):
for j in range(block_nums[(i - 1)]):
if (j == 0):
convert_conv_fc(blobs, state_dict, 'res{}_{}_branch1'.format((i + 1), j), 'layer{}.{}.downsample.0'.format(i, j), converted_names)
convert_bn(blobs, state_dict, 'res{}_{}_branch1_bn'.format((i + 1), j), 'layer{}.{}.downsample.1'.format(i, j), converted_names)
for (k, letter) in enumerate(['a', 'b', 'c']):
convert_conv_fc(blobs, state_dict, 'res{}_{}_branch2{}'.format((i + 1), j, letter), 'layer{}.{}.conv{}'.format(i, j, (k + 1)), converted_names)
convert_bn(blobs, state_dict, 'res{}_{}_branch2{}_bn'.format((i + 1), j, letter), 'layer{}.{}.bn{}'.format(i, j, (k + 1)), converted_names)
for key in blobs:
if (key not in converted_names):
print('Not Convert: {}'.format(key))
checkpoint = dict()
checkpoint['state_dict'] = state_dict
torch.save(checkpoint, dst) |
class VariableSet(object):
def __init__(self, d):
self._raw_data = dict([(k, v) for (k, v) in d.items()])
self._re = {}
self._re_sub = {}
self._init_parse()
def _init_parse(self):
for (k, v) in self._raw_data.items():
self._init_parse_var(k, v)
def _init_parse_var(self, name, value):
self._re[name] = re.compile(('\\$\\{%s\\}' % name))
self._re_sub[name] = value
def interpolate(self, value):
def _interpolate(value):
for k in self._re.keys():
value = self._re[k].sub(self._re_sub[k], value)
return value
while _VAR.search(value):
nvalue = _interpolate(value)
if (nvalue == value):
break
value = nvalue
return value
def variables(self):
return list(self._raw_data.keys())
def __getitem__(self, name):
return self._raw_data[name]
def __setitem__(self, name, value):
self._raw_data[name] = value
self._init_parse_var(name, value) |
class TStdNotify(TNotify):
thisown = _swig_property((lambda x: x.this.own()), (lambda x, v: x.this.own(v)), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
_snap.TStdNotify_swiginit(self, _snap.new_TStdNotify())
def New():
return _snap.TStdNotify_New()
New = staticmethod(New)
__swig_destroy__ = _snap.delete_TStdNotify |
def cross_entropy(pred, label, weight=None, class_weight=None, reduction='mean', avg_factor=None, ignore_index=(- 100), avg_non_ignore=False):
loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none', ignore_index=ignore_index)
if ((avg_factor is None) and avg_non_ignore and (reduction == 'mean')):
avg_factor = (label.numel() - (label == ignore_index).sum().item())
if (weight is not None):
weight = weight.float()
loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor)
return loss |
def test_unflatten_dict_raises_error_column_index():
flat = {'foo__1__0': 'some value'}
err_msg = 'There was an error unflattening the extension.'
with pytest.raises(ValueError, match=err_msg):
unflatten_dict(flat) |
_HEADS.register('parsingiou_head')
class ParsingIoUHead(nn.Module):
def __init__(self, cfg, dim_in, spatial_in):
super(ParsingIoUHead, self).__init__()
self.dim_in = dim_in[(- 1)]
self.spatial_in = spatial_in[(- 1)]
num_convs = cfg.PARSING.PARSINGIOU.NUM_CONVS
conv_dim = cfg.PARSING.PARSINGIOU.CONV_DIM
norm = cfg.PARSING.PARSINGIOU.NORM
self.conv1x1 = make_conv(self.dim_in, self.dim_in, kernel_size=1, stride=1, norm=make_norm(self.dim_in, norm=norm), act=make_act())
conv_layers = []
for i in range(num_convs):
conv_layers.append(make_conv(self.dim_in, conv_dim, kernel_size=1, stride=1, norm=make_norm(conv_dim, norm=norm), act=make_act()))
self.dim_in = conv_dim
self.add_module('conv_layers', nn.Sequential(*conv_layers))
self.dim_out = [conv_dim]
self.spatial_out = [(1, 1)]
self._init_weights()
def _init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if (m.bias is not None):
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.kaiming_uniform_(m.weight, a=1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x):
x = x[(- 1)]
x = self.conv1x1(x)
x = self.conv_layers(F.adaptive_avg_pool2d(x, (1, 1)))
return [x] |
def test_log_operation_with_checksum(agent: Agent):
file_ops.log_operation('log_test', 'path/to/test', agent=agent, checksum='ABCDEF')
with open(agent.config.file_logger_path, 'r', encoding='utf-8') as f:
content = f.read()
assert (f'''log_test: path/to/test #ABCDEF
''' in content) |
.parametrize('ctx, func_name', ctxs)
.parametrize('seed', [314])
def test_assign_recomputation(seed, ctx, func_name):
rng = np.random.RandomState(seed)
dst = nn.Variable((2, 3, 4))
src = nn.Variable((2, 3, 4))
recomputation_test(rng=rng, func=F.assign, vinputs=[dst, src], func_args=[], func_kwargs={}, ctx=ctx) |
def test_sum_single():
with goos.OptimizationPlan() as plan:
x = goos.Variable(2.0)
res = goos.Sum([x])
assert (res.get() == 2)
assert (res.get_grad([x]) == [1]) |
def label_smoothed_nll_loss(lprobs, target, epsilon, ignore_index=None, reduce=True):
if (target.dim() == (lprobs.dim() - 1)):
target = target.unsqueeze((- 1))
nll_loss = (- lprobs.gather(dim=(- 1), index=target))
smooth_loss = (- lprobs.sum(dim=(- 1), keepdim=True))
if (ignore_index is not None):
non_pad_mask = target.ne(ignore_index)
nll_loss = nll_loss[non_pad_mask]
smooth_loss = smooth_loss[non_pad_mask]
else:
nll_loss = nll_loss.squeeze((- 1))
smooth_loss = smooth_loss.squeeze((- 1))
if reduce:
nll_loss = nll_loss.sum()
smooth_loss = smooth_loss.sum()
eps_i = (epsilon / lprobs.size((- 1)))
loss = (((1.0 - epsilon) * nll_loss) + (eps_i * smooth_loss))
return (loss, nll_loss) |
def transform(column_names, data):
data[column_names] = (data[column_names] ** 2)
return data |
def get_normals_field(vertices):
if (vertices not in normals_field_cache):
N = vertices.shape[0]
normals = Vector.field(3, f32, shape=(N,))
normal_weights = field(f32, shape=(N,))
normals_field_cache[vertices] = (normals, normal_weights)
return (normals, normal_weights)
return normals_field_cache[vertices] |
class UPChannelBAN(BAN):
def __init__(self, feature_in=256, cls_out_channels=2):
super(UPChannelBAN, self).__init__()
cls_output = cls_out_channels
loc_output = 4
self.template_cls_conv = nn.Conv2d(feature_in, (feature_in * cls_output), kernel_size=3)
self.template_loc_conv = nn.Conv2d(feature_in, (feature_in * loc_output), kernel_size=3)
self.search_cls_conv = nn.Conv2d(feature_in, feature_in, kernel_size=3)
self.search_loc_conv = nn.Conv2d(feature_in, feature_in, kernel_size=3)
self.loc_adjust = nn.Conv2d(loc_output, loc_output, kernel_size=1)
def forward(self, z_f, x_f):
cls_kernel = self.template_cls_conv(z_f)
loc_kernel = self.template_loc_conv(z_f)
cls_feature = self.search_cls_conv(x_f)
loc_feature = self.search_loc_conv(x_f)
cls = xcorr_fast(cls_feature, cls_kernel)
loc = self.loc_adjust(xcorr_fast(loc_feature, loc_kernel))
return (cls, loc) |
class TestDiscreteCNNQFunction(TfGraphTestCase):
def setup_method(self):
super().setup_method()
self.env = GarageEnv(DummyDiscretePixelEnv())
self.obs = self.env.reset()
.parametrize('filters, strides', [(((5, (3, 3)),), (1,)), (((5, (3, 3)),), (2,)), (((5, (3, 3)), (5, (3, 3))), (1, 1))])
def test_get_action(self, filters, strides):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.CNNModel', new=SimpleCNNModel):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.MLPModel', new=SimpleMLPModel):
qf = DiscreteCNNQFunction(env_spec=self.env.spec, filters=filters, strides=strides, dueling=False)
action_dim = self.env.action_space.n
expected_output = np.full(action_dim, 0.5)
outputs = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs]})
assert np.array_equal(outputs[0], expected_output)
outputs = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs, self.obs, self.obs]})
for output in outputs:
assert np.array_equal(output, expected_output)
.parametrize('obs_dim', [[1], [2], [1, 1, 1, 1], [2, 2, 2, 2]])
def test_invalid_obs_shape(self, obs_dim):
boxEnv = GarageEnv(DummyDiscreteEnv(obs_dim=obs_dim))
with pytest.raises(ValueError):
DiscreteCNNQFunction(env_spec=boxEnv.spec, filters=((5, (3, 3)),), strides=(2,), dueling=False)
def test_obs_is_image(self):
image_env = GarageEnv(DummyDiscretePixelEnv(), is_image=True)
with mock.patch('garage.tf.models.categorical_cnn_model.CNNModel._build', autospec=True, side_effect=CNNModel._build) as build:
qf = DiscreteCNNQFunction(env_spec=image_env.spec, filters=((5, (3, 3)),), strides=(2,), dueling=False)
normalized_obs = build.call_args_list[0][0][1]
input_ph = qf.input
assert (input_ph != normalized_obs)
fake_obs = [np.full(image_env.spec.observation_space.shape, 255)]
assert (self.sess.run(normalized_obs, feed_dict={input_ph: fake_obs}) == 1.0).all()
obs_dim = image_env.spec.observation_space.shape
state_input = tf.compat.v1.placeholder(tf.uint8, shape=((None,) + obs_dim))
qf.get_qval_sym(state_input, name='another')
normalized_obs = build.call_args_list[1][0][1]
fake_obs = [np.full(image_env.spec.observation_space.shape, 255)]
assert (self.sess.run(normalized_obs, feed_dict={state_input: fake_obs}) == 1.0).all()
def test_obs_not_image(self):
env = self.env
with mock.patch('garage.tf.models.categorical_cnn_model.CNNModel._build', autospec=True, side_effect=CNNModel._build) as build:
qf = DiscreteCNNQFunction(env_spec=env.spec, filters=((5, (3, 3)),), strides=(2,), dueling=False)
normalized_obs = build.call_args_list[0][0][1]
input_ph = qf.input
assert (input_ph == normalized_obs)
fake_obs = [np.full(env.spec.observation_space.shape, 255)]
assert (self.sess.run(normalized_obs, feed_dict={input_ph: fake_obs}) == 255.0).all()
obs_dim = env.spec.observation_space.shape
state_input = tf.compat.v1.placeholder(tf.float32, shape=((None,) + obs_dim))
qf.get_qval_sym(state_input, name='another')
normalized_obs = build.call_args_list[1][0][1]
fake_obs = [np.full(env.spec.observation_space.shape, 255)]
assert (self.sess.run(normalized_obs, feed_dict={state_input: fake_obs}) == 255).all()
.parametrize('filters, strides', [(((5, (3, 3)),), (1,)), (((5, (3, 3)),), (2,)), (((5, (3, 3)), (5, (3, 3))), (1, 1))])
def test_get_action_dueling(self, filters, strides):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.CNNModel', new=SimpleCNNModel):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.MLPDuelingModel', new=SimpleMLPModel):
qf = DiscreteCNNQFunction(env_spec=self.env.spec, filters=filters, strides=strides, dueling=True)
action_dim = self.env.action_space.n
expected_output = np.full(action_dim, 0.5)
outputs = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs]})
assert np.array_equal(outputs[0], expected_output)
outputs = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs, self.obs, self.obs]})
for output in outputs:
assert np.array_equal(output, expected_output)
.parametrize('filters, strides, pool_strides, pool_shapes', [(((5, (3, 3)),), (1,), (1, 1), (1, 1)), (((5, (3, 3)),), (2,), (2, 2), (2, 2)), (((5, (3, 3)), (5, (3, 3))), (1, 1), (1, 1), (1, 1)), (((5, (3, 3)), (5, (3, 3))), (1, 1), (2, 2), (2, 2))])
def test_get_action_max_pooling(self, filters, strides, pool_strides, pool_shapes):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.CNNModelWithMaxPooling', new=SimpleCNNModelWithMaxPooling):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.MLPModel', new=SimpleMLPModel):
qf = DiscreteCNNQFunction(env_spec=self.env.spec, filters=filters, strides=strides, max_pooling=True, pool_strides=pool_strides, pool_shapes=pool_shapes, dueling=False)
action_dim = self.env.action_space.n
expected_output = np.full(action_dim, 0.5)
outputs = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs]})
assert np.array_equal(outputs[0], expected_output)
outputs = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs, self.obs, self.obs]})
for output in outputs:
assert np.array_equal(output, expected_output)
.parametrize('filters, strides', [(((5, (3, 3)),), (1,)), (((5, (3, 3)),), (2,)), (((5, (3, 3)), (5, (3, 3))), (1, 1))])
def test_get_qval_sym(self, filters, strides):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.CNNModel', new=SimpleCNNModel):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.MLPModel', new=SimpleMLPModel):
qf = DiscreteCNNQFunction(env_spec=self.env.spec, filters=filters, strides=strides, dueling=False)
output1 = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs]})
obs_dim = self.env.observation_space.shape
action_dim = self.env.action_space.n
input_var = tf.compat.v1.placeholder(tf.float32, shape=((None,) + obs_dim))
q_vals = qf.get_qval_sym(input_var, 'another')
output2 = self.sess.run(q_vals, feed_dict={input_var: [self.obs]})
expected_output = np.full(action_dim, 0.5)
assert np.array_equal(output1, output2)
assert np.array_equal(output2[0], expected_output)
.parametrize('filters, strides', [(((5, (3, 3)),), (1,)), (((5, (3, 3)),), (2,)), (((5, (3, 3)), (5, (3, 3))), (1, 1))])
def test_is_pickleable(self, filters, strides):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.CNNModel', new=SimpleCNNModel):
with mock.patch('garage.tf.q_functions.discrete_cnn_q_function.MLPModel', new=SimpleMLPModel):
qf = DiscreteCNNQFunction(env_spec=self.env.spec, filters=filters, strides=strides, dueling=False)
with tf.compat.v1.variable_scope('DiscreteCNNQFunction/Sequential/SimpleMLPModel', reuse=True):
return_var = tf.compat.v1.get_variable('return_var')
return_var.load(tf.ones_like(return_var).eval())
output1 = self.sess.run(qf.q_vals, feed_dict={qf.input: [self.obs]})
h_data = pickle.dumps(qf)
with tf.compat.v1.Session(graph=tf.Graph()) as sess:
qf_pickled = pickle.loads(h_data)
output2 = sess.run(qf_pickled.q_vals, feed_dict={qf_pickled.input: [self.obs]})
assert np.array_equal(output1, output2) |
class PredictionList():
def __init__(self, predictions: List[Prediction]):
self.id_to_prediction = {p.id: p for p in predictions}
assert (len(predictions) == len(self.id_to_prediction))
def __contains__(self, item: str):
return (item in self.id_to_prediction)
def __getitem__(self, item: str) -> Prediction:
return self.id_to_prediction[item]
def __iter__(self):
return iter(self.id_to_prediction.values())
def from_file(cls, prediction_file_path: str, answer_string: Callable=None):
if (answer_string is None):
answer_string = AnswerString
with open(prediction_file_path, 'r') as prediction_file:
predictions = json.load(prediction_file)
predictions = [Prediction(id_, answer_string(answer)) for (id_, answer) in predictions.items()]
return cls(predictions) |
.parametrize('module', MODULES)
def test_networkpass_set_variable(module):
(_, inputs) = module
verbose = 1
callback = nnp_graph.NnpNetworkPass(verbose)
ref_callback = legacy_nnp_graph.NnpNetworkPass(verbose)
for (inp_name, inp_shape) in inputs:
inp_shape = (1, *inp_shape[1:])
callback.set_variable(inp_name, nn.Variable(inp_shape))
ref_callback.set_variable(inp_name, nn.Variable(inp_shape))
with get_saved_test_model(module) as nnp_file:
ref_nnp = legacy_nnp_graph.NnpLoader(nnp_file)
nnp = nnp_graph.NnpLoader(nnp_file)
for (ref_v, v) in zip(nnp_check(ref_nnp, 'left', ref_callback), nnp_check(nnp, 'right', callback)):
verify_equivalence(ref_v, v) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.