code stringlengths 101 5.91M |
|---|
def test_single_data_multiple_connectors():
outer_sdfg = dace.SDFG('single_data_multiple_connectors')
outer_sdfg.add_array('A', (2, 10), dtype=dace.int32)
outer_sdfg.add_array('B', (2, 10), dtype=dace.int32)
inner_sdfg = dace.SDFG('inner')
inner_sdfg.add_array('A0', (10,), dtype=dace.int32)
inner_sdfg.add_array('A1', (10,), dtype=dace.int32)
inner_sdfg.add_array('B0', (10,), dtype=dace.int32)
inner_sdfg.add_array('B1', (10,), dtype=dace.int32)
inner_state = inner_sdfg.add_state('inner_state', is_start_state=True)
inner_state.add_mapped_tasklet(name='plus', map_ranges={'j': '0:10'}, inputs={'__a0': dace.Memlet(data='A0', subset='j'), '__a1': dace.Memlet(data='A1', subset='j')}, outputs={'__b0': dace.Memlet(data='B0', subset='j')}, code='__b0 = __a0 + __a1', external_edges=True)
inner_state.add_mapped_tasklet(name='minus', map_ranges={'j': '0:10'}, inputs={'__a0': dace.Memlet(data='A0', subset='j'), '__a1': dace.Memlet(data='A1', subset='j')}, outputs={'__b1': dace.Memlet(data='B1', subset='j')}, code='__b1 = __a0 - __a1', external_edges=True)
outer_state = outer_sdfg.add_state('outer_state', is_start_state=True)
a = outer_state.add_access('A')
b = outer_state.add_access('B')
(me, mx) = outer_state.add_map('map', {'i': '0:2'})
inner_sdfg_node = outer_state.add_nested_sdfg(inner_sdfg, None, {'A0', 'A1'}, {'B0', 'B1'})
outer_state.add_memlet_path(a, me, inner_sdfg_node, memlet=dace.Memlet(data='A', subset='0, 0:10'), dst_conn='A0')
outer_state.add_memlet_path(a, me, inner_sdfg_node, memlet=dace.Memlet(data='A', subset='1, 0:10'), dst_conn='A1')
outer_state.add_memlet_path(inner_sdfg_node, mx, b, memlet=dace.Memlet(data='B', subset='0, 0:10'), src_conn='B0')
outer_state.add_memlet_path(inner_sdfg_node, mx, b, memlet=dace.Memlet(data='B', subset='1, 0:10'), src_conn='B1')
sdutils.consolidate_edges(outer_sdfg)
A = np.arange(20, dtype=np.int32).reshape((2, 10)).copy()
ref = np.empty_like(A)
ref_sdfg = copy.deepcopy(outer_sdfg)
ref_sdfg.name = f'{ref_sdfg.name}_ref'
ref_sdfg(A=A, B=ref)
MapFission.apply_to(outer_sdfg, expr_index=1, map_entry=me, nested_sdfg=inner_sdfg_node)
val = np.empty_like(A)
outer_sdfg(A=A, B=val)
assert np.array_equal(val, ref) |
def define_tf_flags():
if (os.environ.get('SQLFLOW_USE_DEFAULT_FLAGS', '').lower() == 'true'):
return DefaultFlags()
if hasattr(tf.app.flags.FLAGS, 'task_index'):
return tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('task_index', 0, 'Worker task index')
tf.app.flags.DEFINE_string('ps_hosts', '', 'ps hosts')
tf.app.flags.DEFINE_string('worker_hosts', '', 'worker hosts')
tf.app.flags.DEFINE_string('job_name', 'worker', 'job name: worker or ps')
tf.app.flags.DEFINE_string('checkpointDir', '', 'oss info')
tf.app.flags.DEFINE_string('tables', '', 'required by PAI-TF 1.15')
tf.app.flags.DEFINE_string('outputs', '', 'required by PAI-TF 1.15')
tf.app.flags.DEFINE_string('sqlflow_oss_ak', '', 'oss ak, for writing saved models')
tf.app.flags.DEFINE_string('sqlflow_oss_sk', '', 'oss sk, for writing saved models')
tf.app.flags.DEFINE_string('sqlflow_oss_ep', '', 'oss endpoint, for writing saved models')
tf.app.flags.DEFINE_string('sqlflow_oss_modeldir', '', 'oss model dir, where the model will be saved')
return tf.app.flags.FLAGS |
class TestLUTActivationsQuantizerParams(unittest.TestCase):
def test_signed_lut_activation_quantization_params(self):
data = np.random.randn(3, 4, 5, 6)
(counts, bins) = np.histogram(data, bins=20)
n_bits = 4
quantization_params = lut_kmeans_histogram(bins=bins, counts=counts, p=2, n_bits=n_bits, min_value=1, max_value=1, constrained=True, n_iter=20, min_threshold=MIN_THRESHOLD, quant_error_method=QuantizationErrorMethod.MSE)
lut_values = quantization_params[LUT_VALUES]
threshold = quantization_params[THRESHOLD]
self.assertTrue(math.log2(threshold).is_integer(), 'LUT quantization threshold must be a power of two')
(self.assertTrue((lut_values.shape[0] <= (2 ** n_bits)), f'Number of lut values is {lut_values.shape[0]} but should not exceed {(2 ** n_bits)}'),)
self.assertTrue(np.all((np.mod(lut_values, 1) == 0)), 'lut values are supposed to be rounded')
def test_unsigned_lut_activation_quantization_params(self):
data = np.random.randn(3, 4, 5, 6)
data[(data < 0)] = (data[(data < 0)] * (- 1))
(counts, bins) = np.histogram(data, bins=20)
n_bits = 4
quantization_params = lut_kmeans_histogram(bins=bins, counts=counts, p=2, n_bits=n_bits, min_value=1, max_value=1, constrained=True, n_iter=20, min_threshold=MIN_THRESHOLD, quant_error_method=QuantizationErrorMethod.MSE)
lut_values = quantization_params[LUT_VALUES]
threshold = quantization_params[THRESHOLD]
self.assertTrue(math.log2(threshold).is_integer(), 'LUT quantization threshold must be a power of two')
(self.assertTrue((lut_values.shape[0] <= (2 ** n_bits)), f'Number of lut values is {lut_values.shape[0]} but should not exceed {(2 ** n_bits)}'),)
self.assertTrue(np.all((np.mod(lut_values, 1) == 0)), 'lut values are supposed to be rounded')
def test_lut_activation_quantization_params_with_fewer_data(self):
data = np.random.randn(3, 4, 5)
(counts, bins) = np.histogram(data, bins=20)
n_bits = 7
quantization_params = lut_kmeans_histogram(bins=bins, counts=counts, p=2, n_bits=n_bits, min_value=1, max_value=1, constrained=True, n_iter=20, min_threshold=MIN_THRESHOLD, quant_error_method=QuantizationErrorMethod.MSE)
lut_values = quantization_params[LUT_VALUES]
threshold = quantization_params[THRESHOLD]
self.assertTrue(math.log2(threshold).is_integer(), 'LUT quantization threshold must be a power of two')
(self.assertTrue((lut_values.shape[0] <= bins.shape[0]), f'Number of lut values is {lut_values.shape[0]} but should not exceed {bins.shape[0]}'),)
self.assertTrue(np.all((np.mod(lut_values, 1) == 0)), 'lut values are supposed to be rounded') |
class YelpFull(Task):
def __init__(self):
super().__init__()
self.class_number = 5
self.file_by_split = dict(train='yelp_review_full_csv/train.train.csv', val='yelp_review_full_csv/train.dev.csv', test='yelp_review_full_csv/test.csv')
self.max_length = 400
def read_data(path, max_length):
def label_fn(x):
return (x - 1)
rows = pd.read_csv(path, sep=',', error_bad_lines=False, header=None, skiprows=None, quoting=0, keep_default_na=False, encoding='utf-8')
label_fn = (label_fn if (label_fn is not None) else (lambda x: x))
labels = rows[0].apply((lambda x: label_fn(x)))
sentences = rows[1]
sentences = sentences.apply((lambda x: clean_tokenize_truncate(x, max_length)))
return (sentences.tolist(), labels.tolist()) |
def recursively_load_weights(fairseq_model, hf_model, is_finetuned):
unused_weights = []
fairseq_dict = fairseq_model.state_dict()
feature_extractor = (hf_model.hubert.feature_extractor if is_finetuned else hf_model.feature_extractor)
for (name, value) in fairseq_dict.items():
is_used = False
if ('conv_layers' in name):
load_conv_layer(name, value, feature_extractor, unused_weights, (hf_model.config.feat_extract_norm == 'group'))
is_used = True
else:
for (key, mapped_key) in MAPPING.items():
mapped_key = (('hubert.' + mapped_key) if (is_finetuned and (mapped_key != 'lm_head')) else mapped_key)
if ((key in name) or ((key.split('w2v_model.')[(- 1)] == name.split('.')[0]) and (not is_finetuned))):
is_used = True
if ('*' in mapped_key):
layer_index = name.split(key)[0].split('.')[(- 2)]
mapped_key = mapped_key.replace('*', layer_index)
if ('weight_g' in name):
weight_type = 'weight_g'
elif ('weight_v' in name):
weight_type = 'weight_v'
elif ('weight' in name):
weight_type = 'weight'
elif ('bias' in name):
weight_type = 'bias'
else:
weight_type = None
set_recursively(hf_model, mapped_key, value, name, weight_type)
continue
if (not is_used):
unused_weights.append(name)
logger.warning(f'Unused weights: {unused_weights}') |
def get_data_max():
data = get_data()
xcoord = data.x.values
ycoord = data.y.values
training_data_ids = np.where(((((xcoord ** 2) + (ycoord ** 2)) - (RADI ** 2)).reshape((- 1)) > 0))[0]
data_max = {}
for v in data.keys():
data_max[v] = abs(data[v].values[training_data_ids]).max()
return data_max |
def test_evaluate_prequential_delayed_classifier(tmpdir, test_path):
data = RandomTreeGenerator(tree_random_state=23, sample_random_state=12, n_classes=4, n_cat_features=2, n_num_features=5, n_categories_per_cat_feature=5, max_tree_depth=6, min_leaf_depth=3, fraction_leaves_per_level=0.15)
max_samples = 1000
(X, y) = data.next_sample(max_samples)
y = y.astype(int)
time = generate_random_dates(seed=1, samples=max_samples)
stream = TemporalDataStream(X, y, time, ordered=True)
nominal_attr_idx = [x for x in range(15, len(data.feature_names))]
learner = HoeffdingTreeClassifier(nominal_attributes=nominal_attr_idx)
metrics = ['accuracy', 'kappa', 'kappa_t']
output_file = os.path.join(str(tmpdir), 'prequential_delayed_summary.csv')
evaluator = EvaluatePrequentialDelayed(max_samples=max_samples, metrics=metrics, output_file=output_file)
result = evaluator.evaluate(stream=stream, model=[learner])
result_learner = result[0]
assert isinstance(result_learner, HoeffdingTreeClassifier)
assert (learner.model_measurements == result_learner.model_measurements)
expected_file = os.path.join(test_path, 'prequential_delayed_summary.csv')
compare_files(output_file, expected_file)
(mean_performance, current_performance) = evaluator.get_measurements(model_idx=0)
expected_mean_accuracy = 0.43625
assert np.isclose(mean_performance.accuracy_score(), expected_mean_accuracy)
expected_mean_kappa = 0.231791
assert np.isclose(mean_performance.kappa_score(), expected_mean_kappa)
expected_mean_kappa_t = 0.236886
assert np.isclose(mean_performance.kappa_t_score(), expected_mean_kappa_t)
expected_current_accuracy = 0.43
assert np.isclose(current_performance.accuracy_score(), expected_current_accuracy)
expected_current_kappa = 0.223909
assert np.isclose(current_performance.kappa_score(), expected_current_kappa)
expected_current_kappa_t = 0.24
assert np.isclose(current_performance.kappa_t_score(), expected_current_kappa_t)
expected_info = "EvaluatePrequentialDelayed(batch_size=1, data_points_for_classification=False, max_samples=1000, max_time=inf, metrics=['accuracy', 'kappa', 'kappa_t'], n_wait=200, output_file='prequential_delayed_summary.csv', pretrain_size=200, restart_stream=True, show_plot=False)"
info = ' '.join([line.strip() for line in evaluator.get_info().split()])
assert (info == expected_info) |
class BayesianMVLinReg(ConjPrior):
def __init__(self, sample=None):
self.nu = 0
self.w_0 = None
self.Lambda_0 = (np.array([[0, 0], [0, 1]]) + _epsilon)
self.V_0 = None
super().__init__(sample=sample)
def n_params(self) -> int:
d = (0 if (self.w_0 is None) else self.w_0.shape[1])
return (((1 + (2 * d)) + 4) + (d * d))
def process_time_series(self, x):
(t, x) = super().process_time_series(x)
(n, d) = x.shape
if (self.nu == 0):
self.nu = (2 * (d + _epsilon))
if (self.V_0 is None):
self.V_0 = ((2 * np.eye(d)) * _epsilon)
if (self.w_0 is None):
self.w_0 = np.zeros((2, d))
return (t, x)
def update(self, x):
(t, x) = self.process_time_series(x)
(n, d) = x.shape
t_full = np.stack((t, np.ones_like(t)), axis=(- 1))
design = (t_full.T t_full)
new_Lambda = (design + self.Lambda_0)
new_w = (pinvh(new_Lambda) ((t_full.T x) + (self.Lambda_0 self.w_0)))
self.n = (self.n + len(x))
self.nu = (self.nu + len(x))
residual = (x - (t_full new_w))
delta_w = (new_w - self.w_0)
residual_squared = (residual.T residual)
delta_w_quad_form = ((delta_w.T self.Lambda_0) delta_w)
self.V_0 = ((self.V_0 + residual_squared) + delta_w_quad_form)
self.w_0 = new_w
self.Lambda_0 = new_Lambda
def posterior_explicit(self, x, return_rv=False, log=True, return_updated=False):
if ((x is None) or return_rv):
raise ValueError("Bayesian linear regression doesn't have a scipy.stats random variable posterior. Please specify a non-``None`` value of ``x`` and set ``return_rv = False``.")
updated = copy.deepcopy(self)
updated.update(x)
(t, x_np) = self.process_time_series(x)
logdet_V = np.linalg.slogdet((self.V_0 / 2))[1]
logdet_V = (_log_pdet((self.V_0 / 2)) if np.isinf(logdet_V) else logdet_V)
logdet_V_new = np.linalg.slogdet((updated.V_0 / 2))[1]
logdet_V_new = (_log_pdet((updated.V_0 / 2)) if np.isinf(logdet_V_new) else logdet_V_new)
a = ((((- len(x_np)) / 2) * self.dim) * np.log((2 * np.pi)))
b = ((np.linalg.slogdet(self.Lambda_0)[1] - np.linalg.slogdet(updated.Lambda_0)[1]) / 2)
c = (((self.nu * logdet_V) - (updated.nu * logdet_V_new)) / 2)
d = (multigammaln((updated.nu / 2), self.dim) - multigammaln((self.nu / 2), self.dim))
ret = ((((a + b) + c) + d) if log else np.exp((((a + b) + c) + d))).reshape(1)
return ((ret, updated) if return_updated else ret)
def posterior(self, x, return_rv=False, log=True, return_updated=False):
if ((x is None) or return_rv):
raise ValueError("Bayesian linear regression doesn't have a scipy.stats random variable posterior. Please specify a non-``None`` value of ``x`` and set ``return_rv = False``.")
(t, x_np) = self.process_time_series(x)
prior_Sigma = invwishart(df=self.nu, scale=self.V_0)
Sigma_hat = prior_Sigma.mean()
w_hat = self.w_0.flatten()
prior_w = mvnorm(w_hat, np.kron(Sigma_hat, pinvh(self.Lambda_0)), allow_singular=True)
xhat = (np.stack((t, np.ones_like(t)), axis=(- 1)) w_hat.reshape(2, (- 1)))
updated = copy.deepcopy(self)
updated.update(x)
post_Sigma = invwishart(df=updated.nu, scale=updated.V_0)
post_w = mvnorm(updated.w_0.flatten(), np.kron(Sigma_hat, pinvh(updated.Lambda_0)), allow_singular=True)
evidence = mvnorm(cov=Sigma_hat, allow_singular=True).logpdf((x_np - xhat)).reshape(len(x_np))
prior = (prior_Sigma.logpdf(Sigma_hat) + prior_w.logpdf(w_hat))
post = (post_Sigma.logpdf(Sigma_hat) + post_w.logpdf(w_hat))
logp = ((evidence + prior) - post)
ret = (logp if log else np.exp(logp))
return ((ret, updated) if return_updated else ret)
def forecast(self, time_stamps) -> Tuple[(TimeSeries, TimeSeries)]:
names = self.names
t = to_timestamp(time_stamps)
if (self.t0 is None):
self.t0 = t[0]
if (self.dt is None):
self.dt = ((t[(- 1)] - t[0]) if (len(t) > 1) else 1)
t = ((t - self.t0) / self.dt)
t_full = np.stack((t, np.ones_like(t)), axis=(- 1))
Sigma_hat = invwishart(df=self.nu, scale=self.V_0).mean().reshape((self.dim, self.dim))
xhat = (t_full self.w_0)
x_Lambda_diag = np.sum(((t_full pinvh(self.Lambda_0)) * t_full), axis=(- 1))
sigma2 = np.outer(Sigma_hat.diagonal(), x_Lambda_diag).reshape(xhat.shape)
sigma = np.sqrt((sigma2 + Sigma_hat.diagonal()))
t = to_pd_datetime(time_stamps)
xhat_df = pd.DataFrame(xhat, index=t, columns=names)
sigma_df = pd.DataFrame(sigma, index=t, columns=[f'{n}_stderr' for n in names])
return (TimeSeries.from_pd(xhat_df), TimeSeries.from_pd(sigma_df)) |
class MLlogger():
def __init__(self, log_dir, experiment_name, args=None, name_args=[]):
self.log_dir = log_dir
self.args = vars(args)
self.name_args = name_args
mlflow.set_tracking_uri(log_dir)
mlflow.set_experiment(experiment_name)
self.auto_steps = {}
self.metters = {}
def __enter__(self):
self.mlflow = mlflow
name = ('_'.join(self.name_args) if (len(self.name_args) > 0) else 'run1')
self.run = mlflow.start_run(run_name=name)
self.run_loc = os.path.join(self.log_dir, self.run.info.experiment_id, self.run.info.run_uuid)
self.tf_logger = SummaryWriter(os.path.join(self.run_loc, 'artifacts', 'events'))
self.mlflow.set_tag('Tensor board', 'tensorboard --logdir={} --port={} --samples_per_plugin images=0'.format(self.mlflow.get_artifact_uri(), 9999))
for (key, value) in self.args.items():
self.mlflow.log_param(key, value)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.mlflow.end_run()
def log_metric(self, key, value, step=None, log_to_tfboard=False, meterId=None, weight=1.0):
if (meterId not in self.metters):
self.metters[meterId] = AverageMeter()
if ((step is not None) and (type(step) is str) and (step == 'auto')):
if (key not in self.auto_steps):
self.auto_steps[key] = count(0)
step = next(self.auto_steps[key])
self.mlflow.log_metric(key, value, step)
else:
self.mlflow.log_metric(key, value, step=step)
if log_to_tfboard:
self.tf_logger.add_scalar(key, value, step)
if (meterId is not None):
self.metters[meterId].update(value, weight)
self.mlflow.log_metric(meterId, self.metters[meterId].avg, step) |
def calc_reconstruction_loss(x, recon_x, loss_type='mse', reduction='sum'):
if (reduction not in ['sum', 'mean', 'none']):
raise NotImplementedError
recon_x = recon_x.view(recon_x.size(0), (- 1))
x = x.view(x.size(0), (- 1))
if (loss_type == 'mse'):
recon_error = F.mse_loss(recon_x, x, reduction='none')
recon_error = recon_error.sum(1)
if (reduction == 'sum'):
recon_error = recon_error.sum()
elif (reduction == 'mean'):
recon_error = recon_error.mean()
elif (loss_type == 'l1'):
recon_error = F.l1_loss(recon_x, x, reduction=reduction)
elif (loss_type == 'bce'):
recon_error = F.binary_cross_entropy(recon_x, x, reduction=reduction)
else:
raise NotImplementedError
return recon_error |
class AnomalibVideoDataset(AnomalibDataset, ABC):
def __init__(self, task: TaskType, transform: A.Compose, clip_length_in_frames: int, frames_between_clips: int) -> None:
super().__init__(task, transform)
self.clip_length_in_frames = clip_length_in_frames
self.frames_between_clips = frames_between_clips
self.transform = transform
self.indexer: (ClipsIndexer | None) = None
self.indexer_cls: (Callable | None) = None
def __len__(self) -> int:
assert isinstance(self.indexer, ClipsIndexer)
return self.indexer.num_clips()
def samples(self) -> DataFrame:
return super().samples
def samples(self, samples):
super(AnomalibVideoDataset, self.__class__).samples.fset(self, samples)
self._setup_clips()
def _setup_clips(self) -> None:
assert callable(self.indexer_cls)
self.indexer = self.indexer_cls(video_paths=list(self.samples.image_path), mask_paths=list(self.samples.mask_path), clip_length_in_frames=self.clip_length_in_frames, frames_between_clips=self.frames_between_clips)
def __getitem__(self, index: int) -> dict[(str, (str | Tensor))]:
assert isinstance(self.indexer, ClipsIndexer)
item = self.indexer.get_item(index)
item['original_image'] = item['image'].to(torch.uint8)
if (('mask' in item) and (item['mask'] is not None)):
processed_frames = [self.transform(image=frame.numpy(), mask=mask) for (frame, mask) in zip(item['image'], item['mask'])]
item['image'] = torch.stack([item['image'] for item in processed_frames]).squeeze(0)
mask = torch.as_tensor(item['mask'])
item['mask'] = torch.stack([item['mask'] for item in processed_frames]).squeeze(0)
item['label'] = Tensor([(1 in frame) for frame in mask]).int().squeeze(0)
if (self.task == TaskType.DETECTION):
(item['boxes'], _) = masks_to_boxes(item['mask'])
item['boxes'] = (item['boxes'][0] if (len(item['boxes']) == 1) else item['boxes'])
else:
item['image'] = torch.stack([self.transform(image=frame.numpy())['image'] for frame in item['image']]).squeeze(0)
if (item['mask'] is None):
item.pop('mask')
return item |
def test_getter_after_setter(setter_getter_test):
module_name = 'tests.fixtures.linecoverage.setter_getter'
test_case_chromosome = tcc.TestCaseChromosome(test_case=setter_getter_test)
config.configuration.statistics_output.coverage_metrics = [config.CoverageMetric.CHECKED]
tracer = ExecutionTracer()
tracer.current_thread_identifier = threading.current_thread().ident
with install_import_hook(module_name, tracer):
module = importlib.import_module(module_name)
importlib.reload(module)
executor = TestCaseExecutor(tracer)
executor.add_observer(StatementSlicingObserver(tracer))
ff = TestCaseStatementCheckedCoverageFunction(executor)
assert (ff.compute_coverage(test_case_chromosome) == pytest.approx((5 / 6), 0.1, 0.1)) |
def ResUnit(inputs, filters, kernel_size, strides, scope, reuse=None):
with tf.variable_scope(scope, reuse=reuse):
outputs = tf.contrib.layers.layer_norm(inputs, scope='layernorm1', reuse=reuse)
outputs = tf.nn.relu(outputs, name='relu')
outputs = tf.layers.conv2d(outputs, filters, kernel_size, strides, padding='SAME', name='conv1', reuse=reuse)
outputs = tf.contrib.layers.layer_norm(outputs, scope='layernorm2', reuse=reuse)
outputs = tf.nn.relu(outputs, name='relu')
outputs = tf.layers.conv2d(outputs, filters, kernel_size, strides, padding='SAME', name='conv2', reuse=reuse)
outputs += inputs
return outputs |
def setup_logging(level='INFO', log_file=None):
from logging import basicConfig
from rich.console import Console
from rich.logging import RichHandler
import pkgutil
if (True if pkgutil.find_loader('tensorflow') else False):
import tensorflow as tf
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
metric = 25
add_log_level('METRIC', metric)
if isinstance(level, str):
level = level.upper()
handlers = []
if log_file:
fh = logging.FileHandler(log_file)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s %(filename)s:%(lineno)d')
fh.setFormatter(formatter)
handlers.append(fh)
console = Console(width=160)
handlers.append(RichHandler(console=console))
basicConfig(level=level, format='%(message)s', datefmt='[%X]', handlers=handlers) |
def render_comparison_continous(itmdt: Intermediate, cfg: Config) -> Dict[(str, Any)]:
plot_width = (cfg.plot.width if (cfg.plot.width is not None) else 450)
plot_height = (cfg.plot.height if (cfg.plot.height is not None) else 400)
df_labels: List[str] = cfg.diff.label
tabs: List[Panel] = []
htgs: Dict[(str, List[Tuple[(str, str)]])] = {}
(col, data) = (itmdt['col'], itmdt['data'][0])
if cfg.hist.enable:
nrows = itmdt['stats']['nrows']
fig = hist_viz(data['hist'], nrows, col, cfg.hist.yscale, plot_width, plot_height, False, df_labels)
tabs.append(Panel(child=row(fig), title='Histogram'))
if cfg.kde.enable:
if ((data['kde'] is not None) and (not math.isclose(itmdt['stats']['min'][0], itmdt['stats']['max'][0]))):
(dens, kde) = (data['dens'], data['kde'])
tabs.append(kde_viz_panel(dens, kde, col, plot_width, plot_height, cfg))
if cfg.box.enable:
df_list = []
group_all = []
for (i, data_box) in enumerate(data['box']):
box_data = {'grp': (col + str(i)), 'q1': data_box['qrtl1'], 'q2': data_box['qrtl2'], 'q3': data_box['qrtl3'], 'lw': data_box['lw'], 'uw': data_box['uw'], 'otlrs': [data_box['otlrs']]}
df_list.append(pd.DataFrame(box_data, index=[i]))
group_all.append(box_data['grp'])
tabs.append(box_viz(df_list, col, plot_width, plot_height, cfg, group_all))
for panel in tabs:
panel.child.children[0].frame_width = int((plot_width * 0.9))
if cfg.correlations.enable:
tabs = (tabs + render_correlation_single_heatmaps(data['corr'], col, plot_width, plot_height, cfg))
legend_lables = [{'label': label, 'color': color} for (label, color) in zip(cfg.diff.label, CATEGORY10[:len(cfg.diff.label)])]
return {'comparison_stats': (format_num_stats(itmdt['stats']) if cfg.stats.enable else []), 'value_table': [], 'insights': [], 'layout': [panel.child for panel in tabs], 'meta': (['Stats'] + [tab.title for tab in tabs]), 'container_width': (plot_width + 110), 'how_to_guide': htgs, 'df_labels': cfg.diff.label, 'legend_labels': legend_lables} |
def track_progress(func, tasks, bar_width=50, file=sys.stdout, **kwargs):
if isinstance(tasks, tuple):
assert (len(tasks) == 2)
assert isinstance(tasks[0], Iterable)
assert isinstance(tasks[1], int)
task_num = tasks[1]
tasks = tasks[0]
elif isinstance(tasks, Iterable):
task_num = len(tasks)
else:
raise TypeError('"tasks" must be an iterable object or a (iterator, int) tuple')
prog_bar = ProgressBar(task_num, bar_width, file=file)
results = []
for task in tasks:
results.append(func(task, **kwargs))
prog_bar.update()
prog_bar.file.write('\n')
return results |
def main(index=0):
parser = argparse.ArgumentParser(add_help=True, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--index', type=int, default=0, help='index of datacube to use')
parser.add_argument('-a', '--all', type=bool, default=False, help='whether to use all extreme samples')
parser.add_argument('-t', '--tile', type=str, default=None, help='tile to use')
args = parser.parse_args()
files = os.listdir('demos/visualizations/ndvi_pickles')
no_files = len(files)
with open(('demos/visualizations/ndvi_pickles/' + files[0]), 'rb') as inp:
data = pickle.load(inp)
x_t = data[0]
x_p = data[2]
q_t = data[1]
for q in range(len(q_t)):
q_t[q] = [0 for x in q_t[q]]
q_ps = data[3]
for i in range(len(q_ps)):
q_ps[i] = np.zeros_like(q_ps[i])
for i in range(no_files):
with open(('demos/visualizations/ndvi_pickles/' + files[i]), 'rb') as inp:
data = pickle.load(inp)
q_t += data[1]
cur_q_ps = data[3]
for j in range(len(q_ps)):
q_ps[j] += cur_q_ps[j]
q_t = [(i / no_files) for i in q_t]
for i in range(len(q_ps)):
q_ps[i] = [(x / no_files) for x in q_ps[i]]
model_names = ['2019 weather', 'SGConvLSTM', 'SGEDConvLSTM']
colors = ['b', 'r', 'g', 'c', 'm', 'y']
colors = colors[:len(model_names)]
(fig, ax0) = plt.subplots()
for (q_p, mod_name, color) in zip(q_ps, model_names, colors):
ax0.plot(x_p, q_p[1], '--', color=color, label=mod_name)
ax0.plot(x_t, q_t[1], '-', color='k', label='2018 ground truth')
ax0.legend(loc='upper right')
ax0.set_ylabel('NDVI (unitless)')
ax0.set_xlabel('Time')
days = [4, 32, 63, 93, 124, 154, 185, 216, 246, 277]
days = [(d / 5) for d in days]
plt.xticks(days, ['Feb', 'March', 'April', 'May', 'June', 'July', 'Aug', 'Sep', 'Oct', 'Nov'])
plt.xlim([x_t[0], x_t[(- 1)]])
plt.ylim(0, 1)
plt.grid()
plt.savefig('visualizations/final_ndvi.pdf', format='pdf') |
def test_prime_factor_multiplicities():
assert (prime_factor_multiplicities(90) == {Integer(2): 1, Integer(3): 2, Integer(5): 1})
assert (prime_factor_multiplicities(1) == {}) |
class JSONDecoderWithFeatureColumn(json.JSONDecoder):
def __init__(self, *args, **kwargs):
kwargs['object_hook'] = feature_column_json_hook
super(JSONDecoderWithFeatureColumn, self).__init__(*args, **kwargs) |
def printLog(*args, **kwargs):
print(*args, **kwargs)
with open('./test_log/log.txt', 'a') as file:
print(*args, **kwargs, file=file) |
def _check_for_name_clashes(stree: tn.ScheduleTreeNode):
def _traverse(node: tn.ScheduleTreeScope, scopes: List[str]):
for child in node.children:
if isinstance(child, tn.ForScope):
itervar = child.header.itervar
if (itervar in scopes):
raise NameError('Nested scope redefines iteration variable')
_traverse(child, (scopes + [itervar]))
elif isinstance(child, tn.MapScope):
itervars = child.node.map.params
if any(((itervar in scopes) for itervar in itervars)):
raise NameError('Nested scope redefines iteration variable')
_traverse(child, (scopes + itervars))
elif isinstance(child, tn.ScheduleTreeScope):
_traverse(child, scopes)
_traverse(stree, []) |
def load_pose_data(data_file):
spin = (True if data_file.endswith('.json') else False)
if spin:
data = json.load(open(data_file, 'r'))
if ('rotmat_tuned' in data):
rotmat = np.array(data['rotmat_tuned'])
else:
rotmat = np.array(data['rotmat'])
poses = []
num_bones = (rotmat.shape[0] // (3 * 3))
for i in range(0, rotmat.shape[0], 9):
mat = rotmat[i:(i + 9)].reshape(3, 3)
if (i == 0):
extr_rotmat = eulerAngleToRoatationMatrix([math.radians(180), math.radians(0), math.radians(0)])
mat = np.dot(extr_rotmat, mat)
(x, y, z) = rotationMatrixToEulerAngles(mat)
poses.extend([x, y, z])
poses = np.array(poses).reshape(1, (- 1))
trans = np.zeros((1, 3))
trans[(0, 2)] = 0.85
total_frames = poses.shape[0]
return (poses, trans, total_frames, '')
else:
data = np.load(data_file)
if ('poses' in data.keys()):
poses = data['poses']
N = poses.shape[0]
cdata_ids = list(range(int((0.1 * N)), int((0.9 * N)), 1))
poses = data['poses'][cdata_ids].astype(np.float32)
trans = data['trans'][cdata_ids].astype(np.float32)
total_frames = poses.shape[0]
gender = data['gender']
return (poses, trans, total_frames, str(gender.astype('<U13')))
return (None, None, 0, None) |
def calculateScore(m):
if (_fscores is None):
readFragmentScores()
fp = rdMolDescriptors.GetMorganFingerprint(m, 2)
fps = fp.GetNonzeroElements()
score1 = 0.0
nf = 0
for (bitId, v) in iteritems(fps):
nf += v
sfp = bitId
score1 += (_fscores.get(sfp, (- 4)) * v)
score1 /= nf
nAtoms = m.GetNumAtoms()
nChiralCenters = len(Chem.FindMolChiralCenters(m, includeUnassigned=True))
ri = m.GetRingInfo()
(nBridgeheads, nSpiro) = numBridgeheadsAndSpiro(m, ri)
nMacrocycles = 0
for x in ri.AtomRings():
if (len(x) > 8):
nMacrocycles += 1
sizePenalty = ((nAtoms ** 1.005) - nAtoms)
stereoPenalty = math.log10((nChiralCenters + 1))
spiroPenalty = math.log10((nSpiro + 1))
bridgePenalty = math.log10((nBridgeheads + 1))
macrocyclePenalty = 0.0
if (nMacrocycles > 0):
macrocyclePenalty = math.log10(2)
score2 = (((((0.0 - sizePenalty) - stereoPenalty) - spiroPenalty) - bridgePenalty) - macrocyclePenalty)
score3 = 0.0
if (nAtoms > len(fps)):
score3 = (math.log((float(nAtoms) / len(fps))) * 0.5)
sascore = ((score1 + score2) + score3)
min = (- 4.0)
max = 2.5
sascore = (11.0 - ((((sascore - min) + 1) / (max - min)) * 9.0))
if (sascore > 8.0):
sascore = (8.0 + math.log(((sascore + 1.0) - 9.0)))
if (sascore > 10.0):
sascore = 10.0
elif (sascore < 1.0):
sascore = 1.0
return sascore |
class GPT2ForSequenceClassification(metaclass=DummyObject):
_backends = ['torch']
def __init__(self, *args, **kwargs):
requires_backends(self, ['torch']) |
class ScaledUpperTriangMaskedSoftmax(torch.autograd.Function):
def forward(ctx, inputs, scale):
scale_t = torch.tensor([scale])
softmax_results = scaled_upper_triang_masked_softmax_forward(inputs, scale_t[0])
ctx.save_for_backward(softmax_results, scale_t)
return softmax_results
def backward(ctx, output_grads):
(softmax_results, scale_t) = ctx.saved_tensors
input_grads = scaled_upper_triang_masked_softmax_backward(output_grads, softmax_results, scale_t[0])
return (input_grads, None) |
.parametrize('observation_shape', [(4, 84, 84), (100,)])
.parametrize('action_size', [2])
.parametrize('batch_size', [32])
.parametrize('encoder_factory', [DefaultEncoderFactory()])
def test_create_normal_policy(observation_shape: Sequence[int], action_size: int, batch_size: int, encoder_factory: EncoderFactory) -> None:
policy = create_normal_policy(observation_shape, action_size, encoder_factory, device='cpu:0')
assert isinstance(policy, NormalPolicy)
x = torch.rand((batch_size, *observation_shape))
y = policy(x)
assert (y.mu.shape == (batch_size, action_size)) |
class MultiRPN(RPN):
def __init__(self, anchor_num, in_channels, weighted=False, fused='none'):
super(MultiRPN, self).__init__()
self.weighted = weighted
for i in range(len(in_channels)):
self.add_module(('rpn' + str((i + 2))), DepthwiseRPN(anchor_num, in_channels[i], in_channels[i], fused))
if self.weighted:
self.cls_weight = nn.Parameter(torch.ones(len(in_channels)))
self.loc_weight = nn.Parameter(torch.ones(len(in_channels)))
def forward(self, z_fs, x_fs):
cls = []
loc = []
for (idx, (z_f, x_f)) in enumerate(zip(z_fs, x_fs), start=2):
rpn = getattr(self, ('rpn' + str(idx)))
(c, l) = rpn(z_f, x_f)
cls.append(c)
loc.append(l)
if self.weighted:
cls_weight = F.softmax(self.cls_weight, 0)
loc_weight = F.softmax(self.loc_weight, 0)
def avg(lst):
return (sum(lst) / len(lst))
def weighted_avg(lst, weight):
s = 0
for i in range(len(weight)):
s += (lst[i] * weight[i])
return s
if self.weighted:
return (weighted_avg(cls, cls_weight), weighted_avg(loc, loc_weight))
else:
return (avg(cls), avg(loc)) |
class PoincareDistance(Function):
def grad(x, v, sqnormx, sqnormv, sqdist, eps):
alpha = (1 - sqnormx)
beta = (1 - sqnormv)
z = (1 + ((2 * sqdist) / (alpha * beta)))
a = (((sqnormv - (2 * th.sum((x * v), dim=(- 1)))) + 1) / th.pow(alpha, 2)).unsqueeze((- 1)).expand_as(x)
a = ((a * x) - (v / alpha.unsqueeze((- 1)).expand_as(v)))
z = th.sqrt((th.pow(z, 2) - 1))
z = th.clamp((z * beta), min=eps).unsqueeze((- 1))
return ((4 * a) / z.expand_as(x))
def forward(ctx, u, v, eps=1e-05):
squnorm = th.clamp(th.sum((u * u), dim=(- 1)), 0, (1 - eps))
sqvnorm = th.clamp(th.sum((v * v), dim=(- 1)), 0, (1 - eps))
sqdist = th.sum(th.pow((u - v), 2), dim=(- 1))
ctx.eps = eps
ctx.save_for_backward(u, v, squnorm, sqvnorm, sqdist)
x = (((sqdist / ((1 - squnorm) * (1 - sqvnorm))) * 2) + 1)
z = th.sqrt((th.pow(x, 2) - 1))
return th.log((x + z))
def backward(ctx, g):
(u, v, squnorm, sqvnorm, sqdist) = ctx.saved_tensors
g = g.unsqueeze((- 1))
gu = PoincareDistance.grad(u, v, squnorm, sqvnorm, sqdist, ctx.eps)
gv = PoincareDistance.grad(v, u, sqvnorm, squnorm, sqdist, ctx.eps)
return ((g.expand_as(gu) * gu), (g.expand_as(gv) * gv), None) |
class SpatialCorrelationSampler(nn.Module):
def __init__(self, kernel_size=1, patch_size=1, stride=1, padding=0, dilation=1, dilation_patch=1):
super(SpatialCorrelationSampler, self).__init__()
self.kernel_size = kernel_size
self.patch_size = patch_size
self.stride = stride
self.padding = padding
self.dilation = dilation
self.dilation_patch = dilation_patch
def forward(self, input1, input2):
return SpatialCorrelationSamplerFunction.apply(input1, input2, self.kernel_size, self.patch_size, self.stride, self.padding, self.dilation_patch) |
def help_documents():
docs = get_documents()
s = 'DOCUMENTs:\n'
s += format_columns(docs)
s += '\n'
if ('reference' in docs):
s += "Other valid document names take the form 'reference/DIR', where\n"
s += 'DIR is a subdirectory of SAGE_DOC_SRC/en/reference/.\n'
s += 'This builds just the specified part of the reference manual.\n'
s += "DOCUMENT may also have the form 'file=/path/to/FILE', which builds\n"
s += 'the documentation for the specified file.\n'
return s |
class SEModule(nn.Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Conv2d(channels, (channels // reduction), kernel_size=1, padding=0)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d((channels // reduction), channels, kernel_size=1, padding=0)
self.sigmoid = nn.Sigmoid()
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = ((m.kernel_size[0] * m.kernel_size[1]) * m.out_channels)
m.weight.data.normal_(0, math.sqrt((2.0 / n)))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.bias.data.zero_()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return (module_input * x) |
class RandomNavigationAgent(ThorAgent):
def __init__(self, create_model, args, rank, gpu_id):
max_episode_length = args.max_episode_length
episode = BasicEpisode(args, gpu_id, args.strict_done)
super(RandomNavigationAgent, self).__init__(create_model(args), args, rank, episode, max_episode_length, gpu_id)
self.action_space = args.action_space
' A random navigation agent. '
def eval_at_state(self, params=None):
critic = torch.ones(1, 1)
actor = torch.ones(1, self.action_space)
critic = gpuify(critic, self.gpu_id)
actor = gpuify(actor, self.gpu_id)
return (ModelInput(), ModelOutput(value=critic, logit=actor))
def reset_hidden(self, volatile=False):
pass
def repackage_hidden(self, volatile=False):
pass
def preprocess_frame(self, frame):
return None
def state(self):
return None
def sync_with_shared(self, shared_model):
return |
class FacadeSets(CategoryWithAxiom):
def example(self, choice='subset'):
import sage.categories.examples.facade_sets as examples
if (choice == 'union'):
return examples.IntegersCompletion()
elif (choice == 'subset'):
return examples.PositiveIntegerMonoid()
else:
raise TypeError("choice should be 'union' or 'subset'")
class ParentMethods():
def _element_constructor_(self, element):
if self.is_parent_of(element):
return element
else:
parents = self.facade_for()
if (parents is True):
raise NotImplementedError
for parent in self.facade_for():
try:
return parent(element)
except Exception:
pass
raise ValueError(("Can't coerce `%s` in any parent `%s` is a facade for" % (element, self)))
def facade_for(self):
try:
return self._facade_for
except AttributeError:
raise NotImplementedError('this parent did not specify which parents it is a facade for')
def is_parent_of(self, element):
parents = self.facade_for()
if (parents is True):
return True
from sage.structure.element import parent
return (parent(element) in parents)
def __contains__(self, element):
return any(((element in parent) for parent in self.facade_for()))
def _an_element_(self):
for parent in self.facade_for():
x = parent.an_element()
if (x in self):
return x
raise NotImplementedError |
def softmax_check(loader, model, K, device):
save_sm = torch.empty((0, K))
sm = nn.Softmax(dim=1)
model.eval()
with torch.no_grad():
for (images, _, confs) in loader:
(images, confs) = (images.to(device), confs.to(device))
outputs = model(images)
save_sm = torch.cat((save_sm, sm(outputs)))
return save_sm.numpy() |
class Block(Node):
CMD = namedtuple('cmd', ['tiu', 'dma', 'all'])
def __init__(self, subnet: SubNet, indent=0, ctx_addr=0, ctx_size=0):
super().__init__()
self.subnet_id = subnet.id
self.indent = indent
self.operations: List[BaseCmd] = []
bmodel_net = atomic_context.bmodel_net
context = atomic_context.bmodel_context
decoder = context.decoder
self.group_by_core = False
self.run_mode = subnet.run_mode
self.cmds = []
self.cpu_cmds = []
self.ir_cmds = []
input_memref = [i.memref for i in subnet.input_tensor]
output_memref = [i.memref for i in subnet.output_tensor]
self.args = subnet.input_tensor
self.terminator = subnet.output_tensor
self.successor = subnet.next_subnet_ids
if (subnet.run_mode == subnet.run_mode.CPU):
self.cpu_cmds.extend([bmodel_net.decode_cpu_op(i) for i in subnet.cpu_param])
for (cpu_cmd_id, cpu_x) in enumerate(self.cpu_cmds):
self.operations.append(decoder.decode_cpu_cmd(op_type=cpu_x.op_type, buf=cpu_x.cpu_cmd, input_memref=input_memref, output_memref=output_memref, subnet_id=subnet.id, cmd_id=cpu_cmd_id))
return
if (subnet.run_mode == subnet.run_mode.TPU_DYNAMIC):
self.ir_cmds.extend(bmodel_net.decode_dynamic_ir(subnet.ir_buffer))
for (ir_cmd_id, x) in enumerate(self.ir_cmds):
self.operations.append(decoder.decode_ir_cmd(subnet.ir_buffer, subnet.ir_len, input_memref=input_memref, output_memref=output_memref, subnet_id=subnet.id, cmd_id=ir_cmd_id))
return
if (subnet.run_mode == subnet.run_mode.TPU_STATIC):
if (bmodel_net.core_num > 1):
self.cmds = [decode_cmdgroup(context, cmd, self.subnet_id, core_id) for (core_id, x) in enumerate(subnet.core_commands) for cmd in x.gdma_tiu_commands]
if isinstance(context, SG2260Context):
from .target_2260.multi_core import MultiCore, MsgCore
elif isinstance(context, BM1688Context):
from .target_1688.multi_core import MultiCore, MsgCore
core_nums = len(self.cmds)
self.cores_cmds = [MultiCore(core_id, core_nums, core_cmds.all, indent) for (core_id, core_cmds) in enumerate(self.cmds)]
msgcore_nums = len(self.cores_cmds[0].msgcores)
msgcore_id = 0
self.group_by_core = True
self.core_operations: List[MsgCore] = []
while (msgcore_id < msgcore_nums):
for (core_id, core_cmds) in enumerate(self.cores_cmds):
assert (msgcore_nums == len(core_cmds.msgcores))
self.core_operations.append(core_cmds.msgcores[msgcore_id])
msgcore_id += 1
for msgcore in self.core_operations:
if msgcore.nearing_before_cmds:
self.operations.extend(msgcore.nearing_before_cmds)
self.operations.extend(msgcore.mlir_cmds)
return
if subnet.cmd_group:
self.cmds = [decode_cmdgroup(context, x, self.subnet_id) for x in subnet.cmd_group]
else:
self.cmds = [decode_cmdgroup(context, cmd, self.subnet_id, core_id) for (core_id, x) in enumerate(subnet.core_commands) for cmd in x.gdma_tiu_commands]
for x in self.cmds:
self.operations.extend(x.all)
_cache()
def __str__(self):
if (self.run_mode == self.run_mode.CPU):
ops_str = '\n'.join([i.op_type.name for i in self.cpu_cmds])
elif (self.run_mode == self.run_mode.TPU_STATIC):
if self.group_by_core:
ops_str = '\n'.join((f'{x}' for x in self.core_operations))
else:
ops_str = '\n'.join((f'{x}' for x in self.operations))
elif (self.run_mode == self.run_mode.TPU_DYNAMIC):
ops_str = '\n'.join((f'{x}' for x in self.operations))
else:
ops_str = f'// not resovled yet for mode {self.run_mode.name}'
comment = f' // run_mode={self.run_mode.name}'
ops_str = textwrap.indent(ops_str, INDENT_SPACE)
args = []
for arg in self.args:
value = Value(arg)
args.append(str(value))
args_str = ', '.join(args)
if all(((x == (- 1)) for x in self.successor)):
tem = [Value(x) for x in self.terminator]
rets = ((('return ' + ', '.join((x.name for x in tem))) + ': ') + ', '.join((x.type_str for x in tem)))
else:
rets = f'Successor {self.successor}'
rets = textwrap.indent(rets, INDENT_SPACE)
return f'''^bb{self.subnet_id}({args_str}){comment}
{ops_str}
{rets}''' |
class EisensteinSubmodule_gH_Q(EisensteinSubmodule_params):
def _parameters_character(self):
return self.group()
def _convert_matrix_from_modsyms_eis(self, A):
from .cuspidal_submodule import _convert_matrix_from_modsyms
symbs = self.modular_symbols(sign=0)
d = self.rank()
(wrong_mat, pivs) = _convert_matrix_from_modsyms(symbs, A)
c = Matrix(self.base_ring(), d, [self.basis()[i][(j + 1)] for i in range(d) for j in pivs])
return ((c * wrong_mat) * (~ c))
def _compute_hecke_matrix(self, n, bound=None):
symbs = self.modular_symbols(sign=0)
T = symbs.hecke_matrix(n)
return self._convert_matrix_from_modsyms_eis(T)
def _compute_diamond_matrix(self, d):
symbs = self.modular_symbols(sign=0)
T = symbs.diamond_bracket_matrix(d)
return self._convert_matrix_from_modsyms_eis(T) |
def ExpectingFunctionArgs(clean_lines, linenum):
line = clean_lines.elided[linenum]
return (Match('^\\s*MOCK_(CONST_)?METHOD\\d+(_T)?\\(', line) or ((linenum >= 2) and (Match('^\\s*MOCK_(?:CONST_)?METHOD\\d+(?:_T)?\\((?:\\S+,)?\\s*$', clean_lines.elided[(linenum - 1)]) or Match('^\\s*MOCK_(?:CONST_)?METHOD\\d+(?:_T)?\\(\\s*$', clean_lines.elided[(linenum - 2)]) or Search('\\bstd::m?function\\s*\\<\\s*$', clean_lines.elided[(linenum - 1)])))) |
def dist_init(old_test_method=None, setup_rpc: bool=True, clean_shutdown: bool=True, faulty_messages=None, messages_to_delay=None):
if (old_test_method is None):
return partial(dist_init, setup_rpc=setup_rpc, clean_shutdown=clean_shutdown, faulty_messages=faulty_messages, messages_to_delay=messages_to_delay)
(old_test_method)
def new_test_method(self, *arg, **kwargs):
import torch.distributed.rpc.api as api
api._ignore_rref_leak = False
self.worker_id = self.rank
self.setup_fault_injection(faulty_messages, messages_to_delay)
if setup_rpc:
rpc.init_rpc(name=('worker%d' % self.rank), backend=self.rpc_backend, rank=self.rank, world_size=self.world_size, rpc_backend_options=self.rpc_backend_options)
return_value = old_test_method(self, *arg, **kwargs)
if setup_rpc:
rpc.shutdown(graceful=clean_shutdown)
return return_value
return new_test_method |
def OA_11_80():
from sage.rings.finite_rings.finite_field_constructor import FiniteField
A = [[(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)], [(0, None), (1, None), (2, 3), (3, None), (4, 3), (2, None), (3, 3), (4, None), (0, 3), (1, 3)], [(0, None), (2, 8), (4, 6), (1, 3), (3, 3), (3, 13), (0, 13), (2, 6), (4, 14), (1, 12)], [(0, None), (3, 11), (1, 0), (4, 9), (2, 0), (3, 7), (1, 8), (4, 10), (2, 10), (0, 11)], [(0, None), (4, 8), (3, 14), (2, 14), (1, 12), (2, 10), (1, 10), (0, 3), (4, 5), (3, 8)], [(0, None), (1, 8), (4, 14), (4, 12), (1, 1), (0, 1), (2, 8), (3, 12), (3, 6), (2, 1)], [(1, None), (0, 6), (1, 1), (4, 4), (4, 13), (2, 6), (0, 14), (2, 9), (3, 0), (3, 3)], [(4, None), (1, 9), (0, 7), (1, 1), (4, 8), (3, 5), (2, 14), (0, 0), (2, None), (3, 0)], [(4, None), (4, 6), (1, 2), (0, None), (1, 13), (3, 8), (3, 2), (2, 0), (0, 14), (2, None)], [(1, None), (4, 9), (4, 1), (1, 0), (0, 4), (2, 5), (3, None), (3, 5), (2, None), (0, None)]]
Y = [None, 0, 1, 14, 12, 7, 2, 11, 3, 6]
return OA_n_times_2_pow_c_from_matrix(11, 4, FiniteField(5), A, Y, check=False) |
def encode_image_text_with_clip(dataset, dir_to_data, num_frames, clip_model='ViT-B/32', image_only=False):
device = ('cuda' if torch.cuda.is_available() else 'cpu')
time_meters = defaultdict(AverageMeter)
tictoc = time.time()
(model, preprocess) = clip.load(clip_model, device=device)
model_text = build_text_clip(model)
model_image = build_image_clip(model)
time_meters['load_model'].update((time.time() - tictoc))
tictoc = time.time()
dir_to_anno = os.path.join(dir_to_data, 'annotations')
phases = (['train', 'val', 'test'] if (dataset in ['activitynet']) else ['train', 'test'])
for phase in phases:
with open(os.path.join(dir_to_anno, (phase + '.json'))) as j:
annos = json.load(j)
time_meters['load_annotations'].update((time.time() - tictoc))
tictoc = time.time()
for video_id in tqdm(list(annos.keys()), desc=phase):
save_dir = os.path.join(dir_to_data, 'clip_features', phase, video_id)
if os.path.exists(save_dir):
if os.path.exists(os.path.join(save_dir, f'vid_feats_{str(num_frames)}.pt')):
continue
if os.path.exists(os.path.join(save_dir, 'txt_feats.pt')):
image_only = True
else:
os.makedirs(save_dir)
annotations = annos[video_id]
if (not image_only):
video_captions = annotations['sentences']
dir_to_frame = os.path.join(dir_to_data, 'frames', str(args.num_frames), (video_id + '*'))
if (not os.path.exists(dir_to_frame)):
ValueError(f'The directory {dir_to_frame} does not exists.')
frames = sorted(glob.glob(os.path.join(dir_to_frame, '*.png')))
if (len(frames) == 0):
print(f'No valid frames exist in {dir_to_frame}.')
continue
video_frames = [Image.open(frame).convert('RGB') for frame in frames]
time_meters['prepare_text_image'].update((time.time() - tictoc))
tictoc = time.time()
if (not image_only):
text = clip.tokenize(video_captions, truncate=True).to(device)
frames = torch.cat([preprocess(video_frame).unsqueeze(0).to(device) for video_frame in video_frames], dim=0)
time_meters['preprocess_text_image'].update((time.time() - tictoc))
tictoc = time.time()
with torch.no_grad():
if (not image_only):
text_features = model_text(text)
video_features = model_image(frames)
time_meters['encode_text_image'].update((time.time() - tictoc))
tictoc = time.time()
if (not image_only):
torch.save(text_features.cpu(), (os.path.join(save_dir, 'txt_feats') + '.pt'))
torch.save(video_features.cpu(), (os.path.join(save_dir, ('vid_feats_' + str(num_frames))) + '.pt'))
time_meters['save_features'].update((time.time() - tictoc))
tictoc = time.time()
print('Time stats:')
for (name, meter) in time_meters.items():
d = {k: f'{getattr(meter, k):.4f}' for k in ['max', 'min', 'avg']}
print(f'{name} ==> {d}') |
def create_argparser():
defaults = dict(root='', schedule_sampler='uniform', lr=0.0001, weight_decay=0.0, lr_anneal_steps=0, batch_size=1, microbatch=(- 1), ema_rate='0.9999', log_interval=10, save_interval=10000, resume_checkpoint='', use_fp16=False, fp16_scale_growth=0.001, target='vocals', seq_dur=4.2, samples_per_track=1, spec_type='complex')
defaults.update(model_and_diffusion_defaults())
parser = argparse.ArgumentParser()
add_dict_to_argparser(parser, defaults)
return parser |
def rotation_loss_class(out_rotation_x, angle_x):
length = out_rotation_x.size((- 1))
label = ((((angle_x.view((- 1)).cuda() + pi) / 2) / np.pi) * length)
label[(label < 0)] += length
label[(label >= length)] -= length
if (out_rotation_x.size((- 1)) == 1):
loss_x = ((out_rotation_x - angle_x.view((- 1)).cuda()) ** 2).mean()
elif (out_rotation_x.size((- 1)) == length):
criterion = nn.CrossEntropyLoss()
loss_x = criterion(out_rotation_x, label.long())
else:
assert False
return loss_x |
class BufferType(BaseType):
is_buffer = 1
writable = True
subtypes = ['dtype']
def __init__(self, base, dtype, ndim, mode, negative_indices, cast):
self.base = base
self.dtype = dtype
self.ndim = ndim
self.buffer_ptr_type = CPtrType(dtype)
self.mode = mode
self.negative_indices = negative_indices
self.cast = cast
self.is_numpy_buffer = (self.base.name == 'ndarray')
def can_coerce_to_pyobject(self, env):
return True
def can_coerce_from_pyobject(self, env):
return True
def as_argument_type(self):
return self
def specialize(self, values):
dtype = self.dtype.specialize(values)
if (dtype is not self.dtype):
return BufferType(self.base, dtype, self.ndim, self.mode, self.negative_indices, self.cast)
return self
def get_entry(self, node):
from . import Buffer
assert node.is_name
return Buffer.BufferEntry(node.entry)
def __getattr__(self, name):
return getattr(self.base, name)
def __repr__(self):
return ('<BufferType %r>' % self.base)
def __str__(self):
cast_str = ''
if self.cast:
cast_str = ',cast=True'
return ('%s[%s,ndim=%d%s]' % (self.base, self.dtype, self.ndim, cast_str))
def assignable_from(self, other_type):
if other_type.is_buffer:
return (self.same_as(other_type, compare_base=False) and self.base.assignable_from(other_type.base))
return self.base.assignable_from(other_type)
def same_as(self, other_type, compare_base=True):
if (not other_type.is_buffer):
return other_type.same_as(self.base)
return (self.dtype.same_as(other_type.dtype) and (self.ndim == other_type.ndim) and (self.mode == other_type.mode) and (self.cast == other_type.cast) and ((not compare_base) or self.base.same_as(other_type.base))) |
def overlap_curves(fig, xlabels, avg, std, legend, color, path, title='', x_str='', y_str='', dpi=300, ylimup=None, ylimdown=None, step=10.0):
if (ylimup is None):
ylimup = 105.0
if (ylimdown is None):
ylimdown = 0.0
font_sz = 10
tiks_fsz = 7
plt.figure(fig.number)
x = list(range(avg.size))
linewidth = 1.5
plt.plot(x, avg, label=legend, color=color, linewidth=linewidth)
plt.fill_between(x, (avg - std), (avg + std), alpha=0.1, color=color)
plt.xlabel(x_str, fontsize=font_sz)
plt.ylabel(y_str, fontsize=font_sz)
plt.legend(loc='lower right', prop={'size': 10})
plt.title(title, fontsize=font_sz)
plt.xticks(x, xlabels, fontsize=tiks_fsz)
ylabels = [str(i) for i in range(int(ylimdown), int(ylimup), int(step))]
y = list(range(int(ylimdown), int(ylimup), int(step)))
plt.yticks(y, ylabels, fontsize=tiks_fsz)
plt.grid(b=True, which='major', color='#666666', linestyle='-', alpha=0.1)
plt.minorticks_on()
plt.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.05)
plt.ylim(ylimdown, ylimup)
plt.savefig(path, bbox_inches='tight', dpi=dpi)
return fig |
def p1NFlist(N):
k = N.number_field()
L = [MSymbol(N, k(0), k(1), check=False)]
L = (L + [MSymbol(N, k(1), r, check=False) for r in N.residues()])
from sage.arith.misc import divisors
for D in divisors(N):
if ((not D.is_trivial()) and (D != N)):
if D.is_principal():
Dp = k.ideal(1)
c = D.gens_reduced()[0]
else:
it = k.primes_of_degree_one_iter()
Dp = next(it)
while ((not Dp.is_coprime(N)) or (not (Dp * D).is_principal())):
Dp = next(it)
c = (D * Dp).gens_reduced()[0]
I = (D + (N / D))
for d in (N / D).residues():
if I.is_coprime(d):
M = D.prime_to_idealM_part((N / D))
u = (Dp * M).element_1_mod((N / D))
d1 = ((u * d) + (1 - u))
L.append(MSymbol(N, c, d1, check=False).normalize())
return L |
def create_entity_cluster_bow_lexical_vec(entity_cluster, model, device, use_char_embeds, requires_grad):
if use_char_embeds:
bow_vec = torch.zeros((model.embedding_dim + model.char_hidden_dim), requires_grad=requires_grad).to(device).view(1, (- 1))
else:
bow_vec = torch.zeros(model.embedding_dim, requires_grad=requires_grad).to(device).view(1, (- 1))
for entity_mention in entity_cluster.mentions.values():
mention_bow = torch.zeros(model.embedding_dim, requires_grad=requires_grad).to(device).view(1, (- 1))
mention_embeds = [find_word_embed(token, model, device) for token in entity_mention.get_tokens() if (not is_stop(token))]
if use_char_embeds:
char_embeds = get_char_embed(entity_mention.mention_str, model, device)
for word_tensor in mention_embeds:
mention_bow += word_tensor
mention_bow /= len(entity_mention.get_tokens())
if use_char_embeds:
if (not requires_grad):
char_embeds = char_embeds.detach()
cat_tensor = torch.cat([mention_bow, char_embeds], 1)
else:
cat_tensor = mention_bow
bow_vec += cat_tensor
return (bow_vec / len(entity_cluster.mentions.keys())) |
.entry
def test_viz_lm():
model_config = Gpt2Config(num_layers=2, num_heads=2, hidden_dim=32, seq_len=32)
with tempfile.TemporaryDirectory() as f:
try:
data_config = tiny_test_corpus.tiny_corpus_config(f)
tok = data_config.the_tokenizer
Vocab = haliax.Axis('vocab', len(tok))
model = Gpt2LMHeadModel.init(Vocab, model_config, key=jax.random.PRNGKey(0))
save_checkpoint(model, None, 0, f'{f}/ckpt')
config = viz_logprobs.VizGpt2Config(data=data_config, model=model_config, trainer=viz_logprobs.TrainerConfig(per_device_eval_parallelism=len(jax.devices()), max_eval_batches=1, wandb=WandbConfig(mode='disabled'), require_accelerator=False, ray=RayConfig(auto_start_cluster=False)), checkpoint_path=f'{f}/ckpt', num_docs=len(jax.devices()), path=f'{f}/viz')
viz_logprobs.main(config)
finally:
try:
os.unlink('wandb')
except Exception:
pass |
class Model(nn.Module):
def __init__(self, in_channels, out_channels, latent_size, spiral_indices, down_transform, up_transform, is_vae=False):
super(Model, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.latent_size = latent_size
self.spiral_indices = spiral_indices
self.down_transform = down_transform
self.up_transform = up_transform
self.num_vert = self.down_transform[(- 1)].size(0)
self.is_vae = is_vae
self.en_layers = nn.ModuleList()
for idx in range(len(out_channels)):
if (idx == 0):
self.en_layers.append(SpiralEnblock(in_channels, out_channels[idx], self.spiral_indices[idx]))
else:
self.en_layers.append(SpiralEnblock(out_channels[(idx - 1)], out_channels[idx], self.spiral_indices[idx]))
self.en_layers.append(nn.Linear((self.num_vert * out_channels[(- 1)]), latent_size))
if self.is_vae:
self.en_layers.append(nn.Linear((self.num_vert * out_channels[(- 1)]), latent_size))
self.de_layers = nn.ModuleList()
self.de_layers.append(nn.Linear(latent_size, (self.num_vert * out_channels[(- 1)])))
for idx in range(len(out_channels)):
if (idx == 0):
self.de_layers.append(SpiralDeblock(out_channels[((- idx) - 1)], out_channels[((- idx) - 1)], self.spiral_indices[((- idx) - 1)]))
else:
self.de_layers.append(SpiralDeblock(out_channels[(- idx)], out_channels[((- idx) - 1)], self.spiral_indices[((- idx) - 1)]))
self.de_layers.append(SpiralConv(out_channels[0], in_channels, self.spiral_indices[0]))
self.reset_parameters()
def reset_parameters(self):
for (name, param) in self.named_parameters():
if ('bias' in name):
nn.init.constant_(param, 0)
else:
nn.init.xavier_uniform_(param)
def encode(self, x):
n_linear_layers = (2 if self.is_vae else 1)
for (i, layer) in enumerate(self.en_layers):
if (i < (len(self.en_layers) - n_linear_layers)):
x = layer(x, self.down_transform[i])
x = x.view((- 1), self.en_layers[(- 1)].weight.size(1))
mu = self.en_layers[(- 1)](x)
if self.is_vae:
logvar = self.en_layers[(- 2)](x)
else:
mu = torch.sigmoid(mu)
logvar = None
return (mu, logvar)
def decode(self, x):
num_layers = len(self.de_layers)
num_features = (num_layers - 2)
for (i, layer) in enumerate(self.de_layers):
if (i == 0):
x = layer(x)
x = x.view((- 1), self.num_vert, self.out_channels[(- 1)])
elif (i != (num_layers - 1)):
x = layer(x, self.up_transform[(num_features - i)])
else:
x = layer(x)
return x
def forward(self, x):
(mu, logvar) = self.encode(x)
if (self.is_vae and self.training):
z = self._reparameterize(mu, logvar)
else:
z = mu
out = self.decode(z)
return (out, z, mu, logvar)
def _reparameterize(mu, logvar):
std = torch.exp((0.5 * logvar))
eps = torch.randn_like(std)
return (mu + (eps * std)) |
def register_types(module):
root_module = module.get_root()
module.add_class('Address', import_from_module='ns.network')
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
module.add_class('AsciiTraceHelper', import_from_module='ns.network')
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
module.add_class('AsciiTraceHelperForIpv4', allow_subclassing=True)
module.add_class('AsciiTraceHelperForIpv6', allow_subclassing=True)
module.add_class('AttributeConstructionList', import_from_module='ns.core')
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
module.add_class('Buffer', import_from_module='ns.network')
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
module.add_class('ByteTagIterator', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
module.add_class('ByteTagList', import_from_module='ns.network')
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
module.add_class('CallbackBase', import_from_module='ns.core')
module.add_class('CandidateQueue')
module.add_class('DataRate', import_from_module='ns.network')
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
module.add_class('DefaultDeleter', template_parameters=['ns3::Ipv4Route'])
module.add_class('DefaultDeleter', template_parameters=['ns3::Ipv6Route'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
module.add_class('EventId', import_from_module='ns.core')
module.add_class('GlobalRouteManager')
module.add_class('GlobalRouteManagerImpl', allow_subclassing=True)
module.add_class('GlobalRouteManagerLSDB')
module.add_class('GlobalRoutingLSA')
module.add_enum('LSType', ['Unknown', 'RouterLSA', 'NetworkLSA', 'SummaryLSA', 'SummaryLSA_ASBR', 'ASExternalLSAs'], outer_class=root_module['ns3::GlobalRoutingLSA'])
module.add_enum('SPFStatus', ['LSA_SPF_NOT_EXPLORED', 'LSA_SPF_CANDIDATE', 'LSA_SPF_IN_SPFTREE'], outer_class=root_module['ns3::GlobalRoutingLSA'])
module.add_class('GlobalRoutingLinkRecord')
module.add_enum('LinkType', ['Unknown', 'PointToPoint', 'TransitNetwork', 'StubNetwork', 'VirtualLink'], outer_class=root_module['ns3::GlobalRoutingLinkRecord'])
module.add_class('Hasher', import_from_module='ns.core')
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('InetSocketAddress', import_from_module='ns.network')
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['0'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'], import_from_module='ns.core')
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['1'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'], import_from_module='ns.core')
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['2'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'], import_from_module='ns.core')
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['3'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'], import_from_module='ns.core')
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['4'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'], import_from_module='ns.core')
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['5'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'], import_from_module='ns.core')
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['6'])
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >'], import_from_module='ns.core')
module.add_class('Ipv4Address', import_from_module='ns.network')
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv4AddressGenerator')
module.add_class('Ipv4AddressHelper')
module.add_class('Ipv4EndPoint')
module.add_class('Ipv4EndPointDemux')
typehandlers.add_type_alias(u'std::list< ns3::Ipv4EndPoint * >', u'ns3::Ipv4EndPointDemux::EndPoints')
typehandlers.add_type_alias(u'std::list< ns3::Ipv4EndPoint * >*', u'ns3::Ipv4EndPointDemux::EndPoints*')
typehandlers.add_type_alias(u'std::list< ns3::Ipv4EndPoint * >&', u'ns3::Ipv4EndPointDemux::EndPoints&')
typehandlers.add_type_alias(u'std::list< ns3::Ipv4EndPoint * > iterator', u'ns3::Ipv4EndPointDemux::EndPointsI')
typehandlers.add_type_alias(u'std::list< ns3::Ipv4EndPoint * > iterator*', u'ns3::Ipv4EndPointDemux::EndPointsI*')
typehandlers.add_type_alias(u'std::list< ns3::Ipv4EndPoint * > iterator&', u'ns3::Ipv4EndPointDemux::EndPointsI&')
module.add_class('Ipv4InterfaceAddress')
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'])
module.add_class('Ipv4InterfaceContainer')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv4 >, unsigned int > > const_iterator', u'ns3::Ipv4InterfaceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv4 >, unsigned int > > const_iterator*', u'ns3::Ipv4InterfaceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv4 >, unsigned int > > const_iterator&', u'ns3::Ipv4InterfaceContainer::Iterator&')
module.add_class('Ipv4Mask', import_from_module='ns.network')
module.add_class('Ipv4MulticastRoutingTableEntry')
module.add_class('Ipv4RoutingHelper', allow_subclassing=True)
module.add_class('Ipv4RoutingTableEntry')
module.add_class('Ipv4StaticRoutingHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
module.add_class('Ipv6Address', import_from_module='ns.network')
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv6AddressGenerator')
module.add_class('Ipv6AddressHelper')
module.add_class('Ipv6EndPoint')
module.add_class('Ipv6EndPointDemux')
typehandlers.add_type_alias(u'std::list< ns3::Ipv6EndPoint * >', u'ns3::Ipv6EndPointDemux::EndPoints')
typehandlers.add_type_alias(u'std::list< ns3::Ipv6EndPoint * >*', u'ns3::Ipv6EndPointDemux::EndPoints*')
typehandlers.add_type_alias(u'std::list< ns3::Ipv6EndPoint * >&', u'ns3::Ipv6EndPointDemux::EndPoints&')
typehandlers.add_type_alias(u'std::list< ns3::Ipv6EndPoint * > iterator', u'ns3::Ipv6EndPointDemux::EndPointsI')
typehandlers.add_type_alias(u'std::list< ns3::Ipv6EndPoint * > iterator*', u'ns3::Ipv6EndPointDemux::EndPointsI*')
typehandlers.add_type_alias(u'std::list< ns3::Ipv6EndPoint * > iterator&', u'ns3::Ipv6EndPointDemux::EndPointsI&')
module.add_class('Ipv6InterfaceAddress')
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'])
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'])
module.add_class('Ipv6InterfaceContainer')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv6 >, unsigned int > > const_iterator', u'ns3::Ipv6InterfaceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv6 >, unsigned int > > const_iterator*', u'ns3::Ipv6InterfaceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< std::pair< ns3::Ptr< ns3::Ipv6 >, unsigned int > > const_iterator&', u'ns3::Ipv6InterfaceContainer::Iterator&')
module.add_class('Ipv6MulticastRoutingTableEntry')
module.add_class('Ipv6Prefix', import_from_module='ns.network')
module.add_class('Ipv6RoutingHelper', allow_subclassing=True)
module.add_class('Ipv6RoutingTableEntry')
module.add_class('Ipv6StaticRoutingHelper', parent=root_module['ns3::Ipv6RoutingHelper'])
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Mac8Address', import_from_module='ns.network')
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('NetDeviceContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator', u'ns3::NetDeviceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator*', u'ns3::NetDeviceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator&', u'ns3::NetDeviceContainer::Iterator&')
module.add_class('NodeContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&')
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
module.add_class('ObjectDeleter', import_from_module='ns.core')
module.add_class('ObjectFactory', import_from_module='ns.core')
module.add_class('OptionField')
module.add_class('PacketMetadata', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
module.add_class('PacketTagIterator', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
module.add_class('PacketTagList', import_from_module='ns.network')
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
module.add_class('PcapFile', import_from_module='ns.network')
module.add_class('PcapHelper', import_from_module='ns.network')
module.add_enum('DataLinkType', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_LINUX_SLL', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO', 'DLT_IEEE802_15_4', 'DLT_NETLINK'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network')
module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
module.add_class('PcapHelperForIpv4', allow_subclassing=True)
module.add_class('PcapHelperForIpv6', allow_subclassing=True)
module.add_class('RipHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
module.add_class('RipNgHelper', parent=root_module['ns3::Ipv6RoutingHelper'])
module.add_class('RipNgRoutingTableEntry', parent=root_module['ns3::Ipv6RoutingTableEntry'])
module.add_enum('Status_e', ['RIPNG_VALID', 'RIPNG_INVALID'], outer_class=root_module['ns3::RipNgRoutingTableEntry'])
module.add_class('RipRoutingTableEntry', parent=root_module['ns3::Ipv4RoutingTableEntry'])
module.add_enum('Status_e', ['RIP_VALID', 'RIP_INVALID'], outer_class=root_module['ns3::RipRoutingTableEntry'])
module.add_class('RttHistory')
module.add_class('SPFVertex')
module.add_enum('VertexType', ['VertexUnknown', 'VertexRouter', 'VertexNetwork'], outer_class=root_module['ns3::SPFVertex'])
typehandlers.add_type_alias(u'std::pair< ns3::Ipv4Address, int >', u'ns3::SPFVertex::NodeExit_t')
typehandlers.add_type_alias(u'std::pair< ns3::Ipv4Address, int >*', u'ns3::SPFVertex::NodeExit_t*')
typehandlers.add_type_alias(u'std::pair< ns3::Ipv4Address, int >&', u'ns3::SPFVertex::NodeExit_t&')
module.add_class('SequenceNumber32', import_from_module='ns.network')
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
module.add_class('TagBuffer', import_from_module='ns.network')
module.add_class('TcpTxItem')
module.add_class('TimeWithUnit', import_from_module='ns.core')
module.add_class('Timer', import_from_module='ns.core')
module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
module.add_class('TimerImpl', allow_subclassing=True, import_from_module='ns.core')
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['double'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['ns3::SequenceNumber<unsigned int, int>'])
root_module['ns3::TracedValue< ns3::SequenceNumber<unsigned int, int> >'].implicitly_converts_to(root_module['ns3::SequenceNumber32'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['ns3::TcpSocket::TcpStates_t'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['ns3::TcpSocketState::EcnState_t'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['ns3::TcpSocketState::TcpCongState_t'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int'])
module.add_class('TypeId', import_from_module='ns.core')
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
module.add_class('empty', import_from_module='ns.core')
module.add_class('int64x64_t', import_from_module='ns.core')
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
module.add_class('Icmpv4DestinationUnreachable', parent=root_module['ns3::Header'])
module.add_enum('ErrorDestinationUnreachable_e', ['ICMPV4_NET_UNREACHABLE', 'ICMPV4_HOST_UNREACHABLE', 'ICMPV4_PROTOCOL_UNREACHABLE', 'ICMPV4_PORT_UNREACHABLE', 'ICMPV4_FRAG_NEEDED', 'ICMPV4_SOURCE_ROUTE_FAILED'], outer_class=root_module['ns3::Icmpv4DestinationUnreachable'])
module.add_class('Icmpv4Echo', parent=root_module['ns3::Header'])
module.add_class('Icmpv4Header', parent=root_module['ns3::Header'])
module.add_enum('Type_e', ['ICMPV4_ECHO_REPLY', 'ICMPV4_DEST_UNREACH', 'ICMPV4_ECHO', 'ICMPV4_TIME_EXCEEDED'], outer_class=root_module['ns3::Icmpv4Header'])
module.add_class('Icmpv4TimeExceeded', parent=root_module['ns3::Header'])
module.add_enum('ErrorTimeExceeded_e', ['ICMPV4_TIME_TO_LIVE', 'ICMPV4_FRAGMENT_REASSEMBLY'], outer_class=root_module['ns3::Icmpv4TimeExceeded'])
module.add_class('Icmpv6Header', parent=root_module['ns3::Header'])
module.add_enum('Type_e', ['ICMPV6_ERROR_DESTINATION_UNREACHABLE', 'ICMPV6_ERROR_PACKET_TOO_BIG', 'ICMPV6_ERROR_TIME_EXCEEDED', 'ICMPV6_ERROR_PARAMETER_ERROR', 'ICMPV6_ECHO_REQUEST', 'ICMPV6_ECHO_REPLY', 'ICMPV6_SUBSCRIBE_REQUEST', 'ICMPV6_SUBSCRIBE_REPORT', 'ICMPV6_SUBSCRIVE_END', 'ICMPV6_ND_ROUTER_SOLICITATION', 'ICMPV6_ND_ROUTER_ADVERTISEMENT', 'ICMPV6_ND_NEIGHBOR_SOLICITATION', 'ICMPV6_ND_NEIGHBOR_ADVERTISEMENT', 'ICMPV6_ND_REDIRECTION', 'ICMPV6_ROUTER_RENUMBER', 'ICMPV6_INFORMATION_REQUEST', 'ICMPV6_INFORMATION_RESPONSE', 'ICMPV6_INVERSE_ND_SOLICITATION', 'ICMPV6_INVERSE_ND_ADVERSTISEMENT', 'ICMPV6_MLDV2_SUBSCRIBE_REPORT', 'ICMPV6_MOBILITY_HA_DISCOVER_REQUEST', 'ICMPV6_MOBILITY_HA_DISCOVER_RESPONSE', 'ICMPV6_MOBILITY_MOBILE_PREFIX_SOLICITATION', 'ICMPV6_SECURE_ND_CERTIFICATE_PATH_SOLICITATION', 'ICMPV6_SECURE_ND_CERTIFICATE_PATH_ADVERTISEMENT', 'ICMPV6_EXPERIMENTAL_MOBILITY'], outer_class=root_module['ns3::Icmpv6Header'])
module.add_enum('OptionType_e', ['ICMPV6_OPT_LINK_LAYER_SOURCE', 'ICMPV6_OPT_LINK_LAYER_TARGET', 'ICMPV6_OPT_PREFIX', 'ICMPV6_OPT_REDIRECTED', 'ICMPV6_OPT_MTU'], outer_class=root_module['ns3::Icmpv6Header'])
module.add_enum('ErrorDestinationUnreachable_e', ['ICMPV6_NO_ROUTE', 'ICMPV6_ADM_PROHIBITED', 'ICMPV6_NOT_NEIGHBOUR', 'ICMPV6_ADDR_UNREACHABLE', 'ICMPV6_PORT_UNREACHABLE'], outer_class=root_module['ns3::Icmpv6Header'])
module.add_enum('ErrorTimeExceeded_e', ['ICMPV6_HOPLIMIT', 'ICMPV6_FRAGTIME'], outer_class=root_module['ns3::Icmpv6Header'])
module.add_enum('ErrorParameterError_e', ['ICMPV6_MALFORMED_HEADER', 'ICMPV6_UNKNOWN_NEXT_HEADER', 'ICMPV6_UNKNOWN_OPTION'], outer_class=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6NA', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6NS', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6OptionHeader', parent=root_module['ns3::Header'])
module.add_class('Icmpv6OptionLinkLayerAddress', parent=root_module['ns3::Icmpv6OptionHeader'])
module.add_class('Icmpv6OptionMtu', parent=root_module['ns3::Icmpv6OptionHeader'])
module.add_class('Icmpv6OptionPrefixInformation', parent=root_module['ns3::Icmpv6OptionHeader'])
module.add_class('Icmpv6OptionRedirected', parent=root_module['ns3::Icmpv6OptionHeader'])
module.add_class('Icmpv6ParameterError', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6RA', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6RS', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6Redirection', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6TimeExceeded', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6TooBig', parent=root_module['ns3::Icmpv6Header'])
module.add_class('InternetStackHelper', parent=[root_module['ns3::PcapHelperForIpv4'], root_module['ns3::PcapHelperForIpv6'], root_module['ns3::AsciiTraceHelperForIpv4'], root_module['ns3::AsciiTraceHelperForIpv6']])
module.add_class('Ipv4GlobalRoutingHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
module.add_class('Ipv4Header', parent=root_module['ns3::Header'])
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'])
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'])
module.add_class('Ipv4ListRoutingHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
module.add_class('Ipv4PacketInfoTag', parent=root_module['ns3::Tag'])
module.add_class('Ipv6ExtensionHeader', parent=root_module['ns3::Header'])
module.add_class('Ipv6ExtensionHopByHopHeader', parent=[root_module['ns3::Ipv6ExtensionHeader'], root_module['ns3::OptionField']])
module.add_class('Ipv6ExtensionRoutingHeader', parent=root_module['ns3::Ipv6ExtensionHeader'])
module.add_class('Ipv6Header', parent=root_module['ns3::Header'])
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv6Header'])
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'])
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv6Header'])
module.add_class('Ipv6ListRoutingHelper', parent=root_module['ns3::Ipv6RoutingHelper'])
module.add_class('Ipv6OptionHeader', parent=root_module['ns3::Header'])
module.add_class('Alignment', outer_class=root_module['ns3::Ipv6OptionHeader'])
module.add_class('Ipv6OptionJumbogramHeader', parent=root_module['ns3::Ipv6OptionHeader'])
module.add_class('Ipv6OptionPad1Header', parent=root_module['ns3::Ipv6OptionHeader'])
module.add_class('Ipv6OptionPadnHeader', parent=root_module['ns3::Ipv6OptionHeader'])
module.add_class('Ipv6OptionRouterAlertHeader', parent=root_module['ns3::Ipv6OptionHeader'])
module.add_class('Ipv6PacketInfoTag', parent=root_module['ns3::Tag'])
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
module.add_class('PacketFilter', import_from_module='ns.traffic_control', parent=root_module['ns3::Object'])
module.add_class('PcapFileWrapper', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
module.add_class('RipHeader', parent=root_module['ns3::Header'])
module.add_enum('Command_e', ['REQUEST', 'RESPONSE'], outer_class=root_module['ns3::RipHeader'])
module.add_class('RipNgHeader', parent=root_module['ns3::Header'])
module.add_enum('Command_e', ['REQUEST', 'RESPONSE'], outer_class=root_module['ns3::RipNgHeader'])
module.add_class('RipNgRte', parent=root_module['ns3::Header'])
module.add_class('RipRte', parent=root_module['ns3::Header'])
module.add_class('RttEstimator', parent=root_module['ns3::Object'])
module.add_class('RttMeanDeviation', parent=root_module['ns3::RttEstimator'])
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Ipv6MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv6MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Ipv6Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv6Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('SocketPriority', ['NS3_PRIO_BESTEFFORT', 'NS3_PRIO_FILLER', 'NS3_PRIO_BULK', 'NS3_PRIO_INTERACTIVE_BULK', 'NS3_PRIO_INTERACTIVE', 'NS3_PRIO_CONTROL'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('Ipv6MulticastFilterMode', ['INCLUDE', 'EXCLUDE'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_class('SocketFactory', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketPriorityTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('TcpCongestionOps', parent=root_module['ns3::Object'])
module.add_class('TcpHeader', parent=root_module['ns3::Header'])
module.add_enum('Flags_t', ['NONE', 'FIN', 'SYN', 'RST', 'PSH', 'ACK', 'URG', 'ECE', 'CWR'], outer_class=root_module['ns3::TcpHeader'])
typehandlers.add_type_alias(u'std::list< ns3::Ptr< ns3::TcpOption const > >', u'ns3::TcpHeader::TcpOptionList')
typehandlers.add_type_alias(u'std::list< ns3::Ptr< ns3::TcpOption const > >*', u'ns3::TcpHeader::TcpOptionList*')
typehandlers.add_type_alias(u'std::list< ns3::Ptr< ns3::TcpOption const > >&', u'ns3::TcpHeader::TcpOptionList&')
typehandlers.add_type_alias(u'ns3::TcpHeader::Flags_t', u'ns3::TcpHeader::Flags_t')
typehandlers.add_type_alias(u'ns3::TcpHeader::Flags_t*', u'ns3::TcpHeader::Flags_t*')
typehandlers.add_type_alias(u'ns3::TcpHeader::Flags_t&', u'ns3::TcpHeader::Flags_t&')
module.add_class('TcpNewReno', parent=root_module['ns3::TcpCongestionOps'])
module.add_class('TcpOption', parent=root_module['ns3::Object'])
module.add_enum('Kind', ['END', 'NOP', 'MSS', 'WINSCALE', 'SACKPERMITTED', 'SACK', 'TS', 'UNKNOWN'], outer_class=root_module['ns3::TcpOption'])
module.add_class('TcpOptionEnd', parent=root_module['ns3::TcpOption'])
module.add_class('TcpOptionMSS', parent=root_module['ns3::TcpOption'])
module.add_class('TcpOptionNOP', parent=root_module['ns3::TcpOption'])
module.add_class('TcpOptionSack', parent=root_module['ns3::TcpOption'])
typehandlers.add_type_alias(u'std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > >', u'ns3::TcpOptionSack::SackBlock')
typehandlers.add_type_alias(u'std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > >*', u'ns3::TcpOptionSack::SackBlock*')
typehandlers.add_type_alias(u'std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > >&', u'ns3::TcpOptionSack::SackBlock&')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > > >', u'ns3::TcpOptionSack::SackList')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > > >*', u'ns3::TcpOptionSack::SackList*')
typehandlers.add_type_alias(u'std::list< std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > > >&', u'ns3::TcpOptionSack::SackList&')
module.add_class('TcpOptionSackPermitted', parent=root_module['ns3::TcpOption'])
module.add_class('TcpOptionTS', parent=root_module['ns3::TcpOption'])
module.add_class('TcpOptionUnknown', parent=root_module['ns3::TcpOption'])
module.add_class('TcpOptionWinScale', parent=root_module['ns3::TcpOption'])
module.add_class('TcpRecoveryOps', parent=root_module['ns3::Object'])
module.add_class('TcpRxBuffer', parent=root_module['ns3::Object'])
module.add_class('TcpScalable', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpSocket', parent=root_module['ns3::Socket'])
module.add_enum('TcpStates_t', ['CLOSED', 'LISTEN', 'SYN_SENT', 'SYN_RCVD', 'ESTABLISHED', 'CLOSE_WAIT', 'LAST_ACK', 'FIN_WAIT_1', 'FIN_WAIT_2', 'CLOSING', 'TIME_WAIT', 'LAST_STATE'], outer_class=root_module['ns3::TcpSocket'])
typehandlers.add_type_alias(u'ns3::TcpSocket::TcpStates_t', u'ns3::TcpSocket::TcpStates_t')
typehandlers.add_type_alias(u'ns3::TcpSocket::TcpStates_t*', u'ns3::TcpSocket::TcpStates_t*')
typehandlers.add_type_alias(u'ns3::TcpSocket::TcpStates_t&', u'ns3::TcpSocket::TcpStates_t&')
module.add_class('TcpSocketBase', parent=root_module['ns3::TcpSocket'])
module.add_enum('EcnMode_t', ['NoEcn', 'ClassicEcn'], outer_class=root_module['ns3::TcpSocketBase'])
typehandlers.add_type_alias(u'ns3::TcpSocketBase::EcnMode_t', u'ns3::TcpSocketBase::EcnMode_t')
typehandlers.add_type_alias(u'ns3::TcpSocketBase::EcnMode_t*', u'ns3::TcpSocketBase::EcnMode_t*')
typehandlers.add_type_alias(u'ns3::TcpSocketBase::EcnMode_t&', u'ns3::TcpSocketBase::EcnMode_t&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::TcpHeader const &, ns3::Ptr< ns3::TcpSocketBase const > const )', u'ns3::TcpSocketBase::TcpTxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::TcpHeader const &, ns3::Ptr< ns3::TcpSocketBase const > const )*', u'ns3::TcpSocketBase::TcpTxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::TcpHeader const &, ns3::Ptr< ns3::TcpSocketBase const > const )&', u'ns3::TcpSocketBase::TcpTxRxTracedCallback&')
module.add_class('TcpSocketFactory', parent=root_module['ns3::SocketFactory'])
module.add_class('TcpSocketState', parent=root_module['ns3::Object'])
module.add_enum('TcpCongState_t', ['CA_OPEN', 'CA_DISORDER', 'CA_CWR', 'CA_RECOVERY', 'CA_LOSS', 'CA_LAST_STATE'], outer_class=root_module['ns3::TcpSocketState'])
module.add_enum('TcpCAEvent_t', ['CA_EVENT_TX_START', 'CA_EVENT_CWND_RESTART', 'CA_EVENT_COMPLETE_CWR', 'CA_EVENT_LOSS', 'CA_EVENT_ECN_NO_CE', 'CA_EVENT_ECN_IS_CE', 'CA_EVENT_DELAYED_ACK', 'CA_EVENT_NON_DELAYED_ACK'], outer_class=root_module['ns3::TcpSocketState'])
module.add_enum('EcnState_t', ['ECN_DISABLED', 'ECN_IDLE', 'ECN_CE_RCVD', 'ECN_SENDING_ECE', 'ECN_ECE_RCVD', 'ECN_CWR_SENT'], outer_class=root_module['ns3::TcpSocketState'])
typehandlers.add_type_alias(u'ns3::TcpSocketState::TcpCongState_t', u'ns3::TcpSocketState::TcpCongState_t')
typehandlers.add_type_alias(u'ns3::TcpSocketState::TcpCongState_t*', u'ns3::TcpSocketState::TcpCongState_t*')
typehandlers.add_type_alias(u'ns3::TcpSocketState::TcpCongState_t&', u'ns3::TcpSocketState::TcpCongState_t&')
typehandlers.add_type_alias(u'ns3::TcpSocketState::TcpCAEvent_t', u'ns3::TcpSocketState::TcpCAEvent_t')
typehandlers.add_type_alias(u'ns3::TcpSocketState::TcpCAEvent_t*', u'ns3::TcpSocketState::TcpCAEvent_t*')
typehandlers.add_type_alias(u'ns3::TcpSocketState::TcpCAEvent_t&', u'ns3::TcpSocketState::TcpCAEvent_t&')
typehandlers.add_type_alias(u'ns3::TcpSocketState::EcnState_t', u'ns3::TcpSocketState::EcnState_t')
typehandlers.add_type_alias(u'ns3::TcpSocketState::EcnState_t*', u'ns3::TcpSocketState::EcnState_t*')
typehandlers.add_type_alias(u'ns3::TcpSocketState::EcnState_t&', u'ns3::TcpSocketState::EcnState_t&')
module.add_class('TcpTxBuffer', parent=root_module['ns3::Object'])
module.add_class('TcpVegas', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpVeno', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpWestwood', parent=root_module['ns3::TcpNewReno'])
module.add_enum('ProtocolType', ['WESTWOOD', 'WESTWOODPLUS'], outer_class=root_module['ns3::TcpWestwood'])
module.add_enum('FilterType', ['NONE', 'TUSTIN'], outer_class=root_module['ns3::TcpWestwood'])
module.add_class('TcpYeah', parent=root_module['ns3::TcpNewReno'])
module.add_class('Time', import_from_module='ns.core')
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['ns3::Time'])
root_module['ns3::TracedValue< ns3::Time >'].implicitly_converts_to(root_module['ns3::Time'])
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('UdpHeader', parent=root_module['ns3::Header'])
module.add_class('UdpSocket', parent=root_module['ns3::Socket'])
module.add_class('UdpSocketFactory', parent=root_module['ns3::SocketFactory'])
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ArpCache', parent=root_module['ns3::Object'])
module.add_class('Entry', outer_class=root_module['ns3::ArpCache'])
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv4Header >', u'ns3::ArpCache::Ipv4PayloadHeaderPair')
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv4Header >*', u'ns3::ArpCache::Ipv4PayloadHeaderPair*')
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv4Header >&', u'ns3::ArpCache::Ipv4PayloadHeaderPair&')
module.add_class('ArpHeader', parent=root_module['ns3::Header'])
module.add_enum('ArpType_e', ['ARP_TYPE_REQUEST', 'ARP_TYPE_REPLY'], outer_class=root_module['ns3::ArpHeader'])
module.add_class('ArpL3Protocol', parent=root_module['ns3::Object'])
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('DataCollectionObject', import_from_module='ns.stats', parent=root_module['ns3::Object'])
module.add_class('DataRateChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('DataRateValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('GlobalRouter', destructor_visibility='private', parent=root_module['ns3::Object'])
module.add_class('Icmpv6DestinationUnreachable', parent=root_module['ns3::Icmpv6Header'])
module.add_class('Icmpv6Echo', parent=root_module['ns3::Icmpv6Header'])
module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('IpL4Protocol', parent=root_module['ns3::Object'])
module.add_enum('RxStatus', ['RX_OK', 'RX_CSUM_FAILED', 'RX_ENDPOINT_CLOSED', 'RX_ENDPOINT_UNREACH'], outer_class=root_module['ns3::IpL4Protocol'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::IpL4Protocol::DownTargetCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::IpL4Protocol::DownTargetCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::IpL4Protocol::DownTargetCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::IpL4Protocol::DownTargetCallback6')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::IpL4Protocol::DownTargetCallback6*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::IpL4Protocol::DownTargetCallback6&')
module.add_class('Ipv4', parent=root_module['ns3::Object'])
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4Interface', parent=root_module['ns3::Object'])
module.add_class('Ipv4L3Protocol', parent=root_module['ns3::Ipv4'])
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv4L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv4L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv4L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )', u'ns3::Ipv4L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )*', u'ns3::Ipv4L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv4Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv4L3Protocol::DropReason, ns3::Ptr< ns3::Ipv4 >, uint32_t )&', u'ns3::Ipv4L3Protocol::DropTracedCallback&')
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4MulticastRoute', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
module.add_class('Ipv4PacketFilter', parent=root_module['ns3::PacketFilter'])
module.add_class('Ipv4RawSocketFactory', parent=root_module['ns3::SocketFactory'])
module.add_class('Ipv4RawSocketImpl', parent=root_module['ns3::Socket'])
module.add_class('Ipv4Route', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
module.add_class('Ipv4RoutingProtocol', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::ErrorCallback&')
module.add_class('Ipv4StaticRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_class('Ipv6', parent=root_module['ns3::Object'])
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv6Extension', parent=root_module['ns3::Object'])
module.add_class('Ipv6ExtensionAH', parent=root_module['ns3::Ipv6Extension'])
module.add_class('Ipv6ExtensionAHHeader', parent=root_module['ns3::Ipv6ExtensionHeader'])
module.add_class('Ipv6ExtensionDemux', parent=root_module['ns3::Object'])
module.add_class('Ipv6ExtensionDestination', parent=root_module['ns3::Ipv6Extension'])
module.add_class('Ipv6ExtensionDestinationHeader', parent=[root_module['ns3::Ipv6ExtensionHeader'], root_module['ns3::OptionField']])
module.add_class('Ipv6ExtensionESP', parent=root_module['ns3::Ipv6Extension'])
module.add_class('Ipv6ExtensionESPHeader', parent=root_module['ns3::Ipv6ExtensionHeader'])
module.add_class('Ipv6ExtensionFragment', parent=root_module['ns3::Ipv6Extension'])
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >', u'ns3::Ipv6ExtensionFragment::Ipv6PayloadHeaderPair')
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >*', u'ns3::Ipv6ExtensionFragment::Ipv6PayloadHeaderPair*')
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >&', u'ns3::Ipv6ExtensionFragment::Ipv6PayloadHeaderPair&')
module.add_class('Ipv6ExtensionFragmentHeader', parent=root_module['ns3::Ipv6ExtensionHeader'])
module.add_class('Ipv6ExtensionHopByHop', parent=root_module['ns3::Ipv6Extension'])
module.add_class('Ipv6ExtensionLooseRoutingHeader', parent=root_module['ns3::Ipv6ExtensionRoutingHeader'])
module.add_class('Ipv6ExtensionRouting', parent=root_module['ns3::Ipv6Extension'])
module.add_class('Ipv6ExtensionRoutingDemux', parent=root_module['ns3::Object'])
module.add_class('Ipv6Interface', parent=root_module['ns3::Object'])
module.add_class('Ipv6L3Protocol', parent=root_module['ns3::Ipv6'])
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_UNKNOWN_PROTOCOL', 'DROP_UNKNOWN_OPTION', 'DROP_MALFORMED_HEADER', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv6L3Protocol'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )', u'ns3::Ipv6L3Protocol::SentTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )*', u'ns3::Ipv6L3Protocol::SentTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, uint32_t )&', u'ns3::Ipv6L3Protocol::SentTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::TxRxTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::TxRxTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::TxRxTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )', u'ns3::Ipv6L3Protocol::DropTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )*', u'ns3::Ipv6L3Protocol::DropTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ipv6Header const &, ns3::Ptr< ns3::Packet const >, ns3::Ipv6L3Protocol::DropReason, ns3::Ptr< ns3::Ipv6 >, uint32_t )&', u'ns3::Ipv6L3Protocol::DropTracedCallback&')
module.add_class('Ipv6MulticastRoute', parent=root_module['ns3::SimpleRefCount< ns3::Ipv6MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv6MulticastRoute> >'])
module.add_class('Ipv6Option', parent=root_module['ns3::Object'])
module.add_class('Ipv6OptionJumbogram', parent=root_module['ns3::Ipv6Option'])
module.add_class('Ipv6OptionPad1', parent=root_module['ns3::Ipv6Option'])
module.add_class('Ipv6OptionPadn', parent=root_module['ns3::Ipv6Option'])
module.add_class('Ipv6OptionRouterAlert', parent=root_module['ns3::Ipv6Option'])
module.add_class('Ipv6PacketFilter', parent=root_module['ns3::PacketFilter'])
module.add_class('Ipv6PmtuCache', parent=root_module['ns3::Object'])
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv6RawSocketFactory', parent=root_module['ns3::SocketFactory'])
module.add_class('Ipv6Route', parent=root_module['ns3::SimpleRefCount< ns3::Ipv6Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv6Route> >'])
module.add_class('Ipv6RoutingProtocol', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice const >, ns3::Ptr< ns3::Ipv6Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv6RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice const >, ns3::Ptr< ns3::Ipv6Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv6RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice const >, ns3::Ptr< ns3::Ipv6Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv6RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice const >, ns3::Ptr< ns3::Ipv6MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv6RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice const >, ns3::Ptr< ns3::Ipv6MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv6RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice const >, ns3::Ptr< ns3::Ipv6MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv6RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv6RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv6RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv6RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv6RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv6RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv6Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv6RoutingProtocol::ErrorCallback&')
module.add_class('Ipv6StaticRouting', parent=root_module['ns3::Ipv6RoutingProtocol'])
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('NdiscCache', parent=root_module['ns3::Object'])
module.add_class('Entry', outer_class=root_module['ns3::NdiscCache'])
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >', u'ns3::NdiscCache::Ipv6PayloadHeaderPair')
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >*', u'ns3::NdiscCache::Ipv6PayloadHeaderPair*')
typehandlers.add_type_alias(u'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >&', u'ns3::NdiscCache::Ipv6PayloadHeaderPair&')
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
module.add_class('Probe', import_from_module='ns.stats', parent=root_module['ns3::DataCollectionObject'])
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
module.add_enum('Uint8Values', ['IP_DSFIELD'], outer_class=root_module['ns3::QueueItem'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )', u'ns3::QueueItem::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )*', u'ns3::QueueItem::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )&', u'ns3::QueueItem::TracedCallback&')
module.add_class('Rip', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_enum('SplitHorizonType_e', ['NO_SPLIT_HORIZON', 'SPLIT_HORIZON', 'POISON_REVERSE'], outer_class=root_module['ns3::Rip'])
module.add_class('RipNg', parent=root_module['ns3::Ipv6RoutingProtocol'])
module.add_enum('SplitHorizonType_e', ['NO_SPLIT_HORIZON', 'SPLIT_HORIZON', 'POISON_REVERSE'], outer_class=root_module['ns3::RipNg'])
module.add_class('TcpBic', parent=root_module['ns3::TcpCongestionOps'])
module.add_class('TcpClassicRecovery', parent=root_module['ns3::TcpRecoveryOps'])
module.add_class('TcpHighSpeed', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpHtcp', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpHybla', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpIllinois', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpL4Protocol', parent=root_module['ns3::IpL4Protocol'])
module.add_class('TcpLedbat', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpLp', parent=root_module['ns3::TcpNewReno'])
module.add_class('TcpPrrRecovery', parent=root_module['ns3::TcpClassicRecovery'])
module.add_enum('ReductionBound_t', ['CRB', 'SSRB'], outer_class=root_module['ns3::TcpPrrRecovery'])
typehandlers.add_type_alias(u'ns3::TcpPrrRecovery::ReductionBound_t', u'ns3::TcpPrrRecovery::ReductionBound_t')
typehandlers.add_type_alias(u'ns3::TcpPrrRecovery::ReductionBound_t*', u'ns3::TcpPrrRecovery::ReductionBound_t*')
typehandlers.add_type_alias(u'ns3::TcpPrrRecovery::ReductionBound_t&', u'ns3::TcpPrrRecovery::ReductionBound_t&')
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('UdpL4Protocol', parent=root_module['ns3::IpL4Protocol'])
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('BridgeChannel', import_from_module='ns.bridge', parent=root_module['ns3::Channel'])
module.add_class('BridgeNetDevice', import_from_module='ns.bridge', parent=root_module['ns3::NetDevice'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv4L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv4Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ipv6L3Protocol::DropReason', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::Ipv6Header &', 'ns3::Ptr<const ns3::Packet>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'double', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ipv4Address', 'unsigned char', 'unsigned char', 'unsigned char', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ipv6Address', 'unsigned char', 'unsigned char', 'unsigned char', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'const ns3::TcpHeader &', 'ns3::Ptr<const ns3::TcpSocketBase>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv4>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Ptr<ns3::Ipv6>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::Ipv4Address', 'ns3::Ipv4Address', 'unsigned char', 'ns3::Ptr<ns3::Ipv4Route>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::Ipv4Header', 'unsigned short', 'ns3::Ptr<ns3::Ipv4Interface>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::Ipv6Address', 'ns3::Ipv6Address', 'unsigned char', 'ns3::Ptr<ns3::Ipv6Route>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::Ipv6Header', 'unsigned short', 'ns3::Ptr<ns3::Ipv6Interface>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::SequenceNumber<unsigned int, int>', 'ns3::SequenceNumber<unsigned int, int>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::TcpSocket::TcpStates_t', 'ns3::TcpSocket::TcpStates_t', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::TcpSocketState::EcnState_t', 'ns3::TcpSocketState::EcnState_t', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::TcpSocketState::TcpCongState_t', 'ns3::TcpSocketState::TcpCongState_t', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('Icmpv4L4Protocol', parent=root_module['ns3::IpL4Protocol'])
module.add_class('Icmpv6L4Protocol', parent=root_module['ns3::IpL4Protocol'])
module.add_class('Ipv4GlobalRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_class('Ipv4ListRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_class('Ipv4PacketProbe', parent=root_module['ns3::Probe'])
module.add_class('Ipv6ExtensionLooseRouting', parent=root_module['ns3::Ipv6ExtensionRouting'])
module.add_class('Ipv6ListRouting', parent=root_module['ns3::Ipv6RoutingProtocol'])
module.add_class('Ipv6PacketProbe', parent=root_module['ns3::Probe'])
module.add_class('LoopbackNetDevice', parent=root_module['ns3::NetDevice'])
module.add_class('QueueDiscItem', import_from_module='ns.network', parent=root_module['ns3::QueueItem'])
module.add_class('ArpQueueDiscItem', parent=root_module['ns3::QueueDiscItem'])
module.add_class('Ipv4QueueDiscItem', parent=root_module['ns3::QueueDiscItem'])
module.add_class('Ipv6QueueDiscItem', parent=root_module['ns3::QueueDiscItem'])
module.add_container('std::list< ns3::Ipv4EndPoint * >', 'ns3::Ipv4EndPoint *', container_type=u'list')
module.add_container('ns3::Ipv4EndPointDemux::EndPoints', 'ns3::Ipv4EndPoint *', container_type=u'list')
module.add_container('std::vector< unsigned int >', 'unsigned int', container_type=u'vector')
module.add_container('std::vector< bool >', 'bool', container_type=u'vector')
module.add_container('std::list< ns3::Ipv6EndPoint * >', 'ns3::Ipv6EndPoint *', container_type=u'list')
module.add_container('ns3::Ipv6EndPointDemux::EndPoints', 'ns3::Ipv6EndPoint *', container_type=u'list')
module.add_container('std::list< ns3::RipRte >', 'ns3::RipRte', container_type=u'list')
module.add_container('std::list< ns3::RipNgRte >', 'ns3::RipNgRte', container_type=u'list')
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::list< ns3::Ptr< ns3::TcpOption const > >', 'ns3::Ptr< ns3::TcpOption const >', container_type=u'list')
module.add_container('std::list< std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > > >', 'std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > >', container_type=u'list')
module.add_container('ns3::TcpOptionSack::SackList', 'std::pair< ns3::SequenceNumber< unsigned int, int >, ns3::SequenceNumber< unsigned int, int > >', container_type=u'list')
module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv4Header > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv4Header >', container_type=u'list')
module.add_container('std::list< ns3::ArpCache::Entry * >', 'ns3::ArpCache::Entry *', container_type=u'list')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::Ipv6Header >', container_type=u'list')
module.add_container('std::list< ns3::NdiscCache::Entry * >', 'ns3::NdiscCache::Entry *', container_type=u'list')
module.add_container('std::set< unsigned int >', 'unsigned int', container_type=u'set')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocketState::TcpCongState_t const, ns3::TcpSocketState::TcpCongState_t const )', u'ns3::TcpCongStatesTracedValueCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocketState::TcpCongState_t const, ns3::TcpSocketState::TcpCongState_t const )*', u'ns3::TcpCongStatesTracedValueCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocketState::TcpCongState_t const, ns3::TcpSocketState::TcpCongState_t const )&', u'ns3::TcpCongStatesTracedValueCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocketState::EcnState_t const, ns3::TcpSocketState::EcnState_t const )', u'ns3::EcnStatesTracedValueCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocketState::EcnState_t const, ns3::TcpSocketState::EcnState_t const )*', u'ns3::EcnStatesTracedValueCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocketState::EcnState_t const, ns3::TcpSocketState::EcnState_t const )&', u'ns3::EcnStatesTracedValueCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocket::TcpStates_t const, ns3::TcpSocket::TcpStates_t const )', u'ns3::TcpStatesTracedValueCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocket::TcpStates_t const, ns3::TcpSocket::TcpStates_t const )*', u'ns3::TcpStatesTracedValueCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::TcpSocket::TcpStates_t const, ns3::TcpSocket::TcpStates_t const )&', u'ns3::TcpStatesTracedValueCallback&')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned int, int >', u'ns3::SequenceNumber32')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned int, int >*', u'ns3::SequenceNumber32*')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned int, int >&', u'ns3::SequenceNumber32&')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned short, short >', u'ns3::SequenceNumber16')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned short, short >*', u'ns3::SequenceNumber16*')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned short, short >&', u'ns3::SequenceNumber16&')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned char, signed char >', u'ns3::SequenceNumber8')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned char, signed char >*', u'ns3::SequenceNumber8*')
typehandlers.add_type_alias(u'ns3::SequenceNumber< unsigned char, signed char >&', u'ns3::SequenceNumber8&')
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module) |
def _fake_quantize_per_channel_affine_grad_reference(dY, X, per_channel_scale, per_channel_zero_point, axis, quant_min, quant_max):
(X, permute_axis_list) = _permute_to_axis_zero(X, axis)
Xq = torch.zeros_like(X)
for i in range(X.size()[0]):
Xq[i] = torch.round(((X[i] * (1.0 / per_channel_scale[i])) + per_channel_zero_point[i]))
Xq = Xq.permute(tuple(permute_axis_list))
mask = ((Xq >= quant_min) * (Xq <= quant_max))
res = torch.zeros_like(dY)
res[mask] = dY[mask]
return res |
class AlgebraicNumber(AlgebraicNumber_base):
def __init__(self, x):
AlgebraicNumber_base.__init__(self, QQbar, x)
def __reduce__(self):
return (AlgebraicNumber, (self._descr,))
def _richcmp_(self, other, op):
if (self is other):
return rich_to_bool(op, 0)
sd = self._descr
od = other._descr
if (isinstance(sd, ANRational) and isinstance(od, ANRational)):
return richcmp(sd._value, od._value, op)
ri1 = self._value.real()
ri2 = other._value.real()
if (not ri1.overlaps(ri2)):
return ri1._richcmp_(ri2, op)
if ((op == op_EQ) or (op == op_NE)):
if (not self._value.imag().overlaps(other._value.imag())):
return (op == op_NE)
if (isinstance(sd, ANRational) and (not sd._value)):
return (bool(other) == (op == op_NE))
elif (isinstance(od, ANRational) and (not od._value)):
return (bool(self) == (op == op_NE))
elif (isinstance(sd, ANExtensionElement) and isinstance(od, ANExtensionElement) and (sd._generator is od._generator)):
return ((sd._value == od._value) if (op == op_EQ) else (sd._value != od._value))
ci1 = self._value.imag().abs()
ci2 = other._value.imag().abs()
if (ci1.overlaps(ci2) and (self.minpoly() == other.minpoly())):
c = cmp_elements_with_same_minpoly(self, other, self.minpoly())
if (c is not None):
return rich_to_bool(op, c)
srp = self.real()
orp = other.real()
if (srp != orp):
return richcmp_not_equal(srp, orp, op)
return richcmp(self.imag(), other.imag(), op)
def _mpfr_(self, field):
return AA(self)._mpfr_(field)
def __float__(self):
return AA(self).__float__()
def __complex__(self):
return CC(self).__complex__()
def _complex_double_(self, cdf):
return cdf(CC(self))
def _interval_fast(self, prec):
return self.interval_fast(ComplexIntervalField(prec))
def _integer_(self, ZZ=None):
return AA(self)._integer_(ZZ)
def _rational_(self):
return AA(self)._rational_()
def real(self):
return AlgebraicReal(self._descr.real(self))
def imag(self):
return AlgebraicReal(self._descr.imag(self))
def conjugate(self):
return AlgebraicNumber(self._descr.conjugate(self))
def norm(self):
return AlgebraicReal(self._descr.norm(self))
def interval_exact(self, field):
if (not isinstance(field, sage.rings.abc.ComplexIntervalField)):
raise ValueError('AlgebraicNumber interval_exact requires a ComplexIntervalField')
rfld = field._real_field()
re = self.real().interval_exact(rfld)
im = self.imag().interval_exact(rfld)
return field(re, im)
def _complex_mpfr_field_(self, field):
return self.complex_number(field)
def complex_number(self, field):
v = self.interval(ComplexIntervalField(field.prec()))
return field(v)
def complex_exact(self, field):
rfld = field._real_field()
re = self.real().real_exact(rfld)
im = self.imag().real_exact(rfld)
return field(re, im)
def multiplicative_order(self):
if (1 not in CIF(self).norm()):
return infinity.infinity
if (self.norm() != 1):
return infinity.infinity
d = self.minpoly().is_cyclotomic(True)
return (d if d else infinity.infinity)
def rational_argument(self):
self.exactify()
return self._descr.rational_argument(self)
def _pow_(self, other):
if (self == 1):
return self
raise TypeError("unsupported operand parent(s) for ^: '{0}' and '{0}'".format(self.parent())) |
def id2label(image):
array = np.array(image)
out_array = np.empty(array.shape, dtype=array.dtype)
for l in labels:
out_array[(array == l.id)] = l.trainId
return Image.fromarray(out_array) |
class ShapeSpec(namedtuple('_ShapeSpec', ['channels', 'height', 'width', 'stride'])):
def __new__(cls, *, channels=None, height=None, width=None, stride=None):
return super().__new__(cls, channels, height, width, stride) |
class RE25():
def __init__(self):
self.problem_name = 'RE25'
self.n_objectives = 2
self.n_variables = 3
self.n_constraints = 0
self.n_original_constraints = 6
self.ubound = np.zeros(self.n_variables)
self.lbound = np.zeros(self.n_variables)
self.lbound[0] = 1
self.lbound[1] = 0.6
self.lbound[2] = 0.09
self.ubound[0] = 70
self.ubound[1] = 3
self.ubound[2] = 0.5
self.feasible_vals = np.array([0.009, 0.0095, 0.0104, 0.0118, 0.0128, 0.0132, 0.014, 0.015, 0.0162, 0.0173, 0.018, 0.02, 0.023, 0.025, 0.028, 0.032, 0.035, 0.041, 0.047, 0.054, 0.063, 0.072, 0.08, 0.092, 0.105, 0.12, 0.135, 0.148, 0.162, 0.177, 0.192, 0.207, 0.225, 0.244, 0.263, 0.283, 0.307, 0.331, 0.362, 0.394, 0.4375, 0.5])
def evaluate(self, x):
f = np.zeros(self.n_objectives)
g = np.zeros(self.n_original_constraints)
x1 = np.round(x[0])
x2 = x[1]
idx = np.abs((np.asarray(self.feasible_vals) - x[2])).argmin()
x3 = self.feasible_vals[idx]
f[0] = ((((((np.pi * np.pi) * x2) * x3) * x3) * (x1 + 2)) / 4.0)
Cf = ((((4.0 * (x2 / x3)) - 1) / ((4.0 * (x2 / x3)) - 4)) + ((0.615 * x3) / x2))
Fmax = 1000.0
S = 189000.0
G = (11.5 * 1000000.0)
K = (((((G * x3) * x3) * x3) * x3) / ((((8 * x1) * x2) * x2) * x2))
lmax = 14.0
lf = ((Fmax / K) + ((1.05 * (x1 + 2)) * x3))
dmin = 0.2
Dmax = 3
Fp = 300.0
sigmaP = (Fp / K)
sigmaPM = 6
sigmaW = 1.25
g[0] = ((- ((((8 * Cf) * Fmax) * x2) / (((np.pi * x3) * x3) * x3))) + S)
g[1] = ((- lf) + lmax)
g[2] = ((- 3) + (x2 / x3))
g[3] = ((- sigmaP) + sigmaPM)
g[4] = ((((- sigmaP) - ((Fmax - Fp) / K)) - ((1.05 * (x1 + 2)) * x3)) + lf)
g[5] = (sigmaW - ((Fmax - Fp) / K))
g = np.where((g < 0), (- g), 0)
f[1] = (((((g[0] + g[1]) + g[2]) + g[3]) + g[4]) + g[5])
return f |
class TryFinallyStatNode(StatNode):
child_attrs = ['body', 'finally_clause', 'finally_except_clause']
preserve_exception = 1
handle_error_case = True
func_return_type = None
finally_except_clause = None
is_try_finally_in_nogil = False
in_generator = False
def create_analysed(pos, env, body, finally_clause):
node = TryFinallyStatNode(pos, body=body, finally_clause=finally_clause)
return node
def analyse_declarations(self, env):
self.body.analyse_declarations(env)
self.finally_except_clause = copy.deepcopy(self.finally_clause)
self.finally_except_clause.analyse_declarations(env)
self.finally_clause.analyse_declarations(env)
def analyse_expressions(self, env):
self.body = self.body.analyse_expressions(env)
self.finally_clause = self.finally_clause.analyse_expressions(env)
self.finally_except_clause = self.finally_except_clause.analyse_expressions(env)
if (env.return_type and (not env.return_type.is_void)):
self.func_return_type = env.return_type
return self
nogil_check = Node.gil_error
gil_message = 'Try-finally statement'
def generate_execution_code(self, code):
code.mark_pos(self.pos)
code.putln('/*try:*/ {')
old_error_label = code.error_label
old_labels = code.all_new_labels()
new_labels = code.get_all_labels()
new_error_label = code.error_label
if (not self.handle_error_case):
code.error_label = old_error_label
catch_label = code.new_label()
was_in_try_finally = code.funcstate.in_try_finally
code.funcstate.in_try_finally = 1
self.body.generate_execution_code(code)
code.funcstate.in_try_finally = was_in_try_finally
code.putln('}')
temps_to_clean_up = code.funcstate.all_free_managed_temps()
code.mark_pos(self.finally_clause.pos)
code.putln('/*finally:*/ {')
code.set_all_labels(old_labels)
def fresh_finally_clause(_next=[self.finally_clause]):
node = _next[0]
node_copy = copy.deepcopy(node)
if (node is self.finally_clause):
_next[0] = node_copy
else:
node = node_copy
return node
preserve_error = (self.preserve_exception and code.label_used(new_error_label))
needs_success_cleanup = (not self.finally_clause.is_terminator)
if (not self.body.is_terminator):
code.putln('/*normal exit:*/{')
fresh_finally_clause().generate_execution_code(code)
if (not self.finally_clause.is_terminator):
code.put_goto(catch_label)
code.putln('}')
if preserve_error:
code.put_label(new_error_label)
code.putln('/*exception exit:*/{')
if (not self.in_generator):
code.putln('__Pyx_PyThreadState_declare')
if self.is_try_finally_in_nogil:
code.declare_gilstate()
if needs_success_cleanup:
exc_lineno_cnames = tuple([code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False) for _ in range(2)])
exc_filename_cname = code.funcstate.allocate_temp(PyrexTypes.CPtrType(PyrexTypes.c_const_type(PyrexTypes.c_char_type)), manage_ref=False)
else:
exc_lineno_cnames = exc_filename_cname = None
exc_vars = tuple([code.funcstate.allocate_temp(py_object_type, manage_ref=False) for _ in range(6)])
self.put_error_catcher(code, temps_to_clean_up, exc_vars, exc_lineno_cnames, exc_filename_cname)
finally_old_labels = code.all_new_labels()
code.putln('{')
old_exc_vars = code.funcstate.exc_vars
code.funcstate.exc_vars = exc_vars[:3]
self.finally_except_clause.generate_execution_code(code)
code.funcstate.exc_vars = old_exc_vars
code.putln('}')
if needs_success_cleanup:
self.put_error_uncatcher(code, exc_vars, exc_lineno_cnames, exc_filename_cname)
if exc_lineno_cnames:
for cname in exc_lineno_cnames:
code.funcstate.release_temp(cname)
if exc_filename_cname:
code.funcstate.release_temp(exc_filename_cname)
code.put_goto(old_error_label)
for (new_label, old_label) in zip(code.get_all_labels(), finally_old_labels):
if (not code.label_used(new_label)):
continue
code.put_label(new_label)
self.put_error_cleaner(code, exc_vars)
code.put_goto(old_label)
for cname in exc_vars:
code.funcstate.release_temp(cname)
code.putln('}')
code.set_all_labels(old_labels)
return_label = code.return_label
exc_vars = ()
for (i, (new_label, old_label)) in enumerate(zip(new_labels, old_labels)):
if (not code.label_used(new_label)):
continue
if ((new_label == new_error_label) and preserve_error):
continue
code.putln(('%s: {' % new_label))
ret_temp = None
if (old_label == return_label):
if self.in_generator:
exc_vars = tuple([code.funcstate.allocate_temp(py_object_type, manage_ref=False) for _ in range(6)])
self.put_error_catcher(code, [], exc_vars)
if (not self.finally_clause.is_terminator):
if (self.func_return_type and (not self.is_try_finally_in_nogil) and (not isinstance(self.finally_clause, GILExitNode))):
ret_temp = code.funcstate.allocate_temp(self.func_return_type, manage_ref=False)
code.putln(('%s = %s;' % (ret_temp, Naming.retval_cname)))
if self.func_return_type.is_pyobject:
code.putln(('%s = 0;' % Naming.retval_cname))
fresh_finally_clause().generate_execution_code(code)
if (old_label == return_label):
if ret_temp:
code.putln(('%s = %s;' % (Naming.retval_cname, ret_temp)))
if self.func_return_type.is_pyobject:
code.putln(('%s = 0;' % ret_temp))
code.funcstate.release_temp(ret_temp)
if self.in_generator:
self.put_error_uncatcher(code, exc_vars)
for cname in exc_vars:
code.funcstate.release_temp(cname)
if (not self.finally_clause.is_terminator):
code.put_goto(old_label)
code.putln('}')
code.put_label(catch_label)
code.putln('}')
def generate_function_definitions(self, env, code):
self.body.generate_function_definitions(env, code)
self.finally_clause.generate_function_definitions(env, code)
if self.finally_except_clause:
self.finally_except_clause.generate_function_definitions(env, code)
def put_error_catcher(self, code, temps_to_clean_up, exc_vars, exc_lineno_cnames=None, exc_filename_cname=None):
code.globalstate.use_utility_code(restore_exception_utility_code)
code.globalstate.use_utility_code(get_exception_utility_code)
code.globalstate.use_utility_code(swap_exception_utility_code)
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
code.putln('__Pyx_PyThreadState_assign')
code.putln(' '.join([('%s = 0;' % var) for var in exc_vars]))
for (temp_name, type) in temps_to_clean_up:
code.put_xdecref_clear(temp_name, type)
code.putln(('if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&%s, &%s, &%s);' % exc_vars[3:]))
code.putln(('if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&%s, &%s, &%s) < 0)) __Pyx_ErrFetch(&%s, &%s, &%s);' % (exc_vars[:3] * 2)))
for var in exc_vars:
code.put_xgotref(var)
if exc_lineno_cnames:
code.putln(('%s = %s; %s = %s; %s = %s;' % (exc_lineno_cnames[0], Naming.lineno_cname, exc_lineno_cnames[1], Naming.clineno_cname, exc_filename_cname, Naming.filename_cname)))
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
def put_error_uncatcher(self, code, exc_vars, exc_lineno_cnames=None, exc_filename_cname=None):
code.globalstate.use_utility_code(restore_exception_utility_code)
code.globalstate.use_utility_code(reset_exception_utility_code)
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
code.putln('if (PY_MAJOR_VERSION >= 3) {')
for var in exc_vars[3:]:
code.put_xgiveref(var)
code.putln(('__Pyx_ExceptionReset(%s, %s, %s);' % exc_vars[3:]))
code.putln('}')
for var in exc_vars[:3]:
code.put_xgiveref(var)
code.putln(('__Pyx_ErrRestore(%s, %s, %s);' % exc_vars[:3]))
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
code.putln(' '.join([('%s = 0;' % var) for var in exc_vars]))
if exc_lineno_cnames:
code.putln(('%s = %s; %s = %s; %s = %s;' % (Naming.lineno_cname, exc_lineno_cnames[0], Naming.clineno_cname, exc_lineno_cnames[1], Naming.filename_cname, exc_filename_cname)))
def put_error_cleaner(self, code, exc_vars):
code.globalstate.use_utility_code(reset_exception_utility_code)
if self.is_try_finally_in_nogil:
code.put_ensure_gil(declare_gilstate=False)
code.putln('if (PY_MAJOR_VERSION >= 3) {')
for var in exc_vars[3:]:
code.put_xgiveref(var)
code.putln(('__Pyx_ExceptionReset(%s, %s, %s);' % exc_vars[3:]))
code.putln('}')
for var in exc_vars[:3]:
code.put_xdecref_clear(var, py_object_type)
if self.is_try_finally_in_nogil:
code.put_release_ensured_gil()
code.putln((' '.join((['%s = 0;'] * 3)) % exc_vars[3:]))
def annotate(self, code):
self.body.annotate(code)
self.finally_clause.annotate(code) |
class TPredicate(object):
thisown = _swig_property((lambda x: x.this.own()), (lambda x, v: x.this.own(v)), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_snap.TPredicate_swiginit(self, _snap.new_TPredicate(*args))
def GetVariables(self, Variables):
return _snap.TPredicate_GetVariables(self, Variables)
def SetIntVal(self, VarName, VarVal):
return _snap.TPredicate_SetIntVal(self, VarName, VarVal)
def SetFltVal(self, VarName, VarVal):
return _snap.TPredicate_SetFltVal(self, VarName, VarVal)
def SetStrVal(self, VarName, VarVal):
return _snap.TPredicate_SetStrVal(self, VarName, VarVal)
def Eval(self):
return _snap.TPredicate_Eval(self)
def EvalAtomicPredicate(self, Atom):
return _snap.TPredicate_EvalAtomicPredicate(self, Atom)
def EvalStrAtom(Val1, Val2, Cmp):
return _snap.TPredicate_EvalStrAtom(Val1, Val2, Cmp)
EvalStrAtom = staticmethod(EvalStrAtom)
__swig_destroy__ = _snap.delete_TPredicate |
def iterator(model, dataloader, **kwargs):
model.eval()
with torch.no_grad():
for (current_step, input_data) in enumerate(dataloader):
input_data_gpu = {}
for (k, v) in input_data.items():
if isinstance(v, torch.Tensor):
input_data_gpu[k] = v.detach().to(device, non_blocking=True)
outputs = model(input_data_gpu, **kwargs)
inputs_np = {k: (v.numpy() if isinstance(v, torch.Tensor) else v) for (k, v) in input_data.items()}
outputs_np = {k: v.detach().cpu().numpy() for (k, v) in outputs.items() if isinstance(v, torch.Tensor)}
(yield (current_step, inputs_np, outputs_np)) |
class TestConcatenateTrainingData(unittest.TestCase):
def setUp(self):
self.train_sequences = [np.zeros((3, 2)), np.ones((4, 2))]
self.train_cluster_ids = [['a', 'b', 'a'], np.array(['a', 'b', 'c', 'b'])]
def test_noenforce_noshuffle(self):
(concatenated_train_sequence, concatenated_train_cluster_id) = utils.concatenate_training_data(self.train_sequences, self.train_cluster_ids, False, False)
self.assertListEqual((([0.0] * 6) + ([1.0] * 8)), concatenated_train_sequence.flatten().tolist())
self.assertListEqual(['a', 'b', 'a', 'a', 'b', 'c', 'b'], concatenated_train_cluster_id)
def test_enforce_noshuffle(self):
(concatenated_train_sequence, concatenated_train_cluster_id) = utils.concatenate_training_data(self.train_sequences, self.train_cluster_ids, True, False)
self.assertListEqual((([0.0] * 6) + ([1.0] * 8)), concatenated_train_sequence.flatten().tolist())
self.assertEqual(7, len(concatenated_train_cluster_id))
self.assertEqual(5, len(set(concatenated_train_cluster_id)))
def test_noenforce_shuffle(self):
(concatenated_train_sequence, concatenated_train_cluster_id) = utils.concatenate_training_data(self.train_sequences, self.train_cluster_ids, False, True)
try:
self.assertListEqual((([0.0] * 6) + ([1.0] * 8)), concatenated_train_sequence.flatten().tolist())
self.assertListEqual(['a', 'b', 'a', 'a', 'b', 'c', 'b'], concatenated_train_cluster_id)
except AssertionError:
self.assertListEqual((([1.0] * 8) + ([0.0] * 6)), concatenated_train_sequence.flatten().tolist())
self.assertListEqual(['a', 'b', 'c', 'b', 'a', 'b', 'a'], concatenated_train_cluster_id) |
def test(model, device, test_loader, epoch):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for (data, target) in test_loader:
(data, target) = (data.to(device), target.to(device))
output = model(data)
output = torch.nn.functional.log_softmax(output, dim=1)
test_loss += torch.nn.functional.nll_loss(output, target, reduction='sum').item()
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set epoch {}: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(epoch, test_loss, correct, len(test_loader.dataset), ((100.0 * correct) / len(test_loader.dataset)))) |
def dot(x: tf.Tensor, y: tf.Tensor, sparse: bool=False) -> tf.Tensor:
if sparse:
res = tf.sparse.sparse_dense_matmul(x, y)
else:
res = tf.matmul(x, y)
return res |
def NIR_calc(P, POP):
try:
max_P = max(list(P.values()))
length = POP
return (max_P / length)
except Exception:
return 'None' |
class FileLogger():
def __init__(self, output_dir: str, global_rank: int, local_rank: int, name: str, world_size: int, name_prefix=''):
self.output_dir = output_dir
if (not os.path.exists(self.output_dir)):
os.makedirs(self.output_dir, exist_ok=True)
self.logger = FileLogger.get_logger(output_dir, global_rank=global_rank, local_rank=local_rank, name=name, world_size=world_size, name_prefix=name_prefix)
def exception(self, *args_, **kwargs):
return self.logger.exception(*args_, **kwargs)
def get_logger(output_dir: str, global_rank: int, local_rank: int, name: str, world_size: int, name_prefix=''):
logger_ = logging.getLogger(name)
logger_.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(message)s')
def get_name(u):
curr_name = f'{name_prefix}-{u}-{global_rank}.log'
curr_name = os.path.join(output_dir, curr_name)
return curr_name
vlog = logging.FileHandler(get_name('info'))
vlog.setLevel(logging.INFO)
vlog.setFormatter(formatter)
logger_.addHandler(vlog)
eventlog = logging.FileHandler(get_name('warn'))
eventlog.setLevel(logging.WARN)
eventlog.setFormatter(formatter)
logger_.addHandler(eventlog)
time_formatter = logging.Formatter('%(asctime)s - %(filename)s:%(lineno)d - %(message)s')
debuglog = logging.FileHandler(get_name('debug'))
debuglog.setLevel(logging.DEBUG)
debuglog.setFormatter(time_formatter)
logger_.addHandler(debuglog)
console = logging.StreamHandler()
console.setFormatter(formatter)
console.setLevel(logging.DEBUG)
logger_.addHandler(console)
return logger_
def debug(self, *args_):
self.logger.debug(*args_)
def warning(self, *args_):
self.logger.warning(*args_)
def info(self, *args_):
self.logger.info(*args_) |
class LayoutLMv2Processor():
def __init__(self, feature_extractor, tokenizer):
if (not isinstance(feature_extractor, LayoutLMv2FeatureExtractor)):
raise ValueError(f'`feature_extractor` has to be of type {LayoutLMv2FeatureExtractor.__class__}, but is {type(feature_extractor)}')
if (not isinstance(tokenizer, (LayoutLMv2Tokenizer, LayoutLMv2TokenizerFast))):
raise ValueError(f'`tokenizer` has to be of type {LayoutLMv2Tokenizer.__class__} or {LayoutLMv2TokenizerFast.__class__}, but is {type(tokenizer)}')
self.feature_extractor = feature_extractor
self.tokenizer = tokenizer
def save_pretrained(self, save_directory):
self.feature_extractor._set_processor_class(self.__class__.__name__)
self.feature_extractor.save_pretrained(save_directory)
self.tokenizer._set_processor_class(self.__class__.__name__)
self.tokenizer.save_pretrained(save_directory)
def from_pretrained(cls, pretrained_model_name_or_path, use_fast=True, **kwargs):
feature_extractor = LayoutLMv2FeatureExtractor.from_pretrained(pretrained_model_name_or_path, **kwargs)
if use_fast:
tokenizer = LayoutLMv2TokenizerFast.from_pretrained(pretrained_model_name_or_path, **kwargs)
else:
tokenizer = LayoutLMv2Tokenizer.from_pretrained(pretrained_model_name_or_path, **kwargs)
return cls(feature_extractor=feature_extractor, tokenizer=tokenizer)
def __call__(self, images, text: Union[(TextInput, PreTokenizedInput, List[TextInput], List[PreTokenizedInput])]=None, text_pair: Optional[Union[(PreTokenizedInput, List[PreTokenizedInput])]]=None, boxes: Union[(List[List[int]], List[List[List[int]]])]=None, word_labels: Optional[Union[(List[int], List[List[int]])]]=None, add_special_tokens: bool=True, padding: Union[(bool, str, PaddingStrategy)]=False, truncation: Union[(bool, str, TruncationStrategy)]=False, max_length: Optional[int]=None, stride: int=0, pad_to_multiple_of: Optional[int]=None, return_token_type_ids: Optional[bool]=None, return_attention_mask: Optional[bool]=None, return_overflowing_tokens: bool=False, return_special_tokens_mask: bool=False, return_offsets_mapping: bool=False, return_length: bool=False, verbose: bool=True, return_tensors: Optional[Union[(str, TensorType)]]=None, **kwargs) -> BatchEncoding:
if (self.feature_extractor.apply_ocr and (boxes is not None)):
raise ValueError('You cannot provide bounding boxes if you initialized the feature extractor with apply_ocr set to True.')
if (self.feature_extractor.apply_ocr and (word_labels is not None)):
raise ValueError('You cannot provide word labels if you initialized the feature extractor with apply_ocr set to True.')
features = self.feature_extractor(images=images, return_tensors=return_tensors)
if ((text is not None) and self.feature_extractor.apply_ocr and (text_pair is None)):
if isinstance(text, str):
text = [text]
text_pair = features['words']
encoded_inputs = self.tokenizer(text=(text if (text is not None) else features['words']), text_pair=(text_pair if (text_pair is not None) else None), boxes=(boxes if (boxes is not None) else features['boxes']), word_labels=word_labels, add_special_tokens=add_special_tokens, padding=padding, truncation=truncation, max_length=max_length, stride=stride, pad_to_multiple_of=pad_to_multiple_of, return_token_type_ids=return_token_type_ids, return_attention_mask=return_attention_mask, return_overflowing_tokens=return_overflowing_tokens, return_special_tokens_mask=return_special_tokens_mask, return_offsets_mapping=return_offsets_mapping, return_length=return_length, verbose=verbose, return_tensors=return_tensors, **kwargs)
encoded_inputs['image'] = features.pop('pixel_values')
return encoded_inputs |
def hard_sigmoid_backward(grad_inputs, inputs, input_shapes, outputs, output_shapes):
dy = grad_inputs[0]
x0 = inputs[0]
m0 = F.greater_scalar(x0, (- 2.5))
m1 = F.less_scalar(x0, 2.5)
m01 = (m0 * m1)
m01 = no_grad(m01)
dx0 = ((dy * 0.2) * m01)
return dx0 |
class FeatureDataset(IterableDataset):
def __init__(self, args, shards_path, all_shards_path, node_selection=identity, shard_shuffle=identity, is_train=True):
self.shards_path = shards_path
self.all_shards_path = all_shards_path
if is_train:
if isinstance(args.computation.num_gpus, int):
world_size = min(du.get_world_size(), args.computation.num_gpus)
else:
world_size = du.get_world_size()
num_shards = [len(du.node_selection(all_shards_path, i, total=world_size, is_train=is_train)) for i in range(world_size)]
self.num_workers = min(([args.computation.num_workers] + num_shards))
else:
(self.num_workers, _) = get_num_workers(args.computation.num_workers, len(self.shards_path))
out_str = '#Workers of Feature Extraction Dataset'
out_str += f' (train={is_train}, node={du.get_rank()})'
out_str += f': {self.num_workers}'
print(out_str)
self.node_selection = node_selection
self.shard_shuffle = shard_shuffle
self.pipeline = []
def shard_fn(self):
urls = self.shards_path
urls = self.node_selection(urls)
urls = worker_urls(urls)
urls = self.shard_shuffle(urls)
return urls
def samples(self, urls):
if isinstance(urls, str):
urls = [urls]
assert isinstance(urls, list)
source = self.raw_samples(urls)
return pipeline(source, *self.pipeline)
def raw_samples(self, urls):
for url in urls:
url = Path(url)
try:
try:
pkl = load_pickle(url)
except EOFError as e:
print(e)
print('EOFError in shard loading: {}'.format(Path(url.stem)))
continue
for feature in pkl:
(yield feature)
except Exception as e:
print(e)
print('Exception in shard loading: {}'.format(Path(url.stem)))
continue
def __iter__(self):
urls = self.shard_fn()
return self.samples(urls)
def shuffle(self, size, rng=None, **kw):
if (size == 0):
return self
if (rng is None):
rng = random.Random()
self.rng = rng
self.shard_shuffle = Shuffler(rng)
self.pipeline.append(shuffle(size, rng=rng, **kw))
return self |
def get_b16s_config():
config = ml_collections.ConfigDict()
config.patches = ml_collections.ConfigDict({'size': (16, 16)})
config.hidden_size = 128
config.transformer = ml_collections.ConfigDict()
config.transformer.mlp_dim = 512
config.transformer.num_heads = 8
config.transformer.num_layers = 8
config.transformer.attention_dropout_rate = 0.0
config.transformer.dropout_rate = 0.1
config.classifier = 'token'
config.representation_size = None
return config |
class Checkpointer(object):
def __init__(self, model, optimizer=None, scheduler=None, save_dir='', save_to_disk=None, logger=None):
self.model = model
self.optimizer = optimizer
self.scheduler = scheduler
self.save_dir = save_dir
self.save_to_disk = save_to_disk
if (logger is None):
logger = logging.getLogger(__name__)
self.logger = logger
def save(self, name, **kwargs):
if (not self.save_dir):
return
if (not self.save_to_disk):
return
data = {}
data['model'] = self.model.state_dict()
if (self.optimizer is not None):
data['optimizer'] = self.optimizer.state_dict()
if (self.scheduler is not None):
data['scheduler'] = self.scheduler.state_dict()
data.update(kwargs)
save_file = os.path.join(self.save_dir, '{}.pth'.format(name))
self.logger.info('Saving checkpoint to {}'.format(save_file))
torch.save(data, save_file)
self.tag_last_checkpoint(save_file)
def load(self, f=None):
if self.has_checkpoint():
f = self.get_checkpoint_file()
if (not f):
self.logger.info('No checkpoint found. Initializing model from scratch')
return {}
self.logger.info('Loading checkpoint from {}'.format(f))
checkpoint = self._load_file(f)
self._load_model(checkpoint)
if (('optimizer' in checkpoint) and self.optimizer):
self.logger.info('Loading optimizer from {}'.format(f))
self.optimizer.load_state_dict(checkpoint.pop('optimizer'))
if (('scheduler' in checkpoint) and self.scheduler):
self.logger.info('Loading scheduler from {}'.format(f))
self.scheduler.load_state_dict(checkpoint.pop('scheduler'))
return checkpoint
def has_checkpoint(self):
save_file = os.path.join(self.save_dir, 'last_checkpoint')
return os.path.exists(save_file)
def get_checkpoint_file(self):
save_file = os.path.join(self.save_dir, 'last_checkpoint')
try:
with open(save_file, 'r') as f:
last_saved = f.read()
last_saved = last_saved.strip()
except IOError:
last_saved = ''
return last_saved
def tag_last_checkpoint(self, last_filename):
save_file = os.path.join(self.save_dir, 'last_checkpoint')
with open(save_file, 'w') as f:
f.write(last_filename)
def _load_file(self, f):
return torch.load(f, map_location=torch.device('cpu'))
def _load_model(self, checkpoint):
load_state_dict(self.model, checkpoint.pop('model')) |
def deconv3(in_planes, out_planes, kernel_size=4, stride=2, padding=1):
return nn.Sequential(torch.nn.ConvTranspose2d(in_channels=in_planes, out_channels=out_planes, kernel_size=4, stride=2, padding=1, bias=True), nn.PReLU(out_planes), nn.Conv2d(out_planes, out_planes, 3, 1, 1), nn.PReLU(out_planes), nn.Conv2d(out_planes, out_planes, 3, 1, 1), nn.PReLU(out_planes)) |
class StochasticScriptAgent(BaseScriptAgent):
def __init__(self):
super().__init__()
def reset(self, mdp, state, player_idx):
pass
def step(self, mdp, state, player_idx):
action = np.random.choice(Action.ALL_ACTIONS)
return action |
class ConstantPad2d(_ConstantPadNd):
__constants__ = ['padding', 'value']
padding: _size_4_t
def __init__(self, padding: _size_4_t, value: float) -> None:
super(ConstantPad2d, self).__init__(value)
self.padding = _quadruple(padding) |
def convert_boolean_value(var, default_value):
if (var.strip().lower() == 'y'):
converted_var = True
elif (var.strip().lower() == 'n'):
converted_var = False
else:
converted_var = default_value
return converted_var |
class InputStream(object):
def __init__(self, stream):
self._stream = stream
def read(self, *args):
if (len(args) == 0):
warn("WSGI does not guarantee an EOF marker on the input stream, thus making calls to 'wsgi.input.read()' unsafe. Conforming servers may never return from this call.", WSGIWarning, stacklevel=2)
elif (len(args) != 1):
warn("Too many parameters passed to 'wsgi.input.read()'.", WSGIWarning, stacklevel=2)
return self._stream.read(*args)
def readline(self, *args):
if (len(args) == 0):
warn("Calls to 'wsgi.input.readline()' without arguments are unsafe. Use 'wsgi.input.read()' instead.", WSGIWarning, stacklevel=2)
elif (len(args) == 1):
warn("'wsgi.input.readline()' was called with a size hint. WSGI does not support this, although it's available on all major servers.", WSGIWarning, stacklevel=2)
else:
raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.")
return self._stream.readline(*args)
def __iter__(self):
try:
return iter(self._stream)
except TypeError:
warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2)
return iter(())
def close(self):
warn('The application closed the input stream!', WSGIWarning, stacklevel=2)
self._stream.close() |
class Encoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers, p):
super(Encoder, self).__init__()
self.dropout = nn.Dropout(p)
self.hidden_size = hidden_size
self.num_layers = num_layers
self.embedding = nn.Embedding(input_size, embedding_size)
self.rnn = nn.LSTM(embedding_size, hidden_size, num_layers, dropout=p)
def forward(self, x):
embedding = self.dropout(torch.sigmoid(self.embedding(x)))
(outputs, (hidden, cell)) = self.rnn(embedding)
return (hidden, cell) |
def test_parametrized_fixture(testdir, openapi3_base_url, is_older_subtests):
testdir.make_test(f'''
schema.base_url = "{openapi3_base_url}"
(params=["a", "b"])
def parametrized_lazy_schema(request):
return schema
lazy_schema = schemathesis.from_pytest_fixture("parametrized_lazy_schema")
_schema.parametrize()
def test_(case):
case.call()
''')
result = testdir.runpytest('-v')
result.assert_outcomes(passed=2)
if is_older_subtests:
expected = ['test_parametrized_fixture.py::test_\\[a\\]\\[GET /api/users\\] PASSED', 'test_parametrized_fixture.py::test_\\[b\\]\\[GET /api/users\\] PASSED']
else:
expected = ['test_parametrized_fixture.py::test_\\[a\\]\\[GET /api/users\\] SUBPASS', 'test_parametrized_fixture.py::test_\\[b\\]\\[GET /api/users\\] SUBPASS']
result.stdout.re_match_lines(expected) |
class UnaryOpSparseFuzzer(Fuzzer):
def __init__(self, seed, dtype=torch.float32, cuda=False):
super().__init__(parameters=[FuzzedParameter('dim_parameter', distribution={1: 0.3, 2: 0.4, 3: 0.3}, strict=True), FuzzedParameter(name='sparse_dim', distribution={1: 0.4, 2: 0.4, 3: 0.2}, strict=True), [FuzzedParameter(name=f'k_any_{i}', minval=_MIN_DIM_SIZE, maxval=_MAX_DIM_SIZE, distribution='loguniform') for i in range(3)], [FuzzedParameter(name=f'k_pow2_{i}', distribution={size: (1.0 / len(_POW_TWO_SIZES)) for size in _POW_TWO_SIZES}) for i in range(3)], [FuzzedParameter(name=f'k{i}', distribution={ParameterAlias(f'k_any_{i}'): 0.8, ParameterAlias(f'k_pow2_{i}'): 0.2}, strict=True) for i in range(3)], FuzzedParameter(name='density', distribution={0.1: 0.4, 0.05: 0.3, 0.01: 0.3}), FuzzedParameter(name='coalesced', distribution={True: 0.5, False: 0.5}), FuzzedParameter(name='random_value', minval=0, maxval=((2 ** 32) - 1), distribution='uniform')], tensors=[FuzzedSparseTensor(name='x', size=('k0', 'k1', 'k2'), dim_parameter='dim_parameter', sparse_dim='sparse_dim', min_elements=(4 * 1024), max_elements=(32 * (1024 ** 2)), density='density', coalesced='coalesced', dtype=dtype, cuda=cuda)], seed=seed) |
def load_tr_te_data(csv_file_tr, csv_file_te):
tp_tr = pd.read_csv(csv_file_tr)
tp_te = pd.read_csv(csv_file_te)
start_idx = min(tp_tr['uid'].min(), tp_te['uid'].min())
end_idx = max(tp_tr['uid'].max(), tp_te['uid'].max())
(rows_tr, cols_tr) = ((tp_tr['uid'] - start_idx), tp_tr['sid'])
(rows_te, cols_te) = ((tp_te['uid'] - start_idx), tp_te['sid'])
data_tr = sparse.csr_matrix((np.ones_like(rows_tr), (rows_tr, cols_tr)), dtype='float64', shape=(((end_idx - start_idx) + 1), n_items))
data_te = sparse.csr_matrix((np.ones_like(rows_te), (rows_te, cols_te)), dtype='float64', shape=(((end_idx - start_idx) + 1), n_items))
return (data_tr, data_te) |
def JDUTC_to_BJDTDB(JDUTC, starname='', hip_id=None, ra=None, dec=None, epoch=None, pmra=None, pmdec=None, px=None, rv=None, obsname='', lat=0.0, longi=0.0, alt=0.0, ephemeris='de430', leap_dir=os.path.join(os.path.dirname(__file__), 'data'), leap_update=True):
corr_time = []
warning = []
error = []
status = 0
if (type(JDUTC) != Time):
warning += [['Warning: Float JDUTC entered. Verify time scale (UTC) and format (JD)']]
JDUTC = Time(JDUTC, format='jd', scale='utc')
if JDUTC.isscalar:
JDUTC = Time([JDUTC])
star_par = {'ra': ra, 'dec': dec, 'pmra': pmra, 'pmdec': pmdec, 'px': px, 'rv': rv, 'epoch': epoch}
star_simbad = {'ra': None, 'dec': None, 'pmra': None, 'pmdec': None, 'px': None, 'rv': None, 'epoch': None}
star_hip = {}
star_zero = {'ra': 0.0, 'dec': 0.0, 'pmra': 0.0, 'pmdec': 0.0, 'px': 0.0, 'rv': 0.0, 'epoch': 2451545.0}
star_output = {}
if starname:
(star_simbad, warning1) = get_stellar_data(starname)
warning += warning1
if hip_id:
if starname:
warning += ['Warning: Querying SIMBAD and Hipparcos Catalogue']
star_hip = find_hip(hip_id)
star_output = star_simbad.copy()
star_output.update({k: star_hip[k] for k in star_hip if (star_hip[k] is not None)})
star_output.update({k: star_par[k] for k in star_par if (star_par[k] is not None)})
star_output.update({k: star_zero[k] for k in star_zero if (star_output[k] is None)})
warning += ['Following are the stellar positional parameters being used - ', star_output]
if obsname:
loc = EarthLocation.of_site(obsname)
lat = loc.lat.value
longi = loc.lon.value
alt = loc.height.value
warning += [[('Warning: Taking observatory coordinates from Astropy Observatory database. Verify precision. Latitude = %f Longitude = %f Altitude = %f' % (lat, longi, alt))]]
else:
loc = EarthLocation.from_geodetic(longi, lat, height=alt)
for jdutc in JDUTC:
a = _JDUTC_to_BJDTDB(JDUTC=jdutc, loc=loc, ephemeris=ephemeris, **star_output)
corr_time.append(a[0])
warning.append(a[1])
error.append(a[2])
if (not all(corr_time)):
error += ['Check inputs. Error in code']
if any(error):
status |= 2
if any(warning):
status |= 1
corr_time = np.array(corr_time)
return (corr_time, (warning + error), status) |
class DropPath(nn.ModuleDict):
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training) |
class NoSuchTileError(Exception):
def __init__(self, lat, lon):
Exception.__init__()
self.lat = lat
self.lon = lon
def __str__(self):
return ('No SRTM tile for %d, %d available!' % (self.lat, self.lon)) |
def to_graphics_array(graph_list, **kwds):
from sage.graphs import graph
plist = []
for graph_i in graph_list:
if isinstance(graph_i, graph.GenericGraph):
pos = graph_i.get_pos()
if (pos is None):
if ('layout' not in kwds):
kwds['layout'] = 'circular'
if ('vertex_size' not in kwds):
kwds['vertex_size'] = 50
if ('vertex_labels' not in kwds):
kwds['vertex_labels'] = False
kwds['graph_border'] = True
plist.append(graph_i.plot(**kwds))
else:
plist.append(graph_i.plot(pos=pos, vertex_size=50, vertex_labels=False, graph_border=True))
else:
raise TypeError('param list must be a list of Sage (di)graphs.')
from sage.plot.plot import graphics_array
return graphics_array(plist, ncols=4) |
def _build_model(args):
inp = Input(shape=args['input_dimention'], name='input')
model = cred2(nb_filters=[8, 16, 16, 32, 32, 64, 64], kernel_size=[11, 9, 7, 7, 5, 5, 3], padding=args['padding'], activationf=args['activation'], cnn_blocks=args['cnn_blocks'], BiLSTM_blocks=args['lstm_blocks'], drop_rate=args['drop_rate'], loss_weights=args['loss_weights'], loss_types=args['loss_types'], kernel_regularizer=keras.regularizers.l2(1e-06), bias_regularizer=keras.regularizers.l1(0.0001))(inp)
model.summary()
return model |
def save_model(model, epoch, update_best=False, **kwargs):
save_dir = os.path.join(kwargs['save_dir'], 'checkpoints', '{:s}_{:s}_{:s}'.format(kwargs['model_name'].lower(), kwargs.get('page_retrieval', '').lower(), kwargs['dataset_name'].lower()))
model.model.save_pretrained(os.path.join(save_dir, 'model__{:d}.ckpt'.format(epoch)))
tokenizer = (model.tokenizer if hasattr(model, 'tokenizer') else (model.processor if hasattr(model, 'processor') else None))
if (tokenizer is not None):
tokenizer.save_pretrained(os.path.join(save_dir, 'model__{:d}.ckpt'.format(epoch)))
if hasattr(model.model, 'visual_embeddings'):
model.model.visual_embeddings.feature_extractor.save_pretrained(os.path.join(save_dir, 'model__{:d}.ckpt'.format(epoch)))
save_yaml(os.path.join(save_dir, 'model__{:d}.ckpt'.format(epoch), 'experiment_config.yml'), kwargs)
if update_best:
model.model.save_pretrained(os.path.join(save_dir, 'best.ckpt'))
tokenizer.save_pretrained(os.path.join(save_dir, 'best.ckpt'))
save_yaml(os.path.join(save_dir, 'best.ckpt', 'experiment_config.yml'), kwargs) |
def test_ast_resolver_alias():
import taichi
taichi.init()
node = ast.parse('taichi.kernel', mode='eval').body
assert ASTResolver.resolve_to(node, taichi.kernel, locals())
import taichi as tc
node = ast.parse('tc.kernel', mode='eval').body
assert ASTResolver.resolve_to(node, tc.kernel, locals()) |
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
if (system in ['win32', 'darwin']):
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
if appname:
path = os.path.join(path, appname)
if (appname and version):
path = os.path.join(path, version)
return path |
def _get_dataloaders(params):
batch_size = params.batch_size
labeled_source_bs = batch_size
unlabeled_source_bs = batch_size
unlabeled_target_bs = batch_size
if (params.us and params.ut):
unlabeled_source_bs //= 2
unlabeled_target_bs //= 2
(ls, us, ut) = (None, None, None)
if params.ls:
print('Using source data {} (labeled)'.format(params.source_dataset))
ls = get_unlabeled_dataloader(dataset_name=params.source_dataset, augmentation=params.augmentation, batch_size=labeled_source_bs, siamese=False, unlabeled_ratio=params.unlabeled_ratio, num_workers=params.num_workers, split_seed=params.split_seed)
if params.us:
raise NotImplementedError
print('Using source data {} (unlabeled)'.format(params.source_dataset))
us = get_dataloader(dataset_name=params.source_dataset, augmentation=params.augmentation, batch_size=unlabeled_source_bs, num_workers=params.num_workers, siamese=True)
if params.ut:
print('Using target data {} (unlabeled)'.format(params.target_dataset))
ut = get_unlabeled_dataloader(dataset_name=params.target_dataset, augmentation=params.augmentation, batch_size=unlabeled_target_bs, num_workers=params.num_workers, siamese=True, unlabeled_ratio=params.unlabeled_ratio)
return (ls, us, ut) |
def GenerateSM90_TensorOp_1684_symm(manifest, cuda_version):
if (not CudaToolkitVersionSatisfies(cuda_version, 11, 8)):
return
layouts = [(LayoutType.ColumnMajor, LayoutType.ColumnMajor)]
side_modes = [SideMode.Left, SideMode.Right]
fill_modes = [FillMode.Lower, FillMode.Upper]
math_inst = MathInstruction([16, 8, 4], DataType.f64, DataType.f64, DataType.f64, OpcodeClass.TensorOp, MathOperation.multiply_add)
min_cc = 90
max_cc = 1024
alignment_constraints = [1]
tile_descriptions = [TileDescription([128, 128, 16], 3, [4, 2, 1], math_inst, min_cc, max_cc), TileDescription([64, 128, 16], 3, [2, 2, 1], math_inst, min_cc, max_cc), TileDescription([128, 64, 16], 3, [2, 2, 1], math_inst, min_cc, max_cc), TileDescription([64, 64, 16], 4, [2, 2, 1], math_inst, min_cc, max_cc), TileDescription([64, 32, 16], 4, [2, 2, 1], math_inst, min_cc, max_cc), TileDescription([32, 64, 16], 4, [2, 2, 1], math_inst, min_cc, max_cc), TileDescription([32, 32, 16], 5, [2, 2, 1], math_inst, min_cc, max_cc), TileDescription([16, 32, 16], 5, [1, 2, 1], math_inst, min_cc, max_cc), TileDescription([32, 16, 16], 5, [2, 1, 1], math_inst, min_cc, max_cc)]
data_type = [DataType.f64, DataType.f64, DataType.f64, DataType.f64]
CreateSymmOperator(manifest, layouts, side_modes, fill_modes, tile_descriptions, data_type, alignment_constraints, BlasMode.symmetric) |
class TestRMSNormOp(hu.HypothesisTestCase):
(M=st.integers(0, 8), N=st.integers(1, 16), eps=st.floats(0, 0.001), dtype=st.sampled_from([np.float32, np.float64]), **hu.gcs)
(deadline=None)
def test_rms_norm(self, M, N, eps, dtype, gc, dc):
X = ((np.random.randn(M, N) * 2.0) + 1.0).astype(dtype)
gamma = np.random.randn(N).astype(dtype)
beta = np.random.randn(N).astype(dtype)
op = core.CreateOperator('RMSNorm', ['X', 'gamma', 'beta'], ['Y', 'rrms'], eps=eps)
def rms_norm_ref(X, gamma, beta):
rrms = (1.0 / np.sqrt((np.mean(np.square(X), axis=1) + eps)))
Y = (((X * np.expand_dims(rrms, axis=1)) * gamma) + beta)
return (Y, rrms)
inputs = [X, gamma, beta]
self.assertReferenceChecks(gc, op, inputs, rms_norm_ref)
self.assertDeviceChecks(dc, op, inputs, [0, 1])
for i in range(len(inputs)):
self.assertGradientChecks(gc, op, inputs, i, [0]) |
class ModularCorrespondenceDatabase(ModularPolynomialDatabase):
def _dbpath(self, level):
(Nlevel, crrlevel) = level
return ('PolMod/%s/crr.%02d.%03d.dbz' % (self.model, Nlevel, crrlevel)) |
def test_MultiProcDataset_exception_at_init():
with timeout():
mp_dataset = MultiProcDataset(dataset={'class': 'MapDatasetWrapper', 'map_dataset': _MyCustomMapDatasetThrowingExceptionAtInit}, num_workers=1, buffer_size=1)
try:
mp_dataset.initialize()
except Exception as exc:
print('Got expected exception:', exc)
else:
raise Exception('Expected exception') |
def get_task(model: str, use_auth_token: Optional[str]=None) -> str:
if is_offline_mode():
raise RuntimeError('You cannot infer task automatically within `pipeline` when using offline mode')
try:
info = model_info(model, token=use_auth_token)
except Exception as e:
raise RuntimeError(f'Instantiating a pipeline without a task set raised an error: {e}')
if (not info.pipeline_tag):
raise RuntimeError(f'The model {model} does not seem to have a correct `pipeline_tag` set to infer the task automatically')
if (getattr(info, 'library_name', 'transformers') != 'transformers'):
raise RuntimeError(f'This model is meant to be used with {info.library_name} not with transformers')
task = info.pipeline_tag
return task |
class ResLayer(nn.Sequential):
def __init__(self, block, num_blocks, in_channels, out_channels, expansion=None, stride=1, avg_down=False, conv_cfg=None, norm_cfg=dict(type='BN'), act_cfg=dict(type='ReLU', inplace=True), conv_cfg_inv=None, norm_cfg_inv=None, act_cfg_inv=None, **kwargs):
self.block = block
self.expansion = get_expansion(block, expansion)
conv_cfg_inv = (conv_cfg if (conv_cfg_inv is None) else conv_cfg_inv)
norm_cfg_inv = (norm_cfg if (norm_cfg_inv is None) else norm_cfg_inv)
act_cfg_inv = (act_cfg if (act_cfg_inv is None) else act_cfg_inv)
downsample = None
if ((stride != 1) or (in_channels != out_channels)):
downsample = []
conv_stride = stride
if (avg_down and (stride != 1)):
conv_stride = 1
downsample.append(nn.AvgPool2d(kernel_size=stride, stride=stride, ceil_mode=True, count_include_pad=False))
downsample.extend([build_conv_layer(conv_cfg, in_channels, out_channels, kernel_size=1, stride=conv_stride, bias=False), build_norm_layer(norm_cfg, out_channels)[1]])
downsample = nn.Sequential(*downsample)
layers = []
layers.append(block(in_channels=in_channels, out_channels=out_channels, expansion=self.expansion, stride=stride, downsample=downsample, conv_cfg=conv_cfg, norm_cfg=norm_cfg, conv_cfg_inv=conv_cfg_inv, norm_cfg_inv=norm_cfg_inv, act_cfg_inv=act_cfg_inv, **kwargs))
in_channels = out_channels
for i in range(1, num_blocks):
layers.append(block(in_channels=in_channels, out_channels=out_channels, expansion=self.expansion, stride=1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, conv_cfg_inv=conv_cfg_inv, norm_cfg_inv=norm_cfg_inv, act_cfg_inv=act_cfg_inv, **kwargs))
super(ResLayer, self).__init__(*layers) |
def _exact_inf_norm(A):
if scipy.sparse.isspmatrix(A):
return max(abs(A).sum(axis=1).flat)
elif is_pydata_spmatrix(A):
return max(abs(A).sum(axis=1))
else:
return np.linalg.norm(A, np.inf) |
def CyclicCover(r, f, names=None, check_smooth=True):
if (not isinstance(f, Polynomial)):
raise TypeError(('Arguments f (= %s) must be a polynomial' % (f,)))
P = f.parent()
f = P(f)
if check_smooth:
if (P(r) == 0):
raise ValueError('As the characteristic divides the order of the cover, this model is not smooth.')
try:
smooth = f.is_squarefree()
except NotImplementedError as err:
raise NotImplementedError((str(err) + 'Use check_smooth=False to skip this check.'))
if (not smooth):
raise ValueError('Not a smooth Cyclic Cover of P^1: singularity in the provided affine patch.')
R = P.base_ring()
if (names is None):
names = ['x', 'y']
A2 = AffineSpace(2, R, names=names)
if isinstance(R, FiniteField):
return CyclicCover_finite_field(A2, r, f, names=names)
else:
return CyclicCover_generic(A2, r, f, names=names) |
class Gpt2Transformer(StateDictSerializationMixin, eqx.Module):
config: Gpt2Config = eqx.static_field()
blocks: Stacked[Gpt2Block]
ln_f: hnn.LayerNorm
def init(config: Gpt2Config, *, key):
blocks = Stacked.init(config.Layers, Gpt2Block, gradient_checkpointing=config.gradient_checkpointing)(config, key=shaped_rng_split(key, config.num_layers))
ln_f = hnn.LayerNorm.init(config.Embed, eps=config.layer_norm_epsilon, use_bias=config.use_bias)
return Gpt2Transformer(config, blocks, ln_f)
_call
def __call__(self, x: NamedArray, attn_mask: Optional[(AttentionMask | NamedArray)], *, key=None) -> NamedArray:
keys = (hax.jax_utils.maybe_rng_split(key, self.config.num_layers) if (key is not None) else None)
x = self.blocks.fold(x, attn_mask, hax.arange(self.config.Layers), key=keys)
x = self.ln_f(x)
return x
def _state_dict_key_map(self) -> Dict[(str, Optional[str])]:
return {'blocks': 'h'}
def from_state_dict(self, state_dict: StateDict, prefix: Optional[str]=None):
stacked = stack_state_dict(state_dict, prefix=apply_prefix(prefix, 'h'))
out = super().from_state_dict(stacked, prefix=prefix)
return out
def update_state_dict(self, state_dict: StateDict, prefix: Optional[str]=None) -> StateDict:
my_state_dict: StateDict = {}
super().update_state_dict(my_state_dict, prefix)
stacked_dict = unstack_state_dict(my_state_dict, apply_prefix(prefix, 'h'))
state_dict.update(stacked_dict)
return state_dict |
def MI_loss(mus, sigmas, i_c, alpha=1e-08):
kl_divergence = (0.5 * torch.sum(((((mus ** 2) + (sigmas ** 2)) - torch.log(((sigmas ** 2) + alpha))) - 1), dim=1))
MI_loss = (torch.mean(kl_divergence) - i_c)
return MI_loss |
class TdmTwinSAC(TemporalDifferenceModel, TwinSAC):
def __init__(self, env, qf1, qf2, vf, twin_sac_kwargs, tdm_kwargs, base_kwargs, policy=None, eval_policy=None, replay_buffer=None, dense_log_pi=True, optimizer_class=optim.Adam, **kwargs):
TwinSAC.__init__(self, env=env, qf1=qf1, qf2=qf2, vf=vf, policy=policy, replay_buffer=replay_buffer, eval_policy=eval_policy, optimizer_class=optimizer_class, **twin_sac_kwargs, **base_kwargs)
super().__init__(**tdm_kwargs)
self.dense_log_pi = dense_log_pi
def _do_training(self):
batch = self.get_batch()
rewards = batch['rewards']
terminals = batch['terminals']
obs = batch['observations']
actions = batch['actions']
next_obs = batch['next_observations']
goals = batch['goals']
num_steps_left = batch['num_steps_left']
q1_pred = self.qf1(observations=obs, actions=actions, goals=goals, num_steps_left=num_steps_left)
q2_pred = self.qf2(observations=obs, actions=actions, goals=goals, num_steps_left=num_steps_left)
policy_outputs = self.policy(obs, goals, num_steps_left, reparameterize=self.train_policy_with_reparameterization, return_log_prob=True)
(new_actions, policy_mean, policy_log_std, log_pi) = policy_outputs[:4]
if ((not self.dense_rewards) and (not self.dense_log_pi)):
log_pi = (log_pi * terminals)
'\n QF Loss\n '
target_v_values = self.target_vf(observations=next_obs, goals=goals, num_steps_left=(num_steps_left - 1))
q_target = ((self.reward_scale * rewards) + (((1.0 - terminals) * self.discount) * target_v_values))
q_target = q_target.detach()
bellman_errors_1 = ((q1_pred - q_target) ** 2)
bellman_errors_2 = ((q2_pred - q_target) ** 2)
qf1_loss = bellman_errors_1.mean()
qf2_loss = bellman_errors_2.mean()
if self.use_automatic_entropy_tuning:
alpha_loss = (- (self.log_alpha * (log_pi + self.target_entropy).detach()).mean())
self.alpha_optimizer.zero_grad()
alpha_loss.backward()
self.alpha_optimizer.step()
alpha = self.log_alpha.exp()
else:
alpha = 1
'\n VF Loss\n '
q1_new_actions = self.qf1(observations=obs, actions=new_actions, goals=goals, num_steps_left=num_steps_left)
q2_new_actions = self.qf2(observations=obs, actions=new_actions, goals=goals, num_steps_left=num_steps_left)
q_new_actions = torch.min(q1_new_actions, q2_new_actions)
v_target = (q_new_actions - (alpha * log_pi))
v_pred = self.vf(observations=obs, goals=goals, num_steps_left=num_steps_left)
v_target = v_target.detach()
bellman_errors = ((v_pred - v_target) ** 2)
vf_loss = bellman_errors.mean()
self.qf1_optimizer.zero_grad()
qf1_loss.backward()
self.qf1_optimizer.step()
self.qf2_optimizer.zero_grad()
qf2_loss.backward()
self.qf2_optimizer.step()
self.vf_optimizer.zero_grad()
vf_loss.backward()
self.vf_optimizer.step()
if self.train_policy_with_reparameterization:
policy_loss = ((alpha * log_pi) - q_new_actions).mean()
else:
log_policy_target = (q_new_actions - v_pred)
policy_loss = (log_pi * ((alpha * log_pi) - log_policy_target).detach()).mean()
mean_reg_loss = (self.policy_mean_reg_weight * (policy_mean ** 2).mean())
std_reg_loss = (self.policy_std_reg_weight * (policy_log_std ** 2).mean())
pre_tanh_value = policy_outputs[(- 1)]
pre_activation_reg_loss = (self.policy_pre_activation_weight * (pre_tanh_value ** 2).sum(dim=1).mean())
policy_reg_loss = ((mean_reg_loss + std_reg_loss) + pre_activation_reg_loss)
policy_loss = (policy_loss + policy_reg_loss)
if ((self._n_train_steps_total % self.policy_update_period) == 0):
self.policy_optimizer.zero_grad()
policy_loss.backward()
self.policy_optimizer.step()
if ((self._n_train_steps_total % self.target_update_period) == 0):
ptu.soft_update_from_to(self.vf, self.target_vf, self.soft_target_tau)
'\n Save some statistics for eval\n '
if self.need_to_update_eval_statistics:
self.need_to_update_eval_statistics = False
self.eval_statistics['QF1 Loss'] = np.mean(ptu.get_numpy(qf1_loss))
self.eval_statistics['QF2 Loss'] = np.mean(ptu.get_numpy(qf2_loss))
self.eval_statistics['VF Loss'] = np.mean(ptu.get_numpy(vf_loss))
self.eval_statistics['Policy Loss'] = np.mean(ptu.get_numpy(policy_loss))
self.eval_statistics.update(create_stats_ordered_dict('Q1 Predictions', ptu.get_numpy(q1_pred)))
self.eval_statistics.update(create_stats_ordered_dict('Q2 Predictions', ptu.get_numpy(q2_pred)))
self.eval_statistics.update(create_stats_ordered_dict('V Predictions', ptu.get_numpy(v_pred)))
self.eval_statistics.update(create_stats_ordered_dict('Log Pis', ptu.get_numpy(log_pi)))
self.eval_statistics.update(create_stats_ordered_dict('Policy mu', ptu.get_numpy(policy_mean)))
self.eval_statistics.update(create_stats_ordered_dict('Policy log std', ptu.get_numpy(policy_log_std)))
if self.use_automatic_entropy_tuning:
self.eval_statistics['Alpha'] = ptu.get_numpy(alpha)[0]
self.eval_statistics['Alpha Loss'] = ptu.get_numpy(alpha_loss)[0] |
def unstack_lstm(lstm):
device = next(iter(lstm.parameters())).device
in_size = lstm.input_size
hidden_dim = lstm.hidden_size
layers = []
for i in range(lstm.num_layers):
layer = nn.LSTM(in_size, hidden_dim, batch_first=True, bidirectional=True)
layer.to(device)
attributes = ['weight_ih_l', 'weight_hh_l', 'bias_ih_l', 'bias_hh_l']
for attr in attributes:
dest = (attr + '0')
src = (attr + str(i))
getattr(layer, dest).data[:] = getattr(lstm, src)
dest = (attr + '0_reverse')
src = ((attr + str(i)) + '_reverse')
getattr(layer, dest).data[:] = getattr(lstm, src)
layer.flatten_parameters()
layers.append(layer)
in_size = (2 * hidden_dim)
return layers |
_function(pre=[square])
def fp(x: DataPoint) -> int:
return (0 if (x.num_squared > 42) else (- 1)) |
(arg_at(0, assert_tensor))
def _reduce(mat, fun: template()):
shape = static(mat.get_shape())
if static((len(shape) == 1)):
result = mat[0]
for i in static(range(1, shape[0])):
result = fun(result, mat[i])
return result
result = mat[(0, 0)]
for i in static(range(shape[0])):
for j in static(range(shape[1])):
if static(((i != 0) or (j != 0))):
result = fun(result, mat[(i, j)])
return result |
class BottomLeftPoolFunction(Function):
def forward(ctx, input, guide):
(output, maxout) = _C.bl_pool_forward(input, guide)
ctx.save_for_backward(input, output, guide, maxout)
return output
def backward(ctx, grad_output):
(input, output, guide, maxout) = ctx.saved_variables
(grad_input, grad_guide) = _C.bl_pool_backward(input, guide, output, maxout, grad_output)
return (grad_input, grad_guide) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.