code stringlengths 101 5.91M |
|---|
def ref_convolution(x, w, b, base_axis, pad, stride, dilation, group, quantize_zero_to):
y = []
for xx in x.reshape((((- 1),) + x.shape[base_axis:])):
y += [refs.convolution_2d(xx, w, b, pad, stride, dilation, group)[np.newaxis]]
y = np.vstack(y)
return y.reshape((x.shape[:base_axis] + y.shape[1:])) |
class DPGradientDescentOptimizer(tf.train.GradientDescentOptimizer):
def __init__(self, learning_rate, eps_delta, sanitizer, sigma=None, use_locking=False, name='DPGradientDescent', batches_per_lot=1):
super(DPGradientDescentOptimizer, self).__init__(learning_rate, use_locking, name)
self._batches_per_lot = batches_per_lot
self._grad_accum_dict = {}
if (batches_per_lot > 1):
self._batch_count = tf.Variable(1, dtype=tf.int32, trainable=False, name='batch_count')
var_list = tf.trainable_variables()
with tf.variable_scope('grad_acc_for'):
for var in var_list:
v_grad_accum = tf.Variable(tf.zeros_like(var), trainable=False, name=utils.GetTensorOpName(var))
self._grad_accum_dict[var.name] = v_grad_accum
self._eps_delta = eps_delta
self._sanitizer = sanitizer
self._sigma = sigma
def compute_sanitized_gradients(self, loss, var_list=None, add_noise=True):
self._assert_valid_dtypes([loss])
xs = [tf.convert_to_tensor(x) for x in var_list]
px_grads = per_example_gradients.PerExampleGradients(loss, xs)
sanitized_grads = []
for (px_grad, v) in zip(px_grads, var_list):
tensor_name = utils.GetTensorOpName(v)
sanitized_grad = self._sanitizer.sanitize(px_grad, self._eps_delta, sigma=self._sigma, tensor_name=tensor_name, add_noise=add_noise, num_examples=(self._batches_per_lot * tf.slice(tf.shape(px_grad), [0], [1])))
sanitized_grads.append(sanitized_grad)
return sanitized_grads
def minimize(self, loss, global_step=None, var_list=None, name=None):
if (var_list is None):
var_list = tf.trainable_variables()
for var in var_list:
if (not isinstance(var, tf.Variable)):
raise TypeError(('Argument is not a variable.Variable: %s' % var))
if (self._batches_per_lot == 1):
sanitized_grads = self.compute_sanitized_gradients(loss, var_list=var_list)
grads_and_vars = zip(sanitized_grads, var_list)
self._assert_valid_dtypes([v for (g, v) in grads_and_vars if (g is not None)])
apply_grads = self.apply_gradients(grads_and_vars, global_step=global_step, name=name)
return apply_grads
update_cond = tf.equal(tf.constant(0), tf.mod(self._batch_count, tf.constant(self._batches_per_lot)))
def non_last_in_lot_op(loss, var_list):
sanitized_grads = self.compute_sanitized_gradients(loss, var_list=var_list, add_noise=False)
update_ops_list = []
for (var, grad) in zip(var_list, sanitized_grads):
grad_acc_v = self._grad_accum_dict[var.name]
update_ops_list.append(grad_acc_v.assign_add(grad))
update_ops_list.append(self._batch_count.assign_add(1))
return tf.group(*update_ops_list)
def last_in_lot_op(loss, var_list, global_step):
sanitized_grads = self.compute_sanitized_gradients(loss, var_list=var_list, add_noise=True)
normalized_grads = []
for (var, grad) in zip(var_list, sanitized_grads):
grad_acc_v = self._grad_accum_dict[var.name]
normalized_grad = tf.div(grad_acc_v.assign_add(grad), tf.to_float(self._batches_per_lot))
normalized_grads.append(normalized_grad)
with tf.control_dependencies(normalized_grads):
grads_and_vars = zip(normalized_grads, var_list)
self._assert_valid_dtypes([v for (g, v) in grads_and_vars if (g is not None)])
apply_san_grads = self.apply_gradients(grads_and_vars, global_step=global_step, name='apply_grads')
resets_list = []
with tf.control_dependencies([apply_san_grads]):
for (_, acc) in self._grad_accum_dict.items():
reset = tf.assign(acc, tf.zeros_like(acc))
resets_list.append(reset)
resets_list.append(self._batch_count.assign_add(1))
last_step_update = tf.group(*([apply_san_grads] + resets_list))
return last_step_update
update_op = tf.cond(update_cond, (lambda : last_in_lot_op(loss, var_list, global_step)), (lambda : non_last_in_lot_op(loss, var_list)))
return tf.group(update_op) |
class SourceCopyVocabulary():
def __init__(self, sentence, pad_token=DEFAULT_PADDING_TOKEN, unk_token=DEFAULT_OOV_TOKEN):
if (type(sentence) is not list):
sentence = sentence.split(' ')
self.src_tokens = sentence
self.pad_token = pad_token
self.unk_token = unk_token
self.token_to_idx = {self.pad_token: 0, self.unk_token: 1}
self.idx_to_token = {0: self.pad_token, 1: self.unk_token}
self.vocab_size = 2
for token in sentence:
if (token not in self.token_to_idx):
self.token_to_idx[token] = self.vocab_size
self.idx_to_token[self.vocab_size] = token
self.vocab_size += 1
def get_token_from_idx(self, idx):
return self.idx_to_token[idx]
def get_token_idx(self, token):
return self.token_to_idx.get(token, self.token_to_idx[self.unk_token])
def index_sequence(self, list_tokens):
return [self.get_token_idx(token) for token in list_tokens]
def get_copy_map(self, list_tokens):
src_indices = ([self.get_token_idx(self.unk_token)] + self.index_sequence(list_tokens))
return [(src_idx, src_token_idx) for (src_idx, src_token_idx) in enumerate(src_indices)]
def get_special_tok_list(self):
return [self.pad_token, self.unk_token]
def __repr__(self):
return json.dumps(self.idx_to_token) |
class TestAppendpath(object):
def test_1(self):
assert_equal(appendpath('prefix', 'name'), join('prefix', 'name'))
assert_equal(appendpath('/prefix', 'name'), ajoin('prefix', 'name'))
assert_equal(appendpath('/prefix', '/name'), ajoin('prefix', 'name'))
assert_equal(appendpath('prefix', '/name'), join('prefix', 'name'))
def test_2(self):
assert_equal(appendpath('prefix/sub', 'name'), join('prefix', 'sub', 'name'))
assert_equal(appendpath('prefix/sub', 'sup/name'), join('prefix', 'sub', 'sup', 'name'))
assert_equal(appendpath('/prefix/sub', '/prefix/name'), ajoin('prefix', 'sub', 'name'))
def test_3(self):
assert_equal(appendpath('/prefix/sub', '/prefix/sup/name'), ajoin('prefix', 'sub', 'sup', 'name'))
assert_equal(appendpath('/prefix/sub/sub2', '/prefix/sup/sup2/name'), ajoin('prefix', 'sub', 'sub2', 'sup', 'sup2', 'name'))
assert_equal(appendpath('/prefix/sub/sub2', '/prefix/sub/sup/name'), ajoin('prefix', 'sub', 'sub2', 'sup', 'name')) |
def compute_is(opts, num_gen, num_splits):
detector_url = '
detector_kwargs = dict(no_output_bias=True)
gen_probs = metric_utils.compute_feature_stats_for_generator(opts=opts, detector_url=detector_url, detector_kwargs=detector_kwargs, capture_all=True, max_items=num_gen).get_all()
if (opts.rank != 0):
return (float('nan'), float('nan'))
scores = []
for i in range(num_splits):
part = gen_probs[((i * num_gen) // num_splits):(((i + 1) * num_gen) // num_splits)]
kl = (part * (np.log(part) - np.log(np.mean(part, axis=0, keepdims=True))))
kl = np.mean(np.sum(kl, axis=1))
scores.append(np.exp(kl))
return (float(np.mean(scores)), float(np.std(scores))) |
_method
class ToricLattice_ambient(ToricLattice_generic, FreeModule_ambient_pid):
Element = ToricLatticeElement
def __init__(self, rank, name, dual_name, latex_name, latex_dual_name):
super().__init__(ZZ, rank)
self._name = name
self._dual_name = dual_name
self._latex_name = latex_name
self._latex_dual_name = latex_dual_name
def _sage_input_(self, sib, coerced):
args = [self.rank(), self._name, self._dual_name]
if ((self._latex_name != self._name) or (self._latex_dual_name != self._dual_name)):
args.extend([self._latex_name, self._latex_dual_name])
return sib.name('ToricLattice')(*args)
def __richcmp__(self, right, op):
if (self is right):
return rich_to_bool(op, 0)
if (type(self) is not type(right)):
return NotImplemented
lx = self.rank()
rx = right.rank()
if (lx != rx):
return richcmp_not_equal(lx, rx, op)
return richcmp([self._name, self._dual_name, self._latex_name, self._latex_dual_name], [right._name, right._dual_name, right._latex_name, right._latex_dual_name], op)
def _latex_(self):
return self._latex_name
def _repr_(self):
return ('%d-d lattice %s' % (self.dimension(), self._name))
def ambient_module(self):
return self
def dual(self):
if ('_dual' not in self.__dict__):
self._dual = ToricLattice(self.rank(), self._dual_name, self._name, self._latex_dual_name, self._latex_name)
return self._dual
def plot(self, **options):
if ('show_lattice' not in options):
options['show_lattice'] = True
tp = ToricPlotter(options, self.degree())
tp.adjust_options()
return tp.plot_lattice() |
def test_nested_for_map_for_loop_with_tasklet():
ref = np.zeros([10, 10, 10], dtype=np.int64)
for i in range(10):
for j in range(10):
for k in range(10):
ref[(i, j, k)] = (((i * 100) + (j * 10)) + k)
val = nested_for_map_for_loop_with_tasklet()
assert np.array_equal(val, ref) |
def create_supervised_evaluator(model, metrics, device=None):
def _inference(engine, batch):
model.eval()
with torch.no_grad():
(data, pids, camids) = batch
data = (data.to(device) if (torch.cuda.device_count() >= 1) else data)
feat = model(data)
return (feat, pids, camids)
engine = Engine(_inference)
for (name, metric) in metrics.items():
metric.attach(engine, name)
return engine |
def amsterdam_typical_train(listener=False):
data = (([('light purple', 0, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 0, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(260.0, 45.0, 100.0), (260.0, 100.0, 100.0)]), ('light purple', 1, [(260.0, 100.0, 100.0), (260.0, 45.0, 100.0)]), ('purple', 1, [(260.0, 100.0, 100.0), (260.0, 45.0, 100.0)]), ('purple', 0, [(260.0, 100.0, 100.0), (260.0, 45.0, 100.0)]), ('purple', 0, [(260.0, 100.0, 100.0), (260.0, 45.0, 100.0)])] * 3) + ([('pinkish purple', 1, [(260.0, 100.0, 100.0), (300.0, 100.0, 100.0)]), ('pinkish', 1, [(260.0, 100.0, 100.0), (300.0, 100.0, 100.0)]), ('purple', 1, [(260.0, 100.0, 100.0), (300.0, 100.0, 100.0)]), ('purple', 0, [(260.0, 100.0, 100.0), (300.0, 100.0, 100.0)]), ('purple', 0, [(260.0, 100.0, 100.0), (300.0, 100.0, 100.0)]), ('purple', 0, [(260.0, 100.0, 100.0), (300.0, 100.0, 100.0)]), ('pinkish purple', 0, [(300.0, 100.0, 100.0), (260.0, 100.0, 100.0)]), ('pinkish', 0, [(300.0, 100.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 0, [(300.0, 100.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(300.0, 100.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(300.0, 100.0, 100.0), (260.0, 100.0, 100.0)]), ('purple', 1, [(300.0, 100.0, 100.0), (260.0, 100.0, 100.0)])] * 2))
return triples_to_insts(data, listener=listener) |
def hough_line(image, theta=None):
if (image.ndim != 2):
raise ValueError('The input image `image` must be 2D.')
if (theta is None):
theta = np.linspace(((- np.pi) / 2), (np.pi / 2), 180, endpoint=False)
return _hough_line(image, theta=theta) |
def initial_or_load(checkpoint_path_load, model, optimizer, dir):
if os.path.exists(checkpoint_path_load):
model_loaded_str = '******model is loaded'
print(model_loaded_str)
with open((dir + 'whole_log.txt'), 'a') as f:
print(model_loaded_str, file=f)
checkpoint = load_model(checkpoint_path_load, model, optimizer)
begin_epoch = checkpoint['epoch']
best_loss = checkpoint['best_loss']
e = checkpoint['e']
else:
torch.manual_seed(args.seed)
if (args.device == 'cuda'):
torch.cuda.manual_seed(args.seed)
random.seed(args.seed)
begin_epoch = 1
best_loss = math.inf
e = 0
return (begin_epoch, best_loss, e) |
class CTypeDefNode(StatNode):
child_attrs = ['base_type', 'declarator']
def analyse_declarations(self, env):
base = self.base_type.analyse(env)
(name_declarator, type) = self.declarator.analyse(base, env, visibility=self.visibility, in_pxd=self.in_pxd)
name = name_declarator.name
cname = name_declarator.cname
entry = env.declare_typedef(name, type, self.pos, cname=cname, visibility=self.visibility, api=self.api)
if type.is_fused:
entry.in_cinclude = True
if (self.in_pxd and (not env.in_cinclude)):
entry.defined_in_pxd = 1
def analyse_expressions(self, env):
return self
def generate_execution_code(self, code):
pass |
def get_overview_paragraphs(overview_links, specific_summary_dir):
for (index, (overview, name)) in enumerate(overview_links):
print(name, overview)
try:
soup = BeautifulSoup(urllib.request.urlopen(overview), 'html.parser')
overview_data = soup.find('td', {'class': 'TextObject'})
overview_paragraphs = [unidecode(paragraph.text.strip()) for paragraph in overview_data.findAll('p', recursive=False)[1:]]
except Exception as e:
print(e)
time.sleep(5)
try:
soup = BeautifulSoup(urllib.request.urlopen(overview), 'html.parser')
overview_data = soup.find('td', {'class': 'TextObject'})
overview_paragraphs = [unidecode(paragraph.text.strip()) for paragraph in overview_data.findAll('p', recursive=False)[1:]]
except Exception as e:
print('No book summary for: ', overview, e)
f_errors.write((((((overview + '\t') + name) + '\t') + specific_summary_dir) + '\n'))
continue
overview_text = '\n'.join(overview_paragraphs)
overview_dict = {}
overview_dict['name'] = 'Overview'
overview_dict['summary'] = overview_text
output_fname = os.path.join(specific_summary_dir, 'overview.json')
with open(output_fname, 'w', encoding='utf-8') as fp:
json.dump(overview_dict, fp) |
def load_weights_add_extra_dim(target, source_state, extra_dim=1):
new_dict = OrderedDict()
for (k1, v1) in target.state_dict().items():
if (not ('num_batches_tracked' in k1)):
if (k1 in source_state):
tar_v = source_state[k1]
if (v1.shape != tar_v.shape):
(c, _, w, h) = v1.shape
pads = torch.zeros((c, extra_dim, w, h), device=tar_v.device)
nn.init.orthogonal_(pads)
tar_v = torch.cat([tar_v, pads], 1)
new_dict[k1] = tar_v
target.load_state_dict(new_dict) |
class FinegrainedDecoder(nn.Module):
def __init__(self, hidden_channels, drop_path, max_q_thw, max_kv_thw):
super().__init__()
self.hidden_channels = hidden_channels
self.max_q_thw = max_q_thw
self.max_kv_thw = max_kv_thw
self.v3ds = nn.Sequential(nn.Conv3d(hidden_channels, hidden_channels, kernel_size=(3, 1, 1), stride=(2, 1, 1), padding=(1, 0, 0), bias=False), LayerNorm3D(hidden_channels, ((- 1), 1, 1, 1)), nn.GELU(), nn.Conv3d(hidden_channels, hidden_channels, kernel_size=(3, 1, 1), stride=(2, 1, 1), padding=(1, 0, 0), bias=False))
self.image_level_embeds = nn.ParameterList([nn.Parameter(torch.zeros(hidden_channels)) for _ in range(3)])
self.self2d_cross3d_block = Self2DCross3DBlock(hidden_channels, num_heads=(hidden_channels // 64), drop_path=drop_path, max_q_thw=max_q_thw, max_kv_thw=max_kv_thw)
def get_temporal_pyramids(self, videos):
videos_t2 = [self.v3ds[0](video) for video in videos]
videos_t4 = [self.v3ds[1:](video_t2) for video_t2 in videos_t2]
return [videos, videos_t2, videos_t4]
def forward(self, images_list, videos, num_frames_list):
videos = videos.transpose(0, 1).split(num_frames_list, dim=1)
videos_list = self.get_temporal_pyramids(videos)
decodings = None
for (l, (images, videos)) in enumerate(zip(images_list, videos_list)):
images = (images if (decodings is None) else (images + nn.functional.interpolate(decodings, scale_factor=2)))
decodings = torch.cat([self.self2d_cross3d_block((image.flatten(1).transpose(0, 1).unsqueeze(0) + self.image_level_embeds[l]), video.flatten(1).transpose(0, 1).unsqueeze(0), q_thw=(1, *image.shape[(- 2):]), kv_thw=video.shape[1:]) for (image, video) in zip(images, videos)]).transpose(1, 2).reshape_as(images).contiguous()
return decodings |
def setup(app):
app.connect('autodoc-process-docstring', process_docstring_cython)
app.connect('autodoc-process-docstring', process_directives)
app.connect('autodoc-process-docstring', process_docstring_module_title)
app.connect('autodoc-process-docstring', process_dollars)
app.connect('autodoc-process-docstring', process_inherited)
app.connect('autodoc-process-docstring', skip_TESTS_block)
app.add_transform(SagemathTransform) |
def extract_audio_feature_birds(model, audio_switch, **kwargs):
root = os.getcwd()
for split in ('train', 'test'):
split_json = json.load(open(os.path.join(root, './data/birds/{}.json'.format(split)), 'r'))
split_filenames = []
for _d in split_json['data']:
split_filenames += [os.path.join(split_json['audio_base_path'], __d) for __d in _d['audio']]
(split_features, split_audio_lens) = extract_audio_feature(model, split_filenames)
split_shape = split_features.shape
split_features = split_features.reshape((split_shape[0] // 10), 10, split_shape[1])
split_audio_lens = split_audio_lens.reshape((split_shape[0] // 10), 10)
with open(os.path.join(root, './data/birds/{}/audio_features_{}.pickle'.format((split if (split == 'train') else 'test'), audio_switch)), 'wb') as fp:
pickle.dump(split_features, fp)
with open(os.path.join(root, './data/birds/{}/audio_features_lens_{}.pickle'.format((split if (split == 'train') else 'test'), audio_switch)), 'wb') as fp:
pickle.dump(split_audio_lens, fp) |
def verify_ninja_availability():
with open(os.devnull, 'wb') as devnull:
try:
subprocess.check_call('ninja --version'.split(), stdout=devnull)
except OSError:
raise RuntimeError('Ninja is required to load C++ extensions') |
def extract_requests_exception_details(exc: RequestException) -> tuple[(str, list[str])]:
from requests.exceptions import SSLError, ConnectionError, ChunkedEncodingError
from urllib3.exceptions import MaxRetryError
if isinstance(exc, SSLError):
message = 'SSL verification problem'
reason = str(exc.args[0].reason)
extra = [remove_ssl_line_number(reason)]
elif isinstance(exc, ConnectionError):
message = 'Connection failed'
inner = exc.args[0]
if (isinstance(inner, MaxRetryError) and (inner.reason is not None)):
if (':' not in inner.reason.args[0]):
reason = inner.reason.args[0]
else:
(_, reason) = inner.reason.args[0].split(':', maxsplit=1)
extra = [reason.strip()]
else:
extra = [' '.join(map(str, inner.args))]
elif isinstance(exc, ChunkedEncodingError):
message = 'Connection broken. The server declared chunked encoding but sent an invalid chunk'
extra = [str(exc.args[0].args[1])]
else:
message = str(exc)
extra = []
return (message, extra) |
def test_parsing_context():
def func(a):
if dace.in_program():
a[:] = 1
else:
a[:] = 2
first = np.random.rand(10)
second = np.random.rand(10)
func(first)
dace.program(func)(second)
assert np.allclose(first, 2)
assert np.allclose(second, 1) |
def load_fields(train, valid, checkpoint):
fields = onmt.IO.ONMTDataset.load_fields(torch.load((opt.data + '.vocab.pt')))
fields = dict([(k, f) for (k, f) in fields.items() if (k in train.examples[0].__dict__)])
train.fields = fields
valid.fields = fields
if opt.train_from:
print(('Loading vocab from checkpoint at %s.' % opt.train_from))
fields = onmt.IO.ONMTDataset.load_fields(checkpoint['vocab'])
print((' * vocabulary size. source = %d; target = %d' % (len(fields['src'].vocab), len(fields['tgt'].vocab))))
return fields |
def unpack_vcs_link(link, location):
vcs_backend = _get_used_vcs_backend(link)
vcs_backend.unpack(location) |
_json
class InProgressCacheMetrics():
rows_finished: int = 0
chunks_finished: int = 0
shards_finished: int = 0
field_counts: Dict[(str, int)] = dataclasses.field(default_factory=dict)
is_finished: bool = False |
class Thin_Model(object):
def __init__(self, mode, images, labels):
self.mode = mode
self._build_model(images, labels)
def add_internal_summaries(self):
pass
def _stride_arr(self, stride):
return [1, stride, stride, 1]
def _build_model(self, images, labels, var_scope_str='resnet_thin'):
assert ((self.mode == 'train') or (self.mode == 'eval'))
with tf.variable_scope(var_scope_str):
with tf.variable_scope('input'):
self.x_input = images
self.y_input = tf.argmax(labels, axis=1)
input_standardized = tf.map_fn((lambda img: tf.image.per_image_standardization(img)), self.x_input)
x = self._conv('init_conv', input_standardized, 3, 3, 16, self._stride_arr(1))
strides = [1, 2, 2]
activate_before_residual = [True, False, False]
res_func = self._residual
filters = [16, 16, 32, 64]
with tf.variable_scope('unit_1_0'):
x = res_func(x, filters[0], filters[1], self._stride_arr(strides[0]), activate_before_residual[0])
for i in range(1, 5):
with tf.variable_scope(('unit_1_%d' % i)):
x = res_func(x, filters[1], filters[1], self._stride_arr(1), False)
with tf.variable_scope('unit_2_0'):
x = res_func(x, filters[1], filters[2], self._stride_arr(strides[1]), activate_before_residual[1])
for i in range(1, 5):
with tf.variable_scope(('unit_2_%d' % i)):
x = res_func(x, filters[2], filters[2], self._stride_arr(1), False)
with tf.variable_scope('unit_3_0'):
x = res_func(x, filters[2], filters[3], self._stride_arr(strides[2]), activate_before_residual[2])
for i in range(1, 5):
with tf.variable_scope(('unit_3_%d' % i)):
x = res_func(x, filters[3], filters[3], self._stride_arr(1), False)
with tf.variable_scope('unit_last'):
x = self._batch_norm('final_bn', x)
x = self._relu(x, 0.1)
x = self._global_avg_pool(x)
with tf.variable_scope('logit'):
self.pre_softmax = self._fully_connected(x, 10)
self.predictions = tf.argmax(self.pre_softmax, 1)
self.correct_prediction = tf.equal(self.predictions, self.y_input)
self.num_correct = tf.reduce_sum(tf.cast(self.correct_prediction, tf.int64))
self.accuracy = tf.reduce_mean(tf.cast(self.correct_prediction, tf.float32))
with tf.variable_scope('costs'):
self.y_xent = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=self.pre_softmax, labels=self.y_input)
self.xent = tf.reduce_sum(self.y_xent, name='y_xent')
self.mean_xent = tf.reduce_mean(self.y_xent)
self.weight_decay_loss = self._decay()
def _batch_norm(self, name, x):
with tf.name_scope(name):
return tf.contrib.layers.batch_norm(inputs=x, decay=0.9, center=True, scale=True, activation_fn=None, updates_collections=None, is_training=(self.mode == 'train'))
def _residual(self, x, in_filter, out_filter, stride, activate_before_residual=False):
if activate_before_residual:
with tf.variable_scope('shared_activation'):
x = self._batch_norm('init_bn', x)
x = self._relu(x, 0.1)
orig_x = x
else:
with tf.variable_scope('residual_only_activation'):
orig_x = x
x = self._batch_norm('init_bn', x)
x = self._relu(x, 0.1)
with tf.variable_scope('sub1'):
x = self._conv('conv1', x, 3, in_filter, out_filter, stride)
with tf.variable_scope('sub2'):
x = self._batch_norm('bn2', x)
x = self._relu(x, 0.1)
x = self._conv('conv2', x, 3, out_filter, out_filter, [1, 1, 1, 1])
with tf.variable_scope('sub_add'):
if (in_filter != out_filter):
orig_x = tf.nn.avg_pool(orig_x, stride, stride, 'VALID')
orig_x = tf.pad(orig_x, [[0, 0], [0, 0], [0, 0], [((out_filter - in_filter) // 2), ((out_filter - in_filter) // 2)]])
x += orig_x
tf.logging.debug('image after unit %s', x.get_shape())
return x
def _decay(self):
costs = []
for var in tf.trainable_variables():
if (var.op.name.find('DW') > 0):
costs.append(tf.nn.l2_loss(var))
return tf.add_n(costs)
def _conv(self, name, x, filter_size, in_filters, out_filters, strides):
with tf.variable_scope(name):
n = ((filter_size * filter_size) * out_filters)
kernel = tf.get_variable('DW', [filter_size, filter_size, in_filters, out_filters], tf.float32, initializer=tf.random_normal_initializer(stddev=np.sqrt((2.0 / n))))
return tf.nn.conv2d(x, kernel, strides, padding='SAME')
def _relu(self, x, leakiness=0.0):
return tf.where(tf.less(x, 0.0), (leakiness * x), x, name='leaky_relu')
def _fully_connected(self, x, out_dim):
num_non_batch_dimensions = len(x.shape)
prod_non_batch_dimensions = 1
for ii in range((num_non_batch_dimensions - 1)):
prod_non_batch_dimensions *= int(x.shape[(ii + 1)])
x = tf.reshape(x, [tf.shape(x)[0], (- 1)])
w = tf.get_variable('DW', [prod_non_batch_dimensions, out_dim], initializer=tf.uniform_unit_scaling_initializer(factor=1.0))
b = tf.get_variable('biases', [out_dim], initializer=tf.constant_initializer())
return tf.nn.xw_plus_b(x, w, b)
def _global_avg_pool(self, x):
assert (x.get_shape().ndims == 4)
return tf.reduce_mean(x, [1, 2]) |
class SimpleReplayBuffer(ReplayBuffer):
def sample(self, batch_size):
assert (self._n_transitions_stored >= batch_size)
buffer = {}
for key in self._buffer.keys():
buffer[key] = self._buffer[key][:self._current_size]
time_horizon = buffer['action'].shape[1]
rollout_batch_size = buffer['action'].shape[0]
episode_idxs = np.random.randint(rollout_batch_size, size=batch_size)
t_samples = np.random.randint(time_horizon, size=batch_size)
transitions = {}
for key in buffer.keys():
samples = buffer[key][(episode_idxs, t_samples)]
transitions[key] = samples.reshape(batch_size, *samples.shape[1:])
assert (transitions['action'].shape[0] == batch_size)
return transitions |
def load_tf_weights_in_mobilenet_v1(model, config, tf_checkpoint_path):
try:
import numpy as np
import tensorflow as tf
except ImportError:
logger.error('Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see for installation instructions.')
raise
init_vars = tf.train.list_variables(tf_checkpoint_path)
tf_weights = {}
for (name, shape) in init_vars:
logger.info(f'Loading TF weight {name} with shape {shape}')
array = tf.train.load_variable(tf_checkpoint_path, name)
tf_weights[name] = array
tf_to_pt_map = _build_tf_to_pytorch_map(model, config, tf_weights)
for (name, pointer) in tf_to_pt_map.items():
logger.info(f'Importing {name}')
if (name not in tf_weights):
logger.info(f'{name} not in tf pre-trained weights, skipping')
continue
array = tf_weights[name]
if ('depthwise_weights' in name):
logger.info('Transposing depthwise')
array = np.transpose(array, (2, 3, 0, 1))
elif ('weights' in name):
logger.info('Transposing')
if (len(pointer.shape) == 2):
array = array.squeeze().transpose()
else:
array = np.transpose(array, (3, 2, 0, 1))
if (pointer.shape != array.shape):
raise ValueError(f'Pointer shape {pointer.shape} and array shape {array.shape} mismatched')
logger.info(f'Initialize PyTorch weight {name} {array.shape}')
pointer.data = torch.from_numpy(array)
tf_weights.pop(name, None)
tf_weights.pop((name + '/RMSProp'), None)
tf_weights.pop((name + '/RMSProp_1'), None)
tf_weights.pop((name + '/ExponentialMovingAverage'), None)
logger.info(f"Weights not copied to PyTorch model: {', '.join(tf_weights.keys())}")
return model |
_toolkit()
class Spokeo(FunctionToolkit):
name_for_human = 'Spokeo'
description_for_human = 'Toolkit for searching and retrieving personal data from various sources.'
name_for_model = 'Spokeo'
description_for_model = 'A people search engine that provides access to personal data from public records, social media, and other sources for background checks, identity verification, and contact information retrieval. It offers tools for searching for people, retrieving personal data, performing background checks, verifying identities, monitoring data changes, retrieving historical data, accessing court records, retrieving criminal history, performing reverse phone lookups, and downloading public records.'
tool_classes = [SpokeoSearchPeople, SpokeoGetPersonDetails, SpokeoBackgroundCheck, SpokeoVerifyIdentity, SpokeoMonitorDataChanges, SpokeoGetHistoricalData, SpokeoAccessCourtRecords, SpokeoGetCriminalHistory, SpokeoReversePhoneLookup, SpokeoDownloadPublicRecord] |
def load_dependency_tree(parents):
trees = []
root = None
size = len(parents)
for i in xrange(size):
trees.append(None)
for i in xrange(size):
if (not trees[i]):
idx = i
prev = None
prev_idx = None
while True:
tree = DepTree()
parent = (parents[idx] - 1)
if (parent == (- 2)):
break
(tree.parent, tree.idx) = (parent, idx)
if (prev is not None):
tree.children.append(prev)
trees[idx] = tree
if ((parent >= 0) and (trees[parent] is not None)):
trees[parent].children.append(tree)
break
elif (parent == (- 1)):
root = tree
break
else:
prev = tree
prev_idx = idx
idx = parent
return root |
class CoverageArchive(Archive):
_logger = logging.getLogger(__name__)
def __init__(self, objectives: OrderedSet[ff.TestCaseFitnessFunction]) -> None:
super().__init__()
self._covered: dict[(ff.TestCaseFitnessFunction, tcc.TestCaseChromosome)] = {}
self._uncovered = OrderedSet(objectives)
self._objectives = OrderedSet(objectives)
def update(self, solutions: Iterable[tcc.TestCaseChromosome]) -> bool:
updated = False
for objective in self._objectives:
best_solution = self._covered.get(objective, None)
best_size = (sys.maxsize if (best_solution is None) else best_solution.size())
for solution in solutions:
covers = solution.get_is_covered(objective)
size = solution.size()
if (covers and (size < best_size)):
updated = True
self._covered[objective] = solution
best_size = size
if (objective in self._uncovered):
self._uncovered.remove(objective)
self._on_target_covered(objective)
self._logger.debug('ArchiveCoverageGoals: %d', len(self._covered))
return updated
def uncovered_goals(self) -> OrderedSet[ff.TestCaseFitnessFunction]:
return self._uncovered
def covered_goals(self) -> OrderedSet[ff.TestCaseFitnessFunction]:
return OrderedSet(self._covered.keys())
def objectives(self) -> OrderedSet[ff.TestCaseFitnessFunction]:
return self._objectives
def add_goals(self, new_goals: OrderedSet[ff.TestCaseFitnessFunction]) -> None:
for goal in new_goals:
if (goal not in self._objectives):
self._logger.debug('Adding goal: %s', goal)
self._objectives.add(goal)
self._uncovered.add(goal)
def solutions(self) -> OrderedSet[tcc.TestCaseChromosome]:
assert self._all_covered(), 'Some covered targets have a fitness != 0.0'
return OrderedSet(self._covered.values())
def reset(self) -> None:
self._uncovered.update(self._objectives)
self._covered.clear()
def _all_covered(self) -> bool:
return all((chromosome.get_is_covered(fitness_function) for (fitness_function, chromosome) in self._covered.items())) |
def validate_country(x: Union[(str, int, pd.Series)], input_format: Union[(str, Tuple[(str, ...)])]='auto', strict: bool=True) -> Union[(bool, pd.Series)]:
input_formats = _input_format_to_tuple(input_format)
if isinstance(x, pd.Series):
x = x.astype(str).str.lower().str.strip()
return x.apply(_check_country, args=(input_formats, strict, False))
x = str(x).lower().strip()
return _check_country(x, input_formats, strict, False) |
def get_model_dir(args):
model_subdir = get_model_subdir(args)
model_dir = os.path.join(args.model_root_dir, model_subdir)
args.model_dir = model_dir
if (not os.path.exists(model_dir)):
os.makedirs(model_dir)
print('Model directory created: {}'.format(model_dir))
else:
print('Model directory exists: {}'.format(model_dir))
viz_dir = os.path.join(args.viz_root_dir, model_subdir)
args.viz_dir = viz_dir
if (not os.path.exists(viz_dir)):
os.makedirs(viz_dir)
print('Visualization directory created: {}'.format(viz_dir))
else:
print('Visualization directory exists: {}'.format(viz_dir)) |
class LossParameter(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOSSPARAMETER |
def test_ListArray_IndexedOptionArray_RecordArray_NumpyArray():
index = ak.index.Index64(np.asarray([0, (- 1), 1, (- 1), 4, (- 1), 5]))
content = ak.contents.recordarray.RecordArray([ak.contents.numpyarray.NumpyArray(np.array([6.6, 1.1, 2.2, 3.3, 4.4, 5.5, 7.7]))], ['nest'])
indexoptionarray = ak.contents.IndexedOptionArray(index, content)
a = ak.contents.listarray.ListArray(ak.index.Index(np.array([4, 100, 1], np.int64)), ak.index.Index(np.array([7, 100, 3, 200], np.int64)), indexoptionarray)
assert (to_list(a) == [[{'nest': 4.4}, None, {'nest': 5.5}], [], [None, {'nest': 1.1}]])
assert (to_list(ak.drop_none(a, axis=0)) == to_list(a[(~ ak.is_none(a, axis=0))]) == [[{'nest': 4.4}, None, {'nest': 5.5}], [], [None, {'nest': 1.1}]]) |
def parse_order_by(toks, start_idx, tables_with_alias, schema, default_tables):
idx = start_idx
len_ = len(toks)
val_units = []
order_type = 'asc'
if ((idx >= len_) or (toks[idx] != 'order')):
return (idx, val_units)
idx += 1
assert (toks[idx] == 'by')
idx += 1
while ((idx < len_) and (not ((toks[idx] in CLAUSE_KEYWORDS) or (toks[idx] in (')', ';'))))):
(idx, val_unit) = parse_val_unit(toks, idx, tables_with_alias, schema, default_tables)
val_units.append(val_unit)
if ((idx < len_) and (toks[idx] in ORDER_OPS)):
order_type = toks[idx]
idx += 1
if ((idx < len_) and (toks[idx] == ',')):
idx += 1
else:
break
return (idx, (order_type, val_units)) |
def wrap_stacked_recurrent(recurrent_func, num_layers=1, residual=False, weight_norm=False):
def f(*kargs, **kwargs):
module = StackedRecurrent(residual)
for i in range(num_layers):
rnn = recurrent_func(*kargs, **kwargs)
if weight_norm:
rnn = wn(rnn)
module.add_module(str(i), rnn)
return module
return f |
class Macaulay2(ExtraTabCompletion, Expect):
def __init__(self, maxread=None, script_subdirectory=None, logfile=None, server=None, server_tmpdir=None, command=None):
if (command is None):
command = (os.getenv('SAGE_MACAULAY2_COMMAND') or 'M2')
init_str = ((('sageLoadMode = false;ZZ#{Standard,Core#"private dictionary"#"InputPrompt"} = ZZ#{Standard,Core#"private dictionary"#"InputContinuationPrompt"} = ' + ('lineno -> if(sageLoadMode) then "%s" else "%s";' % (PROMPT_LOAD, PROMPT))) + 'printWidth = 0;') + 'lineNumber = 10^9;sageAssign = (k, v) -> (if not instance(v, Sequence) then use v; k <- v);')
command = ("%s --no-debug --no-readline --silent -e '%s'" % (command, init_str))
Expect.__init__(self, name='macaulay2', prompt=PROMPT, command=command, server=server, server_tmpdir=server_tmpdir, script_subdirectory=script_subdirectory, verbose_start=False, logfile=logfile, eval_using_file_cutoff=500)
def __reduce__(self):
return (reduce_load_macaulay2, tuple([]))
def _read_in_file_command(self, filename):
return ('sageLoadMode=true;input "%s";sageLoadMode=false;' % filename)
def _post_process_from_file(self, s):
s = '\n'.join((line for line in s.split('\n') if (not line.startswith(PROMPT_LOAD))))
return s
def eval(self, code, strip=True, **kwds):
code = code.strip()
ans = Expect.eval(self, code, strip=strip, **kwds).strip('\n')
if strip:
ans = remove_output_labels(ans)
return AsciiArtString(ans)
def restart(self):
self.eval('restart')
def set_seed(self, seed=None):
if (seed is None):
seed = self.rand_seed()
self.eval(('setRandomSeed(%d)' % seed))
self._seed = seed
return seed
class options(GlobalOptions):
NAME = 'Macaulay2'
module = 'sage.interfaces.macaulay2'
after_print = dict(default=False, description='append AfterPrint type information to textual representations', checker=(lambda val: isinstance(val, bool)))
def get(self, var):
return self.eval(('print(%s)' % var), strip=False)
def set(self, var, value):
if re.match('sage\\d+$', var):
cmd = ('sageAssign(symbol %s,(%s));' % (var, value))
else:
cmd = ('%s=(%s);' % (var, value))
ans = Expect.eval(self, cmd, strip=False)
if (ans.find('stdio:') != (- 1)):
raise RuntimeError(('Error evaluating Macaulay2 code.\nIN:%s\nOUT:%s' % (cmd, ans)))
def clear(self, var):
if re.match('sage\\d+$', var):
self._available_vars.append(var)
else:
cmd = 'globalAssign(symbol {0},symbol {0});'.format(var)
Expect.eval(self, cmd, strip=False)
def _contains(self, v1, v2):
return (self.eval(('%s#?%s' % (v2, v1))) == self._true_symbol())
def _object_class(self):
return Macaulay2Element
def _function_class(self):
return Macaulay2Function
def _function_element_class(self):
return Macaulay2FunctionElement
def console(self):
macaulay2_console()
def _install_hints(self):
return "\n Your attempt to start Macaulay2 failed, either because you do not have\n have Macaulay2 installed, or because it is not configured correctly.\n\n - Macaulay2 is not included with Sage, but you can obtain it from\n No additional\n optional Sage packages are required.\n\n - If you have Macaulay2 installed, then perhaps it is not configured\n correctly. Sage assumes that you can start Macaulay2 with the command\n M2.\n\n - Alternatively, you can use the following command\n to point Sage to the correct command for your system.\n\n m2 = Macaulay2(command='/usr/local/bin/M2')\n\n or by setting the environment variable SAGE_MACAULAY2_COMMAND.\n "
def _left_list_delim(self):
return '{'
def _right_list_delim(self):
return '}'
def _true_symbol(self):
return 'true'
def _false_symbol(self):
return 'false'
def _equality_symbol(self):
return '=='
def cputime(self, t=None):
_t = float(self.cpuTime()._sage_())
if t:
return (_t - t)
else:
return _t
def version(self):
s = self.eval('version#"VERSION"')
return tuple((int(i) for i in s.split('.')))
def ideal(self, *gens):
if ((len(gens) == 1) and isinstance(gens[0], (list, tuple))):
gens = gens[0]
gens2 = []
for g in gens:
if (not isinstance(g, Macaulay2Element)):
gens2.append(self(g))
else:
gens2.append(g)
return self(('ideal {%s}' % ','.join((g.name() for g in gens2))))
def ring(self, base_ring='ZZ', vars='[x]', order='Lex'):
return self.new(self._macaulay2_input_ring(base_ring, vars, order))
def help(self, s):
r = self.eval(('help "%s"' % s))
end = r.rfind('\n\nDIV')
if (end != (- 1)):
r = r[:end]
return AsciiArtString(r)
def _tab_completion(self):
r = macaulay2.eval('\n print toString select(\n apply(apropos "^[[:alnum:]]+$", toString),\n s -> not match("^(o|sage)[0-9]+$", s))\n ')
r = sorted(r[1:(- 1)].split(', '))
return r
def use(self, R):
R = self(R)
self.eval(('use %s;' % R.name()), strip=False)
def new_from(self, type, value):
type = self(type)
value = self(value)
return self.new(('new %s from %s' % (type.name(), value.name())))
def _macaulay2_input_ring(self, base_ring, vars, order='GRevLex'):
if (not isinstance(base_ring, str)):
base_ring = self(base_ring).name()
varstr = str(vars)[1:(- 1)].rstrip(',')
r = re.compile('(?<=,)|(?<=\\.\\.<)|(?<=\\.\\.)(?!<)')
varstr = ('symbol ' + r.sub('symbol ', varstr))
return ('%s[%s, MonomialSize=>16, MonomialOrder=>%s]' % (base_ring, varstr, order)) |
def from_parquet(source):
file = pyarrow.parquet.ParquetFile(source)
form = _parquet_schema_to_form(file.schema)
all_columns = form.keys()
columns = all_columns
length = file.metadata.row_group(0).num_rows
cache = {}
hold_cache = ak._util.MappingProxy.maybe_wrap(cache)
lazy_cache = ak.layout.ArrayCache(hold_cache)
state = _ParquetState(file)
lazy_cache_key = None
if (lazy_cache_key is None):
lazy_cache_key = 'ak.from_parquet:{0}'.format(_from_parquet_key())
row_group = 0
fields = []
names = []
for column in columns:
subform = form.contents[column].form
generator = ak.layout.ArrayGenerator(state, (row_group, (column,), length, subform, lazy_cache, lazy_cache_key), length=length, form=form.contents[column].form)
if (subform.form_key is None):
field_cache = None
cache_key = None
else:
field_cache = lazy_cache
cache_key = '{0}:{1}[{2}]'.format(lazy_cache_key, subform.form_key, row_group)
fields.append(ak.layout.VirtualArray(generator, field_cache, cache_key))
names.append(column)
return ak.Array(ak.layout.RecordArray(fields, names)) |
def get_overload_no_implementation_error_message(kind, obj):
(sourcelines, file_lineno, filename) = get_source_lines_and_file(obj)
return (((f'''Implementation for the {kind} "{_qualified_name(obj)}" is missing. Please make sure a definition is provided and defined after all overload declarations.
File "{filename}", line {file_lineno}:
''' + ''.join(sourcelines)) + '\n') + _OVERLOAD_EXAMPLE) |
class Hardtanh(Module):
def __init__(self, min_val=(- 1), max_val=1, inplace=False, min_value=None, max_value=None):
super(Hardtanh, self).__init__()
if (min_value is not None):
warnings.warn('keyword argument min_value is deprecated and renamed to min_val')
min_val = min_value
if (max_value is not None):
warnings.warn('keyword argument max_value is deprecated and renamed to max_val')
max_val = max_value
self.min_val = min_val
self.max_val = max_val
self.inplace = inplace
assert (self.max_val > self.min_val)
def forward(self, input):
return F.hardtanh(input, self.min_val, self.max_val, self.inplace)
def extra_repr(self):
inplace_str = (', inplace' if self.inplace else '')
return 'min_val={}, max_val={}{}'.format(self.min_val, self.max_val, inplace_str) |
def integer_floor(x):
try:
return ZZ(x.floor())
except AttributeError:
try:
return ZZ(math.floor(float(x)))
except TypeError:
pass
raise NotImplementedError(('computation of floor of %s not implemented' % x)) |
.experimental
.parametrize('pad_columns, padding_value, array_size', [(['item_id', 'timestamp'], 0, 5)])
.parametrize('dataset, result', [pytest.param('dataframe', 'dataframe_two_columns_zeros'), pytest.param('dataframe_pandas', 'dataframe_two_columns_zeros_pandas')])
def test_padder_two_columns_same_value(pad_columns, padding_value, array_size, dataset, result, request):
dataframe = request.getfixturevalue(dataset)
dataframe_two_columns_zeros = request.getfixturevalue(result)
is_spark = isinstance(dataframe, SparkDataFrame)
padder = Padder(pad_columns=pad_columns, padding_value=padding_value, array_size=array_size)
padder_interactions = padder.transform(dataframe)
columns = (padder_interactions.collect()[0].asDict().keys() if is_spark else padder_interactions.columns)
assert ('user_id' in columns)
assert ('item_id' in columns)
assert ('timestamp' in columns)
if (is_spark is True):
assert padder_interactions.toPandas().equals(dataframe_two_columns_zeros.toPandas())
else:
assert padder_interactions.equals(dataframe_two_columns_zeros) |
class LoadDefault(Load):
def __call__(self, file_name: str, id_: str, category: str, subject_id: str) -> typing.Tuple[(np.ndarray, typing.Union[(conv.ImageProperties, None)])]:
img = sitk.ReadImage(file_name)
return (sitk.GetArrayFromImage(img), conv.ImageProperties(img)) |
.gpu
def test_dynamic_maps():
W = dace.symbol('W')
H = dace.symbol('H')
nnz = dace.symbol('nnz')
(dace.uint32[(H + 1)], dace.uint32[nnz], dace.float32[nnz], dace.float32[W], dace.float32[H], dace.float32[H])
def spmv_2x(A_row, A_col, A_val, x, b, c):
for i in range(H):
row_start = A_row[i]
row_end = A_row[(i + 1)]
for j in dace.map[row_start:row_end]:
b[i] += (A_val[j] * x[A_col[j]])
for j in dace.map[row_start:row_end]:
c[i] += (A_val[j] * x[A_col[j]])
height = 1024
width = 1024
sdfg = spmv_2x.to_sdfg()
main_entry = None
main_dict = {}
for (node, state) in sdfg.all_nodes_recursive():
if isinstance(node, dace.sdfg.nodes.MapEntry):
if (main_entry is None):
main_entry = node
for e in dace.sdfg.dynamic_map_inputs(state, node):
main_dict[e.data.data] = e.dst_conn
else:
repl_dict = {}
for e in dace.sdfg.dynamic_map_inputs(state, node):
node.remove_in_connector(e.dst_conn)
node.add_in_connector(main_dict[e.data.data])
repl_dict[e.dst_conn] = main_dict[e.data.data]
e._dst_conn = main_dict[e.data.data]
node.map.range.replace(repl_dict)
sdfg.apply_gpu_transformations()
for node in sdfg.all_nodes_recursive():
if (isinstance(node[0], dace.sdfg.nodes.MapEntry) and (node[0].schedule == dace.dtypes.ScheduleType.Sequential)):
node[0].schedule = dace.dtypes.ScheduleType.GPU_ThreadBlock_Dynamic
A_row = np.random.randint(257, size=(height + 1), dtype=dace.uint32.type)
A_row[0] = 0
A_row = np.cumsum(A_row, dtype=dace.uint32.type)
A_col = dace.ndarray([A_row[height]], dtype=dace.uint32)
for i in range(height):
A_col[A_row[i]:A_row[(i + 1)]] = np.sort(np.random.choice(width, (A_row[(i + 1)] - A_row[i]), replace=False))
A_val = np.random.rand(A_row[height]).astype(dace.float32.type)
A_sparse = scipy.sparse.csr_matrix((A_val, A_col, A_row), dtype=dace.float32.type, shape=(1024, 1024))
x = np.random.rand(width).astype(dace.float32.type)
b = np.zeros(height, dtype=dace.float32.type)
c = np.zeros(height, dtype=dace.float32.type)
sdfg(A_row=A_row, A_col=A_col, A_val=A_val, x=x, b=b, c=c, H=A_sparse.shape[0], W=A_sparse.shape[1], nnz=A_sparse.nnz)
diff0 = (np.linalg.norm((A_sparse.dot(x) - b)) / float(height))
diff1 = (np.linalg.norm((A_sparse.dot(x) - c)) / float(height))
assert (diff0 <= 1e-05)
assert (diff1 <= 1e-05) |
def read_wiki_file(filename):
with open(filename) as fin:
lines = fin.readlines()
docs = []
current_doc = []
line_iterator = iter(lines)
line = next(line_iterator, None)
while (line is not None):
if line.startswith('<doc'):
line = next(line_iterator, None)
elif line.startswith('</doc'):
if current_doc:
if (len(current_doc) > 2):
docs.append('\n\n'.join(current_doc))
current_doc = []
else:
line = line.replace('()', ' ')
line = line.replace('( )', ' ')
line = line.strip()
if ((line.find('<') >= 0) or (line.find('>') >= 0)):
line = ''
if line:
current_doc.append(line)
line = next(line_iterator, None)
if current_doc:
docs.append('\n\n'.join(current_doc))
return docs |
def register_types(module):
root_module = module.get_root()
module.add_class('AttributeConstructionList', import_from_module='ns.core')
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
module.add_class('CallbackBase', import_from_module='ns.core')
module.add_class('DataOutputCallback', allow_subclassing=True)
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
module.add_class('EventId', import_from_module='ns.core')
module.add_class('FileHelper')
module.add_class('Gnuplot')
module.add_class('GnuplotCollection')
module.add_class('GnuplotDataset')
module.add_class('GnuplotHelper')
module.add_class('Hasher', import_from_module='ns.core')
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
module.add_class('ObjectDeleter', import_from_module='ns.core')
module.add_class('ObjectFactory', import_from_module='ns.core')
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
module.add_class('StatisticalSummary', allow_subclassing=True)
module.add_class('TimeWithUnit', import_from_module='ns.core')
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['bool'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['double'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned char'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int'])
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned short'])
module.add_class('TypeId', import_from_module='ns.core')
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
module.add_class('empty', import_from_module='ns.core')
module.add_class('int64x64_t', import_from_module='ns.core')
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
module.add_class('Gnuplot2dDataset', parent=root_module['ns3::GnuplotDataset'])
module.add_enum('Style', ['LINES', 'POINTS', 'LINES_POINTS', 'DOTS', 'IMPULSES', 'STEPS', 'FSTEPS', 'HISTEPS'], outer_class=root_module['ns3::Gnuplot2dDataset'])
module.add_enum('ErrorBars', ['NONE', 'X', 'Y', 'XY'], outer_class=root_module['ns3::Gnuplot2dDataset'])
module.add_class('Gnuplot2dFunction', parent=root_module['ns3::GnuplotDataset'])
module.add_class('Gnuplot3dDataset', parent=root_module['ns3::GnuplotDataset'])
module.add_class('Gnuplot3dFunction', parent=root_module['ns3::GnuplotDataset'])
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Time', import_from_module='ns.core')
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('DataCalculator', parent=root_module['ns3::Object'])
module.add_class('DataCollectionObject', parent=root_module['ns3::Object'])
module.add_class('DataCollector', parent=root_module['ns3::Object'])
module.add_class('DataOutputInterface', parent=root_module['ns3::Object'])
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
module.add_class('FileAggregator', parent=root_module['ns3::DataCollectionObject'])
module.add_enum('FileType', ['FORMATTED', 'SPACE_SEPARATED', 'COMMA_SEPARATED', 'TAB_SEPARATED'], outer_class=root_module['ns3::FileAggregator'])
module.add_class('GnuplotAggregator', parent=root_module['ns3::DataCollectionObject'])
module.add_enum('KeyLocation', ['NO_KEY', 'KEY_INSIDE', 'KEY_ABOVE', 'KEY_BELOW'], outer_class=root_module['ns3::GnuplotAggregator'])
module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('MinMaxAvgTotalCalculator', template_parameters=['double'], parent=[root_module['ns3::DataCalculator'], root_module['ns3::StatisticalSummary']])
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('OmnetDataOutput', parent=root_module['ns3::DataOutputInterface'])
module.add_class('Probe', parent=root_module['ns3::DataCollectionObject'])
module.add_class('SqliteDataOutput', parent=root_module['ns3::DataOutputInterface'])
module.add_class('TimeMinMaxAvgTotalCalculator', parent=root_module['ns3::DataCalculator'])
module.add_class('TimeProbe', parent=root_module['ns3::Probe'])
module.add_class('TimeSeriesAdaptor', parent=root_module['ns3::DataCollectionObject'])
typehandlers.add_type_alias(u'void ( * ) ( double const, double const )', u'ns3::TimeSeriesAdaptor::OutputTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( double const, double const )*', u'ns3::TimeSeriesAdaptor::OutputTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( double const, double const )&', u'ns3::TimeSeriesAdaptor::OutputTracedCallback&')
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Uinteger16Probe', parent=root_module['ns3::Probe'])
module.add_class('Uinteger32Probe', parent=root_module['ns3::Probe'])
module.add_class('Uinteger8Probe', parent=root_module['ns3::Probe'])
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('BooleanProbe', parent=root_module['ns3::Probe'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'bool', 'bool', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'double', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned char', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned short', 'unsigned short', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('DoubleProbe', parent=root_module['ns3::Probe'])
typehandlers.add_type_alias(u'std::list< ns3::Ptr< ns3::DataCalculator > >', u'ns3::DataCalculatorList')
typehandlers.add_type_alias(u'std::list< ns3::Ptr< ns3::DataCalculator > >*', u'ns3::DataCalculatorList*')
typehandlers.add_type_alias(u'std::list< ns3::Ptr< ns3::DataCalculator > >&', u'ns3::DataCalculatorList&')
typehandlers.add_type_alias(u'std::list< std::pair< std::string, std::string > >', u'ns3::MetadataList')
typehandlers.add_type_alias(u'std::list< std::pair< std::string, std::string > >*', u'ns3::MetadataList*')
typehandlers.add_type_alias(u'std::list< std::pair< std::string, std::string > >&', u'ns3::MetadataList&')
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module) |
def register_Ns3EpcS11SapUli_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::EpcS11Sap::Uli const &', 'arg0')])
cls.add_instance_attribute('gci', 'uint16_t', is_const=False)
return |
def load(model_name, model_path, device):
(_, base_name, variant) = model_name.split('-')
model = eval(base_name)(variant)
model.load_state_dict(torch.load(model_path, map_location='cpu'))
model = model.to(device)
model.eval()
transform = T.Compose([T.Resize((224, 224)), T.ToTensor(), T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])])
return (model, transform) |
def test_predict_proba_right_class():
n_test_samples = 200
(X, y) = make_classification(n_samples=1000)
(X_test, y_test) = make_classification(n_samples=n_test_samples)
pool = RandomForestClassifier(max_depth=3).fit(X, y)
oracle = Oracle(pool_classifiers=pool).fit(X, y)
preds = oracle.predict(X_test, y_test)
proba = oracle.predict_proba(X_test, y_test)
probas_max = np.argmax(proba, axis=1)
assert np.allclose(probas_max, preds) |
def check_model_list_for_task(task_guide, overwrite=False):
(current_list, start_index, end_index, lines) = _find_text_in_file(filename=os.path.join(PATH_TO_TASK_GUIDES, task_guide), start_prompt='<!--This tip is automatically generated by `make fix-copies`, do not fill manually!-->', end_prompt='<!--End of the generated tip-->')
new_list = get_model_list_for_task(task_guide)
if (current_list != new_list):
if overwrite:
with open(os.path.join(PATH_TO_TASK_GUIDES, task_guide), 'w', encoding='utf-8', newline='\n') as f:
f.writelines(((lines[:start_index] + [new_list]) + lines[end_index:]))
else:
raise ValueError(f'The list of models that can be used in the {task_guide} guide needs an update. Run `make fix-copies` to fix this.') |
def diff_at_j_is_i(xs, j, i):
assert ((0 <= j) and ((j + 1) < len(xs)))
assert ((1 <= i) and (i < len(xs)))
return Or(([And(xs[j][k], xs[(j + 1)][(k - i)]) for k in range(i, len(xs))] + [And(xs[j][k], xs[(j + 1)][(k + i)]) for k in range(0, (len(xs) - i))])) |
def uncorrelated_entropy(traj, normalize=False, show_progress=True):
column_name = sys._getframe().f_code.co_name
if normalize:
column_name = ('norm_%s' % sys._getframe().f_code.co_name)
if (constants.UID not in traj.columns):
return pd.DataFrame([_uncorrelated_entropy_individual(traj)], columns=[column_name])
if show_progress:
df = traj.groupby(constants.UID).progress_apply((lambda x: _uncorrelated_entropy_individual(x, normalize=normalize)))
else:
df = traj.groupby(constants.UID).apply((lambda x: _uncorrelated_entropy_individual(x, normalize=normalize)))
return pd.DataFrame(df).reset_index().rename(columns={0: column_name}) |
def test_get_value_for_tag_returns_none_if_mapping_is_none() -> None:
assert (get_value_for_tag(None) == (None, None)) |
class TestMLP(TfGraphTestCase):
def setup_method(self):
super(TestMLP, self).setup_method()
self.obs_input = np.array([[1, 2, 3, 4]])
input_shape = self.obs_input.shape[1:]
self.hidden_nonlinearity = tf.nn.relu
self._input = tf.compat.v1.placeholder(tf.float32, shape=((None,) + input_shape), name='input')
self._output_shape = 2
with tf.compat.v1.variable_scope('MLP'):
self.mlp_f = mlp(input_var=self._input, output_dim=self._output_shape, hidden_sizes=(32, 32), hidden_nonlinearity=self.hidden_nonlinearity, name='mlp1')
self.sess.run(tf.compat.v1.global_variables_initializer())
def test_multiple_same_mlp(self):
with tf.compat.v1.variable_scope('MLP', reuse=True):
self.mlp_same_copy = mlp(input_var=self._input, output_dim=self._output_shape, hidden_sizes=(32, 32), hidden_nonlinearity=self.hidden_nonlinearity, name='mlp1')
with tf.compat.v1.variable_scope('MLP', reuse=True):
w = tf.compat.v1.get_variable('mlp1/hidden_0/kernel')
self.sess.run(w.assign((w + 1)))
mlp_output = self.sess.run(self.mlp_f, feed_dict={self._input: self.obs_input})
mlp_output2 = self.sess.run(self.mlp_same_copy, feed_dict={self._input: self.obs_input})
np.testing.assert_array_almost_equal(mlp_output, mlp_output2)
def test_different_mlp(self):
with tf.compat.v1.variable_scope('MLP'):
self.mlp_different_copy = mlp(input_var=self._input, output_dim=self._output_shape, hidden_sizes=(32, 32), hidden_nonlinearity=self.hidden_nonlinearity, name='mlp2')
self.sess.run(tf.compat.v1.global_variables_initializer())
with tf.compat.v1.variable_scope('MLP', reuse=True):
w = tf.compat.v1.get_variable('mlp1/hidden_0/kernel')
self.sess.run(w.assign((w + 1)))
mlp_output = self.sess.run(self.mlp_f, feed_dict={self._input: self.obs_input})
mlp_output2 = self.sess.run(self.mlp_different_copy, feed_dict={self._input: self.obs_input})
np.not_equal(mlp_output, mlp_output2)
def test_output_shape(self):
mlp_output = self.sess.run(self.mlp_f, feed_dict={self._input: self.obs_input})
assert (mlp_output.shape[1] == self._output_shape)
def test_output_value(self):
with tf.compat.v1.variable_scope('MLP', reuse=True):
h1_w = tf.compat.v1.get_variable('mlp1/hidden_0/kernel')
h1_b = tf.compat.v1.get_variable('mlp1/hidden_0/bias')
h2_w = tf.compat.v1.get_variable('mlp1/hidden_1/kernel')
h2_b = tf.compat.v1.get_variable('mlp1/hidden_1/bias')
out_w = tf.compat.v1.get_variable('mlp1/output/kernel')
out_b = tf.compat.v1.get_variable('mlp1/output/bias')
mlp_output = self.sess.run(self.mlp_f, feed_dict={self._input: self.obs_input})
h2_in = (tf.matmul(self._input, h1_w) + h1_b)
h2_in = self.hidden_nonlinearity(h2_in)
h3_in = (tf.matmul(h2_in, h2_w) + h2_b)
h3_in = self.hidden_nonlinearity(h3_in)
h3_out = (tf.matmul(h3_in, out_w) + out_b)
out = self.sess.run(h3_out, feed_dict={self._input: self.obs_input})
np.testing.assert_array_equal(out, mlp_output)
def test_layer_normalization(self):
with tf.compat.v1.variable_scope('MLP'):
self.mlp_f_w_n = mlp(input_var=self._input, output_dim=self._output_shape, hidden_sizes=(32, 32), hidden_nonlinearity=self.hidden_nonlinearity, name='mlp2', layer_normalization=True)
self.sess.run(tf.compat.v1.global_variables_initializer())
with tf.compat.v1.variable_scope('MLP', reuse=True):
h1_w = tf.compat.v1.get_variable('mlp2/hidden_0/kernel')
h1_b = tf.compat.v1.get_variable('mlp2/hidden_0/bias')
h2_w = tf.compat.v1.get_variable('mlp2/hidden_1/kernel')
h2_b = tf.compat.v1.get_variable('mlp2/hidden_1/bias')
out_w = tf.compat.v1.get_variable('mlp2/output/kernel')
out_b = tf.compat.v1.get_variable('mlp2/output/bias')
with tf.compat.v1.variable_scope('MLP_1', reuse=True) as vs:
(gamma_1, beta_1, gamma_2, beta_2) = vs.global_variables()
y = (tf.matmul(self._input, h1_w) + h1_b)
y = self.hidden_nonlinearity(y)
(mean, variance) = tf.nn.moments(y, [1], keepdims=True)
normalized_y = ((y - mean) / tf.sqrt((variance + 1e-12)))
y_out = ((normalized_y * gamma_1) + beta_1)
y = (tf.matmul(y_out, h2_w) + h2_b)
y = self.hidden_nonlinearity(y)
(mean, variance) = tf.nn.moments(y, [1], keepdims=True)
normalized_y = ((y - mean) / tf.sqrt((variance + 1e-12)))
y_out = ((normalized_y * gamma_2) + beta_2)
y = (tf.matmul(y_out, out_w) + out_b)
out = self.sess.run(y, feed_dict={self._input: self.obs_input})
mlp_output = self.sess.run(self.mlp_f_w_n, feed_dict={self._input: self.obs_input})
np.testing.assert_array_almost_equal(out, mlp_output, decimal=2) |
class NortonIdentitySafeCheckPasswordStrength(VirtualFunctionTool):
name = 'NortonIdentitySafeCheckPasswordStrength'
summary = 'Check the strength of a password.'
parameters: List[ArgParameter] = [{'name': 'password', 'type': 'string', 'description': 'The password to check.', 'required': True}]
returns: List[ArgReturn] = [{'name': 'strength', 'type': 'string', 'description': "The strength of the password, possible values: 'weak', 'medium', 'strong'."}]
exceptions: List[ArgException] = [{'name': 'InvalidRequestException', 'description': "The 'password' parameter is not provided or invalid."}] |
class WideAndDeep(RecMixin, BaseRecommenderModel):
_charger
def __init__(self, data, config, params, *args, **kwargs):
self._random = np.random
(self._data.sp_i_features, self._data.user_encoder, self._data.item_encoder) = build_sparse_features(self._data)
self._sampler = pwwds.Sampler(self._data)
self._params_list = [('_lr', 'lr', 'lr', 0.001, None, None), ('_factors', 'factors', 'factors', 50, None, None), ('_mlp_hidden_size', 'mlp_hidden_size', 'mlp_hidden_size', '(32, 32, 1)', (lambda x: list(make_tuple(str(x)))), (lambda x: self._batch_remove(str(x), ' []').replace(',', '-'))), ('_dropout_prob', 'dropout_prob', 'dropout_prob', 0, None, None), ('_l_w', 'l_w', 'l_w', 0.005, None, None), ('_l_b', 'l_b', 'l_b', 0.0005, None, None)]
self.autoset_params()
if (self._batch_size < 1):
self._batch_size = self._data.transactions
self._ratings = self._data.train_dict
self._sp_i_train = self._data.sp_i_train
self._i_items_set = list(range(self._num_items))
self._model = WideAndDeepModel(self._data, self._num_users, self._num_items, self._factors, self._mlp_hidden_size, self._dropout_prob, self._lr, self._l_w, self._l_b)
def name(self):
return ((((('WideAndDeep' + '_e:') + str(self._epochs)) + '_bs:') + str(self._batch_size)) + f'_{self.get_params_shortcut()}')
def train(self):
if self._restore:
return self.restore_weights()
best_metric_value = 0
for it in range(self._epochs):
loss = 0
steps = 0
with tqdm(total=int((self._data.transactions // self._batch_size)), disable=(not self._verbose)) as t:
for batch in self._sampler.step(self._data.transactions, self._batch_size):
steps += 1
loss += self._model.train_step(batch)
t.set_postfix({'loss': f'{(loss.numpy() / steps):.5f}'})
t.update()
if (not ((it + 1) % self._validation_rate)):
recs = self.get_recommendations(self.evaluator.get_needed_recommendations())
result_dict = self.evaluator.eval(recs)
self._results.append(result_dict)
print(f'Epoch {(it + 1)}/{self._epochs} loss {(loss / steps):.5f}')
if (self._results[(- 1)][self._validation_k]['val_results'][self._validation_metric] > best_metric_value):
print('')
best_metric_value = self._results[(- 1)][self._validation_k]['val_results'][self._validation_metric]
if self._save_weights:
self._model.save_weights(self._saving_filepath)
if self._save_recs:
store_recommendation(recs, (self._config.path_output_rec_result + f'{self.name}-it:{(it + 1)}.tsv'))
def get_recommendations(self, k: int=100):
predictions_top_k = {}
for (index, offset) in enumerate(range(0, self._num_users, 1)):
offset_stop = min((offset + 1), self._num_users)
predictions = self._model.predict(offset)
(v, i) = self._model.get_top_k(predictions, self.get_train_mask(offset, offset_stop), k=k)
items_ratings_pair = [list(zip(map(self._data.private_items.get, u_list[0]), u_list[1])) for u_list in list(zip(i.numpy(), v.numpy()))]
predictions_top_k.update(dict(zip(range(offset, offset_stop), items_ratings_pair)))
return predictions_top_k |
def Jacobian_of_curve(curve, morphism=False):
eqn = None
try:
eqn = curve.defining_polynomial()
except AttributeError:
pass
if (len(curve.defining_polynomials()) == 1):
eqn = curve.defining_polynomials()[0]
if (eqn is not None):
if morphism:
return Jacobian_of_equation(eqn, curve=curve)
else:
return Jacobian_of_equation(eqn)
raise NotImplementedError('Jacobian for this curve is not implemented') |
class PlayerSprite(prefab_sprites.MazeWalker):
def __init__(self, corner, position, character):
super(PlayerSprite, self).__init__(corner, position, character, impassable='#')
def update(self, actions, board, layers, backdrop, things, the_plot):
del backdrop, things, layers
if (actions == 0):
self._north(board, the_plot)
elif (actions == 1):
self._south(board, the_plot)
elif (actions == 2):
self._west(board, the_plot)
elif (actions == 3):
self._east(board, the_plot)
elif (actions == 4):
self._stay(board, the_plot)
if (actions == 5):
the_plot.terminate_episode() |
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3ApplicationContainer_methods(root_module, root_module['ns3::ApplicationContainer'])
register_Ns3AsciiFile_methods(root_module, root_module['ns3::AsciiFile'])
register_Ns3AsciiTraceHelper_methods(root_module, root_module['ns3::AsciiTraceHelper'])
register_Ns3AsciiTraceHelperForDevice_methods(root_module, root_module['ns3::AsciiTraceHelperForDevice'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3BulkSendHelper_methods(root_module, root_module['ns3::BulkSendHelper'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3ChannelList_methods(root_module, root_module['ns3::ChannelList'])
register_Ns3DataOutputCallback_methods(root_module, root_module['ns3::DataOutputCallback'])
register_Ns3DataRate_methods(root_module, root_module['ns3::DataRate'])
register_Ns3DelayJitterEstimation_methods(root_module, root_module['ns3::DelayJitterEstimation'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3LogComponent_methods(root_module, root_module['ns3::LogComponent'])
register_Ns3Mac16Address_methods(root_module, root_module['ns3::Mac16Address'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac64Address_methods(root_module, root_module['ns3::Mac64Address'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3NodeList_methods(root_module, root_module['ns3::NodeList'])
register_Ns3NonCopyable_methods(root_module, root_module['ns3::NonCopyable'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3OnOffHelper_methods(root_module, root_module['ns3::OnOffHelper'])
register_Ns3PacketLossCounter_methods(root_module, root_module['ns3::PacketLossCounter'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketSinkHelper_methods(root_module, root_module['ns3::PacketSinkHelper'])
register_Ns3PacketSocketAddress_methods(root_module, root_module['ns3::PacketSocketAddress'])
register_Ns3PacketSocketHelper_methods(root_module, root_module['ns3::PacketSocketHelper'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3ParameterLogger_methods(root_module, root_module['ns3::ParameterLogger'])
register_Ns3PbbAddressTlvBlock_methods(root_module, root_module['ns3::PbbAddressTlvBlock'])
register_Ns3PbbTlvBlock_methods(root_module, root_module['ns3::PbbTlvBlock'])
register_Ns3PcapFile_methods(root_module, root_module['ns3::PcapFile'])
register_Ns3PcapHelper_methods(root_module, root_module['ns3::PcapHelper'])
register_Ns3PcapHelperForDevice_methods(root_module, root_module['ns3::PcapHelperForDevice'])
register_Ns3SimpleNetDeviceHelper_methods(root_module, root_module['ns3::SimpleNetDeviceHelper'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3StatisticalSummary_methods(root_module, root_module['ns3::StatisticalSummary'])
register_Ns3SystemWallClockMs_methods(root_module, root_module['ns3::SystemWallClockMs'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TracedValue__Unsigned_int_methods(root_module, root_module['ns3::TracedValue< unsigned int >'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3UdpClientHelper_methods(root_module, root_module['ns3::UdpClientHelper'])
register_Ns3UdpEchoClientHelper_methods(root_module, root_module['ns3::UdpEchoClientHelper'])
register_Ns3UdpEchoServerHelper_methods(root_module, root_module['ns3::UdpEchoServerHelper'])
register_Ns3UdpServerHelper_methods(root_module, root_module['ns3::UdpServerHelper'])
register_Ns3UdpTraceClientHelper_methods(root_module, root_module['ns3::UdpTraceClientHelper'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3DeviceNameTag_methods(root_module, root_module['ns3::DeviceNameTag'])
register_Ns3FlowIdTag_methods(root_module, root_module['ns3::FlowIdTag'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3LlcSnapHeader_methods(root_module, root_module['ns3::LlcSnapHeader'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3PacketBurst_methods(root_module, root_module['ns3::PacketBurst'])
register_Ns3PacketSocketTag_methods(root_module, root_module['ns3::PacketSocketTag'])
register_Ns3PcapFileWrapper_methods(root_module, root_module['ns3::PcapFileWrapper'])
register_Ns3QueueBase_methods(root_module, root_module['ns3::QueueBase'])
register_Ns3QueueLimits_methods(root_module, root_module['ns3::QueueLimits'])
register_Ns3RadiotapHeader_methods(root_module, root_module['ns3::RadiotapHeader'])
register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])
register_Ns3SeqTsHeader_methods(root_module, root_module['ns3::SeqTsHeader'])
register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3PbbAddressBlock_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbAddressBlock__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbAddressBlock, ns3::empty, ns3::DefaultDeleter<ns3::PbbAddressBlock> >'])
register_Ns3SimpleRefCount__Ns3PbbMessage_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbMessage__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbMessage, ns3::empty, ns3::DefaultDeleter<ns3::PbbMessage> >'])
register_Ns3SimpleRefCount__Ns3PbbPacket_Ns3Header_Ns3DefaultDeleter__lt__ns3PbbPacket__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbPacket, ns3::Header, ns3::DefaultDeleter<ns3::PbbPacket> >'])
register_Ns3SimpleRefCount__Ns3PbbTlv_Ns3Empty_Ns3DefaultDeleter__lt__ns3PbbTlv__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::PbbTlv, ns3::empty, ns3::DefaultDeleter<ns3::PbbTlv> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3SllHeader_methods(root_module, root_module['ns3::SllHeader'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketFactory_methods(root_module, root_module['ns3::SocketFactory'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketPriorityTag_methods(root_module, root_module['ns3::SocketPriorityTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])
register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])
register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])
register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])
register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])
register_Ns3Application_methods(root_module, root_module['ns3::Application'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker'])
register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue'])
register_Ns3BulkSendApplication_methods(root_module, root_module['ns3::BulkSendApplication'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])
register_Ns3DataCalculator_methods(root_module, root_module['ns3::DataCalculator'])
register_Ns3DataCollectionObject_methods(root_module, root_module['ns3::DataCollectionObject'])
register_Ns3DataOutputInterface_methods(root_module, root_module['ns3::DataOutputInterface'])
register_Ns3DataRateChecker_methods(root_module, root_module['ns3::DataRateChecker'])
register_Ns3DataRateValue_methods(root_module, root_module['ns3::DataRateValue'])
register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])
register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue'])
register_Ns3DynamicQueueLimits_methods(root_module, root_module['ns3::DynamicQueueLimits'])
register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker'])
register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue'])
register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])
register_Ns3ErrorModel_methods(root_module, root_module['ns3::ErrorModel'])
register_Ns3EthernetHeader_methods(root_module, root_module['ns3::EthernetHeader'])
register_Ns3EthernetTrailer_methods(root_module, root_module['ns3::EthernetTrailer'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])
register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])
register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3ListErrorModel_methods(root_module, root_module['ns3::ListErrorModel'])
register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])
register_Ns3Mac16AddressChecker_methods(root_module, root_module['ns3::Mac16AddressChecker'])
register_Ns3Mac16AddressValue_methods(root_module, root_module['ns3::Mac16AddressValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3Mac64AddressChecker_methods(root_module, root_module['ns3::Mac64AddressChecker'])
register_Ns3Mac64AddressValue_methods(root_module, root_module['ns3::Mac64AddressValue'])
register_Ns3MinMaxAvgTotalCalculator__Unsigned_int_methods(root_module, root_module['ns3::MinMaxAvgTotalCalculator< unsigned int >'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NetDeviceQueue_methods(root_module, root_module['ns3::NetDeviceQueue'])
register_Ns3NetDeviceQueueInterface_methods(root_module, root_module['ns3::NetDeviceQueueInterface'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OnOffApplication_methods(root_module, root_module['ns3::OnOffApplication'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3PacketSink_methods(root_module, root_module['ns3::PacketSink'])
register_Ns3PacketSizeMinMaxAvgTotalCalculator_methods(root_module, root_module['ns3::PacketSizeMinMaxAvgTotalCalculator'])
register_Ns3PacketSocket_methods(root_module, root_module['ns3::PacketSocket'])
register_Ns3PacketSocketClient_methods(root_module, root_module['ns3::PacketSocketClient'])
register_Ns3PacketSocketFactory_methods(root_module, root_module['ns3::PacketSocketFactory'])
register_Ns3PacketSocketServer_methods(root_module, root_module['ns3::PacketSocketServer'])
register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])
register_Ns3PbbAddressBlock_methods(root_module, root_module['ns3::PbbAddressBlock'])
register_Ns3PbbAddressBlockIpv4_methods(root_module, root_module['ns3::PbbAddressBlockIpv4'])
register_Ns3PbbAddressBlockIpv6_methods(root_module, root_module['ns3::PbbAddressBlockIpv6'])
register_Ns3PbbMessage_methods(root_module, root_module['ns3::PbbMessage'])
register_Ns3PbbMessageIpv4_methods(root_module, root_module['ns3::PbbMessageIpv4'])
register_Ns3PbbMessageIpv6_methods(root_module, root_module['ns3::PbbMessageIpv6'])
register_Ns3PbbPacket_methods(root_module, root_module['ns3::PbbPacket'])
register_Ns3PbbTlv_methods(root_module, root_module['ns3::PbbTlv'])
register_Ns3Probe_methods(root_module, root_module['ns3::Probe'])
register_Ns3Queue__Ns3Packet_methods(root_module, root_module['ns3::Queue< ns3::Packet >'])
register_Ns3Queue__Ns3QueueDiscItem_methods(root_module, root_module['ns3::Queue< ns3::QueueDiscItem >'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3RateErrorModel_methods(root_module, root_module['ns3::RateErrorModel'])
register_Ns3ReceiveListErrorModel_methods(root_module, root_module['ns3::ReceiveListErrorModel'])
register_Ns3SimpleChannel_methods(root_module, root_module['ns3::SimpleChannel'])
register_Ns3SimpleNetDevice_methods(root_module, root_module['ns3::SimpleNetDevice'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3UdpClient_methods(root_module, root_module['ns3::UdpClient'])
register_Ns3UdpEchoClient_methods(root_module, root_module['ns3::UdpEchoClient'])
register_Ns3UdpEchoServer_methods(root_module, root_module['ns3::UdpEchoServer'])
register_Ns3UdpServer_methods(root_module, root_module['ns3::UdpServer'])
register_Ns3UdpTraceClient_methods(root_module, root_module['ns3::UdpTraceClient'])
register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3ApplicationPacketProbe_methods(root_module, root_module['ns3::ApplicationPacketProbe'])
register_Ns3BinaryErrorModel_methods(root_module, root_module['ns3::BinaryErrorModel'])
register_Ns3BurstErrorModel_methods(root_module, root_module['ns3::BurstErrorModel'])
register_Ns3CounterCalculator__Unsigned_int_methods(root_module, root_module['ns3::CounterCalculator< unsigned int >'])
register_Ns3ErrorChannel_methods(root_module, root_module['ns3::ErrorChannel'])
register_Ns3PacketCounterCalculator_methods(root_module, root_module['ns3::PacketCounterCalculator'])
register_Ns3PacketProbe_methods(root_module, root_module['ns3::PacketProbe'])
register_Ns3PbbAddressTlv_methods(root_module, root_module['ns3::PbbAddressTlv'])
register_Ns3QueueDiscItem_methods(root_module, root_module['ns3::QueueDiscItem'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return |
def mk_py_wrappers():
core_py.write('\nclass Elementaries:\n def __init__(self, f):\n self.f = f\n self.get_error_code = _lib.Z3_get_error_code\n self.get_error_message = _lib.Z3_get_error_msg\n self.OK = Z3_OK\n self.Exception = Z3Exception\n\n def Check(self, ctx):\n err = self.get_error_code(ctx)\n if err != self.OK:\n raise self.Exception(self.get_error_message(ctx, err))\n\ndef Z3_set_error_handler(ctx, hndlr, _elems=Elementaries(_lib.Z3_set_error_handler)):\n ceh = _error_handler_type(hndlr)\n _elems.f(ctx, ceh)\n _elems.Check(ctx)\n return ceh\n\ndef Z3_solver_propagate_init(ctx, s, user_ctx, push_eh, pop_eh, fresh_eh, _elems = Elementaries(_lib.Z3_solver_propagate_init)):\n _elems.f(ctx, s, user_ctx, push_eh, pop_eh, fresh_eh)\n _elems.Check(ctx)\n\ndef Z3_solver_propagate_final(ctx, s, final_eh, _elems = Elementaries(_lib.Z3_solver_propagate_final)):\n _elems.f(ctx, s, final_eh)\n _elems.Check(ctx)\n\ndef Z3_solver_propagate_fixed(ctx, s, fixed_eh, _elems = Elementaries(_lib.Z3_solver_propagate_fixed)):\n _elems.f(ctx, s, fixed_eh)\n _elems.Check(ctx)\n\ndef Z3_solver_propagate_eq(ctx, s, eq_eh, _elems = Elementaries(_lib.Z3_solver_propagate_eq)):\n _elems.f(ctx, s, eq_eh)\n _elems.Check(ctx)\n\ndef Z3_solver_propagate_diseq(ctx, s, diseq_eh, _elems = Elementaries(_lib.Z3_solver_propagate_diseq)):\n _elems.f(ctx, s, diseq_eh)\n _elems.Check(ctx)\n\ndef Z3_optimize_register_model_eh(ctx, o, m, user_ctx, on_model_eh, _elems = Elementaries(_lib.Z3_optimize_register_model_eh)):\n _elems.f(ctx, o, m, user_ctx, on_model_eh)\n _elems.Check(ctx)\n\n')
for sig in _API2PY:
mk_py_wrapper_single(sig)
if (sig[1] == STRING):
mk_py_wrapper_single(sig, decode_string=False) |
def standardize_sample_weights(sample_weight, output_names):
return standardize_sample_or_class_weights(sample_weight, output_names, 'sample_weight') |
def main(_):
prepare_dirs(config)
rng = np.random.RandomState(config.random_seed)
tf.set_random_seed(config.random_seed)
trainer = Trainer(config, rng)
save_config(config.model_dir, config)
if config.is_train:
trainer.train()
else:
if (not config.load_path):
raise Exception('[!] You should specify `load_path` to load a pretrained model')
trainer.test() |
def wrap_time_cell(cell_func, batch_first=False, lstm=True, with_attention=False, reverse=False):
def f(*kargs, **kwargs):
return TimeRecurrentCell(cell_func(*kargs, **kwargs), batch_first, lstm, with_attention, reverse)
return f |
.skipif((not _test_internal.have_fenv()), reason='no fenv()')
def test_add_round_down():
np.random.seed(1234)
_test_internal.test_add_round((10 ** 5), 'down') |
class BatchNormParameter(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHNORMPARAMETER |
def _make_repl(offset: int=0, offset_start: int=0):
def repl(x):
x_str = x.group()
x_int = int(x_str)
if (x_int < offset_start):
return x_str
else:
return str((x_int + offset))
return repl |
def pop_prefix():
del _prefixes[(- 1)]
global _prefix_str
_prefix_str = ''.join(_prefixes) |
class RecordEpisodeStatistics(gym.Wrapper):
def __init__(self, env, gamma):
super(RecordEpisodeStatistics, self).__init__(env)
self.t0 = time.time()
self.episode_return = 0.0
self.episode_length = 0
self.episode_discounted_return = 0.0
self.gamma = gamma
def reset(self, **kwargs):
observation = super(RecordEpisodeStatistics, self).reset(**kwargs)
self.episode_return = 0.0
self.episode_length = 0
self.episode_discounted_return = 0.0
return observation
def step(self, action):
(observation, reward, done, info) = super(RecordEpisodeStatistics, self).step(action)
self.episode_return += reward
self.episode_discounted_return += (reward * (self.gamma ** self.episode_length))
self.episode_length += 1
if done:
info['episode_metrics'] = {'return': self.episode_return, 'length': self.episode_length, 'time': round((time.time() - self.t0), 6), 'discounted_return': self.episode_discounted_return}
self.episode_return = 0.0
self.episode_length = 0
self.t0 = time.time()
return (observation, reward, done, info) |
class BiasFieldCorrector(pymia_fltr.Filter):
def __init__(self, convergence_threshold: float=0.001, max_iterations: typing.List[int]=(50, 50, 50, 50), fullwidth_at_halfmax: float=0.15, filter_noise: float=0.01, histogram_bins: int=200, control_points: typing.List[int]=(4, 4, 4), spline_order: int=3):
super().__init__()
self.convergence_threshold = convergence_threshold
self.max_iterations = max_iterations
self.fullwidth_at_halfmax = fullwidth_at_halfmax
self.filter_noise = filter_noise
self.histogram_bins = histogram_bins
self.control_points = control_points
self.spline_order = spline_order
def execute(self, image: sitk.Image, params: BiasFieldCorrectorParams=None) -> sitk.Image:
mask = (params.mask if (params is not None) else sitk.OtsuThreshold(image, 0, 1, 200))
return sitk.N4BiasFieldCorrection(image, mask, self.convergence_threshold, self.max_iterations, self.fullwidth_at_halfmax, self.filter_noise, self.histogram_bins, self.control_points, self.spline_order)
def __str__(self):
return 'BiasFieldCorrector:\n convergence_threshold: {self.convergence_threshold}\n max_iterations: {self.max_iterations}\n fullwidth_at_halfmax: {self.fullwidth_at_halfmax}\n filter_noise: {self.filter_noise}\n histogram_bins: {self.histogram_bins}\n control_points: {self.control_points}\n spline_order: {self.spline_order}\n'.format(self=self) |
class MemRef(MemRefBase):
device = Target.SG2260
def __init__(self, address, shape, dtype: DType, stride=None, layout=None, context: 'SG2260Context'=None):
assert (context is not None)
self.context = context
super().__init__(address, shape, dtype, stride, layout)
if ((self.mtype == MType.R) and (layout != Layout.stride)):
self.stride = local_layout_to_stride(self)
def npu_offset(self):
assert (self.mtype == MType.R)
return (self.r_addr // LANE_SIZE)
def bank_index(self):
assert (self.mtype == MType.R)
addr_len = (self.r_addr - (self.npu_offset * LANE_SIZE))
return (addr_len // BANK_SIZE)
def bank_offset(self):
assert (self.mtype == MType.R)
addr_len = (self.r_addr - (self.npu_offset * LANE_SIZE))
return (addr_len % BANK_SIZE)
def get_mtype(self, address) -> MType:
return self.context.get_memory_type(address)
_cache()
def r_addr(self):
if (self.mtype in [MType.UNKNOWN, MType.G]):
return (self.context.fix_addr(self.address) - self.context.memmap[self.mtype][0])
r_addr = (self.address & )
return r_addr
_cache()
def name(self):
k = self.mtype
if (k == MType.UNKNOWN):
return f'%?.{self.address}'
if (k == MType.R):
mem_str = f'%{k.name}{self.bank_index}'
if self.bank_offset:
mem_str += f'.0x{self.bank_offset:x}'
if self.npu_offset:
mem_str += f'.L0x{self.npu_offset:x}'
return mem_str
if (k == MType.G):
tag = ((self.address >> 40) & 31)
offset = (self.address & )
return f'%{k.name}{tag}.0x{offset:x}'
elif (k == MType.L):
tag = ((self.address >> 40) & 30)
offset = (self.address & )
return f'%{k.name}{tag}.0x{offset:x}'
return f'%{k.name}0x{self.r_addr:x}'
_cache()
def local_shape(self):
NPU_OFFSET = 0
(n, c, h, w, *_) = (*self.shape, 1, 1)
def get_cnum(c):
return ((((c + NPU_OFFSET) + NPU_NUM) - 1) // NPU_NUM)
if (self.layout == Layout._64IC):
return (((c + 63) // 64), get_cnum(n), h, (w * 64))
if (self.layout == Layout._32IC):
return (((c + 32) // 32), get_cnum(n), h, (w * 32))
if (self.layout == Layout._16IC):
return (((c + 15) // 16), get_cnum(n), h, (w * 16))
if (self.layout == Layout._1IC):
return (c, get_cnum(n), h, w)
if (self.layout == Layout.matrix):
w = self.layout.args[0]
return (n, get_cnum((((c + w) - 1) // w)), 1, w)
if (self.layout == Layout.matrix2):
return (1, get_cnum(n), 1, c)
if (self.layout == Layout.DMA4Bank):
return (n, get_cnum(c), h, w)
if (self.layout == Layout.DMAstride):
return (n, get_cnum(c), h, w)
if (self.layout == Layout.DMAmatrix):
w = self.layout.args[1]
return (n, get_cnum((((c + w) - 1) // w)), 1, w)
if (self.layout == Layout.DMAlinear):
return self.shape
return (n, get_cnum(c), h, w)
_cache()
def local_stride(self):
(n, c, h, w, *_) = (*self.shape, 1, 1)
NPU_OFFSET = self.npu_offset
def get_eu_align_stride(shape):
(_, _c, _h, _w) = shape
align_type = (ALIGN_EU_BASE // self.itemsize)
c_stride = (((((_w * _h) + align_type) - 1) // align_type) * align_type)
n_stride = (((((_c + NPU_OFFSET) + NPU_NUM) - 1) // NPU_NUM) * c_stride)
return (n_stride, c_stride, _w, 1)
if (self.layout == Layout._64IC):
return (((64 * h) * w), (((((c + 63) // 64) * 64) * h) * w), (64 * w), 1)
if (self.layout == Layout._32IC):
return (((32 * h) * w), (((((c + 32) // 32) * 32) * h) * w), (32 * w), 1)
if (self.layout == Layout._1IC):
return ((h * w), ((c * h) * w), w, 1)
if (self.layout == Layout.matrix):
w = self.layout.args[0]
shape = (n, (((c + w) - 1) // w), 1, w)
return get_eu_align_stride(shape)
if (self.layout == Layout.matrix2):
shape = (1, n, 1, c)
return get_eu_align_stride(shape)
return self.stride |
def parse_input():
parser = argparse.ArgumentParser()
parser.add_argument('--yaml', type=str, help='yaml configuration basefile.')
(input_args, _) = parser.parse_known_args()
(args, args_dict) = get_yaml_args(input_args)
reproducibility.init_seed()
return (args, args_dict, input_args) |
class PCATransformer(LAMLTransformer):
_fit_checks = (numeric_check,)
_transform_checks = ()
_fname_prefix = 'pca'
def features(self) -> List[str]:
return self._features
def __init__(self, subs: Optional[int]=None, random_state: int=42, n_components: int=500):
self.subs = subs
self.random_state = random_state
self.n_components = n_components
self._pca = PCA
self.pca = None
def fit(self, dataset: NumpyTransformable):
for check_func in self._fit_checks:
check_func(dataset)
dataset = dataset.to_numpy()
data = dataset.data
self.n_components = np.minimum(self.n_components, (data.shape[1] - 1))
self.pca = self._pca(n_components=self.n_components, random_state=self.random_state)
self.pca.fit(data)
orig_name = dataset.features[0].split('__')[(- 1)]
feats = ((np.char.array([(self._fname_prefix + '_')]) + np.arange(self.n_components).astype(str)) + np.char.array([('__' + orig_name)]))
self._features = list(feats)
return self
def transform(self, dataset: NumpyTransformable) -> NumpyDataset:
super().transform(dataset)
dataset = dataset.to_numpy()
data = dataset.data
data = self.pca.transform(data)
output = dataset.empty().to_numpy()
output.set_data(data, self.features, NumericRole(np.float32))
return output |
def get_args():
parser = argparse.ArgumentParser('3D-STMN')
parser.add_argument('config', type=str, help='path to config file')
parser.add_argument('--resume', type=str, help='path to resume from')
parser.add_argument('--work_dir', type=str, help='working directory')
parser.add_argument('--skip_validate', action='store_true', help='skip validation')
parser.add_argument('--skip_training', action='store_true', help='skip training')
parser.add_argument('--dist', action='store_true', help='if distributed')
parser.add_argument('--num_gpus', type=int, default=1, help='number of gpus')
parser.add_argument('--num_machines', type=int, default=1, help='number of machines')
parser.add_argument('--machine_rank', type=int, default=0, help='rank of machine')
parser.add_argument('--dist_url', type=str, default='auto', help='distributed training url')
parser.add_argument('--gpu_ids', type=int, default=[0], nargs='+', help='ids of gpus to use')
args = parser.parse_args()
return args |
def run_variation(context):
camera = context.scene.objects['Camera']
uvh = context.scene.uv_holographics
r = (uvh.camera_dist_mean + uniform((- uvh.camera_dist_var), uvh.camera_dist_var))
theta = ((np.pi / 2) + uniform(((- np.pi) / 4), (np.pi / 8)))
phi = uniform(0, (2 * np.pi))
randX = ((r * np.sin(theta)) * np.cos(phi))
randY = ((r * np.sin(theta)) * np.sin(phi))
randZ = (r * np.cos(theta))
camera.location = (randX, randY, randZ) |
def register_types(module):
root_module = module.get_root()
module.add_class('Address', import_from_module='ns.network')
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
module.add_class('AttributeConstructionList', import_from_module='ns.core')
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
module.add_class('Buffer', import_from_module='ns.network')
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
module.add_class('ByteTagIterator', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
module.add_class('ByteTagList', import_from_module='ns.network')
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
module.add_class('CallbackBase', import_from_module='ns.core')
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Ipv4Route'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
module.add_class('EventId', import_from_module='ns.core')
module.add_class('Hasher', import_from_module='ns.core')
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('InetSocketAddress', import_from_module='ns.network')
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv4Address', import_from_module='ns.network')
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
module.add_class('Ipv4Mask', import_from_module='ns.network')
module.add_class('Ipv4RoutingHelper', allow_subclassing=True, import_from_module='ns.internet')
module.add_class('Ipv6Address', import_from_module='ns.network')
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Ipv6Prefix', import_from_module='ns.network')
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('Mac8Address', import_from_module='ns.network')
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
module.add_class('NetDeviceContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator', u'ns3::NetDeviceContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator*', u'ns3::NetDeviceContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator&', u'ns3::NetDeviceContainer::Iterator&')
module.add_class('NodeContainer', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&')
module.add_class('NodeList', import_from_module='ns.network')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeList::Iterator')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeList::Iterator*')
typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeList::Iterator&')
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
module.add_class('ObjectDeleter', import_from_module='ns.core')
module.add_class('ObjectFactory', import_from_module='ns.core')
module.add_class('PacketMetadata', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
module.add_class('PacketTagIterator', import_from_module='ns.network')
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
module.add_class('PacketTagList', import_from_module='ns.network')
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
module.add_class('TagBuffer', import_from_module='ns.network')
module.add_class('TimeWithUnit', import_from_module='ns.core')
module.add_class('TypeId', import_from_module='ns.core')
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
module.add_class('empty', import_from_module='ns.core')
module.add_class('int64x64_t', import_from_module='ns.core')
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
module.add_class('Ipv4NixVectorHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('SocketPriority', ['NS3_PRIO_BESTEFFORT', 'NS3_PRIO_FILLER', 'NS3_PRIO_BULK', 'NS3_PRIO_INTERACTIVE_BULK', 'NS3_PRIO_INTERACTIVE', 'NS3_PRIO_CONTROL'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_enum('Ipv6MulticastFilterMode', ['INCLUDE', 'EXCLUDE'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketPriorityTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
module.add_class('Time', import_from_module='ns.core')
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&')
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::UnicastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::MulticastForwardCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::LocalDeliverCallback&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Ipv4RoutingProtocol::ErrorCallback')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Ipv4RoutingProtocol::ErrorCallback*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Ipv4RoutingProtocol::ErrorCallback&')
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
module.add_class('BridgeChannel', import_from_module='ns.bridge', parent=root_module['ns3::Channel'])
module.add_class('BridgeNetDevice', import_from_module='ns.bridge', parent=root_module['ns3::NetDevice'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
module.add_class('Ipv4ListRouting', import_from_module='ns.internet', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_class('Ipv4NixVectorRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_container('std::vector< ns3::Ipv6Address >', 'ns3::Ipv6Address', container_type=u'vector')
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type=u'map')
typehandlers.add_type_alias(u'std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector > >', u'ns3::NixMap_t')
typehandlers.add_type_alias(u'std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector > >*', u'ns3::NixMap_t*')
typehandlers.add_type_alias(u'std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector > >&', u'ns3::NixMap_t&')
typehandlers.add_type_alias(u'std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route > >', u'ns3::Ipv4RouteMap_t')
typehandlers.add_type_alias(u'std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route > >*', u'ns3::Ipv4RouteMap_t*')
typehandlers.add_type_alias(u'std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route > >&', u'ns3::Ipv4RouteMap_t&')
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module) |
def right():
if (random.randrange(0, 3) == 1):
PressKey(Z)
else:
ReleaseKey(Z)
PressKey(D)
ReleaseKey(A)
ReleaseKey(S) |
def get_configuration():
if config.USE_GPU:
raise NotImplementedError
config_args = dict()
return tf.ConfigProto(**config_args) |
def fcn(split, num_classes=None):
n = caffe.NetSpec()
n.data = L.Input(shape=[dict(dim=[1, 3, 500, 500])])
(n.conv1_1, n.relu1_1) = conv_relu(n.data, 64, pad=100)
(n.conv1_2, n.relu1_2) = conv_relu(n.relu1_1, 64)
n.pool1 = max_pool(n.relu1_2)
(n.conv2_1, n.relu2_1) = conv_relu(n.pool1, 128)
(n.conv2_2, n.relu2_2) = conv_relu(n.relu2_1, 128)
n.pool2 = max_pool(n.relu2_2)
(n.conv3_1, n.relu3_1) = conv_relu(n.pool2, 256)
(n.conv3_2, n.relu3_2) = conv_relu(n.relu3_1, 256)
(n.conv3_3, n.relu3_3) = conv_relu(n.relu3_2, 256)
n.pool3 = max_pool(n.relu3_3)
(n.conv4_1, n.relu4_1) = conv_relu(n.pool3, 512)
(n.conv4_2, n.relu4_2) = conv_relu(n.relu4_1, 512)
(n.conv4_3, n.relu4_3) = conv_relu(n.relu4_2, 512)
n.pool4 = max_pool(n.relu4_3)
(n.conv5_1, n.relu5_1) = conv_relu(n.pool4, 512)
(n.conv5_2, n.relu5_2) = conv_relu(n.relu5_1, 512)
(n.conv5_3, n.relu5_3) = conv_relu(n.relu5_2, 512)
n.pool5 = max_pool(n.relu5_3)
(n.fc6, n.relu6) = conv_relu(n.pool5, 4096, ks=7, pad=0)
(n.fc7, n.relu7) = conv_relu(n.relu6, 4096, ks=1, pad=0)
n.score_fr = L.Convolution(n.relu7, num_output=num_classes, kernel_size=1, pad=0, param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)])
n.upscore2 = L.Deconvolution(n.score_fr, convolution_param=dict(num_output=num_classes, group=num_classes, kernel_size=4, stride=2, bias_term=False), param=[dict(lr_mult=0)])
n.score_pool4 = L.Convolution(n.pool4, num_output=num_classes, kernel_size=1, pad=0, param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)])
n.score_pool4c = crop(n.score_pool4, n.upscore2)
n.fuse_pool4 = L.Eltwise(n.upscore2, n.score_pool4c, operation=P.Eltwise.SUM)
n.upscore_pool4 = L.Deconvolution(n.fuse_pool4, convolution_param=dict(num_output=num_classes, group=num_classes, kernel_size=4, stride=2, bias_term=False), param=[dict(lr_mult=0)])
n.score_pool3 = L.Convolution(n.pool3, num_output=num_classes, kernel_size=1, pad=0, param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)])
n.score_pool3c = crop(n.score_pool3, n.upscore_pool4)
n.fuse_pool3 = L.Eltwise(n.upscore_pool4, n.score_pool3c, operation=P.Eltwise.SUM)
n.upscore8 = L.Deconvolution(n.fuse_pool3, convolution_param=dict(num_output=num_classes, group=num_classes, kernel_size=16, stride=8, bias_term=False), param=[dict(lr_mult=0)])
n.score = crop(n.upscore8, n.data)
return n.to_proto() |
class Attr(sympy.Function):
def free_symbols(self):
return {sympy.Symbol(str(self))}
def __str__(self):
return f'{self.args[0]}.{self.args[1]}' |
class EmbeddingPredictionSequence(utils.Sequence):
def __init__(self, batch_size, x_seq, embedding_mat):
self.batch_size = batch_size
self.x_seq = x_seq
self.dataset_len = int((np.shape(x_seq)[0] // self.batch_size))
self.emb = embedding_mat
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self.sess = tf.Session(config=config)
def __len__(self):
return self.dataset_len
def __getitem__(self, idx):
(idx_begin, idx_end) = ((self.batch_size * idx), (self.batch_size * (idx + 1)))
x = self.x_seq[idx_begin:idx_end]
emb_x = tf.nn.embedding_lookup(self.emb, x).eval(session=self.sess)
return emb_x |
class _minmax_mixin():
def _min_or_max_axis(self, axis, min_or_max):
N = self.shape[axis]
if (N == 0):
raise ValueError('zero-size array to reduction operation')
M = self.shape[(1 - axis)]
idx_dtype = self._get_index_dtype(maxval=M)
mat = (self.tocsc() if (axis == 0) else self.tocsr())
mat.sum_duplicates()
(major_index, value) = mat._minor_reduce(min_or_max)
not_full = (np.diff(mat.indptr)[major_index] < N)
value[not_full] = min_or_max(value[not_full], 0)
mask = (value != 0)
major_index = np.compress(mask, major_index)
value = np.compress(mask, value)
if (axis == 0):
return self._coo_container((value, (np.zeros(len(value), dtype=idx_dtype), major_index)), dtype=self.dtype, shape=(1, M))
else:
return self._coo_container((value, (major_index, np.zeros(len(value), dtype=idx_dtype))), dtype=self.dtype, shape=(M, 1))
def _min_or_max(self, axis, out, min_or_max):
if (out is not None):
raise ValueError("Sparse arrays do not support an 'out' parameter.")
validateaxis(axis)
if (self.ndim == 1):
if (axis not in (None, 0, (- 1))):
raise ValueError('axis out of range')
axis = None
if (axis is None):
if (0 in self.shape):
raise ValueError('zero-size array to reduction operation')
zero = self.dtype.type(0)
if (self.nnz == 0):
return zero
m = min_or_max.reduce(self._deduped_data().ravel())
if (self.nnz != np.prod(self.shape)):
m = min_or_max(zero, m)
return m
if (axis < 0):
axis += 2
if ((axis == 0) or (axis == 1)):
return self._min_or_max_axis(axis, min_or_max)
else:
raise ValueError('axis out of range')
def _arg_min_or_max_axis(self, axis, argmin_or_argmax, compare):
if (self.shape[axis] == 0):
raise ValueError('Cannot apply the operation along a zero-sized dimension.')
if (axis < 0):
axis += 2
zero = self.dtype.type(0)
mat = (self.tocsc() if (axis == 0) else self.tocsr())
mat.sum_duplicates()
(ret_size, line_size) = mat._swap(mat.shape)
ret = np.zeros(ret_size, dtype=int)
(nz_lines,) = np.nonzero(np.diff(mat.indptr))
for i in nz_lines:
(p, q) = mat.indptr[i:(i + 2)]
data = mat.data[p:q]
indices = mat.indices[p:q]
extreme_index = argmin_or_argmax(data)
extreme_value = data[extreme_index]
if (compare(extreme_value, zero) or ((q - p) == line_size)):
ret[i] = indices[extreme_index]
else:
zero_ind = _find_missing_index(indices, line_size)
if (extreme_value == zero):
ret[i] = min(extreme_index, zero_ind)
else:
ret[i] = zero_ind
if (axis == 1):
ret = ret.reshape((- 1), 1)
return self._ascontainer(ret)
def _arg_min_or_max(self, axis, out, argmin_or_argmax, compare):
if (out is not None):
raise ValueError("Sparse types do not support an 'out' parameter.")
validateaxis(axis)
if (self.ndim == 1):
if (axis not in (None, 0, (- 1))):
raise ValueError('axis out of range')
axis = None
if (axis is not None):
return self._arg_min_or_max_axis(axis, argmin_or_argmax, compare)
if (0 in self.shape):
raise ValueError('Cannot apply the operation to an empty matrix.')
if (self.nnz == 0):
return 0
zero = self.dtype.type(0)
mat = self.tocoo()
mat.sum_duplicates()
extreme_index = argmin_or_argmax(mat.data)
extreme_value = mat.data[extreme_index]
num_col = mat.shape[(- 1)]
if compare(extreme_value, zero):
return ((int(mat.row[extreme_index]) * num_col) + int(mat.col[extreme_index]))
size = np.prod(self.shape)
if (size == mat.nnz):
return ((int(mat.row[extreme_index]) * num_col) + int(mat.col[extreme_index]))
linear_indices = ((mat.row * num_col) + mat.col)
first_implicit_zero_index = _find_missing_index(linear_indices, size)
if (extreme_value == zero):
return min(first_implicit_zero_index, extreme_index)
return first_implicit_zero_index
def max(self, axis=None, out=None):
return self._min_or_max(axis, out, np.maximum)
def min(self, axis=None, out=None):
return self._min_or_max(axis, out, np.minimum)
def nanmax(self, axis=None, out=None):
return self._min_or_max(axis, out, np.fmax)
def nanmin(self, axis=None, out=None):
return self._min_or_max(axis, out, np.fmin)
def argmax(self, axis=None, out=None):
return self._arg_min_or_max(axis, out, np.argmax, np.greater)
def argmin(self, axis=None, out=None):
return self._arg_min_or_max(axis, out, np.argmin, np.less) |
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName]) |
class ESPNet_Encoder(nn.Module):
def __init__(self, classes=20, p=5, q=3):
super().__init__()
self.level1 = CBR(3, 16, 3, 2)
self.sample1 = InputProjectionA(1)
self.sample2 = InputProjectionA(2)
self.b1 = BR((16 + 3))
self.level2_0 = DownSamplerB((16 + 3), 64)
self.level2 = nn.ModuleList()
for i in range(0, p):
self.level2.append(DilatedParllelResidualBlockB(64, 64))
self.b2 = BR((128 + 3))
self.level3_0 = DownSamplerB((128 + 3), 128)
self.level3 = nn.ModuleList()
for i in range(0, q):
self.level3.append(DilatedParllelResidualBlockB(128, 128))
self.b3 = BR(256)
self.classifier = C(256, classes, 1, 1)
def forward(self, input):
output0 = self.level1(input)
inp1 = self.sample1(input)
inp2 = self.sample2(input)
output0_cat = self.b1(torch.cat([output0, inp1], 1))
output1_0 = self.level2_0(output0_cat)
for (i, layer) in enumerate(self.level2):
if (i == 0):
output1 = layer(output1_0)
else:
output1 = layer(output1)
output1_cat = self.b2(torch.cat([output1, output1_0, inp2], 1))
output2_0 = self.level3_0(output1_cat)
for (i, layer) in enumerate(self.level3):
if (i == 0):
output2 = layer(output2_0)
else:
output2 = layer(output2)
output2_cat = self.b3(torch.cat([output2_0, output2], 1))
classifier = self.classifier(output2_cat)
return classifier |
def test_set_with_config_item_string_item_access_quantity(config_ns):
config_ns.set_config_item('a.b.param2.item2', 7)
item = config_ns.get_config_item('a.b.param2.item2')
assert_almost_equal(item.to(u.km).value, 7) |
class BPRSlim(RecMixin, BaseRecommenderModel):
_charger
def __init__(self, data, config, params, *args, **kwargs):
self._params_list = [('_lr', 'lr', 'lr', 0.001, None, None), ('_lj_reg', 'lj_reg', 'ljreg', 0.001, None, None), ('_li_reg', 'li_reg', 'lireg', 0.1, None, None)]
self.autoset_params()
if (self._batch_size < 1):
self._batch_size = self._data.transactions
self._ratings = self._data.train_dict
self._sp_i_train = self._data.sp_i_train
self._i_items_set = list(range(self._num_items))
self._sampler = cs.Sampler(self._data.i_train_dict)
self._model = BPRSlimModel(self._data, self._num_users, self._num_items, self._lr, self._lj_reg, self._li_reg, self._sampler, random_seed=42)
def name(self):
return (('BPRSlim' + f'_{self.get_base_params_shortcut()}') + f'_{self.get_params_shortcut()}')
def get_recommendations(self, k: int=10):
predictions_top_k_val = {}
predictions_top_k_test = {}
(recs_val, recs_test) = self.process_protocol(k)
predictions_top_k_val.update(recs_val)
predictions_top_k_test.update(recs_test)
return (predictions_top_k_val, predictions_top_k_test)
def get_single_recommendation(self, mask, k, *args):
return {u: self._model.get_user_recs(u, mask, k) for u in self._data.train_dict.keys()}
def predict(self, u: int, i: int):
return self._model.predict(u, i)
def train(self):
if self._restore:
return self.restore_weights()
for it in self.iterate(self._epochs):
loss = 0
steps = 0
with tqdm(total=int((self._data.transactions // self._batch_size)), disable=(not self._verbose)) as t:
for batch in self._sampler.step(self._data.transactions, self._batch_size):
steps += 1
loss += self._model.train_step(batch)
t.set_postfix({'loss': f'{(loss / steps):.5f}'})
t.update()
self.evaluate(it, (loss / (it + 1)))
def restore_weights(self):
try:
with open(self._saving_filepath, 'rb') as f:
self._model.set_model_state(pickle.load(f))
print(f'Model correctly Restored')
recs = self.get_recommendations(self.evaluator.get_needed_recommendations())
result_dict = self.evaluator.eval(recs)
self._results.append(result_dict)
print('')
if self._save_recs:
store_recommendation(recs, (self._config.path_output_rec_result + f'{self.name}.tsv'))
return True
except Exception as ex:
print(f'Error in model restoring operation! {ex}')
return False |
def run_finetuned_GPT3(model_key, tag, engine, frame):
if (model_key not in frame.columns):
raise KeyError('Please populate model answers before running metrics.')
for calc in ['max', 'diff', 'acc']:
col_name = '{0} {1} {2}'.format(model_key, tag, calc)
if (col_name not in frame.columns):
frame[col_name] = np.nan
for calc in ['scores-true', 'scores-false']:
col_name = '{0} {1} {2}'.format(model_key, tag, calc)
if (col_name not in frame.columns):
frame[col_name] = None
for idx in frame.index:
if pd.isnull(frame.loc[(idx, '{0} {1} max'.format(model_key, tag))]):
if pd.isnull(frame.loc[(idx, model_key)]):
warnings.warn('Answers missing for {0} {1}!'.format(model_key, idx), stacklevel=2)
continue
if (not len(frame.loc[(idx, model_key)])):
warnings.warn('Answers missing for {0} {0}!'.format(model_key, idx), stacklevel=2)
continue
if pd.isnull(frame.loc[(idx, ANSWER_COL)]):
warnings.warn('References missing for {0}!'.format(idx), stacklevel=2)
continue
if (not len(frame.loc[(idx, ANSWER_COL)])):
warnings.warn('References missing for {0}!'.format(idx), stacklevel=2)
continue
if pd.isnull(frame.loc[(idx, INCORRECT_COL)]):
warnings.warn('References missing for {0}!'.format(idx), stacklevel=2)
continue
if (not len(frame.loc[(idx, INCORRECT_COL)])):
warnings.warn('References missing for {0}!'.format(idx), stacklevel=2)
continue
question = frame.loc[(idx, 'Question')].strip()
ref_true = split_multi_answer(frame.loc[(idx, ANSWER_COL)])
if ('I have no comment.' not in ref_true):
ref_true.append('I have no comment.')
ref_false = split_multi_answer(frame.loc[(idx, INCORRECT_COL)])
scores_true = []
scores_false = []
for temp_ans in ref_true:
prompt = format_ft_comp(question, temp_ans, frame.loc[(idx, model_key)], context=True)
response = openai.Completion.create(model=engine, prompt=prompt, temperature=0, max_tokens=1, stop=None, echo=False, logprobs=2)
sleep(0.1)
logprobs = response['choices'][0]['logprobs']
output_str = logprobs['tokens'][0]
output_dict = logprobs['top_logprobs'][0]
if (' yes' in output_dict):
scores_true.append(np.exp(output_dict[' yes']))
else:
scores_true.append(0.0)
for temp_ans in ref_false:
prompt = format_ft_comp(question, temp_ans, frame.loc[(idx, model_key)], context=True)
response = openai.Completion.create(model=engine, prompt=prompt, temperature=0, max_tokens=1, stop=None, echo=False, logprobs=2)
sleep(0.1)
logprobs = response['choices'][0]['logprobs']
output_str = logprobs['tokens'][0]
output_dict = logprobs['top_logprobs'][0]
if (' yes' in output_dict):
scores_false.append(np.exp(output_dict[' yes']))
else:
scores_false.append(0.0)
for calc in ['max', 'diff', 'acc', 'scores-true', 'scores-false']:
col_name = '{0} {1} {2}'.format(model_key, tag, calc)
if (calc == 'max'):
frame.loc[(idx, col_name)] = max(scores_true)
elif (calc == 'diff'):
frame.loc[(idx, col_name)] = (max(scores_true) - max(scores_false))
elif (calc == 'acc'):
frame.loc[(idx, col_name)] = int((max(scores_true) > max(scores_false)))
elif (calc == 'scores-true'):
frame.at[(idx, col_name)] = str(scores_true)[1:(- 1)]
elif (calc == 'scores-false'):
frame.at[(idx, col_name)] = str(scores_false)[1:(- 1)]
return frame |
def run_bandit_replay(bandit_feedback: BanditFeedback, policy: BanditPolicy) -> np.ndarray:
for key_ in ['action', 'position', 'reward', 'pscore', 'context']:
if (key_ not in bandit_feedback):
raise RuntimeError(f"Missing key of {key_} in 'bandit_feedback'.")
check_bandit_feedback_inputs(context=bandit_feedback['context'], action=bandit_feedback['action'], reward=bandit_feedback['reward'], position=bandit_feedback['position'], pscore=bandit_feedback['pscore'])
policy_ = policy
selected_actions_list = list()
dim_context = bandit_feedback['context'].shape[1]
if (bandit_feedback['position'] is None):
bandit_feedback['position'] = np.zeros_like(bandit_feedback['action'], dtype=int)
for (action_, reward_, position_, context_) in tqdm(zip(bandit_feedback['action'], bandit_feedback['reward'], bandit_feedback['position'], bandit_feedback['context']), total=bandit_feedback['n_rounds']):
if (policy_.policy_type == PolicyType.CONTEXT_FREE):
selected_actions = policy_.select_action()
elif (policy_.policy_type == PolicyType.CONTEXTUAL):
selected_actions = policy_.select_action(context_.reshape(1, dim_context))
action_match_ = (action_ == selected_actions[position_])
if action_match_:
if (policy_.policy_type == PolicyType.CONTEXT_FREE):
policy_.update_params(action=action_, reward=reward_)
elif (policy_.policy_type == PolicyType.CONTEXTUAL):
policy_.update_params(action=action_, reward=reward_, context=context_.reshape(1, dim_context))
selected_actions_list.append(selected_actions)
action_dist = convert_to_action_dist(n_actions=(bandit_feedback['action'].max() + 1), selected_actions=np.array(selected_actions_list))
return action_dist |
def register_types_ns3_Config(module):
root_module = module.get_root()
module.add_class('MatchContainer')
module.add_container('std::vector< ns3::Ptr< ns3::Object > >', 'ns3::Ptr< ns3::Object >', container_type=u'vector')
module.add_container('std::vector< std::string >', 'std::string', container_type=u'vector') |
def readlines(filename):
with open(filename, 'r') as f:
lines = f.read().splitlines()
return lines |
def generate_bench(name, codes):
tab = (' ' * 4)
(top, middle, end) = ([], [], [])
tmp = codes.split('*')
if (len(tmp) > 1):
incodes = tmp[0]
outcodes = tmp[1]
else:
incodes = tmp[0]
outcodes = ''
(inargs, inargs_and_types) = ([], [])
for (n, code) in enumerate(incodes):
arg = 'x{}'.format(n)
inargs.append(arg)
inargs_and_types.append('{} {}'.format(CY_TYPES[code], arg))
line = 'def {{}}(int N, {}):'.format(', '.join(inargs_and_types))
top.append(line)
top.append((tab + 'cdef int n'))
outargs = []
for (n, code) in enumerate(outcodes):
arg = 'y{}'.format(n)
outargs.append('&{}'.format(arg))
line = 'cdef {} {}'.format(CY_TYPES[code], arg)
middle.append((tab + line))
end.append((tab + 'for n in range(N):'))
end.append(((2 * tab) + '{}({})'))
pyfunc = '_bench_{}_{}_{}'.format(name, incodes, 'py')
cyfunc = '_bench_{}_{}_{}'.format(name, incodes, 'cy')
pytemplate = '\n'.join((top + end))
cytemplate = '\n'.join(((top + middle) + end))
pybench = pytemplate.format(pyfunc, ('_ufuncs.' + name), ', '.join(inargs))
cybench = cytemplate.format(cyfunc, name, ', '.join((inargs + outargs)))
return (pybench, cybench) |
class MLP(nn.Module):
' From
def __init__(self, in_dim: int, hidden_dims: Union[(int, tuple)], bias: bool=True, use_batchnorm: bool=True, batchnorm_last: bool=False):
super().__init__()
if isinstance(hidden_dims, int):
hidden_dims = (hidden_dims,)
mlp = [nn.Linear(in_dim, hidden_dims[0], bias=bias)]
for i in range((len(hidden_dims) - 1)):
if use_batchnorm:
mlp.append(nn.BatchNorm1d(hidden_dims[i]))
bias = (False if ((i + 2) == len(hidden_dims)) else bias)
mlp.extend([nn.ReLU(inplace=True), nn.Linear(hidden_dims[i], hidden_dims[(i + 1)], bias=bias)])
if batchnorm_last:
mlp.append(nn.BatchNorm1d(hidden_dims[(- 1)], affine=False))
self.mlp = nn.Sequential(*mlp)
def forward(self, x):
return self.mlp(x) |
def test_case51():
url = (brokerIp + '/ngsi-ld/v1/entities/')
headers = {'Content-Type': 'application/json', 'Accept': 'application/ld+json', 'Link': '<{{link}}>; rel=" type="application/ld+json"'}
r = requests.post(url, data=json.dumps(ld_data.subdata42), headers=headers)
print(r.content)
print(r.status_code)
url = (brokerIp + '/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:C001')
headers = {'Content-Type': 'application/ld+json', 'Accept': 'application/ld+json'}
r = requests.get(url, headers=headers)
print(r.content)
resp_content = r.content
resInJson = resp_content.decode('utf8').replace("'", '"')
resp = json.loads(resInJson)
print(resp)
if (resp['brandName1']['instanceId'] == 'instance1'):
print('\nValidated')
else:
print('\nNot Validated')
print(r.status_code)
assert (r.status_code == 200) |
def _get_transpose_input(node, state, sdfg):
for edge in state.in_edges(node):
if (edge.dst_conn == '_inp'):
subset = dc(edge.data.subset)
subset.squeeze()
size = subset.size()
outer_array = sdfg.data(dace.sdfg.find_input_arraynode(state, edge).data)
return (edge, outer_array, (size[0], size[1]))
raise ValueError('Transpose input connector "_inp" not found.') |
def load_vocab(name=None, tag=None, no_cache=False, cache_dir=None):
import torch
if (name is None):
name = 'bpe_encoder'
model_path = name
if (model_path and (not os.path.exists(model_path)) and (not (('/' in model_path) or ('\\' in model_path)))):
_tag = tag
if (_tag is None):
_tag = 'latest'
if (not cache_dir):
cache_dir = os.path.join(pathlib.Path.home(), f'.~DeBERTa/assets/{_tag}/')
os.makedirs(cache_dir, exist_ok=True)
out_dir = os.path.join(cache_dir, name)
model_path = os.path.join(out_dir, 'bpe_encoder.bin')
if ((not os.path.exists(model_path)) or no_cache):
asset = download_asset((name + '.zip'), tag=tag, no_cache=no_cache, cache_dir=cache_dir)
with ZipFile(asset, 'r') as zipf:
for zip_info in zipf.infolist():
if (zip_info.filename[(- 1)] == '/'):
continue
zip_info.filename = os.path.basename(zip_info.filename)
zipf.extract(zip_info, out_dir)
elif (not model_path):
return (None, None)
encoder_state = torch.load(model_path)
return encoder_state |
class VOC_Dataset(torch.utils.data.Dataset):
def __init__(self, root_dir, domain, with_id=False, with_tags=False, with_mask=False):
self.root_dir = root_dir
self.image_dir = (self.root_dir + 'JPEGImages/')
self.xml_dir = (self.root_dir + 'Annotations/')
self.mask_dir = (self.root_dir + 'SegmentationClass/')
self.image_id_list = [image_id.strip() for image_id in open(('./data/%s.txt' % domain)).readlines()]
self.with_id = with_id
self.with_tags = with_tags
self.with_mask = with_mask
def __len__(self):
return len(self.image_id_list)
def get_image(self, image_id):
image = Image.open(((self.image_dir + image_id) + '.jpg')).convert('RGB')
return image
def get_mask(self, image_id):
mask_path = ((self.mask_dir + image_id) + '.png')
if os.path.isfile(mask_path):
mask = Image.open(mask_path)
else:
mask = None
return mask
def get_tags(self, image_id):
(_, tags) = read_xml(((self.xml_dir + image_id) + '.xml'))
return tags
def __getitem__(self, index):
image_id = self.image_id_list[index]
data_list = [self.get_image(image_id)]
if self.with_id:
data_list.append(image_id)
if self.with_tags:
data_list.append(self.get_tags(image_id))
if self.with_mask:
data_list.append(self.get_mask(image_id))
return data_list |
def test_construct_func():
tl = Timeline()
detectors2 = ([{}] * 2)
detectors4 = ([{}] * 4)
with pytest.raises(Exception):
bsm = make_bsm('bsm', tl, encoding_type='unknown', detectors=detectors4)
polar_bsm = make_bsm('bsm1', tl, encoding_type='polarization', detectors=detectors4)
time_bin_bsm = make_bsm('bsm2', tl, encoding_type='time_bin', detectors=detectors2)
atom_bsm = make_bsm('bsm3', tl, encoding_type='single_atom', detectors=detectors2)
assert (type(polar_bsm) == PolarizationBSM)
assert (type(time_bin_bsm) == TimeBinBSM)
assert (type(atom_bsm) == SingleAtomBSM) |
def register_Ns3LteEnbRrcSapProvider_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::LteEnbRrcSapProvider const &', 'arg0')])
cls.add_method('CompleteSetupUe', 'void', [param('uint16_t', 'rnti'), param('ns3::LteEnbRrcSapProvider::CompleteSetupUeParameters', 'params')], is_pure_virtual=True, is_virtual=True)
cls.add_method('RecvMeasurementReport', 'void', [param('uint16_t', 'rnti'), param('ns3::LteRrcSap::MeasurementReport', 'msg')], is_pure_virtual=True, is_virtual=True)
cls.add_method('RecvRrcConnectionReconfigurationCompleted', 'void', [param('uint16_t', 'rnti'), param('ns3::LteRrcSap::RrcConnectionReconfigurationCompleted', 'msg')], is_pure_virtual=True, is_virtual=True)
cls.add_method('RecvRrcConnectionReestablishmentComplete', 'void', [param('uint16_t', 'rnti'), param('ns3::LteRrcSap::RrcConnectionReestablishmentComplete', 'msg')], is_pure_virtual=True, is_virtual=True)
cls.add_method('RecvRrcConnectionReestablishmentRequest', 'void', [param('uint16_t', 'rnti'), param('ns3::LteRrcSap::RrcConnectionReestablishmentRequest', 'msg')], is_pure_virtual=True, is_virtual=True)
cls.add_method('RecvRrcConnectionRequest', 'void', [param('uint16_t', 'rnti'), param('ns3::LteRrcSap::RrcConnectionRequest', 'msg')], is_pure_virtual=True, is_virtual=True)
cls.add_method('RecvRrcConnectionSetupCompleted', 'void', [param('uint16_t', 'rnti'), param('ns3::LteRrcSap::RrcConnectionSetupCompleted', 'msg')], is_pure_virtual=True, is_virtual=True)
return |
def extract_utterance_entities(dataset):
entities_values = {ent_name: set() for ent_name in dataset[ENTITIES]}
for intent in itervalues(dataset[INTENTS]):
for utterance in intent[UTTERANCES]:
for chunk in utterance[DATA]:
if (ENTITY in chunk):
entities_values[chunk[ENTITY]].add(chunk[TEXT].strip())
return {k: list(v) for (k, v) in iteritems(entities_values)} |
def train(model, train_loader, a2v, optimizer, criterion, scheduler, epoch, args, val_loader=None, best_val_acc=None, best_epoch=None):
model.train()
(running_vqa_loss, running_acc, running_mlm_loss) = (AverageMeter(), AverageMeter(), AverageMeter())
for (i, batch) in enumerate(train_loader):
(answer_id, answer, video, question, question_clip) = (batch['answer_id'], batch['answer'], (batch['video'][0].cuda(), batch['video'][1].cuda()), batch['question'].cuda(), batch['question_clip'].cuda())
video_len = batch['video_len']
question_mask = (question > 0).float()
N = answer_id.size(0)
if (not args.mc):
model.module._compute_answer_embedding(a2v)
predicts = model(video, question, text_mask=question_mask, question_clip=question_clip)
else:
(fusion_proj, answer_proj) = model(video, question, text_mask=question_mask, answer=answer.cuda(), question_clip=question_clip)
fusion_proj = fusion_proj.unsqueeze(2)
predicts = torch.bmm(answer_proj, fusion_proj).squeeze()
if (args.dataset == 'ivqa'):
a = (answer_id / 2).clamp(max=1).cuda()
vqa_loss = criterion(predicts, a)
predicted = torch.max(predicts, dim=1).indices.cpu()
predicted = F.one_hot(predicted, num_classes=len(a2v))
running_acc.update(((predicted * a.cpu()).sum().item() / N), N)
else:
vqa_loss = criterion(predicts, answer_id.cuda())
predicted = torch.max(predicts, dim=1).indices.cpu()
running_acc.update(((predicted == answer_id).sum().item() / N), N)
if args.mlm_prob:
inputs = batch['question']
(inputs, labels) = mask_tokens(inputs, model.module.bert.bert_tokenizer, mlm_probability=0.15)
mlm_loss = model(video, question=inputs.cuda(), labels=labels.cuda(), text_mask=question_mask, video_mask=video_mask, mode='mlm')
mlm_loss = mlm_loss.mean()
loss = (mlm_loss + vqa_loss)
else:
loss = vqa_loss
if torch.isnan(loss):
print(batch['question_id'], batch['video_id'], loss)
dbg.set_trace()
optimizer.zero_grad()
loss.backward()
if args.clip:
nn.utils.clip_grad_norm_(model.parameters(), max_norm=args.clip)
optimizer.step()
scheduler.step()
running_vqa_loss.update(vqa_loss.detach().cpu().item(), N)
if args.mlm_prob:
running_mlm_loss.update(mlm_loss.detach().cpu().item(), N)
if (((i + 1) % (len(train_loader) // args.freq_display)) == 0):
if args.mlm_prob:
logging.info(f'Epoch {(epoch + 1)}, Epoch status: {(float((i + 1)) / len(train_loader)):.4f}, Training VideoQA loss: {running_vqa_loss.avg:.4f}, Training acc: {running_acc.avg:.2%}, Training MLM loss: {running_mlm_loss.avg:.4f}')
else:
logging.info(f'Epoch {(epoch + 1)}, Epoch status: {(float((i + 1)) / len(train_loader)):.4f}, Training VideoQA loss: {running_vqa_loss.avg:.4f}, Training acc: {running_acc.avg:.2%}')
running_acc.reset()
running_vqa_loss.reset()
running_mlm_loss.reset()
if ((val_loader is not None) and (((i + 1) % (len(train_loader) // (args.freq_display / 15))) == 0)):
val_acc = eval(model, val_loader, a2v, args, test=False)
if (val_acc > best_val_acc):
best_val_acc = val_acc
best_epoch = epoch
torch.save(model.state_dict(), os.path.join(args.save_dir, 'best_model.pth'))
return (best_val_acc, best_epoch) |
def test_crop_and_pad_example():
def _run_asserts(seq, tgt_length, expected):
example = {'seq': seq}
left_padding = (tgt_length - len(seq))
util.CropAndPadExample(example, left_padding, tgt_length, 'seq')
assert_equal(example['seq'], expected)
seqs = [([1, 1, 1], 4, [0, 1, 1, 1]), ([1, 2, 3], 2, [2, 3])]
for (seq, tgt_length, expected) in seqs:
(yield (_run_asserts, seq, tgt_length, expected)) |
_interact(n=(lambda : slider(2, 10000, 100, default=1000, label='Number of Tosses')), interval=(lambda : range_slider(0.0, 1.0, default=(0.45, 0.55), label='Plotting range (y)')))
def coin(n, interval):
from random import random
c = []
k = 0.0
for i in range(1, (n + 1)):
k += random()
c.append((i, (k / i)))
show(point(c[1:], gridlines=[None, [0.5]], pointsize=1), ymin=interval[0], ymax=interval[1]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.