code
stringlengths
101
5.91M
class EquivariantDipoleMoment(EquivariantScalar): def __init__(self, hidden_channels, activation='silu'): super(EquivariantDipoleMoment, self).__init__(hidden_channels, activation, allow_prior_model=False) atomic_mass = torch.from_numpy(ase.data.atomic_masses).float() self.register_buffer('atomic_mass', atomic_mass) def pre_reduce(self, x, v, z, pos, batch): for layer in self.output_network: (x, v) = layer(x, v) mass = self.atomic_mass[z].view((- 1), 1) c = (scatter((mass * pos), batch, dim=0) / scatter(mass, batch, dim=0)) x = (x * (pos - c[batch])) return (x + v.squeeze()) def post_reduce(self, x): return torch.norm(x, dim=(- 1), keepdim=True)
def get_runner(experiment, options=None): runners = json.load(open('runners.json', 'r')) return (runners[experiment][options] if (options is not None) else runners[experiment])
def main(): sns.set_context('paper') sns.set_style('white') model_versions = ['distilgpt2', 'gpt2', 'gpt2-medium', 'gpt2-large', 'gpt2-xl'] filters = ['filtered', 'unfiltered'] for model_version in model_versions: for filter in filters: for split in ['dev', 'test']: fname = f'winobias_data/attention_intervention_{model_version}_{filter}_{split}.json' if (not os.path.exists(fname)): print('File does not exist:', fname) continue with open(fname) as f: data = json.load(f) save_figures(data, 'winobias', model_version, filter, split) for model_version in model_versions: for filter in filters: for stat in ['bergsma', 'bls']: fname = f'winogender_data/attention_intervention_{stat}_{model_version}_{filter}.json' if (not os.path.exists(fname)): print('File does not exist:', fname) continue with open(fname) as f: data = json.load(f) save_figures(data, 'winogender', model_version, filter, stat)
def test_jieba_no_ssplit(): nlp = stanza.Pipeline(lang='zh', dir=TEST_MODELS_DIR, processors={'tokenize': 'jieba'}, tokenize_no_ssplit=True, package=None) doc = nlp(ZH_DOC) assert ('JiebaTokenizer' == nlp.processors['tokenize']._variant.__class__.__name__) assert (ZH_DOC_GOLD_NOSSPLIT_TOKENS == '\n\n'.join([sent.tokens_string() for sent in doc.sentences])) assert all([(doc.text[token._start_char:token._end_char] == token.text) for sent in doc.sentences for token in sent.tokens])
_model def densenet169(pretrained=False, **kwargs): model = _densenet('densenet169', growth_rate=32, block_config=(6, 12, 32, 32), pretrained=pretrained, **kwargs) return model
def score_sequences(y_true: List[List[int]], y_pred: List[List[int]], metrics: Set[str]=None) -> Dict[(str, float)]: scorers = {'accuracy': seqeval.metrics.accuracy_score, 'precision': seqeval.metrics.precision_score, 'recall': seqeval.metrics.recall_score, 'f1': seqeval.metrics.f1_score} metrics = (metrics if (metrics is not None) else scorers) try: return {name: scorers[name](y_true, y_pred) for name in metrics} except: return {name: 0.0 for name in metrics}
def visualize_rgb(tif_path, cut_off_value=2000, show=False, save='tmp.png', force_process_all=False): plot = plt.figure() src = rasterio.open(('gs://' + tif_path)) if (not force_process_all): if ((src.width * src.height) > (3451 * 4243)): print('skipping too large~ ', src.width, src.height, src) return (None, None) if ((src.width * src.height) < (500 * 500)): print('skipping too small~ ', src.width, src.height, src) return (None, None) print('opening ~ ', src.width, src.height, src) image_rgb = src.read([4, 3, 2]) (red, green, blue) = image_rgb red[(red > cut_off_value)] = cut_off_value blue[(blue > cut_off_value)] = cut_off_value green[(green > cut_off_value)] = cut_off_value red = (rasterio.plot.adjust_band(red, kind='linear') * 255).astype(np.uint8) green = (rasterio.plot.adjust_band(green, kind='linear') * 255).astype(np.uint8) blue = (rasterio.plot.adjust_band(blue, kind='linear') * 255).astype(np.uint8) array = np.stack([red, green, blue], axis=0) resolution = array.shape rasterio.plot.show(array) plot.tight_layout() plt.axis('off') if show: plot.show() if save: plot.savefig(save) return (plot, resolution)
def extracted_glob(extracted_folder, file_patterns, src, tgt, lang): def get_matching_pattern(file_pattern): params = {k: v for (k, v) in [('src', src), ('tgt', tgt), ('lang', lang)] if ((('{' + k) + '}') in file_pattern)} file_pattern = re.sub('{src:(.*?)}', ('\\1' if (lang == src) else ''), file_pattern) file_pattern = re.sub('{tgt:(.*?)}', ('\\1' if (lang == tgt) else ''), file_pattern) file_pattern = file_pattern.format(**params) return file_pattern for file_pattern in file_patterns: if isinstance(file_pattern, tuple): (file_pattern, lang_pairs) = file_pattern if (f'{src}-{tgt}' not in lang_pairs): continue matching_pattern = get_matching_pattern(file_pattern) if (matching_pattern is None): continue glob_patterns = f'{extracted_folder}/{matching_pattern}' for f in glob.glob(glob_patterns): (yield f)
def autodoc_skip_member(app, what, name, obj, skip, options): exclusions = ('yaml_constructors', 'yaml_implicit_resolvers') exclude = (name in exclusions) return (skip or exclude)
class BraTSDatasetLSTM(Dataset): __im = [] __mask = [] __im1 = [] __im3 = [] im_ht = 0 im_wd = 0 dataset_size = 0 def __init__(self, dataset_folder, train=True, keywords=['P1', '1', 'flair'], im_size=[128, 128], transform=None): self.__file = [] self.__im = [] self.__mask = [] self.im_ht = im_size[0] self.im_wd = im_size[1] self.transform = transform folder = dataset_folder if train: folder = (dataset_folder + 'Train/') else: folder = (dataset_folder + 'Test/') max_file = 0 min_file = for file in os.listdir(folder): if file.endswith('.png'): m = re.search('(P[0-9]*[_])([0-9]*)', file) pic_num = int(m.group(2)) if (pic_num > max_file): max_file = pic_num if (pic_num < min_file): min_file = pic_num for file in os.listdir(folder): if file.endswith('.png'): filename = os.path.splitext(file)[0] filename_fragments = filename.split('_') samekeywords = list((set(filename_fragments) & set(keywords))) if (len(samekeywords) == len(keywords)): if ((filename_fragments[2] != str(min_file)) and (filename_fragments[2] != str(max_file))): self.__im.append((folder + file)) file1 = (((((((filename_fragments[0] + '_') + filename_fragments[1]) + '_') + str((int(filename_fragments[2]) - 1))) + '_') + filename_fragments[3]) + '.png') self.__im1.append((folder + file1)) file3 = (((((((filename_fragments[0] + '_') + filename_fragments[1]) + '_') + str((int(filename_fragments[2]) + 1))) + '_') + filename_fragments[3]) + '.png') self.__im3.append((folder + file3)) mask_file = getMaskFileName(file) self.__mask.append((folder + mask_file)) self.dataset_size = len(self.__file) def __getitem__(self, index): img1 = getImg(self.__im1[index]) img = getImg(self.__im[index]) img3 = getImg(self.__im3[index]) mask = getImg(self.__mask[index]) if (self.transform is not None): img_tr1 = self.transform(img1) img_tr = self.transform(img) img_tr3 = self.transform(img3) mask_tr = self.transform(mask) return (img_tr1, img_tr, img_tr3, mask_tr) def __len__(self): return len(self.__im)
def banner(msg: str) -> Callable: p = (lambda s: print(s, file=sys.stderr, flush=True)) def decorate(f: Callable) -> Callable: sig = inspect.signature(f) C = escape_codes['bold_cyan'] R = escape_codes['bold_red'] N = escape_codes['reset'] def wrapper(*args, **kwargs): _args = sig.bind(*args, **kwargs) p(f'{C}:: -----BEGIN {msg}-----{N}'.format(**_args.arguments)) try: ret = f(*args, **kwargs) p(f'{C}:: -----END {msg}-----{N}'.format(**_args.arguments)) return ret except BaseException as e: p(f'{R}!! -----EXCEPTION {msg}-----{N}'.format(**_args.arguments)) raise return wrapper return decorate
def _update_zipimporter_cache(normalized_path, cache, updater=None): for p in _collect_zipimporter_cache_entries(normalized_path, cache): old_entry = cache[p] del cache[p] new_entry = (updater and updater(p, old_entry)) if (new_entry is not None): cache[p] = new_entry
class GMMTrainer(): def __init__(self, model, dataloader_train, dataloader_val, gpu_id, log_freq, save_dir): if torch.cuda.is_available(): self.device = torch.device(('cuda:' + str(gpu_id))) else: self.device = torch.device('cpu') self.model = model.to(self.device) self.dataloader_train = dataloader_train self.dataloader_val = dataloader_val self.optim = torch.optim.Adam(self.model.parameters(), lr=0.0001, betas=(0.5, 0.999)) self.criterionL1 = nn.L1Loss() self.log_freq = log_freq self.save_dir = save_dir print('Total Parameters:', sum([p.nelement() for p in self.model.parameters()])) def train(self, epoch): return self.iteration(epoch, self.dataloader_train) def val(self, epoch): return self.iteration(epoch, self.dataloader_val, train=False) def iteration(self, epoch, data_loader, train=True): data_iter = tqdm(enumerate(data_loader), desc=('epoch: %d' % epoch), total=len(data_loader), bar_format='{l_bar}{r_bar}') total_loss = 0.0 for (i, _data) in data_iter: data = {} for (key, value) in _data.items(): if (not ('name' in key)): data[key] = value.to(self.device) cloth = data['cloth'] person = data['person'] body_mask = data['body_mask'] (grid, _) = self.model(data['feature'], cloth) warped_cloth = F.grid_sample(cloth, grid, padding_mode='border') warped_grid = F.grid_sample(data['grid'], grid, padding_mode='zeros') warped_person = ((body_mask * person) + ((1 - body_mask) * warped_cloth)) gt = ((body_mask * person) + ((1 - body_mask) * data['cloth_parse'])) visuals = [[data['head'], data['shape'], data['pose']], [cloth, warped_cloth, warped_grid], [warped_person, gt, person]] loss = (self.criterionL1(warped_person, gt) + (0.5 * self.criterionL1(warped_cloth, data['cloth_parse']))) if train: self.optim.zero_grad() loss.backward() self.optim.step() total_loss += loss.item() post_fix = {'epoch': epoch, 'iter': i, 'avg_loss': (total_loss / (i + 1)), 'loss': loss.item()} if (train and ((i % self.log_freq) == 0)): data_iter.write(str(post_fix)) board_add_images(visuals, epoch, i, self.save_dir) return (total_loss / len(data_iter))
def heatmap_viz(df: pd.DataFrame, x: str, y: str, grp_cnt_stats: Dict[(str, int)], plot_width: int, plot_height: int) -> Panel: title = _make_title(grp_cnt_stats, x, y) source = ColumnDataSource(data=df) palette = RDBU[((len(RDBU) // 2) - 1):] mapper = LinearColorMapper(palette=palette, low=(df['cnt'].min() - 0.01), high=df['cnt'].max()) if (grp_cnt_stats[f'{x}_shw'] > 60): plot_width = (16 * grp_cnt_stats[f'{x}_shw']) if (grp_cnt_stats[f'{y}_shw'] > 10): plot_height = (70 + (18 * grp_cnt_stats[f'{y}_shw'])) fig = figure(x_range=sorted(list(set(df[x]))), y_range=sorted(list(set(df[y]))), toolbar_location=None, tools=[], x_axis_location='below', title=title, plot_width=plot_width, plot_height=plot_height) renderer = fig.rect(x=x, y=y, width=1, height=1, source=source, line_color=None, fill_color=transform('cnt', mapper)) color_bar = ColorBar(color_mapper=mapper, location=(0, 0), ticker=BasicTicker(desired_num_ticks=7), formatter=PrintfTickFormatter(format='%d')) fig.add_tools(HoverTool(tooltips=[(x, f'{{{x}}}'), (y, f'{{{y}}}'), ('Count', '')], mode='mouse', renderers=[renderer])) fig.add_layout(color_bar, 'right') tweak_figure(fig, 'heatmap') fig.yaxis.formatter = FuncTickFormatter(code="\n if (tick.length > 15) return tick.substring(0, 14) + '...';\n else return tick;\n ") return Panel(child=fig, title='Heat Map')
_utils.in_tempdir def test_dory_query_workflow_remove_pendants(location): from spacegraphcats.cdbg import bcalm_to_gxt, sort_bcalm_unitigs copy_dory_head() copy_dory_subset() try: os.mkdir('dory_k21') os.mkdir('dory_k21_r1') except FileExistsError: pass args = ['-k', '21', relative_file('data/bcalm.dory.k21.unitigs.fa'), 'dory_k21/bcalm.unitigs.db', 'dory_k21/bcalm.unitigs.pickle'] assert (sort_bcalm_unitigs.main(args) == 0) db = sqlite3.connect('dory_k21/bcalm.unitigs.db') all_seqs = list(search_utils.contigs_iter_sqlite(db)) assert (len(all_seqs) == 736), len(all_seqs) args = ['dory_k21/bcalm.unitigs.db', 'dory_k21/bcalm.unitigs.pickle', 'dory_k21/cdbg.gxt', 'dory_k21/contigs'] assert (bcalm_to_gxt.main(args) == 0) db = sqlite3.connect('dory_k21/bcalm.unitigs.db') all_seqs = list(search_utils.contigs_iter_sqlite(db)) assert (len(all_seqs) == 736), len(all_seqs) with open('dory_k21/cdbg.gxt', 'rb') as fp: data = fp.read() m = hashlib.md5() m.update(data) assert (m.hexdigest() == '7e4d9acc9e968f7425c94f6ec78ecdd5'), m.hexdigest()
class RandomResizedCrop(object): def __init__(self, size, scale=(0.08, 1.0), ratio=((3.0 / 4.0), (4.0 / 3.0)), interpolation=Image.BILINEAR): if isinstance(size, (tuple, list)): self.size = size else: self.size = (size, size) if ((scale[0] > scale[1]) or (ratio[0] > ratio[1])): warnings.warn('range should be of kind (min, max)') self.interpolation = interpolation self.scale = scale self.ratio = ratio def get_params(img, scale, ratio): (width, height) = _get_image_size(img) area = (height * width) for _ in range(10): target_area = (random.uniform(*scale) * area) log_ratio = (math.log(ratio[0]), math.log(ratio[1])) aspect_ratio = math.exp(random.uniform(*log_ratio)) w = int(round(math.sqrt((target_area * aspect_ratio)))) h = int(round(math.sqrt((target_area / aspect_ratio)))) if ((0 < w <= width) and (0 < h <= height)): i = random.randint(0, (height - h)) j = random.randint(0, (width - w)) return (i, j, h, w) in_ratio = (float(width) / float(height)) if (in_ratio < min(ratio)): w = width h = int(round((w / min(ratio)))) elif (in_ratio > max(ratio)): h = height w = int(round((h * max(ratio)))) else: w = width h = height i = ((height - h) // 2) j = ((width - w) // 2) return (i, j, h, w) def __call__(self, img, mask): (i, j, h, w) = self.get_params(img, self.scale, self.ratio) img = TF.resized_crop(img, i, j, h, w, self.size, self.interpolation) mask = TF.resized_crop(mask, i, j, h, w, self.size, Image.NEAREST) return (img, mask) def __repr__(self): interpolate_str = _pil_interpolation_to_str[self.interpolation] format_string = (self.__class__.__name__ + '(size={0}'.format(self.size)) format_string += ', scale={0}'.format(tuple((round(s, 4) for s in self.scale))) format_string += ', ratio={0}'.format(tuple((round(r, 4) for r in self.ratio))) format_string += ', interpolation={0})'.format(interpolate_str) return format_string
class Logger(object): def __init__(self, file_name: str=None, file_mode: str='w', should_flush: bool=True): self.file = None if (file_name is not None): self.file = open(file_name, file_mode) self.should_flush = should_flush self.stdout = sys.stdout self.stderr = sys.stderr sys.stdout = self sys.stderr = self def __enter__(self) -> 'Logger': return self def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: self.close() def write(self, text: Union[(str, bytes)]) -> None: if isinstance(text, bytes): text = text.decode() if (len(text) == 0): return if (self.file is not None): self.file.write(text) self.stdout.write(text) if self.should_flush: self.flush() def flush(self) -> None: if (self.file is not None): self.file.flush() self.stdout.flush() def close(self) -> None: self.flush() if (sys.stdout is self): sys.stdout = self.stdout if (sys.stderr is self): sys.stderr = self.stderr if (self.file is not None): self.file.close() self.file = None
def clean_time(utter): utter = re.sub('(\\d+) ([ap]\\.?m)', (lambda x: (x.group(1) + x.group(2))), utter) utter = re.sub('((?<!\\d)\\d:\\d+)(am)?', '0\\1', utter) utter = re.sub('((?<!\\d)\\d)am', '0\\1:00', utter) utter = re.sub('((?<!\\d)\\d)pm', (lambda x: (str((int(x.group(1)) + 12)) + ':00')), utter) utter = re.sub('(\\d+)(:\\d+)pm', (lambda x: (str((int(x.group(1)) + 12)) + x.group(2))), utter) utter = re.sub('(\\d+)a\\.?m', '\\1', utter) return utter
_function_dispatch(_fft_dispatcher) def ifft(a, n=None, axis=(- 1), norm=None): a = asarray(a) if (n is None): n = a.shape[axis] if ((norm is not None) and _unitary(norm)): inv_norm = sqrt(max(n, 1)) else: inv_norm = n output = _raw_fft(a, n, axis, False, False, inv_norm) return output
def runNonMotifCASC(inputName, outputDir, clusters, beta, oldAssignmentsName): if (outputDir is not None): oldDir = ('%s/old/' % outputDir) makeDir(oldDir) outputDir = oldDir return runTest(0, inputName, outputDir, clusters, beta, 1, 1, oldAssignmentsName, 15)
class RandomWeakPushCartPole(ModifiableCartPoleEnv): def __init__(self): super(RandomWeakPushCartPole, self).__init__() self.force_mag = uniform_exclude_inner(self.np_random.uniform, self.EXTREME_LOWER_FORCE_MAG, self.EXTREME_UPPER_FORCE_MAG, self.RANDOM_LOWER_FORCE_MAG, self.RANDOM_UPPER_FORCE_MAG) def reset(self, new=True): self.state = self.np_random.uniform(low=(- 0.05), high=0.05, size=(4,)) self.steps_beyond_done = None if new: self.force_mag = uniform_exclude_inner(self.np_random.uniform, self.EXTREME_LOWER_FORCE_MAG, self.EXTREME_UPPER_FORCE_MAG, self.RANDOM_LOWER_FORCE_MAG, self.RANDOM_UPPER_FORCE_MAG) return np.array(self.state) def parameters(self): parameters = super(RandomWeakPushCartPole, self).parameters parameters.update({'force': self.force_mag}) return parameters
def register_types(module): root_module = module.get_root() module.add_enum('EnvironmentType', ['UrbanEnvironment', 'SubUrbanEnvironment', 'OpenAreasEnvironment'], import_from_module='ns.propagation') module.add_enum('CitySize', ['SmallCity', 'MediumCity', 'LargeCity'], import_from_module='ns.propagation') module.add_enum('QueueSizeUnit', ['PACKETS', 'BYTES'], import_from_module='ns.network') module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL'], import_from_module='ns.core') module.add_enum('MpduType', ['NORMAL_MPDU', 'MPDU_IN_AGGREGATE', 'LAST_MPDU_IN_AGGREGATE']) module.add_enum('HtProtectionType', ['NO_PROTECTION', 'NON_MEMBER_PROTECTION', 'TWENTY_MHZ_PROTECTION', 'MIXED_MODE_PROTECTION']) module.add_enum('TypeOfStation', ['STA', 'AP', 'ADHOC_STA', 'MESH', 'HT_STA', 'HT_AP', 'HT_ADHOC_STA', 'OCB']) module.add_enum('WifiMacType', ['WIFI_MAC_CTL_CTLWRAPPER', 'WIFI_MAC_CTL_RTS', 'WIFI_MAC_CTL_CTS', 'WIFI_MAC_CTL_ACK', 'WIFI_MAC_CTL_BACKREQ', 'WIFI_MAC_CTL_BACKRESP', 'WIFI_MAC_CTL_END', 'WIFI_MAC_CTL_END_ACK', 'WIFI_MAC_MGT_BEACON', 'WIFI_MAC_MGT_ASSOCIATION_REQUEST', 'WIFI_MAC_MGT_ASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_DISASSOCIATION', 'WIFI_MAC_MGT_REASSOCIATION_REQUEST', 'WIFI_MAC_MGT_REASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_PROBE_REQUEST', 'WIFI_MAC_MGT_PROBE_RESPONSE', 'WIFI_MAC_MGT_AUTHENTICATION', 'WIFI_MAC_MGT_DEAUTHENTICATION', 'WIFI_MAC_MGT_ACTION', 'WIFI_MAC_MGT_ACTION_NO_ACK', 'WIFI_MAC_MGT_MULTIHOP_ACTION', 'WIFI_MAC_DATA', 'WIFI_MAC_DATA_CFACK', 'WIFI_MAC_DATA_CFPOLL', 'WIFI_MAC_DATA_CFACK_CFPOLL', 'WIFI_MAC_DATA_NULL', 'WIFI_MAC_DATA_NULL_CFACK', 'WIFI_MAC_DATA_NULL_CFPOLL', 'WIFI_MAC_DATA_NULL_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA', 'WIFI_MAC_QOSDATA_CFACK', 'WIFI_MAC_QOSDATA_CFPOLL', 'WIFI_MAC_QOSDATA_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA_NULL', 'WIFI_MAC_QOSDATA_NULL_CFPOLL', 'WIFI_MAC_QOSDATA_NULL_CFACK_CFPOLL']) module.add_enum('AcIndex', ['AC_BE', 'AC_BK', 'AC_VI', 'AC_VO', 'AC_BE_NQOS', 'AC_UNDEF']) module.add_enum('WifiPhyStandard', ['WIFI_PHY_STANDARD_80211a', 'WIFI_PHY_STANDARD_80211b', 'WIFI_PHY_STANDARD_80211g', 'WIFI_PHY_STANDARD_80211_10MHZ', 'WIFI_PHY_STANDARD_80211_5MHZ', 'WIFI_PHY_STANDARD_holland', 'WIFI_PHY_STANDARD_80211n_2_4GHZ', 'WIFI_PHY_STANDARD_80211n_5GHZ', 'WIFI_PHY_STANDARD_80211ac', 'WIFI_PHY_STANDARD_80211ax_2_4GHZ', 'WIFI_PHY_STANDARD_80211ax_5GHZ', 'WIFI_PHY_STANDARD_UNSPECIFIED']) module.add_enum('WifiPreamble', ['WIFI_PREAMBLE_LONG', 'WIFI_PREAMBLE_SHORT', 'WIFI_PREAMBLE_HT_MF', 'WIFI_PREAMBLE_HT_GF', 'WIFI_PREAMBLE_VHT', 'WIFI_PREAMBLE_HE_SU', 'WIFI_PREAMBLE_HE_ER_SU', 'WIFI_PREAMBLE_HE_MU', 'WIFI_PREAMBLE_HE_TB', 'WIFI_PREAMBLE_NONE']) module.add_enum('BlockAckType', ['BASIC_BLOCK_ACK', 'COMPRESSED_BLOCK_ACK', 'EXTENDED_COMPRESSED_BLOCK_ACK', 'MULTI_TID_BLOCK_ACK']) module.add_enum('WifiModulationClass', ['WIFI_MOD_CLASS_UNKNOWN', 'WIFI_MOD_CLASS_IR', 'WIFI_MOD_CLASS_FHSS', 'WIFI_MOD_CLASS_DSSS', 'WIFI_MOD_CLASS_HR_DSSS', 'WIFI_MOD_CLASS_ERP_PBCC', 'WIFI_MOD_CLASS_DSSS_OFDM', 'WIFI_MOD_CLASS_ERP_OFDM', 'WIFI_MOD_CLASS_OFDM', 'WIFI_MOD_CLASS_HT', 'WIFI_MOD_CLASS_VHT', 'WIFI_MOD_CLASS_HE']) module.add_enum('WifiCodeRate', ['WIFI_CODE_RATE_UNDEFINED', 'WIFI_CODE_RATE_3_4', 'WIFI_CODE_RATE_2_3', 'WIFI_CODE_RATE_1_2', 'WIFI_CODE_RATE_5_6']) module.add_class('Address', import_from_module='ns.network') module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network') module.add_class('Angles', import_from_module='ns.antenna') module.add_class('ApInfo') module.add_class('AsciiTraceHelper', import_from_module='ns.network') module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network') module.add_class('AthstatsHelper') module.add_class('AttributeConstructionList', import_from_module='ns.core') module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList']) typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator') typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*') typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&') module.add_class('BandInfo', import_from_module='ns.spectrum') module.add_class('Bar') module.add_class('BlockAckAgreement') module.add_class('BlockAckCache') module.add_class('Buffer', import_from_module='ns.network') module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer']) module.add_class('ByteTagIterator', import_from_module='ns.network') module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator']) module.add_class('ByteTagList', import_from_module='ns.network') module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList']) module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator']) module.add_class('CallbackBase', import_from_module='ns.core') module.add_class('CapabilityInformation') module.add_class('DataRate', import_from_module='ns.network') module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase']) module.add_class('DefaultDeleter', template_parameters=['ns3::Event']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation']) module.add_class('DefaultDeleter', template_parameters=['ns3::MacRxMiddle']) module.add_class('DefaultDeleter', template_parameters=['ns3::MacTxMiddle']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet']) module.add_class('DefaultDeleter', template_parameters=['ns3::QosBlockedDestinations']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::QueueItem']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::SpectrumModel']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::SpectrumSignalParameters']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::SpectrumValue']) module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor']) module.add_class('DefaultDeleter', template_parameters=['ns3::WifiInformationElement']) module.add_class('DefaultDeleter', template_parameters=['ns3::WifiMacQueueItem']) module.add_class('DeviceEnergyModelContainer', import_from_module='ns.energy') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::DeviceEnergyModel > > const_iterator', u'ns3::DeviceEnergyModelContainer::Iterator') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::DeviceEnergyModel > > const_iterator*', u'ns3::DeviceEnergyModelContainer::Iterator*') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::DeviceEnergyModel > > const_iterator&', u'ns3::DeviceEnergyModelContainer::Iterator&') module.add_class('DeviceEnergyModelHelper', allow_subclassing=True, import_from_module='ns.energy') module.add_class('DsssErrorRateModel') module.add_class('EnergySourceHelper', allow_subclassing=True, import_from_module='ns.energy') module.add_class('EventId', import_from_module='ns.core') module.add_class('GroupInfo') module.add_class('Hasher', import_from_module='ns.core') module.add_class('HePreambleParameters') module.add_class('HtRateInfo') module.add_class('InterferenceHelper') module.add_class('SnrPer', outer_class=root_module['ns3::InterferenceHelper']) module.add_class('Ipv4Address', import_from_module='ns.network') root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('Ipv4Mask', import_from_module='ns.network') module.add_class('Ipv6Address', import_from_module='ns.network') root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('Ipv6Prefix', import_from_module='ns.network') module.add_class('LogComponent', import_from_module='ns.core') typehandlers.add_type_alias(u'std::map< std::string, ns3::LogComponent * >', u'ns3::LogComponent::ComponentList') typehandlers.add_type_alias(u'std::map< std::string, ns3::LogComponent * >*', u'ns3::LogComponent::ComponentList*') typehandlers.add_type_alias(u'std::map< std::string, ns3::LogComponent * >&', u'ns3::LogComponent::ComponentList&') module.add_class('Mac48Address', import_from_module='ns.network') typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&') root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('Mac8Address', import_from_module='ns.network') root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address']) module.add_class('MacLowTransmissionParameters') module.add_class('McsGroup') module.add_class('MpduInfo') module.add_class('NetDeviceContainer', import_from_module='ns.network') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator', u'ns3::NetDeviceContainer::Iterator') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator*', u'ns3::NetDeviceContainer::Iterator*') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::NetDevice > > const_iterator&', u'ns3::NetDeviceContainer::Iterator&') module.add_class('NodeContainer', import_from_module='ns.network') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator', u'ns3::NodeContainer::Iterator') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator*', u'ns3::NodeContainer::Iterator*') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::Node > > const_iterator&', u'ns3::NodeContainer::Iterator&') module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core') module.add_class('ObjectDeleter', import_from_module='ns.core') module.add_class('ObjectFactory', import_from_module='ns.core') module.add_class('OriginatorBlockAckAgreement', parent=root_module['ns3::BlockAckAgreement']) module.add_enum('State', ['PENDING', 'ESTABLISHED', 'INACTIVE', 'NO_REPLY', 'RESET', 'REJECTED'], outer_class=root_module['ns3::OriginatorBlockAckAgreement']) module.add_class('PacketMetadata', import_from_module='ns.network') module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network') module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata']) module.add_class('PacketTagIterator', import_from_module='ns.network') module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator']) module.add_class('PacketTagList', import_from_module='ns.network') module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList']) module.add_class('ParameterLogger', import_from_module='ns.core') module.add_class('PcapFile', import_from_module='ns.network') module.add_class('PcapHelper', import_from_module='ns.network') module.add_enum('DataLinkType', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_LINUX_SLL', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO', 'DLT_IEEE802_15_4', 'DLT_NETLINK'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network') module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network') module.add_class('PropagationCache', import_from_module='ns.propagation', template_parameters=['ns3::JakesProcess']) module.add_class('QueueSize', import_from_module='ns.network') module.add_class('RateInfo') module.add_class('SignalNoiseDbm') module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core') module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core') module.add_class('StatusCode') module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) module.add_class('TagBuffer', import_from_module='ns.network') module.add_class('TimeWithUnit', import_from_module='ns.core') module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['double']) module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int']) module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned long']) module.add_class('TypeId', import_from_module='ns.core') module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core') module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId']) typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t') typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*') typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&') module.add_class('Vector2D', import_from_module='ns.core') module.add_class('Vector3D', import_from_module='ns.core') module.add_class('WifiHelper', allow_subclassing=True) typehandlers.add_type_alias(u'std::function< unsigned long ( ns3::Ptr< ns3::QueueItem > ) >', u'ns3::WifiHelper::SelectQueueCallback') typehandlers.add_type_alias(u'std::function< unsigned long ( ns3::Ptr< ns3::QueueItem > ) >*', u'ns3::WifiHelper::SelectQueueCallback*') typehandlers.add_type_alias(u'std::function< unsigned long ( ns3::Ptr< ns3::QueueItem > ) >&', u'ns3::WifiHelper::SelectQueueCallback&') module.add_class('WifiMacHelper', allow_subclassing=True) module.add_class('WifiMode') module.add_class('WifiModeFactory') module.add_class('WifiPhyHelper', parent=[root_module['ns3::PcapHelperForDevice'], root_module['ns3::AsciiTraceHelperForDevice']]) module.add_enum('SupportedPcapDataLinkTypes', ['DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::WifiPhyHelper']) module.add_class('WifiPhyListener', allow_subclassing=True) module.add_class('WifiPhyTag', parent=root_module['ns3::Tag']) module.add_class('WifiRadioEnergyModelHelper', parent=root_module['ns3::DeviceEnergyModelHelper']) module.add_class('WifiRadioEnergyModelPhyListener', parent=root_module['ns3::WifiPhyListener']) typehandlers.add_type_alias(u'ns3::Callback< void, double, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::WifiRadioEnergyModelPhyListener::UpdateTxCurrentCallback') typehandlers.add_type_alias(u'ns3::Callback< void, double, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::WifiRadioEnergyModelPhyListener::UpdateTxCurrentCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, double, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::WifiRadioEnergyModelPhyListener::UpdateTxCurrentCallback&') module.add_class('WifiRemoteStation') module.add_class('WifiRemoteStationInfo') module.add_class('WifiRemoteStationState') module.add_enum('', ['BRAND_NEW', 'DISASSOC', 'WAIT_ASSOC_TX_OK', 'GOT_ASSOC_TX_OK'], outer_class=root_module['ns3::WifiRemoteStationState']) module.add_class('WifiRraaThresholds') module.add_class('WifiRrpaaThresholds') module.add_class('WifiTxVector') module.add_class('YansWifiChannelHelper') module.add_class('YansWifiPhyHelper', parent=root_module['ns3::WifiPhyHelper']) module.add_class('empty', import_from_module='ns.core') module.add_class('int64x64_t', import_from_module='ns.core') module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core') module.add_class('AmpduTag', parent=root_module['ns3::Tag']) module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase']) module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk']) module.add_class('HighLatencyCtsToSelfTxVectorTag', parent=root_module['ns3::Tag']) module.add_class('HighLatencyDataTxVectorTag', parent=root_module['ns3::Tag']) module.add_class('HighLatencyRtsTxVectorTag', parent=root_module['ns3::Tag']) module.add_class('MgtAddBaRequestHeader', parent=root_module['ns3::Header']) module.add_class('MgtAddBaResponseHeader', parent=root_module['ns3::Header']) module.add_class('MgtAssocRequestHeader', parent=root_module['ns3::Header']) module.add_class('MgtAssocResponseHeader', parent=root_module['ns3::Header']) module.add_class('MgtDelBaHeader', parent=root_module['ns3::Header']) module.add_class('MgtProbeRequestHeader', parent=root_module['ns3::Header']) module.add_class('MgtProbeResponseHeader', parent=root_module['ns3::Header']) module.add_class('MgtReassocRequestHeader', parent=root_module['ns3::Header']) module.add_class('MinstrelWifiRemoteStation', parent=root_module['ns3::WifiRemoteStation']) module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object']) module.add_class('PcapFileWrapper', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_class('PreambleDetectionModel', parent=root_module['ns3::Object']) module.add_class('PropagationDelayModel', import_from_module='ns.propagation', parent=root_module['ns3::Object']) module.add_class('PropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::Object']) module.add_class('QueueBase', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_class('RandomPropagationDelayModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationDelayModel']) module.add_class('RandomPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object']) module.add_class('RangePropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Event', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Event>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::MacRxMiddle', 'ns3::empty', 'ns3::DefaultDeleter<ns3::MacRxMiddle>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::MacTxMiddle', 'ns3::empty', 'ns3::DefaultDeleter<ns3::MacTxMiddle>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::QosBlockedDestinations', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QosBlockedDestinations>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::SpectrumModel', 'ns3::empty', 'ns3::DefaultDeleter<ns3::SpectrumModel>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::SpectrumSignalParameters', 'ns3::empty', 'ns3::DefaultDeleter<ns3::SpectrumSignalParameters>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::SpectrumValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::SpectrumValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::WifiInformationElement', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiInformationElement>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::WifiMacQueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiMacQueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) module.add_class('SnrTag', parent=root_module['ns3::Tag']) module.add_class('SpectrumModel', import_from_module='ns.spectrum', parent=root_module['ns3::SimpleRefCount< ns3::SpectrumModel, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumModel> >']) module.add_class('SpectrumPhy', import_from_module='ns.spectrum', parent=root_module['ns3::Object']) module.add_class('SpectrumPropagationLossModel', import_from_module='ns.spectrum', parent=root_module['ns3::Object']) module.add_class('SpectrumSignalParameters', import_from_module='ns.spectrum', parent=root_module['ns3::SimpleRefCount< ns3::SpectrumSignalParameters, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumSignalParameters> >']) module.add_class('SpectrumValue', import_from_module='ns.spectrum', parent=root_module['ns3::SimpleRefCount< ns3::SpectrumValue, ns3::empty, ns3::DefaultDeleter<ns3::SpectrumValue> >']) typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumValue > )', u'ns3::SpectrumValue::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumValue > )*', u'ns3::SpectrumValue::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumValue > )&', u'ns3::SpectrumValue::TracedCallback&') module.add_class('SpectrumWifiPhyHelper', parent=root_module['ns3::WifiPhyHelper']) module.add_class('ThreeLogDistancePropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('ThresholdPreambleDetectionModel', parent=root_module['ns3::PreambleDetectionModel']) module.add_class('Time', import_from_module='ns.core') module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )', u'ns3::Time::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )*', u'ns3::Time::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time )&', u'ns3::Time::TracedCallback&') root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk']) module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('TwoRayGroundPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('Txop', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxOk') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxOk*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxOk&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxFailed') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxFailed*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxFailed&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Txop::TxDropped') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Txop::TxDropped*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Txop::TxDropped&') module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('VhtConfiguration', parent=root_module['ns3::Object']) module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('WifiActionHeader', parent=root_module['ns3::Header']) module.add_enum('CategoryValue', ['BLOCK_ACK', 'MESH', 'MULTIHOP', 'SELF_PROTECTED', 'VENDOR_SPECIFIC_ACTION'], outer_class=root_module['ns3::WifiActionHeader']) module.add_enum('SelfProtectedActionValue', ['PEER_LINK_OPEN', 'PEER_LINK_CONFIRM', 'PEER_LINK_CLOSE', 'GROUP_KEY_INFORM', 'GROUP_KEY_ACK'], outer_class=root_module['ns3::WifiActionHeader']) module.add_enum('MultihopActionValue', ['PROXY_UPDATE', 'PROXY_UPDATE_CONFIRMATION'], outer_class=root_module['ns3::WifiActionHeader']) module.add_enum('MeshActionValue', ['LINK_METRIC_REPORT', 'PATH_SELECTION', 'PORTAL_ANNOUNCEMENT', 'CONGESTION_CONTROL_NOTIFICATION', 'MDA_SETUP_REQUEST', 'MDA_SETUP_REPLY', 'MDAOP_ADVERTISMENT_REQUEST', 'MDAOP_ADVERTISMENTS', 'MDAOP_SET_TEARDOWN', 'TBTT_ADJUSTMENT_REQUEST', 'TBTT_ADJUSTMENT_RESPONSE'], outer_class=root_module['ns3::WifiActionHeader']) module.add_enum('BlockAckActionValue', ['BLOCK_ACK_ADDBA_REQUEST', 'BLOCK_ACK_ADDBA_RESPONSE', 'BLOCK_ACK_DELBA'], outer_class=root_module['ns3::WifiActionHeader']) module.add_class('ActionValue', outer_class=root_module['ns3::WifiActionHeader']) typehandlers.add_type_alias(u'ns3::WifiActionHeader::ActionValue', u'ns3::WifiActionHeader::ActionValue') typehandlers.add_type_alias(u'ns3::WifiActionHeader::ActionValue*', u'ns3::WifiActionHeader::ActionValue*') typehandlers.add_type_alias(u'ns3::WifiActionHeader::ActionValue&', u'ns3::WifiActionHeader::ActionValue&') module.add_typedef(root_module['ns3::WifiActionHeader::ActionValue'], 'ActionValue') module.add_class('WifiInformationElement', parent=root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >']) module.add_class('WifiInformationElementVector', parent=root_module['ns3::Header']) typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::WifiInformationElement > > iterator', u'ns3::WifiInformationElementVector::Iterator') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::WifiInformationElement > > iterator*', u'ns3::WifiInformationElementVector::Iterator*') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::WifiInformationElement > > iterator&', u'ns3::WifiInformationElementVector::Iterator&') module.add_class('WifiMac', parent=root_module['ns3::Object']) module.add_class('WifiMacHeader', parent=root_module['ns3::Header']) module.add_enum('QosAckPolicy', ['NORMAL_ACK', 'NO_ACK', 'NO_EXPLICIT_ACK', 'BLOCK_ACK'], outer_class=root_module['ns3::WifiMacHeader']) module.add_enum('AddressType', ['ADDR1', 'ADDR2', 'ADDR3', 'ADDR4'], outer_class=root_module['ns3::WifiMacHeader']) typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )', u'ns3::WifiMacHeader::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )*', u'ns3::WifiMacHeader::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::WifiMacHeader const & )&', u'ns3::WifiMacHeader::TracedCallback&') module.add_class('WifiMacQueueItem', parent=root_module['ns3::SimpleRefCount< ns3::WifiMacQueueItem, ns3::empty, ns3::DefaultDeleter<ns3::WifiMacQueueItem> >']) module.add_class('WifiMacTrailer', parent=root_module['ns3::Trailer']) module.add_class('WifiPhy', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'std::pair< unsigned char, ns3::WifiPhyStandard >', u'ns3::WifiPhy::ChannelNumberStandardPair') typehandlers.add_type_alias(u'std::pair< unsigned char, ns3::WifiPhyStandard >*', u'ns3::WifiPhy::ChannelNumberStandardPair*') typehandlers.add_type_alias(u'std::pair< unsigned char, ns3::WifiPhyStandard >&', u'ns3::WifiPhy::ChannelNumberStandardPair&') typehandlers.add_type_alias(u'std::pair< unsigned short, unsigned short >', u'ns3::WifiPhy::FrequencyWidthPair') typehandlers.add_type_alias(u'std::pair< unsigned short, unsigned short >*', u'ns3::WifiPhy::FrequencyWidthPair*') typehandlers.add_type_alias(u'std::pair< unsigned short, unsigned short >&', u'ns3::WifiPhy::FrequencyWidthPair&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, uint16_t, ns3::WifiTxVector, ns3::MpduInfo, ns3::SignalNoiseDbm )', u'ns3::WifiPhy::MonitorSnifferRxCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, uint16_t, ns3::WifiTxVector, ns3::MpduInfo, ns3::SignalNoiseDbm )*', u'ns3::WifiPhy::MonitorSnifferRxCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, uint16_t, ns3::WifiTxVector, ns3::MpduInfo, ns3::SignalNoiseDbm )&', u'ns3::WifiPhy::MonitorSnifferRxCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, uint16_t, ns3::WifiTxVector, ns3::MpduInfo )', u'ns3::WifiPhy::MonitorSnifferTxCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, uint16_t, ns3::WifiTxVector, ns3::MpduInfo )*', u'ns3::WifiPhy::MonitorSnifferTxCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, uint16_t, ns3::WifiTxVector, ns3::MpduInfo )&', u'ns3::WifiPhy::MonitorSnifferTxCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::HePreambleParameters )', u'ns3::WifiPhy::EndOfHePreambleCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::HePreambleParameters )*', u'ns3::WifiPhy::EndOfHePreambleCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::HePreambleParameters )&', u'ns3::WifiPhy::EndOfHePreambleCallback&') module.add_class('WifiPhyStateHelper', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time, WifiPhyState )', u'ns3::WifiPhyStateHelper::StateTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time, WifiPhyState )*', u'ns3::WifiPhyStateHelper::StateTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time, WifiPhyState )&', u'ns3::WifiPhyStateHelper::StateTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double, ns3::WifiMode, ns3::WifiPreamble )', u'ns3::WifiPhyStateHelper::RxOkTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double, ns3::WifiMode, ns3::WifiPreamble )*', u'ns3::WifiPhyStateHelper::RxOkTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double, ns3::WifiMode, ns3::WifiPreamble )&', u'ns3::WifiPhyStateHelper::RxOkTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::WifiPhyStateHelper::RxEndErrorTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::WifiPhyStateHelper::RxEndErrorTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::WifiPhyStateHelper::RxEndErrorTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::WifiMode, ns3::WifiPreamble, uint8_t )', u'ns3::WifiPhyStateHelper::TxTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::WifiMode, ns3::WifiPreamble, uint8_t )*', u'ns3::WifiPhyStateHelper::TxTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::WifiMode, ns3::WifiPreamble, uint8_t )&', u'ns3::WifiPhyStateHelper::TxTracedCallback&') module.add_class('WifiRemoteStationManager', parent=root_module['ns3::Object']) module.add_enum('ProtectionMode', ['RTS_CTS', 'CTS_TO_SELF'], outer_class=root_module['ns3::WifiRemoteStationManager']) typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >', u'ns3::WifiRemoteStationManager::Stations') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >*', u'ns3::WifiRemoteStationManager::Stations*') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStation * >&', u'ns3::WifiRemoteStationManager::Stations&') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >', u'ns3::WifiRemoteStationManager::StationStates') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >*', u'ns3::WifiRemoteStationManager::StationStates*') typehandlers.add_type_alias(u'std::vector< ns3::WifiRemoteStationState * >&', u'ns3::WifiRemoteStationManager::StationStates&') typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )*', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( double, double, ns3::Mac48Address )&', u'ns3::WifiRemoteStationManager::PowerChangeTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )*', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::DataRate, ns3::DataRate, ns3::Mac48Address )&', u'ns3::WifiRemoteStationManager::RateChangeTracedCallback&') module.add_class('WifiSpectrumPhyInterface', parent=root_module['ns3::SpectrumPhy']) module.add_class('WifiSpectrumSignalParameters', parent=root_module['ns3::SpectrumSignalParameters']) module.add_class('WifiTxCurrentModel', parent=root_module['ns3::Object']) module.add_class('YansWifiPhy', parent=root_module['ns3::WifiPhy']) module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('AarfWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('AarfcdWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('AmpduSubframeHeader', parent=root_module['ns3::Header']) module.add_class('AmrrWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('AmsduSubframeHeader', parent=root_module['ns3::Header']) module.add_class('AntennaModel', import_from_module='ns.antenna', parent=root_module['ns3::Object']) module.add_class('AparfWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_enum('State', ['High', 'Low', 'Spread'], outer_class=root_module['ns3::AparfWifiManager']) module.add_class('ArfWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('AthstatsWifiTraceSink', parent=root_module['ns3::Object']) module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) module.add_class('BlockAckManager', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::BlockAckManager::TxOk') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::BlockAckManager::TxOk*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::BlockAckManager::TxOk&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::BlockAckManager::TxFailed') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::BlockAckManager::TxFailed*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::BlockAckManager::TxFailed&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )', u'ns3::BlockAckManager::AgreementStateTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )*', u'ns3::BlockAckManager::AgreementStateTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Mac48Address, uint8_t, ns3::OriginatorBlockAckAgreement::State )&', u'ns3::BlockAckManager::AgreementStateTracedCallback&') module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('CaraWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('CfParameterSet', parent=root_module['ns3::WifiInformationElement']) module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_class('ChannelAccessManager', parent=root_module['ns3::Object']) module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('ConstantRateWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('ConstantSpeedPropagationDelayModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationDelayModel']) module.add_class('Cost231PropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('CtrlBAckRequestHeader', parent=root_module['ns3::Header']) module.add_class('CtrlBAckResponseHeader', parent=root_module['ns3::Header']) module.add_class('DataRateChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('DataRateValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('DeviceEnergyModel', import_from_module='ns.energy', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::DeviceEnergyModel::ChangeStateCallback') typehandlers.add_type_alias(u'ns3::Callback< void, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::DeviceEnergyModel::ChangeStateCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::DeviceEnergyModel::ChangeStateCallback&') module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('DsssParameterSet', parent=root_module['ns3::WifiInformationElement']) module.add_class('EdcaParameterSet', parent=root_module['ns3::WifiInformationElement']) module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor']) module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('EnergyHarvester', import_from_module='ns.energy', parent=root_module['ns3::Object']) module.add_class('EnergySource', import_from_module='ns.energy', parent=root_module['ns3::Object']) module.add_class('EnergySourceContainer', import_from_module='ns.energy', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::EnergySource > > const_iterator', u'ns3::EnergySourceContainer::Iterator') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::EnergySource > > const_iterator*', u'ns3::EnergySourceContainer::Iterator*') typehandlers.add_type_alias(u'std::vector< ns3::Ptr< ns3::EnergySource > > const_iterator&', u'ns3::EnergySourceContainer::Iterator&') module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('ErpInformation', parent=root_module['ns3::WifiInformationElement']) module.add_class('ErrorModel', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_class('ErrorRateModel', parent=root_module['ns3::Object']) module.add_class('Event', parent=root_module['ns3::SimpleRefCount< ns3::Event, ns3::empty, ns3::DefaultDeleter<ns3::Event> >']) module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('ExtendedCapabilities', parent=root_module['ns3::WifiInformationElement']) module.add_class('ExtendedSupportedRatesIE', parent=root_module['ns3::WifiInformationElement']) module.add_class('FixedRssLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('FrameCaptureModel', parent=root_module['ns3::Object']) module.add_class('FriisPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('HeCapabilities', parent=root_module['ns3::WifiInformationElement']) module.add_class('HeConfiguration', parent=root_module['ns3::Object']) module.add_class('HeOperation', parent=root_module['ns3::WifiInformationElement']) module.add_class('HtCapabilities', parent=root_module['ns3::WifiInformationElement']) module.add_class('HtConfiguration', parent=root_module['ns3::Object']) module.add_class('HtOperation', parent=root_module['ns3::WifiInformationElement']) module.add_class('IdealWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('ItuR1411LosPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('ItuR1411NlosOverRooftopPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('JakesProcess', import_from_module='ns.propagation', parent=root_module['ns3::Object']) module.add_class('JakesPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('Kun2600MhzPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('LinearWifiTxCurrentModel', parent=root_module['ns3::WifiTxCurrentModel']) module.add_class('ListErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel']) module.add_class('LogDistancePropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('MacLow', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::MacLow::MacLowRxCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::MacLow::MacLowRxCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::MacLow::MacLowRxCallback&') module.add_class('MacRxMiddle', parent=root_module['ns3::SimpleRefCount< ns3::MacRxMiddle, ns3::empty, ns3::DefaultDeleter<ns3::MacRxMiddle> >']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::MacRxMiddle::ForwardUpCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::MacRxMiddle::ForwardUpCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::WifiMacHeader const *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::MacRxMiddle::ForwardUpCallback&') module.add_class('MacTxMiddle', parent=root_module['ns3::SimpleRefCount< ns3::MacTxMiddle, ns3::empty, ns3::DefaultDeleter<ns3::MacTxMiddle> >']) module.add_class('MatrixPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('MgtBeaconHeader', parent=root_module['ns3::MgtProbeResponseHeader']) module.add_class('MinstrelHtWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) typehandlers.add_type_alias(u'void ( * ) ( uint64_t const, ns3::Mac48Address const )', u'ns3::MinstrelHtWifiManager::RateChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( uint64_t const, ns3::Mac48Address const )*', u'ns3::MinstrelHtWifiManager::RateChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( uint64_t const, ns3::Mac48Address const )&', u'ns3::MinstrelHtWifiManager::RateChangeTracedCallback&') module.add_class('MinstrelWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('MobilityModel', import_from_module='ns.mobility', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const > )', u'ns3::MobilityModel::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const > )*', u'ns3::MobilityModel::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const > )&', u'ns3::MobilityModel::TracedCallback&') module.add_class('MpduAggregator', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >', u'ns3::MpduAggregator::DeaggregatedMpdus') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >*', u'ns3::MpduAggregator::DeaggregatedMpdus*') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >&', u'ns3::MpduAggregator::DeaggregatedMpdus&') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > > const_iterator', u'ns3::MpduAggregator::DeaggregatedMpdusCI') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > > const_iterator*', u'ns3::MpduAggregator::DeaggregatedMpdusCI*') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > > const_iterator&', u'ns3::MpduAggregator::DeaggregatedMpdusCI&') typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >', u'ns3::MpduAggregator::EdcaQueues') typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >*', u'ns3::MpduAggregator::EdcaQueues*') typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >&', u'ns3::MpduAggregator::EdcaQueues&') module.add_class('MsduAggregator', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >', u'ns3::MsduAggregator::DeaggregatedMsdus') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >*', u'ns3::MsduAggregator::DeaggregatedMsdus*') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >&', u'ns3::MsduAggregator::DeaggregatedMsdus&') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > > const_iterator', u'ns3::MsduAggregator::DeaggregatedMsdusCI') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > > const_iterator*', u'ns3::MsduAggregator::DeaggregatedMsdusCI*') typehandlers.add_type_alias(u'std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > > const_iterator&', u'ns3::MsduAggregator::DeaggregatedMsdusCI&') typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >', u'ns3::MsduAggregator::EdcaQueues') typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >*', u'ns3::MsduAggregator::EdcaQueues*') typehandlers.add_type_alias(u'std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >&', u'ns3::MsduAggregator::EdcaQueues&') module.add_class('NakagamiPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object']) module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network') typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*') typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&') module.add_class('NistErrorRateModel', parent=root_module['ns3::ErrorRateModel']) module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >']) module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&') module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('OkumuraHataPropagationLossModel', import_from_module='ns.propagation', parent=root_module['ns3::PropagationLossModel']) module.add_class('OnoeWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >']) module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >']) typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&') module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream']) module.add_class('ParfWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('QosBlockedDestinations', parent=root_module['ns3::SimpleRefCount< ns3::QosBlockedDestinations, ns3::empty, ns3::DefaultDeleter<ns3::QosBlockedDestinations> >']) module.add_class('QosTxop', parent=root_module['ns3::Txop']) module.add_class('Queue', import_from_module='ns.network', template_parameters=['ns3::Packet'], parent=root_module['ns3::QueueBase']) typehandlers.add_type_alias(u'ns3::Packet', u'ns3::Queue< ns3::Packet > ItemType') typehandlers.add_type_alias(u'ns3::Packet*', u'ns3::Queue< ns3::Packet > ItemType*') typehandlers.add_type_alias(u'ns3::Packet&', u'ns3::Queue< ns3::Packet > ItemType&') module.add_typedef(root_module['ns3::Packet'], 'ItemType') module.add_class('Queue', import_from_module='ns.network', template_parameters=['ns3::QueueDiscItem'], parent=root_module['ns3::QueueBase']) typehandlers.add_type_alias(u'ns3::QueueDiscItem', u'ns3::Queue< ns3::QueueDiscItem > ItemType') typehandlers.add_type_alias(u'ns3::QueueDiscItem*', u'ns3::Queue< ns3::QueueDiscItem > ItemType*') typehandlers.add_type_alias(u'ns3::QueueDiscItem&', u'ns3::Queue< ns3::QueueDiscItem > ItemType&') module.add_class('Queue', template_parameters=['ns3::WifiMacQueueItem'], parent=root_module['ns3::QueueBase']) typehandlers.add_type_alias(u'ns3::WifiMacQueueItem', u'ns3::Queue< ns3::WifiMacQueueItem > ItemType') typehandlers.add_type_alias(u'ns3::WifiMacQueueItem*', u'ns3::Queue< ns3::WifiMacQueueItem > ItemType*') typehandlers.add_type_alias(u'ns3::WifiMacQueueItem&', u'ns3::Queue< ns3::WifiMacQueueItem > ItemType&') module.add_typedef(root_module['ns3::WifiMacQueueItem'], 'ItemType') module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >']) module.add_enum('Uint8Values', ['IP_DSFIELD'], outer_class=root_module['ns3::QueueItem'], import_from_module='ns.network') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )', u'ns3::QueueItem::TracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )*', u'ns3::QueueItem::TracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::QueueItem const > )&', u'ns3::QueueItem::TracedCallback&') module.add_class('QueueSizeChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('QueueSizeValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('RateErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel']) module.add_enum('ErrorUnit', ['ERROR_UNIT_BIT', 'ERROR_UNIT_BYTE', 'ERROR_UNIT_PACKET'], outer_class=root_module['ns3::RateErrorModel'], import_from_module='ns.network') module.add_class('ReceiveListErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel']) module.add_class('RegularWifiMac', parent=root_module['ns3::WifiMac']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RegularWifiMac::ForwardUpCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RegularWifiMac::ForwardUpCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RegularWifiMac::ForwardUpCallback&') module.add_class('RraaWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('RrpaaWifiManager', parent=root_module['ns3::WifiRemoteStationManager']) module.add_class('SimpleFrameCaptureModel', parent=root_module['ns3::FrameCaptureModel']) module.add_class('SpectrumChannel', import_from_module='ns.spectrum', parent=root_module['ns3::Channel']) typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumPhy const >, ns3::Ptr< ns3::SpectrumPhy const >, double )', u'ns3::SpectrumChannel::LossTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumPhy const >, ns3::Ptr< ns3::SpectrumPhy const >, double )*', u'ns3::SpectrumChannel::LossTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumPhy const >, ns3::Ptr< ns3::SpectrumPhy const >, double )&', u'ns3::SpectrumChannel::LossTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const >, ns3::Ptr< ns3::MobilityModel const >, double, double, double, double )', u'ns3::SpectrumChannel::GainTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const >, ns3::Ptr< ns3::MobilityModel const >, double, double, double, double )*', u'ns3::SpectrumChannel::GainTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::MobilityModel const >, ns3::Ptr< ns3::MobilityModel const >, double, double, double, double )&', u'ns3::SpectrumChannel::GainTracedCallback&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumSignalParameters > )', u'ns3::SpectrumChannel::SignalParametersTracedCallback') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumSignalParameters > )*', u'ns3::SpectrumChannel::SignalParametersTracedCallback*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::SpectrumSignalParameters > )&', u'ns3::SpectrumChannel::SignalParametersTracedCallback&') module.add_class('SpectrumWifiPhy', parent=root_module['ns3::WifiPhy']) typehandlers.add_type_alias(u'void ( * ) ( bool, uint32_t, double, ns3::Time )', u'ns3::SpectrumWifiPhy::SignalArrivalCallback') typehandlers.add_type_alias(u'void ( * ) ( bool, uint32_t, double, ns3::Time )*', u'ns3::SpectrumWifiPhy::SignalArrivalCallback*') typehandlers.add_type_alias(u'void ( * ) ( bool, uint32_t, double, ns3::Time )&', u'ns3::SpectrumWifiPhy::SignalArrivalCallback&') module.add_class('Ssid', parent=root_module['ns3::WifiInformationElement']) module.add_class('SsidChecker', parent=root_module['ns3::AttributeChecker']) module.add_class('SsidValue', parent=root_module['ns3::AttributeValue']) module.add_class('SupportedRates', parent=root_module['ns3::WifiInformationElement']) module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker']) module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue']) module.add_class('VhtCapabilities', parent=root_module['ns3::WifiInformationElement']) module.add_class('VhtOperation', parent=root_module['ns3::WifiInformationElement']) module.add_class('WifiMacQueue', parent=root_module['ns3::Queue< ns3::WifiMacQueueItem >']) module.add_enum('DropPolicy', ['DROP_NEWEST', 'DROP_OLDEST'], outer_class=root_module['ns3::WifiMacQueue']) module.add_class('WifiModeChecker', parent=root_module['ns3::AttributeChecker']) module.add_class('WifiModeValue', parent=root_module['ns3::AttributeValue']) module.add_class('WifiNetDevice', parent=root_module['ns3::NetDevice']) module.add_class('WifiRadioEnergyModel', parent=root_module['ns3::DeviceEnergyModel']) typehandlers.add_type_alias(u'ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::WifiRadioEnergyModel::WifiRadioEnergyDepletionCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::WifiRadioEnergyModel::WifiRadioEnergyDepletionCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::WifiRadioEnergyModel::WifiRadioEnergyDepletionCallback&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::WifiRadioEnergyModel::WifiRadioEnergyRechargedCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::WifiRadioEnergyModel::WifiRadioEnergyRechargedCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::WifiRadioEnergyModel::WifiRadioEnergyRechargedCallback&') module.add_class('YansErrorRateModel', parent=root_module['ns3::ErrorRateModel']) module.add_class('YansWifiChannel', parent=root_module['ns3::Channel']) module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker']) module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue']) module.add_class('AdhocWifiMac', parent=root_module['ns3::RegularWifiMac']) module.add_class('BinaryErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel']) module.add_class('BurstErrorModel', import_from_module='ns.network', parent=root_module['ns3::ErrorModel']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'bool', 'unsigned int', 'double', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'const ns3::WifiMacHeader &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'double', 'double', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'double', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::DataRate', 'ns3::DataRate', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::HePreambleParameters', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::MobilityModel>', 'ns3::Ptr<const ns3::MobilityModel>', 'double', 'double', 'double', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::MobilityModel>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'double', 'ns3::WifiMode', 'ns3::WifiPreamble', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::WifiMode', 'ns3::WifiPreamble', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'ns3::WifiTxVector', 'ns3::MpduInfo', 'ns3::SignalNoiseDbm', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'ns3::WifiTxVector', 'ns3::MpduInfo', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::QueueDiscItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::SpectrumPhy>', 'ns3::Ptr<const ns3::SpectrumPhy>', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::WifiMacQueueItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'const ns3::WifiMacHeader *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'double', 'ns3::WifiTxVector', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::SpectrumSignalParameters>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Time', 'ns3::Mac48Address', 'unsigned char', 'ns3::OriginatorBlockAckAgreement::State', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', template_parameters=['void', 'ns3::Time', 'ns3::Time', 'WifiPhyState', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Time', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'unsigned long', 'unsigned long', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase']) module.add_class('InfrastructureWifiMac', parent=root_module['ns3::RegularWifiMac']) module.add_class('QueueDiscItem', import_from_module='ns.network', parent=root_module['ns3::QueueItem']) module.add_class('StaWifiMac', parent=root_module['ns3::InfrastructureWifiMac']) module.add_class('ApWifiMac', parent=root_module['ns3::InfrastructureWifiMac']) module.add_container('ns3::HtMinstrelRate', 'ns3::HtRateInfo', container_type=u'vector') module.add_container('std::map< std::string, ns3::LogComponent * >', ('std::string', 'ns3::LogComponent *'), container_type=u'map') module.add_container('ns3::TxTime', ('ns3::WifiMode', 'ns3::Time'), container_type=u'map') module.add_container('ns3::WifiModeList', 'ns3::WifiMode', container_type=u'vector') module.add_container('ns3::MinstrelRate', 'ns3::RateInfo', container_type=u'vector') module.add_container('ns3::SampleRate', 'std::vector< unsigned char >', container_type=u'vector') module.add_container('std::vector< double >', 'double', container_type=u'vector') module.add_container('ns3::Bands', 'ns3::BandInfo', container_type=u'vector') module.add_container('std::vector< unsigned short >', 'short unsigned int', container_type=u'vector') module.add_container('std::vector< ns3::WifiRemoteStation * >', 'ns3::WifiRemoteStation *', container_type=u'vector') module.add_container('std::vector< ns3::WifiRemoteStationState * >', 'ns3::WifiRemoteStationState *', container_type=u'vector') module.add_container('std::list< unsigned int >', 'unsigned int', container_type=u'list') module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader >', container_type=u'list') module.add_container('std::map< ns3::AcIndex, ns3::Ptr< ns3::QosTxop > >', ('ns3::AcIndex', 'ns3::Ptr< ns3::QosTxop >'), container_type=u'map') module.add_container('std::vector< ns3::Ptr< ns3::WifiMacQueueItem > >', 'ns3::Ptr< ns3::WifiMacQueueItem >', container_type=u'vector') module.add_container('ns3::MpduAggregator::DeaggregatedMpdus', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmpduSubframeHeader >', container_type=u'list') module.add_container('ns3::MpduAggregator::EdcaQueues', ('ns3::AcIndex', 'ns3::Ptr< ns3::QosTxop >'), container_type=u'map') module.add_container('std::list< std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader > >', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader >', container_type=u'list') module.add_container('ns3::MsduAggregator::DeaggregatedMsdus', 'std::pair< ns3::Ptr< ns3::Packet >, ns3::AmsduSubframeHeader >', container_type=u'list') module.add_container('ns3::MsduAggregator::EdcaQueues', ('ns3::AcIndex', 'ns3::Ptr< ns3::QosTxop >'), container_type=u'map') module.add_container('std::map< ns3::Mac48Address, bool >', ('ns3::Mac48Address', 'bool'), container_type=u'map') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )', u'ns3::TimePrinter') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )*', u'ns3::TimePrinter*') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )&', u'ns3::TimePrinter&') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )', u'ns3::NodePrinter') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )*', u'ns3::NodePrinter*') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & )&', u'ns3::NodePrinter&') typehandlers.add_type_alias(u'std::vector< double >', u'ns3::Values') typehandlers.add_type_alias(u'std::vector< double >*', u'ns3::Values*') typehandlers.add_type_alias(u'std::vector< double >&', u'ns3::Values&') typehandlers.add_type_alias(u'std::vector< ns3::BandInfo >', u'ns3::Bands') typehandlers.add_type_alias(u'std::vector< ns3::BandInfo >*', u'ns3::Bands*') typehandlers.add_type_alias(u'std::vector< ns3::BandInfo >&', u'ns3::Bands&') typehandlers.add_type_alias(u'uint32_t', u'ns3::SpectrumModelUid_t') typehandlers.add_type_alias(u'uint32_t*', u'ns3::SpectrumModelUid_t*') typehandlers.add_type_alias(u'uint32_t&', u'ns3::SpectrumModelUid_t&') typehandlers.add_type_alias(u'ns3::Vector3D', u'ns3::Vector') typehandlers.add_type_alias(u'ns3::Vector3D*', u'ns3::Vector*') typehandlers.add_type_alias(u'ns3::Vector3D&', u'ns3::Vector&') module.add_typedef(root_module['ns3::Vector3D'], 'Vector') typehandlers.add_type_alias(u'ns3::Vector3DValue', u'ns3::VectorValue') typehandlers.add_type_alias(u'ns3::Vector3DValue*', u'ns3::VectorValue*') typehandlers.add_type_alias(u'ns3::Vector3DValue&', u'ns3::VectorValue&') module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue') typehandlers.add_type_alias(u'ns3::Vector3DChecker', u'ns3::VectorChecker') typehandlers.add_type_alias(u'ns3::Vector3DChecker*', u'ns3::VectorChecker*') typehandlers.add_type_alias(u'ns3::Vector3DChecker&', u'ns3::VectorChecker&') module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiTxVector, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RxOkCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiTxVector, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RxOkCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiTxVector, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RxOkCallback&') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::RxErrorCallback') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::RxErrorCallback*') typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::RxErrorCallback&') typehandlers.add_type_alias(u'std::vector< std::pair< ns3::WifiRrpaaThresholds, ns3::WifiMode > >', u'ns3::RrpaaThresholdsTable') typehandlers.add_type_alias(u'std::vector< std::pair< ns3::WifiRrpaaThresholds, ns3::WifiMode > >*', u'ns3::RrpaaThresholdsTable*') typehandlers.add_type_alias(u'std::vector< std::pair< ns3::WifiRrpaaThresholds, ns3::WifiMode > >&', u'ns3::RrpaaThresholdsTable&') typehandlers.add_type_alias(u'std::vector< std::vector< double > >', u'ns3::RrpaaProbabilitiesTable') typehandlers.add_type_alias(u'std::vector< std::vector< double > >*', u'ns3::RrpaaProbabilitiesTable*') typehandlers.add_type_alias(u'std::vector< std::vector< double > >&', u'ns3::RrpaaProbabilitiesTable&') typehandlers.add_type_alias(u'std::vector< std::pair< ns3::WifiRraaThresholds, ns3::WifiMode > >', u'ns3::RraaThresholdsTable') typehandlers.add_type_alias(u'std::vector< std::pair< ns3::WifiRraaThresholds, ns3::WifiMode > >*', u'ns3::RraaThresholdsTable*') typehandlers.add_type_alias(u'std::vector< std::pair< ns3::WifiRraaThresholds, ns3::WifiMode > >&', u'ns3::RraaThresholdsTable&') typehandlers.add_type_alias(u'std::map< ns3::WifiMode, ns3::Time >', u'ns3::TxTime') typehandlers.add_type_alias(u'std::map< ns3::WifiMode, ns3::Time >*', u'ns3::TxTime*') typehandlers.add_type_alias(u'std::map< ns3::WifiMode, ns3::Time >&', u'ns3::TxTime&') typehandlers.add_type_alias(u'std::vector< ns3::McsGroup >', u'ns3::MinstrelMcsGroups') typehandlers.add_type_alias(u'std::vector< ns3::McsGroup >*', u'ns3::MinstrelMcsGroups*') typehandlers.add_type_alias(u'std::vector< ns3::McsGroup >&', u'ns3::MinstrelMcsGroups&') typehandlers.add_type_alias(u'std::vector< ns3::HtRateInfo >', u'ns3::HtMinstrelRate') typehandlers.add_type_alias(u'std::vector< ns3::HtRateInfo >*', u'ns3::HtMinstrelRate*') typehandlers.add_type_alias(u'std::vector< ns3::HtRateInfo >&', u'ns3::HtMinstrelRate&') typehandlers.add_type_alias(u'std::vector< ns3::GroupInfo >', u'ns3::McsGroupData') typehandlers.add_type_alias(u'std::vector< ns3::GroupInfo >*', u'ns3::McsGroupData*') typehandlers.add_type_alias(u'std::vector< ns3::GroupInfo >&', u'ns3::McsGroupData&') typehandlers.add_type_alias(u'std::vector< ns3::RateInfo >', u'ns3::MinstrelRate') typehandlers.add_type_alias(u'std::vector< ns3::RateInfo >*', u'ns3::MinstrelRate*') typehandlers.add_type_alias(u'std::vector< ns3::RateInfo >&', u'ns3::MinstrelRate&') typehandlers.add_type_alias(u'std::vector< std::vector< unsigned char > >', u'ns3::SampleRate') typehandlers.add_type_alias(u'std::vector< std::vector< unsigned char > >*', u'ns3::SampleRate*') typehandlers.add_type_alias(u'std::vector< std::vector< unsigned char > >&', u'ns3::SampleRate&') typehandlers.add_type_alias(u'uint8_t', u'ns3::WifiInformationElementId') typehandlers.add_type_alias(u'uint8_t*', u'ns3::WifiInformationElementId*') typehandlers.add_type_alias(u'uint8_t&', u'ns3::WifiInformationElementId&') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >', u'ns3::WifiModeList') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >*', u'ns3::WifiModeList*') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode >&', u'ns3::WifiModeList&') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator', u'ns3::WifiModeListIterator') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator*', u'ns3::WifiModeListIterator*') typehandlers.add_type_alias(u'std::vector< ns3::WifiMode > const_iterator&', u'ns3::WifiModeListIterator&') nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) nested_module = module.add_cpp_namespace('Hash') register_types_ns3_Hash(nested_module) nested_module = module.add_cpp_namespace('TracedValueCallback') register_types_ns3_TracedValueCallback(nested_module) nested_module = module.add_cpp_namespace('internal') register_types_ns3_internal(nested_module)
.parametrize('ctx, func_name', ctxs) .parametrize('seed', [313]) .parametrize('including_pad', [True, False]) .parametrize('ignore_border', [True, False]) .parametrize('channel_last', [False, True]) .parametrize('inshape, kernel, stride, pad', [((3, 4, 6), (2, 2, 2), (2, 1, 1), (1, 0, 1)), ((2, 3, 4, 6), (2, 2, 2), (1, 1, 2), (0, 1, 0)), ((2, 2, 3, 4, 6), (2, 2, 2), (2, 1, 1), (1, 0, 1)), ((2, 2, 2, 3, 4, 6), (2, 2, 2), (1, 1, 2), (0, 1, 0))]) def test_average_pooling_3d(seed, inshape, kernel, stride, pad, ignore_border, channel_last, including_pad, ctx, func_name): from nbla_test_utils import function_tester if (channel_last and (not func_name.endswith('Cudnn'))): pytest.skip('Channel last is only supported in Cudnn so far') if channel_last: t = refs.ChannelLastToFirstTranspose(len(inshape), len(kernel)) inshape = tuple((inshape[i] for i in t.inv_axes)) rng = np.random.RandomState(seed) inputs = [rng.randn(*inshape).astype(np.float32)] func_args = [kernel, stride, ignore_border, pad, channel_last, including_pad] function_tester(rng, F.average_pooling, ref_average_pooling, inputs=inputs, func_args=func_args, func_name=func_name, ctx=ctx, atol_f=1e-06, atol_b=0.01)
def register_Ns3LteRrcSapSoundingRsUlConfigCommon_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::LteRrcSap::SoundingRsUlConfigCommon const &', 'arg0')]) cls.add_instance_attribute('srsBandwidthConfig', 'uint8_t', is_const=False) cls.add_instance_attribute('srsSubframeConfig', 'uint8_t', is_const=False) cls.add_instance_attribute('type', 'ns3::LteRrcSap::SoundingRsUlConfigCommon::action', is_const=False) return
class MelspecInversion(nn.Module): def __init__(self, n_mels: int=128, sample_rate: int=24000, win_length: int=1024, hop_length: int=256): super().__init__() self.n_mels = n_mels self.sample_rate = sample_rate self.win_length = win_length self.hop_length = hop_length self.melspec_layer = None def from_pretrained(cls, pretrained_model_path, **config): model = cls(**config) model.load_state_dict(torch.load(pretrained_model_path, map_location='cpu')) return model def prepare_melspectrogram(self, audio): if (self.melspec_layer is None): self.melspec_layer = MelSpectrogram(n_mels=self.n_mels, sample_rate=self.sample_rate, n_fft=get_least_power2_above(self.win_length), win_length=self.win_length, hop_length=self.hop_length, f_min=0.0, f_max=(self.sample_rate / 2.0), center=True, power=2.0, mel_scale='slaney', norm='slaney', normalized=True, pad_mode='constant') self.melspec_layer = self.melspec_layer.to(audio.device) melspec = self.melspec_layer(audio) melspec = (10 * torch.log10((melspec + 1e-10))) melspec = torch.clamp(((melspec + 100) / 100), min=0.0) return melspec
def _transfer(func): def wrapper(manager, *arg): returns = [] for callback in manager.callbacks: if callback.disabled: continue returns.append(getattr(callback, func.__name__)(*arg)) return returns return wrapper
def process_punctuation(inText): outText = inText for p in punct: if ((((p + ' ') in inText) or ((' ' + p) in inText)) or (re.search(comma_strip, inText) != None)): outText = outText.replace(p, '') else: outText = outText.replace(p, ' ') outText = period_strip.sub('', outText, re.UNICODE) return outText
class TestExpandOp(serial.SerializedTestCase): def _rand_shape(self, X_shape, max_length): length = np.random.randint(max_length) shape = np.ones(length, dtype=np.int64) i = (len(X_shape) - 1) for j in reversed(range(length)): if (i >= 0): k = np.random.choice([1, X_shape[i]]) i -= 1 else: k = (np.random.randint(3) + 1) shape[j] = k return shape def _run_expand_op_test(self, X, shape, gc, dc): shape = np.array(shape) op = core.CreateOperator('Expand', ['X', 'shape'], ['Y']) def ref(X, shape): return ((X * np.ones(abs(shape))),) self.assertReferenceChecks(gc, op, [X, shape], ref) self.assertDeviceChecks(dc, op, [X, shape], [0]) self.assertGradientChecks(gc, op, [X, shape], 0, [0]) (X=hu.tensor(max_dim=5, dtype=np.float32), **hu.gcs) def test_expand_rand_shape(self, X, gc, dc): shape = self._rand_shape(X.shape, 5) self._run_expand_op_test(X, shape, gc, dc) (X=st.sampled_from([np.ones([1, 3, 1]), np.ones([3, 1, 3]), np.ones([1, 3])]), **hu.gcs) def test_expand_nonrand_shape1(self, X, gc, dc): self._run_expand_op_test(X, [3, 1, 3], gc, dc) self._run_expand_op_test(X, [3, (- 1), 3], gc, dc) (X=st.sampled_from([np.ones([4, 4, 2, 1]), np.ones([1, 4, 1, 2]), np.ones([4, 1, 2])]), **hu.gcs) (deadline=1000) def test_expand_nonrand_shape2(self, X, gc, dc): self._run_expand_op_test(X, [4, 1, 2, 2], gc, dc) self._run_expand_op_test(X, [4, (- 1), 2, 2], gc, dc)
def main(): initialize() gui = ti.GUI('Taichi MLS-MPM-99', res=512, background_color=1126209) while (not gui.get_event(ti.GUI.ESCAPE, ti.GUI.EXIT)): for s in range(int((0.002 // dt))): substep() gui.circles(x.to_numpy(), radius=1.5, palette=[427399, , ], palette_indices=material) gui.show()
def render_model(verts, faces, w, h, cam, near=0.5, far=25, img=None): rn = _create_renderer(w=w, h=h, near=near, far=far, rt=cam.rt, t=cam.t, f=cam.f, c=cam.c) if (img is not None): rn.background_image = ((img / 255.0) if (img.max() > 1) else img) imtmp = simple_renderer(rn, verts, faces) if (img is None): imtmp = get_alpha(imtmp) return imtmp
class ConstantPool(): def __init__(self): self._constants: dict[(type[ConstantTypes], OrderedSet[ConstantTypes])] = {tp_: OrderedSet() for tp_ in typing.get_args(ConstantTypes)} def add_constant(self, constant: ConstantTypes) -> None: self._constants[type(constant)].add(constant) def remove_constant(self, value: ConstantTypes) -> None: values = self._constants.get(type(value)) assert (values is not None) values.discard(value) def has_constant_for(self, tp_: type[T]) -> bool: return (len(self._constants[tp_]) > 0) def get_constant_for(self, tp_: type[T]) -> T: return typing.cast(T, randomness.choice(tuple(self._constants[tp_]))) def get_all_constants_for(self, tp_: type[T]) -> OrderedSet[T]: return typing.cast(OrderedSet[T], self._constants[tp_]) def __len__(self): return sum((len(value) for value in self._constants.values()))
def get_model_inference(parameters: Params, weights_path: str=None): (h, w) = parameters.input_shape c = parameters.input_channels input_images = Input(shape=(h, w, c), name='input_images') input_seq_len = Input(shape=[1], dtype=tf.int32, name='input_seq_length') filename_images = Input(shape=[1], dtype=tf.string, name='filename_images') net_output = get_crnn_output(input_images, parameters) output_seq_len = tf.identity(input_seq_len) filenames = tf.identity(filename_images) model = Model(inputs=[input_images, input_seq_len, filename_images], outputs=[net_output, output_seq_len, filenames]) if weights_path: model.load_weights(weights_path) return model
class BleuScorer(object): __slots__ = ('n', 'crefs', 'ctest', '_score', '_ratio', '_testlen', '_reflen', 'special_reflen') def copy(self): new = BleuScorer(n=self.n) new.ctest = copy.copy(self.ctest) new.crefs = copy.copy(self.crefs) new._score = None return new def __init__(self, test=None, refs=None, n=4, special_reflen=None): self.n = n self.crefs = [] self.ctest = [] self.cook_append(test, refs) self.special_reflen = special_reflen def cook_append(self, test, refs): if (refs is not None): self.crefs.append(cook_refs(refs)) if (test is not None): cooked_test = cook_test(test, self.crefs[(- 1)][0], self.crefs[(- 1)][1]) self.ctest.append(cooked_test) else: self.ctest.append(None) self._score = None def ratio(self, option=None): self.compute_score(option=option) return self._ratio def score_ratio(self, option=None): return (self.fscore(option=option), self.ratio(option=option)) def score_ratio_str(self, option=None): return ('%.4f (%.2f)' % self.score_ratio(option)) def reflen(self, option=None): self.compute_score(option=option) return self._reflen def testlen(self, option=None): self.compute_score(option=option) return self._testlen def retest(self, new_test): if (type(new_test) is str): new_test = [new_test] assert (len(new_test) == len(self.crefs)), new_test self.ctest = [] for (t, rs) in zip(new_test, self.crefs): self.ctest.append(cook_test(t, rs[0], rs[1])) self._score = None return self def rescore(self, new_test): return self.retest(new_test).compute_score() def size(self): assert (len(self.crefs) == len(self.ctest)), ('refs/test mismatch! %d<>%d' % (len(self.crefs), len(self.ctest))) return len(self.crefs) def __iadd__(self, other): if (type(other) is tuple): self.cook_append(other[0], other[1]) else: assert self.compatible(other), 'incompatible BLEUs.' self.ctest.extend(other.ctest) self.crefs.extend(other.crefs) self._score = None return self def compatible(self, other): return (isinstance(other, BleuScorer) and (self.n == other.n)) def single_reflen(self, option='average'): return self._single_reflen(self.crefs[0][0], option) def _single_reflen(self, reflens, option=None, testlen=None): if (option == 'shortest'): reflen = min(reflens) elif (option == 'average'): reflen = (float(sum(reflens)) / len(reflens)) elif (option == 'closest'): reflen = min(((abs((l - testlen)), l) for l in reflens))[1] else: assert False, ('unsupported reflen option %s' % option) return reflen def recompute_score(self, option=None, verbose=0): self._score = None return self.compute_score(option, verbose) def compute_score(self, option=None, verbose=0): n = self.n small = 1e-09 tiny = 1e-15 bleu_list = [[] for _ in range(n)] if (self._score is not None): return self._score if (option is None): option = ('average' if (len(self.crefs) == 1) else 'closest') self._testlen = 0 self._reflen = 0 totalcomps = {'testlen': 0, 'reflen': 0, 'guess': ([0] * n), 'correct': ([0] * n)} for comps in self.ctest: testlen = comps['testlen'] self._testlen += testlen if (self.special_reflen is None): reflen = self._single_reflen(comps['reflen'], option, testlen) else: reflen = self.special_reflen self._reflen += reflen for key in ['guess', 'correct']: for k in range(n): totalcomps[key][k] += comps[key][k] bleu = 1.0 for k in range(n): bleu *= ((float(comps['correct'][k]) + tiny) / (float(comps['guess'][k]) + small)) bleu_list[k].append((bleu ** (1.0 / (k + 1)))) ratio = ((testlen + tiny) / (reflen + small)) if (ratio < 1): for k in range(n): bleu_list[k][(- 1)] *= math.exp((1 - (1 / ratio))) if (verbose > 1): print(comps, reflen) totalcomps['reflen'] = self._reflen totalcomps['testlen'] = self._testlen bleus = [] bleu = 1.0 for k in range(n): bleu *= (float((totalcomps['correct'][k] + tiny)) / (totalcomps['guess'][k] + small)) bleus.append((bleu ** (1.0 / (k + 1)))) ratio = ((self._testlen + tiny) / (self._reflen + small)) if (ratio < 1): for k in range(n): bleus[k] *= math.exp((1 - (1 / ratio))) if (verbose > 0): print(totalcomps) print('ratio:', ratio) self._score = bleus return (self._score, bleu_list)
def main(_): _logger = logging.getLogger('tensorflow') _logger.setLevel('INFO') tf_compat.v1.logging.info(('%s startup. TF version: %s' % (__file__, tf.__version__))) if FLAGS.checkpoints: checkpoints = [c.strip() for c in FLAGS.checkpoints.split(',')] checkpoints = [c for c in checkpoints if c] if (not checkpoints): raise ValueError('No checkpoints provided for averaging.') if FLAGS.prefix: checkpoints = [(FLAGS.prefix + c) for c in checkpoints] else: assert (FLAGS.num_last_checkpoints >= 1), 'Must average at least one model' assert FLAGS.prefix, 'Prefix must be provided when averaging last N checkpoints' checkpoint_state = tf.train.get_checkpoint_state(os.path.dirname(FLAGS.prefix)) checkpoints = checkpoint_state.all_model_checkpoint_paths[(- FLAGS.num_last_checkpoints):] checkpoints = [c for c in checkpoints if checkpoint_exists(c)] if (not checkpoints): if FLAGS.checkpoints: raise ValueError(('None of the provided checkpoints exist. %s' % FLAGS.checkpoints)) else: raise ValueError(('Could not find checkpoints at %s' % os.path.dirname(FLAGS.prefix))) tf_compat.v1.logging.info('Reading variables and averaging checkpoints:') for c in checkpoints: tf_compat.v1.logging.info('%s ', c) var_list = tf.train.list_variables(checkpoints[0]) (var_values, var_dtypes) = ({}, {}) for (name, shape) in var_list: var_values[name] = numpy.zeros(shape) for checkpoint in checkpoints: reader = tf.train.load_checkpoint(checkpoint) for name in var_values: tensor = reader.get_tensor(name) if (not isinstance(tensor, numpy.ndarray)): tensor = numpy.array(tensor) assert isinstance(tensor, numpy.ndarray) var_dtypes[name] = tensor.dtype if isinstance(tensor.dtype, numpy.integer): var_values[name] = tensor else: var_values[name] += tensor tf_compat.v1.logging.info('Read from checkpoint %s', checkpoint) for name in var_values: if (not isinstance(var_dtypes[name], numpy.integer)): var_values[name] /= len(checkpoints) with tf_compat.v1.variable_scope(tf_compat.v1.get_variable_scope(), reuse=tf_compat.v1.AUTO_REUSE): tf_vars = [tf_compat.v1.get_variable(v, shape=var_values[v].shape, dtype=var_dtypes[v]) for v in var_values] placeholders = [tf_compat.v1.placeholder(v.dtype, shape=v.shape) for v in tf_vars] assign_ops = [tf_compat.v1.assign(v, p) for (v, p) in zip(tf_vars, placeholders)] saver = tf_compat.v1.train.Saver(tf_compat.v1.all_variables()) with tf_compat.v1.Session() as sess: sess.run(tf_compat.v1.global_variables_initializer()) for (p, assign_op, (name, value)) in zip(placeholders, assign_ops, var_values.items()): sess.run(assign_op, {p: value}) saver.save(sess, FLAGS.output_path) tf_compat.v1.logging.info('Averaged checkpoints saved in %s', FLAGS.output_path)
def test_leverage_bagging_me(): stream = ConceptDriftStream(position=500, width=100, random_state=112) nb = NaiveBayes() learner = LeveragingBaggingClassifier(base_estimator=nb, n_estimators=5, random_state=112, leverage_algorithm='leveraging_bag_me') y_expected = np.asarray([0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0], dtype=np.int) run_prequential_supervised(stream, learner, max_samples=2000, n_wait=40, y_expected=y_expected)
def pesq_eval(predict, target): return ((pesq(fs=16000, ref=target.numpy(), deg=predict.numpy(), mode='wb') + 0.5) / 5)
class AttentionWeightComputation(Function): def forward(ctx, query_batch_cnt: torch.Tensor, key_batch_cnt: torch.Tensor, index_pair_batch: torch.Tensor, index_pair: torch.Tensor, query_features: torch.Tensor, key_features: torch.Tensor): assert query_batch_cnt.is_contiguous() assert key_batch_cnt.is_contiguous() assert index_pair_batch.is_contiguous() assert index_pair.is_contiguous() assert query_features.is_contiguous() assert key_features.is_contiguous() b = query_batch_cnt.shape[0] (total_query_num, local_size) = index_pair.size() (total_key_num, nhead, hdim) = key_features.size() assert (total_query_num == query_features.shape[0]) output = torch.cuda.FloatTensor(total_query_num, local_size, nhead).zero_() attention_cuda.attention_weight_computation_wrapper(b, total_query_num, local_size, total_key_num, nhead, hdim, query_batch_cnt, key_batch_cnt, index_pair_batch, index_pair, query_features, key_features, output) ctx.for_backwards = (b, total_query_num, local_size, total_key_num, nhead, hdim, query_batch_cnt, key_batch_cnt, index_pair_batch, index_pair, query_features, key_features) return output def backward(ctx, grad_out: torch.Tensor): (b, total_query_num, local_size, total_key_num, nhead, hdim, query_batch_cnt, key_batch_cnt, index_pair_batch, index_pair, query_features, key_features) = ctx.for_backwards grad_query_features = Variable(torch.cuda.FloatTensor(total_query_num, nhead, hdim).zero_()) grad_key_features = Variable(torch.cuda.FloatTensor(total_key_num, nhead, hdim).zero_()) grad_out_data = grad_out.data.contiguous() attention_cuda.attention_weight_computation_grad_wrapper(b, total_query_num, local_size, total_key_num, nhead, hdim, query_batch_cnt, key_batch_cnt, index_pair_batch, index_pair, query_features, key_features, grad_out_data, grad_query_features.data, grad_key_features.data) return (None, None, None, None, grad_query_features, grad_key_features)
class DiscreteBCQImpl(DoubleDQNImpl): _modules: DiscreteBCQModules _action_flexibility: float _beta: float def __init__(self, observation_shape: Shape, action_size: int, modules: DiscreteBCQModules, q_func_forwarder: DiscreteEnsembleQFunctionForwarder, targ_q_func_forwarder: DiscreteEnsembleQFunctionForwarder, target_update_interval: int, gamma: float, action_flexibility: float, beta: float, device: str): super().__init__(observation_shape=observation_shape, action_size=action_size, modules=modules, q_func_forwarder=q_func_forwarder, targ_q_func_forwarder=targ_q_func_forwarder, target_update_interval=target_update_interval, gamma=gamma, device=device) self._action_flexibility = action_flexibility self._beta = beta def compute_loss(self, batch: TorchMiniBatch, q_tpn: torch.Tensor) -> DiscreteBCQLoss: td_loss = super().compute_loss(batch, q_tpn).loss imitator_loss = compute_discrete_imitation_loss(policy=self._modules.imitator, x=batch.observations, action=batch.actions.long(), beta=self._beta) loss = (td_loss + imitator_loss) return DiscreteBCQLoss(loss=loss, td_loss=td_loss, imitator_loss=imitator_loss) def inner_predict_best_action(self, x: TorchObservation) -> torch.Tensor: dist = self._modules.imitator(x) log_probs = F.log_softmax(dist.logits, dim=1) ratio = (log_probs - log_probs.max(dim=1, keepdim=True).values) mask = (ratio > math.log(self._action_flexibility)).float() value = self._q_func_forwarder.compute_expected_q(x) normalized_value = ((value - value.min(dim=1, keepdim=True).values) + 1e-05) action = (normalized_value * cast(torch.Tensor, mask)).argmax(dim=1) return action
class RemoteFolderDataset(FolderDataset, RemoteDataset): def __init__(self, root: Union[(str, Path)], download_and_extract: bool=False, overwrite: bool=False, cleanup: bool=False, convert: bool=False, kind: str='json', n_jobs: int=1, ignore_exceptions: bool=True, use_converted: bool=None, verbose: bool=True): RemoteDataset.__init__(self, root, download_and_extract=download_and_extract, overwrite=overwrite, cleanup=cleanup, verbose=verbose) FolderDataset.__init__(self, root, convert=convert, kind=kind, n_jobs=n_jobs, ignore_exceptions=ignore_exceptions, use_converted=use_converted) def read(self, filename: str) -> Music: raise NotImplementedError
def main(): args = get_arg() random.seed(RAND_SEED) np.random.seed(RAND_SEED) torch.manual_seed(RAND_SEED) data = load_stage2_train_all_data(datatrack=args.datatrack, feat_type=args.feat_type) if (args.method == 'ridge'): model = Ridge() elif (args.method == 'linear_svr'): model = LinearSVR(stage='stage2') elif (args.method == 'kernel_svr'): model = KernelSVR(stage='stage2') elif (args.method == 'rf'): raise NotImplementedError() elif (args.method == 'lightgbm'): model = LightGBM() elif (args.method == 'svgp'): raise NotImplementedError() else: raise RuntimeError('Not supported method: "{}"'.format(args.method)) best_params = model.optimize_hp(data['X'], data['y']) logger.info(best_params) out_dir = ((Path('../out/ensemble-multidomain/opt_hp_stage2') / args.datatrack) / f'{args.method}-{args.feat_type}') os.makedirs(out_dir, exist_ok=True) with open((out_dir / 'params.json'), encoding='utf-8', mode='w') as f: json.dump(best_params, f, ensure_ascii=False, indent=2)
class NanDetector(): def __init__(self, model, forward=True, backward=True): self.bhooks = [] self.fhooks = [] self.forward = forward self.backward = backward self.reset() for (name, mod) in model.named_modules(): mod.__module_name = name self.add_hooks(mod) def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_traceback): self.close() def add_hooks(self, module): if self.forward: self.fhooks.append(module.register_forward_hook(self.fhook_fn)) if self.backward: self.bhooks.append(module.register_backward_hook(self.bhook_fn)) def reset(self): self.has_printed_f = False self.has_printed_b = False def _detect(self, tensor, name, backward): err = None if (tensor.numel() >= 2): with torch.no_grad(): if torch.isnan(tensor).any(): err = 'NaN' elif torch.isinf(tensor).any(): err = 'Inf' if (err is not None): err = f"{err} detected in output of {name}, shape: {tensor.shape}, {('backward' if backward else 'forward')}" return err def _apply(self, module, inp, x, backward): if torch.is_tensor(x): if (isinstance(inp, tuple) and (len(inp) > 0)): inp = inp[0] err = self._detect(x, module.__module_name, backward) if (err is not None): if (torch.is_tensor(inp) and (not backward)): err += f' input max: {inp.max().item()}, input min: {inp.min().item()}' has_printed_attr = ('has_printed_b' if backward else 'has_printed_f') logger.warning(err) setattr(self, has_printed_attr, True) elif isinstance(x, dict): for v in x.values(): self._apply(module, inp, v, backward) elif (isinstance(x, list) or isinstance(x, tuple)): for v in x: self._apply(module, inp, v, backward) def fhook_fn(self, module, inp, output): if (not self.has_printed_f): self._apply(module, inp, output, backward=False) def bhook_fn(self, module, inp, output): if (not self.has_printed_b): self._apply(module, inp, output, backward=True) def close(self): for hook in (self.fhooks + self.bhooks): hook.remove()
def msvc_runtime_library(): ver = msvc_runtime_major() if ver: if (ver < 140): return ('msvcr%i' % ver) else: return ('vcruntime%i' % ver) else: return None
def _randomly_negate_tensor(tensor): should_flip = tf.cast(tf.floor((tf.random.uniform([]) + 0.5)), tf.bool) final_tensor = tf.cond(should_flip, (lambda : tensor), (lambda : (- tensor))) return final_tensor
def readArk(filename, limit=numpy.inf): features = [] uttids = [] with open(filename, 'rb') as f: while True: try: uttid = readString(f) except ValueError: break feature = readMatrix(f) features.append(feature) uttids.append(uttid) if (len(features) == limit): break return (features, uttids)
def max_memory_reserved(device: Union[(Device, int)]=None) -> int: return memory_stats(device=device)['reserved_bytes.all.peak']
def parse_serverdesc(args): (path, min_time, max_time) = args relay = next(parse_file(path, document_handler='DOCUMENT', descriptor_type='server-descriptor 1.0', validate=False)) if (relay is None): return None pub_ts = relay.published.replace(tzinfo=timezone.utc).timestamp() if ((pub_ts < min_time) or (pub_ts > max_time)): return None if (relay.observed_bandwidth is None): return None advertised_bw = relay.observed_bandwidth avg_bw = relay.average_bandwidth bst_bw = relay.burst_bandwidth if ((avg_bw is not None) and (avg_bw < advertised_bw)): advertised_bw = avg_bw if ((bst_bw is not None) and (bst_bw < advertised_bw)): advertised_bw = bst_bw result = {'type': 'serverdesc', 'pub_dt': relay.published, 'fprint': relay.fingerprint, 'address': relay.address, 'bw_obs': relay.observed_bandwidth, 'bw_rate': (avg_bw if (avg_bw is not None) else 0), 'bw_burst': (bst_bw if (bst_bw is not None) else 0), 'bw_adv': advertised_bw} return result
class _Sigma0Embedding(Morphism): def __init__(self, domain): Morphism.__init__(self, domain.Hom(domain._matrix_space, category=Monoids())) def _call_(self, x): return x.matrix() def _richcmp_(self, other, op): return richcmp(self.domain(), other.domain(), op)
_task('masked_lm', dataclass=MaskedLMConfig) class MaskedLMTask(FairseqTask): cfg: MaskedLMConfig def __init__(self, cfg: MaskedLMConfig, dictionary): super().__init__(cfg) self.dictionary = dictionary self.mask_idx = dictionary.add_symbol('<mask>') def setup_task(cls, cfg: MaskedLMConfig, **kwargs): paths = utils.split_paths(cfg.data) assert (len(paths) > 0) dictionary = Dictionary.load(os.path.join(paths[0], 'dict.txt')) logger.info('dictionary: {} types'.format(len(dictionary))) return cls(cfg, dictionary) def load_dataset(self, split, epoch=1, combine=False, **kwargs): paths = utils.split_paths(self.cfg.data) assert (len(paths) > 0) data_path = paths[((epoch - 1) % len(paths))] split_path = os.path.join(data_path, split) dataset = data_utils.load_indexed_dataset(split_path, self.source_dictionary, combine=combine) if (dataset is None): raise FileNotFoundError('Dataset not found: {} ({})'.format(split, split_path)) dataset = maybe_shorten_dataset(dataset, split, self.cfg.shorten_data_split_list, self.cfg.shorten_method, self.cfg.tokens_per_sample, self.cfg.seed) dataset = TokenBlockDataset(dataset, dataset.sizes, (self.cfg.tokens_per_sample - 1), pad=self.source_dictionary.pad(), eos=self.source_dictionary.eos(), break_mode=self.cfg.sample_break_mode) logger.info('loaded {} blocks from: {}'.format(len(dataset), split_path)) dataset = PrependTokenDataset(dataset, self.source_dictionary.bos()) mask_whole_words = (get_whole_word_mask(self.args, self.source_dictionary) if self.cfg.mask_whole_words else None) (src_dataset, tgt_dataset) = MaskTokensDataset.apply_mask(dataset, self.source_dictionary, pad_idx=self.source_dictionary.pad(), mask_idx=self.mask_idx, seed=self.cfg.seed, mask_prob=self.cfg.mask_prob, leave_unmasked_prob=self.cfg.leave_unmasked_prob, random_token_prob=self.cfg.random_token_prob, freq_weighted_replacement=self.cfg.freq_weighted_replacement, mask_whole_words=mask_whole_words, mask_multiple_length=self.cfg.mask_multiple_length, mask_stdev=self.cfg.mask_stdev) with data_utils.numpy_seed(self.cfg.seed): shuffle = np.random.permutation(len(src_dataset)) self.datasets[split] = SortDataset(NestedDictionaryDataset({'id': IdDataset(), 'net_input': {'src_tokens': RightPadDataset(src_dataset, pad_idx=self.source_dictionary.pad()), 'src_lengths': NumelDataset(src_dataset, reduce=False)}, 'target': RightPadDataset(tgt_dataset, pad_idx=self.source_dictionary.pad()), 'nsentences': NumSamplesDataset(), 'ntokens': NumelDataset(src_dataset, reduce=True)}, sizes=[src_dataset.sizes]), sort_order=[shuffle, src_dataset.sizes]) def build_dataset_for_inference(self, src_tokens, src_lengths, sort=True): src_dataset = RightPadDataset(TokenBlockDataset(src_tokens, src_lengths, (self.cfg.tokens_per_sample - 1), pad=self.source_dictionary.pad(), eos=self.source_dictionary.eos(), break_mode='eos'), pad_idx=self.source_dictionary.pad()) src_dataset = PrependTokenDataset(src_dataset, self.source_dictionary.bos()) src_dataset = NestedDictionaryDataset({'id': IdDataset(), 'net_input': {'src_tokens': src_dataset, 'src_lengths': NumelDataset(src_dataset, reduce=False)}}, sizes=src_lengths) if sort: src_dataset = SortDataset(src_dataset, sort_order=[src_lengths]) return src_dataset def source_dictionary(self): return self.dictionary def target_dictionary(self): return self.dictionary
class FlaxGPTJForCausalLM(metaclass=DummyObject): _backends = ['flax'] def __init__(self, *args, **kwargs): requires_backends(self, ['flax'])
def _cleanse_included_implicit_return_none(subject_properties, statement_checked_lines, statement_slice): if ((len(statement_slice) >= 3) and (statement_slice[(- 3)].opcode == op.LOAD_CONST) and (statement_slice[(- 3)].arg is None) and (statement_slice[(- 2)].opcode == op.RETURN_VALUE)): if ((len(statement_slice) != 3) and (statement_slice[(- 4)].lineno != statement_slice[(- 3)].lineno)): statement_checked_lines.remove(DynamicSlicer.get_line_id_by_instruction(statement_slice[(- 3)], subject_properties))
def summarize_report(current_iteration, num_updates, max_updates, meter, should_print=True, extra=None, tb_writer=None, wandb_logger=None): if (extra is None): extra = {} if ((not is_main()) and (not is_xla())): return if (wandb_logger and ('lr' in extra)): wandb_logger.log_metrics({'train/learning_rate': float(extra['lr'])}, commit=False) if tb_writer: scalar_dict = meter.get_scalar_dict() tb_writer.add_scalars(scalar_dict, current_iteration) if wandb_logger: metrics = meter.get_scalar_dict() wandb_logger.log_metrics({**metrics, 'trainer/global_step': current_iteration}) if (not should_print): return log_dict = {} if ((num_updates is not None) and (max_updates is not None)): log_dict.update({'progress': f'{num_updates}/{max_updates}'}) log_dict.update(meter.get_log_dict()) log_dict.update(extra) log_progress(log_dict)
def test_to(): env_names = ['CartPole-v0', 'CartPole-v1'] task_envs = [GarageEnv(env_name=name) for name in env_names] env = MultiEnvWrapper(task_envs, sample_strategy=round_robin_strategy) deterministic.set_seed(0) policy = TanhGaussianMLPPolicy(env_spec=env.spec, hidden_sizes=[1, 1], hidden_nonlinearity=torch.nn.ReLU, output_nonlinearity=None, min_std=np.exp((- 20.0)), max_std=np.exp(2.0)) qf1 = ContinuousMLPQFunction(env_spec=env.spec, hidden_sizes=[1, 1], hidden_nonlinearity=F.relu) qf2 = ContinuousMLPQFunction(env_spec=env.spec, hidden_sizes=[1, 1], hidden_nonlinearity=F.relu) replay_buffer = PathBuffer(capacity_in_transitions=int(1000000.0)) num_tasks = 2 buffer_batch_size = 2 mtsac = MTSAC(policy=policy, qf1=qf1, qf2=qf2, gradient_steps_per_itr=150, max_path_length=150, eval_env=env, env_spec=env.spec, num_tasks=num_tasks, steps_per_epoch=5, replay_buffer=replay_buffer, min_buffer_size=1000.0, target_update_tau=0.005, discount=0.99, buffer_batch_size=buffer_batch_size) set_gpu_mode(torch.cuda.is_available()) mtsac.to() device = global_device() for param in mtsac._qf1.parameters(): assert (param.device == device) for param in mtsac._qf2.parameters(): assert (param.device == device) for param in mtsac._qf2.parameters(): assert (param.device == device) for param in mtsac.policy.parameters(): assert (param.device == device) assert (mtsac._log_alpha.device == device)
def count_lus(lus_str): total_freq = 0 lus_bow = {} for lu in lus_str.split(','): try: (lu_name, lu_freq) = lu.split(':') lu_name = lu_name.strip() if (' ' in lu_name): continue lu_freq = int(lu_freq) lus_bow[lu_name] = lu_freq total_freq += lu_freq except: print(lu) return total_freq
def load_mat_training_data(real_fts_dir: str, gan_fts_dir: str, num_examples: int, split: float): real_fts_files = [os.path.join(real_fts_dir, i) for i in os.listdir(real_fts_dir) if i.endswith('.mat')] gan_fts_files = [os.path.join(gan_fts_dir, i) for i in os.listdir(gan_fts_dir) if i.endswith('.mat')] real_fts_files.sort() gan_fts_files.sort() indexes = np.random.randint(len(real_fts_dir), size=num_examples) real_fts_files = [real_fts_files[i] for i in indexes] gan_fts_files = [gan_fts_files[i] for i in indexes] real_split_index = int(math.ceil((len(real_fts_files) * split))) gan_split_index = int(math.ceil((len(gan_fts_files) * split))) (X_train, Y_train) = ([], []) (X_test_real, Y_test_real) = ([], []) (X_test_gan, Y_test_gan) = ([], []) for i in real_fts_files[:real_split_index]: fts = list(loadmat(i)['c'][0]) X_train.append(fts) Y_train.append(1) for i in real_fts_files[real_split_index:]: fts = list(loadmat(i)['c'][0]) X_test_real.append(fts) Y_test_real.append(1) tr_real_data_break_point = len(X_train) for i in gan_fts_files[:gan_split_index]: fts = list(loadmat(i)['c'][0]) X_train.append(fts) Y_train.append(0) for i in gan_fts_files[gan_split_index:]: fts = list(loadmat(i)['c'][0]) X_test_gan.append(fts) Y_test_gan.append(0) return (X_train, Y_train, X_test_real, Y_test_real, X_test_gan, Y_test_gan, tr_real_data_break_point)
def main(): (examples, label_list) = get_data(task=args.task, train_num_per_class=args.train_num_per_class, dev_num_per_class=args.dev_num_per_class, imbalance_rate=args.imbalance_rate, data_seed=args.data_seed) if (args.task in ['sst-2', 'sst-5']): classifier = Classifier(label_list=label_list, device=device) classifier.get_optimizer(learning_rate=args.learning_rate) else: classifier = ImageClassifier(pretrained=args.resnet_pretrained, baseline=True) classifier.get_optimizer(learning_rate=args.image_lr, momentum=args.image_momentum, weight_decay=args.image_weight_decay) for split in ['train', 'dev', 'test']: classifier.load_data(set_type=split, examples=examples[split], batch_size=args.batch_size, shuffle=(split != 'test')) print(('=' * 60), '\n', 'Training', '\n', ('=' * 60), sep='') (best_dev_acc, final_test_acc) = ((- 1.0), (- 1.0)) for epoch in range(args.epochs): classifier.train_epoch() dev_acc = classifier.evaluate('dev') if (epoch >= args.min_epochs): do_test = (dev_acc > best_dev_acc) best_dev_acc = max(best_dev_acc, dev_acc) else: do_test = False print('Epoch {}, Dev Acc: {:.4f}, Best Ever: {:.4f}'.format(epoch, (100.0 * dev_acc), (100.0 * best_dev_acc))) if do_test: final_test_acc = classifier.evaluate('test') print('Test Acc: {:.4f}'.format((100.0 * final_test_acc))) print('Final Dev Acc: {:.4f}, Final Test Acc: {:.4f}'.format((100.0 * best_dev_acc), (100.0 * final_test_acc)))
_utils.test() def test_stacked_mixed_ib_and_non_ib_inner_loops_local_variable(): x = ti.field(dtype=float, shape=(), needs_dual=True) arr = ti.field(dtype=float, shape=2, needs_dual=True) loss = ti.field(dtype=float, shape=(), needs_dual=True) def stacked_mixed_ib_and_non_ib_inner_loops_local_variable(): for i in arr: loss[None] += ti.sin(x[None]) for j in range(3): for k in range(3): loss[None] += (ti.sin(x[None]) + 1.0) for j in range(3): s = 0.0 for k in range(3): s += (ti.sin(x[None]) + 1.0) loss[None] += s for j in range(3): for k in range(3): loss[None] += (ti.sin(x[None]) + 1.0) x[None] = 0.0 with ti.ad.FwdMode(loss=loss, param=x): stacked_mixed_ib_and_non_ib_inner_loops_local_variable() assert (loss[None] == 54.0) assert (loss.dual[None] == 56.0)
class Settings(): def __init__(self): self._lock = threading.Lock() self._parent_configs = {} self._local = threading.local() def _get_current_config(self): return (self._local.config_stack[(- 1)] if (hasattr(self._local, 'config_stack') and self._local.config_stack) else {}) def initialize_for_thread(self, parent_tid): with self._lock: parent_config = self._parent_configs.get(parent_tid) if parent_config: self._local.config_stack = [copy.deepcopy(parent_config)] else: self._local.config_stack = [{}] def context(self, **kwargs): current_config = copy.deepcopy(self._get_current_config()) current_config.update(kwargs) if (not hasattr(self._local, 'config_stack')): self._local.config_stack = [] self._local.config_stack.append(current_config) with self._lock: self._parent_configs[threading.get_ident()] = copy.deepcopy(current_config) try: (yield) finally: self._local.config_stack.pop() with self._lock: self._parent_configs.pop(threading.get_ident(), None)
def evaluate(args, model, tokenizer, output_prediction=False): (dataset, examples) = load_and_cache_examples(args, tokenizer, evaluate=True, output_examples=True) if ((not os.path.exists(args.output_dir)) and (args.local_rank in [(- 1), 0])): os.makedirs(args.output_dir) args.eval_batch_size = (args.per_gpu_eval_batch_size * max(1, args.n_gpu)) eval_sampler = SequentialSampler(dataset) eval_dataloader = DataLoader(dataset, sampler=eval_sampler, batch_size=args.eval_batch_size, collate_fn=partial(disamb_collate_fn, tokenizer=tokenizer)) if ((args.n_gpu > 1) and (not isinstance(model, torch.nn.DataParallel))): model = torch.nn.DataParallel(model) logger.info('***** Running evaluation *****') logger.info(' Num examples = %d', len(dataset)) logger.info(' Batch size = %d', args.eval_batch_size) start_time = timeit.default_timer() all_pred_indexes = [] all_labels = [] for batch in tqdm(eval_dataloader, desc='Evaluating'): model.eval() batch = tuple((t.to(args.device) for t in batch)) with torch.no_grad(): inputs = {'input_ids': batch[0], 'token_type_ids': batch[1], 'attention_mask': batch[2], 'sample_mask': batch[3], 'labels': batch[4]} if (args.model_type in ['xlm', 'roberta', 'distilbert', 'camembert', 'bart']): del inputs['token_type_ids'] logits = model(**inputs)[1] pred_indexes = torch.argmax(logits, 1).detach().cpu() all_pred_indexes.append(pred_indexes) all_labels.append(batch[4].cpu()) all_pred_indexes = torch.cat(all_pred_indexes).numpy() all_labels = torch.cat(all_labels).numpy() acc = (np.sum((all_pred_indexes == all_labels)) / len(all_pred_indexes)) evalTime = (timeit.default_timer() - start_time) logger.info(' Evaluation done in total %f secs (%f sec per example)', evalTime, (evalTime / len(dataset))) coverage = coverage_evaluation(examples, dataset, all_pred_indexes) results = {'num problem': len(all_pred_indexes), 'acc': acc, 'cov': coverage} saving = OrderedDict([(feat.pid, pred) for (feat, pred) in zip(dataset, all_pred_indexes.tolist())]) if output_prediction: dump_json(OrderedDict([(feat.pid, pred) for (feat, pred) in zip(dataset, all_pred_indexes.tolist())]), join(args.output_dir, 'predictions.json')) return results
def create_tar_command(args): Uploader(log=log, progress=Progress()).convert(args.source, args.destination)
class SawyerHandlePressEnv(SawyerXYZEnv): def __init__(self): hand_low = ((- 0.5), 0.4, 0.05) hand_high = (0.5, 1, 0.5) obj_low = ((- 0.1), 0.8, 0.05) obj_high = (0.1, 0.9, 0.05) goal_low = ((- 0.1), 0.65, 0.0399) goal_high = (0.1, 0.75, 0.0401) super().__init__(self.model_name, hand_low=hand_low, hand_high=hand_high) self.init_config = {'obj_init_pos': np.array([0, 0.9, 0.05]), 'hand_init_pos': np.array((0, 0.6, 0.2))} self.goal = np.array([0, 0.8, 0.14]) self.obj_init_pos = self.init_config['obj_init_pos'] self.hand_init_pos = self.init_config['hand_init_pos'] self._random_reset_space = Box(np.array(obj_low), np.array(obj_high)) self.goal_space = Box(np.array(goal_low), np.array(goal_high)) def model_name(self): return full_v1_path_for('sawyer_xyz/sawyer_handle_press.xml') _assert_task_is_set def step(self, action): ob = super().step(action) (reward, reachDist, pressDist) = self.compute_reward(action, ob) self.curr_path_length += 1 info = {'reachDist': reachDist, 'goalDist': pressDist, 'epRew': reward, 'pickRew': None, 'success': float((pressDist <= 0.04))} return (ob, reward, False, info) def _target_site_config(self): return [] def _get_pos_objects(self): return self.data.site_xpos[self.model.site_name2id('handleStart')] def _set_obj_xyz(self, pos): qpos = self.data.qpos.flat.copy() qvel = self.data.qvel.flat.copy() qpos[9] = pos qvel[9] = 0 self.set_state(qpos, qvel) def reset_model(self): self._reset_hand() self._target_pos = self.goal.copy() self.obj_init_pos = self.init_config['obj_init_pos'] if self.random_init: goal_pos = self._get_state_rand_vec() self.obj_init_pos = goal_pos button_pos = goal_pos.copy() button_pos[1] -= 0.1 button_pos[2] += 0.09 self._target_pos = button_pos self.sim.model.body_pos[self.model.body_name2id('box')] = self.obj_init_pos self.sim.model.body_pos[self.model.body_name2id('handle')] = self._target_pos self._set_obj_xyz(0) self._target_pos = self._get_site_pos('goalPress') self.maxDist = np.abs((self.data.site_xpos[self.model.site_name2id('handleStart')][(- 1)] - self._target_pos[(- 1)])) self.target_reward = ((1000 * self.maxDist) + (1000 * 2)) return self._get_obs() def _reset_hand(self): super()._reset_hand(10) (rightFinger, leftFinger) = (self._get_site_pos('rightEndEffector'), self._get_site_pos('leftEndEffector')) self.init_fingerCOM = ((rightFinger + leftFinger) / 2) self.pickCompleted = False def compute_reward(self, actions, obs): del actions objPos = obs[3:6] leftFinger = self._get_site_pos('leftEndEffector') fingerCOM = leftFinger pressGoal = self._target_pos[(- 1)] pressDist = np.abs((objPos[(- 1)] - pressGoal)) reachDist = np.linalg.norm((objPos - fingerCOM)) c1 = 1000 c2 = 0.01 c3 = 0.001 if (reachDist < 0.05): pressRew = ((1000 * (self.maxDist - pressDist)) + (c1 * (np.exp(((- (pressDist ** 2)) / c2)) + np.exp(((- (pressDist ** 2)) / c3))))) else: pressRew = 0 pressRew = max(pressRew, 0) reward = ((- reachDist) + pressRew) return [reward, reachDist, pressDist]
def _showxv(image, title=None, **options): from . import ImageShow ImageShow.show(image, title, **options)
def add_model_training_inputs(model): logger = logging.getLogger(__name__) logger.info('Loading dataset: {}'.format(cfg.TRAIN.DATASETS)) roidb = combined_roidb_for_training(cfg.TRAIN.DATASETS, cfg.TRAIN.PROPOSAL_FILES) logger.info('{:d} roidb entries'.format(len(roidb))) model_builder_wsl.add_training_inputs(model, roidb=roidb)
def ModAbVar_ambient_jacobian(group): try: X = _cache[group]() if (X is not None): return X except KeyError: pass X = ModAbVar_ambient_jacobian_class(group) _cache[group] = weakref.ref(X) return X
def run_clang_tidy(options, line_filters, files): command = [options.clang_tidy_exe, '-p', options.compile_commands_dir] if ((not options.config_file) and os.path.exists('.clang-tidy')): options.config_file = '.clang-tidy' if options.config_file: import yaml with open(options.config_file) as config: command += ['-config', json.dumps(yaml.load(config, Loader=yaml.FullLoader))] command += options.extra_args if line_filters: command += ['-line-filter', json.dumps(line_filters)] if options.parallel: commands = [(list(command) + [f]) for f in files] output = run_shell_commands_in_parallel(commands) else: command += files if options.dry_run: command = [re.sub('^([{[].*[]}])$', "'\\1'", arg) for arg in command] return ' '.join(command) output = run_shell_command(command) if ((not options.keep_going) and ('[clang-diagnostic-error]' in output)): message = 'Found clang-diagnostic-errors in clang-tidy output: {}' raise RuntimeError(message.format(output)) return output
def contract_mwt(infile, outfile, ignore_gapping=True): with open(outfile, 'w') as fout: with open(infile, 'r') as fin: idx = 0 mwt_begin = 0 mwt_end = (- 1) for line in fin: line = line.strip() if line.startswith('#'): print(line, file=fout) continue elif (len(line) <= 0): print(line, file=fout) idx = 0 mwt_begin = 0 mwt_end = (- 1) continue line = line.split('\t') if (ignore_gapping and ('.' in line[0])): continue idx += 1 if ('-' in line[0]): (mwt_begin, mwt_end) = [int(x) for x in line[0].split('-')] print('{}\t{}\t{}'.format(idx, '\t'.join(line[1:(- 1)]), ('MWT=Yes' if (line[(- 1)] == '_') else (line[(- 1)] + '|MWT=Yes'))), file=fout) idx -= 1 elif (mwt_begin <= idx <= mwt_end): continue else: print('{}\t{}'.format(idx, '\t'.join(line[1:])), file=fout)
def _format(val: Any, output_format: str='standard', errors: str='coarse') -> Any: val = str(val) result: Any = [] if (val in NULL_VALUES): return [np.nan] if (not validate_ca_sin(val)): if (errors == 'raise'): raise ValueError(f'Unable to parse value {val}') error_result = (val if (errors == 'ignore') else np.nan) return [error_result] if (output_format == 'compact'): result = ([sin.compact(val)] + result) elif (output_format == 'standard'): result = ([sin.format(val)] + result) return result
class attentionNet(nn.Module): def __init__(self, squeezeFilters=32, expandFilters=64, scailingFactor=2, numAttentionBlock=10): super(attentionNet, self).__init__() self.inputConv = nn.Conv2d(3, squeezeFilters, 3, 1, 1) self.globalPooling = nn.AvgPool2d(2, 2) depthAttenBlock = [] for i in range(numAttentionBlock): depthAttenBlock.append(depthAttentiveResBlock(squeezeFilters, expandFilters)) self.spatialFeatExtBlock = nn.Sequential(*depthAttenBlock) self.psUpsampling = pixelShuffleUpsampling(inputFilters=squeezeFilters, scailingFactor=2) self.featureAttention1 = selfAttention(squeezeFilters, squeezeFilters, 3, 1, 1) depthAttenBlock = [] for i in range((numAttentionBlock // 2)): depthAttenBlock.append(depthAttentiveResBlock(squeezeFilters, expandFilters)) self.fullFeatCorelationBlock = nn.Sequential(*depthAttenBlock) self.featureAttention2 = selfAttention(squeezeFilters, squeezeFilters, 3, 1, 1) self.convOut = nn.Conv2d(squeezeFilters, 3, 1) self._initialize_weights() def forward(self, img): xInp = F.relu(self.inputConv(img)) xGAP = self.globalPooling(xInp) xSPE = self.spatialFeatExtBlock(xGAP) xPUP = (F.relu(self.psUpsampling(xSPE)) + xInp) xFA1 = F.relu(self.featureAttention1(xPUP)) XFFC = self.fullFeatCorelationBlock(xFA1) xFA2 = (F.relu(self.featureAttention2(XFFC)) + xFA1) return torch.tanh((self.convOut(xFA2) + img)) def _initialize_weights(self): self.inputConv.apply(init_weights) self.globalPooling.apply(init_weights) self.spatialFeatExtBlock.apply(init_weights) self.psUpsampling.apply(init_weights) self.featureAttention1.apply(init_weights) self.fullFeatCorelationBlock.apply(init_weights) self.featureAttention2.apply(init_weights) self.convOut.apply(init_weights)
def register_Ns3Ipv4GlobalRouting_methods(root_module, cls): cls.add_constructor([param('ns3::Ipv4GlobalRouting const &', 'arg0')]) cls.add_constructor([]) cls.add_method('AddASExternalRouteTo', 'void', [param('ns3::Ipv4Address', 'network'), param('ns3::Ipv4Mask', 'networkMask'), param('ns3::Ipv4Address', 'nextHop'), param('uint32_t', 'interface')]) cls.add_method('AddHostRouteTo', 'void', [param('ns3::Ipv4Address', 'dest'), param('ns3::Ipv4Address', 'nextHop'), param('uint32_t', 'interface')]) cls.add_method('AddHostRouteTo', 'void', [param('ns3::Ipv4Address', 'dest'), param('uint32_t', 'interface')]) cls.add_method('AddNetworkRouteTo', 'void', [param('ns3::Ipv4Address', 'network'), param('ns3::Ipv4Mask', 'networkMask'), param('ns3::Ipv4Address', 'nextHop'), param('uint32_t', 'interface')]) cls.add_method('AddNetworkRouteTo', 'void', [param('ns3::Ipv4Address', 'network'), param('ns3::Ipv4Mask', 'networkMask'), param('uint32_t', 'interface')]) cls.add_method('AssignStreams', 'int64_t', [param('int64_t', 'stream')]) cls.add_method('GetNRoutes', 'uint32_t', [], is_const=True) cls.add_method('GetRoute', retval('ns3::Ipv4RoutingTableEntry *', caller_owns_return=False), [param('uint32_t', 'i')], is_const=True) cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) cls.add_method('NotifyAddAddress', 'void', [param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')], is_virtual=True) cls.add_method('NotifyInterfaceDown', 'void', [param('uint32_t', 'interface')], is_virtual=True) cls.add_method('NotifyInterfaceUp', 'void', [param('uint32_t', 'interface')], is_virtual=True) cls.add_method('NotifyRemoveAddress', 'void', [param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')], is_virtual=True) cls.add_method('PrintRoutingTable', 'void', [param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Time::Unit', 'unit', default_value='::ns3::Time::Unit::S')], is_const=True, is_virtual=True) cls.add_method('RemoveRoute', 'void', [param('uint32_t', 'i')]) cls.add_method('RouteInput', 'bool', [param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')], is_virtual=True) cls.add_method('RouteOutput', 'ns3::Ptr< ns3::Ipv4Route >', [param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')], is_virtual=True) cls.add_method('SetIpv4', 'void', [param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')], is_virtual=True) cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) return
def load_pickle_model(model_path: str) -> CRF: with open(model_path, 'rb') as pkl_model: model = pickle.load(pkl_model) return model
def read_sentences(filename, encoding): sents = [] cache = [] skipped = 0 skip = False with open(filename, encoding=encoding) as infile: for (i, line) in enumerate(infile): line = line.rstrip() if (len(line) == 0): if (len(cache) > 0): if (not skip): sents.append(cache) else: skipped += 1 skip = False cache = [] continue array = line.split() if (len(array) != 2): skip = True continue (w, t) = array cache.append([w, t]) if (len(cache) > 0): if (not skip): sents.append(cache) else: skipped += 1 cache = [] print('Skipped {} examples due to formatting issues.'.format(skipped)) return sents
def download_weight(link, file_name, verbose=True): response = requests.get(link, stream=True) total_size_in_bytes = int(response.headers.get('content-length', 0)) block_size = 1024 progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True, desc='downloading defualt weights', disable=(False if verbose else True)) with open(file_name, 'wb') as file: for data in response.iter_content(block_size): progress_bar.update(len(data)) file.write(data) progress_bar.close() if ((total_size_in_bytes != 0) and (progress_bar.n != total_size_in_bytes)): exit('ERROR, something went wrong (check your connection)')
def save_pngs(chunk): output_path = '/tmp/test/' save_pngs_operator = SavePNGsOperator(output_path) save_pngs_operator(chunk) print('remove the temporary directory.') shutil.rmtree(output_path)
def get_the_pile_document_iterator(file_path: str) -> Iterator[str]: with open(file_path, 'r') as f: for line in f: (yield json.loads(line)['text'])
class CNNEvaluation(object): def __init__(self, gpu_num, dataset='cifar10', verbose=True, epoch_num=50, batchsize=16, imgSize=32): self.gpu_num = gpu_num self.epoch_num = epoch_num self.batchsize = batchsize self.dataset = dataset self.verbose = verbose self.imgSize = imgSize def __call__(self, net_lists): evaluations = np.zeros(len(net_lists)) for i in np.arange(0, len(net_lists), self.gpu_num): process_num = (np.min(((i + self.gpu_num), len(net_lists))) - i) pool = NoDaemonProcessPool(process_num) arg_data = [(cnn_eval, net_lists[(i + j)], j, self.epoch_num, self.batchsize, self.dataset, self.verbose, self.imgSize) for j in range(process_num)] evaluations[i:(i + process_num)] = pool.map(arg_wrapper_mp, arg_data) pool.terminate() return evaluations
class Cusps_class(Singleton, Parent): def __init__(self): Parent.__init__(self, self) Element = Cusp def _repr_(self): return 'Set P^1(QQ) of all cusps' def _latex_(self): return '\\mathbf{P}^1(\\QQ)' def __call__(self, x): return Cusp(x) def _coerce_map_from_(self, R): if QQ.has_coerce_map_from(R): return True if (R is InfinityRing): return True return False def _element_constructor_(self, x): return Cusp(x)
def register_Ns3PdcpTag_methods(root_module, cls): cls.add_constructor([param('ns3::PdcpTag const &', 'arg0')]) cls.add_constructor([]) cls.add_constructor([param('ns3::Time', 'senderTimestamp')]) cls.add_method('Deserialize', 'void', [param('ns3::TagBuffer', 'i')], is_virtual=True) cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) cls.add_method('GetSenderTimestamp', 'ns3::Time', [], is_const=True) cls.add_method('GetSerializedSize', 'uint32_t', [], is_const=True, is_virtual=True) cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) cls.add_method('Print', 'void', [param('std::ostream &', 'os')], is_const=True, is_virtual=True) cls.add_method('Serialize', 'void', [param('ns3::TagBuffer', 'i')], is_const=True, is_virtual=True) cls.add_method('SetSenderTimestamp', 'void', [param('ns3::Time', 'senderTimestamp')]) return
class FeaturesManager(): _TASKS_TO_AUTOMODELS = {} _TASKS_TO_TF_AUTOMODELS = {} if is_torch_available(): _TASKS_TO_AUTOMODELS = {'default': AutoModel, 'masked-lm': AutoModelForMaskedLM, 'causal-lm': AutoModelForCausalLM, 'seq2seq-lm': AutoModelForSeq2SeqLM, 'sequence-classification': AutoModelForSequenceClassification, 'token-classification': AutoModelForTokenClassification, 'multiple-choice': AutoModelForMultipleChoice, 'object-detection': AutoModelForObjectDetection, 'question-answering': AutoModelForQuestionAnswering, 'image-classification': AutoModelForImageClassification, 'image-segmentation': AutoModelForImageSegmentation, 'masked-im': AutoModelForMaskedImageModeling, 'semantic-segmentation': AutoModelForSemanticSegmentation, 'vision2seq-lm': AutoModelForVision2Seq, 'speech2seq-lm': AutoModelForSpeechSeq2Seq} if is_tf_available(): _TASKS_TO_TF_AUTOMODELS = {'default': TFAutoModel, 'masked-lm': TFAutoModelForMaskedLM, 'causal-lm': TFAutoModelForCausalLM, 'seq2seq-lm': TFAutoModelForSeq2SeqLM, 'sequence-classification': TFAutoModelForSequenceClassification, 'token-classification': TFAutoModelForTokenClassification, 'multiple-choice': TFAutoModelForMultipleChoice, 'question-answering': TFAutoModelForQuestionAnswering, 'semantic-segmentation': TFAutoModelForSemanticSegmentation} _SUPPORTED_MODEL_TYPE = {'albert': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.albert.AlbertOnnxConfig'), 'bart': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', 'sequence-classification', 'question-answering', onnx_config_cls='models.bart.BartOnnxConfig'), 'beit': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.beit.BeitOnnxConfig'), 'bert': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.bert.BertOnnxConfig'), 'big-bird': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.big_bird.BigBirdOnnxConfig'), 'bigbird-pegasus': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', 'sequence-classification', 'question-answering', onnx_config_cls='models.bigbird_pegasus.BigBirdPegasusOnnxConfig'), 'blenderbot': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', onnx_config_cls='models.blenderbot.BlenderbotOnnxConfig'), 'blenderbot-small': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', onnx_config_cls='models.blenderbot_small.BlenderbotSmallOnnxConfig'), 'bloom': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'sequence-classification', 'token-classification', onnx_config_cls='models.bloom.BloomOnnxConfig'), 'camembert': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.camembert.CamembertOnnxConfig'), 'clip': supported_features_mapping('default', onnx_config_cls='models.clip.CLIPOnnxConfig'), 'codegen': supported_features_mapping('default', 'causal-lm', onnx_config_cls='models.codegen.CodeGenOnnxConfig'), 'convbert': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.convbert.ConvBertOnnxConfig'), 'convnext': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.convnext.ConvNextOnnxConfig'), 'data2vec-text': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.data2vec.Data2VecTextOnnxConfig'), 'data2vec-vision': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.data2vec.Data2VecVisionOnnxConfig'), 'deberta': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'token-classification', 'question-answering', onnx_config_cls='models.deberta.DebertaOnnxConfig'), 'deberta-v2': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.deberta_v2.DebertaV2OnnxConfig'), 'deit': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.deit.DeiTOnnxConfig'), 'detr': supported_features_mapping('default', 'object-detection', 'image-segmentation', onnx_config_cls='models.detr.DetrOnnxConfig'), 'distilbert': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.distilbert.DistilBertOnnxConfig'), 'electra': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.electra.ElectraOnnxConfig'), 'flaubert': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.flaubert.FlaubertOnnxConfig'), 'gpt2': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'sequence-classification', 'token-classification', onnx_config_cls='models.gpt2.GPT2OnnxConfig'), 'gptj': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'question-answering', 'sequence-classification', onnx_config_cls='models.gptj.GPTJOnnxConfig'), 'gpt-neo': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'sequence-classification', onnx_config_cls='models.gpt_neo.GPTNeoOnnxConfig'), 'groupvit': supported_features_mapping('default', onnx_config_cls='models.groupvit.GroupViTOnnxConfig'), 'ibert': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.ibert.IBertOnnxConfig'), 'imagegpt': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.imagegpt.ImageGPTOnnxConfig'), 'layoutlm': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'token-classification', onnx_config_cls='models.layoutlm.LayoutLMOnnxConfig'), 'layoutlmv3': supported_features_mapping('default', 'question-answering', 'sequence-classification', 'token-classification', onnx_config_cls='models.layoutlmv3.LayoutLMv3OnnxConfig'), 'levit': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.levit.LevitOnnxConfig'), 'longt5': supported_features_mapping('default', 'default-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', onnx_config_cls='models.longt5.LongT5OnnxConfig'), 'longformer': supported_features_mapping('default', 'masked-lm', 'multiple-choice', 'question-answering', 'sequence-classification', 'token-classification', onnx_config_cls='models.longformer.LongformerOnnxConfig'), 'marian': supported_features_mapping('default', 'default-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', 'causal-lm', 'causal-lm-with-past', onnx_config_cls='models.marian.MarianOnnxConfig'), 'mbart': supported_features_mapping('default', 'default-with-past', 'causal-lm', 'causal-lm-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', 'sequence-classification', 'question-answering', onnx_config_cls='models.mbart.MBartOnnxConfig'), 'mobilebert': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.mobilebert.MobileBertOnnxConfig'), 'mobilenet-v1': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.mobilenet_v1.MobileNetV1OnnxConfig'), 'mobilenet-v2': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.mobilenet_v2.MobileNetV2OnnxConfig'), 'mobilevit': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.mobilevit.MobileViTOnnxConfig'), 'mt5': supported_features_mapping('default', 'default-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', onnx_config_cls='models.mt5.MT5OnnxConfig'), 'm2m-100': supported_features_mapping('default', 'default-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', onnx_config_cls='models.m2m_100.M2M100OnnxConfig'), 'owlvit': supported_features_mapping('default', onnx_config_cls='models.owlvit.OwlViTOnnxConfig'), 'perceiver': supported_features_mapping('image-classification', 'masked-lm', 'sequence-classification', onnx_config_cls='models.perceiver.PerceiverOnnxConfig'), 'poolformer': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.poolformer.PoolFormerOnnxConfig'), 'rembert': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.rembert.RemBertOnnxConfig'), 'resnet': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.resnet.ResNetOnnxConfig'), 'roberta': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.roberta.RobertaOnnxConfig'), 'roformer': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'token-classification', 'multiple-choice', 'question-answering', 'token-classification', onnx_config_cls='models.roformer.RoFormerOnnxConfig'), 'segformer': supported_features_mapping('default', 'image-classification', 'semantic-segmentation', onnx_config_cls='models.segformer.SegformerOnnxConfig'), 'squeezebert': supported_features_mapping('default', 'masked-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.squeezebert.SqueezeBertOnnxConfig'), 'swin': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.swin.SwinOnnxConfig'), 't5': supported_features_mapping('default', 'default-with-past', 'seq2seq-lm', 'seq2seq-lm-with-past', onnx_config_cls='models.t5.T5OnnxConfig'), 'vision-encoder-decoder': supported_features_mapping('vision2seq-lm', onnx_config_cls='models.vision_encoder_decoder.VisionEncoderDecoderOnnxConfig'), 'vit': supported_features_mapping('default', 'image-classification', onnx_config_cls='models.vit.ViTOnnxConfig'), 'whisper': supported_features_mapping('default', 'default-with-past', 'speech2seq-lm', 'speech2seq-lm-with-past', onnx_config_cls='models.whisper.WhisperOnnxConfig'), 'xlm': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.xlm.XLMOnnxConfig'), 'xlm-roberta': supported_features_mapping('default', 'masked-lm', 'causal-lm', 'sequence-classification', 'multiple-choice', 'token-classification', 'question-answering', onnx_config_cls='models.xlm_roberta.XLMRobertaOnnxConfig'), 'yolos': supported_features_mapping('default', 'object-detection', onnx_config_cls='models.yolos.YolosOnnxConfig')} AVAILABLE_FEATURES = sorted(reduce((lambda s1, s2: (s1 | s2)), (v.keys() for v in _SUPPORTED_MODEL_TYPE.values()))) def get_supported_features_for_model_type(model_type: str, model_name: Optional[str]=None) -> Dict[(str, Callable[([PretrainedConfig], OnnxConfig)])]: model_type = model_type.lower() if (model_type not in FeaturesManager._SUPPORTED_MODEL_TYPE): model_type_and_model_name = (f'{model_type} ({model_name})' if model_name else model_type) raise KeyError(f'{model_type_and_model_name} is not supported yet. Only {list(FeaturesManager._SUPPORTED_MODEL_TYPE.keys())} are supported. If you want to support {model_type} please propose a PR or open up an issue.') return FeaturesManager._SUPPORTED_MODEL_TYPE[model_type] def feature_to_task(feature: str) -> str: return feature.replace('-with-past', '') def _validate_framework_choice(framework: str): if (framework not in ['pt', 'tf']): raise ValueError(f'Only two frameworks are supported for ONNX export: pt or tf, but {framework} was provided.') elif ((framework == 'pt') and (not is_torch_available())): raise RuntimeError('Cannot export model to ONNX using PyTorch because no PyTorch package was found.') elif ((framework == 'tf') and (not is_tf_available())): raise RuntimeError('Cannot export model to ONNX using TensorFlow because no TensorFlow package was found.') def get_model_class_for_feature(feature: str, framework: str='pt') -> Type: task = FeaturesManager.feature_to_task(feature) FeaturesManager._validate_framework_choice(framework) if (framework == 'pt'): task_to_automodel = FeaturesManager._TASKS_TO_AUTOMODELS else: task_to_automodel = FeaturesManager._TASKS_TO_TF_AUTOMODELS if (task not in task_to_automodel): raise KeyError(f'Unknown task: {feature}. Possible values are {list(FeaturesManager._TASKS_TO_AUTOMODELS.values())}') return task_to_automodel[task] def determine_framework(model: str, framework: str=None) -> str: if (framework is not None): return framework framework_map = {'pt': 'PyTorch', 'tf': 'TensorFlow'} exporter_map = {'pt': 'torch', 'tf': 'tf2onnx'} if os.path.isdir(model): if os.path.isfile(os.path.join(model, WEIGHTS_NAME)): framework = 'pt' elif os.path.isfile(os.path.join(model, TF2_WEIGHTS_NAME)): framework = 'tf' else: raise FileNotFoundError(f'Cannot determine framework from given checkpoint location. There should be a {WEIGHTS_NAME} for PyTorch or {TF2_WEIGHTS_NAME} for TensorFlow.') logger.info(f'Local {framework_map[framework]} model found.') elif is_torch_available(): framework = 'pt' elif is_tf_available(): framework = 'tf' else: raise EnvironmentError('Neither PyTorch nor TensorFlow found in environment. Cannot export to ONNX.') logger.info(f'Framework not requested. Using {exporter_map[framework]} to export to ONNX.') return framework def get_model_from_feature(feature: str, model: str, framework: str=None, cache_dir: str=None) -> Union[('PreTrainedModel', 'TFPreTrainedModel')]: framework = FeaturesManager.determine_framework(model, framework) model_class = FeaturesManager.get_model_class_for_feature(feature, framework) try: model = model_class.from_pretrained(model, cache_dir=cache_dir) except OSError: if (framework == 'pt'): logger.info('Loading TensorFlow model in PyTorch before exporting to ONNX.') model = model_class.from_pretrained(model, from_tf=True, cache_dir=cache_dir) else: logger.info('Loading PyTorch model in TensorFlow before exporting to ONNX.') model = model_class.from_pretrained(model, from_pt=True, cache_dir=cache_dir) return model def check_supported_model_or_raise(model: Union[('PreTrainedModel', 'TFPreTrainedModel')], feature: str='default') -> Tuple[(str, Callable)]: model_type = model.config.model_type.replace('_', '-') model_name = getattr(model, 'name', '') model_features = FeaturesManager.get_supported_features_for_model_type(model_type, model_name=model_name) if (feature not in model_features): raise ValueError(f"{model.config.model_type} doesn't support feature {feature}. Supported values are: {model_features}") return (model.config.model_type, FeaturesManager._SUPPORTED_MODEL_TYPE[model_type][feature]) def get_config(model_type: str, feature: str) -> OnnxConfig: return FeaturesManager._SUPPORTED_MODEL_TYPE[model_type][feature]
def test_test_dataloader(): movieLensDataHandler = AEDataHandler('MovieLensSmall', train_data_path, validation_input_data_path, validation_output_data_path, test_input_data_path, test_output_data_path) test_dataloader = movieLensDataHandler.get_test_dataloader() count = 0 for batch in test_dataloader: assert (500 == len(batch[0])) assert (500 == len(batch[1])) assert (8936 == len(batch[0][0])) assert (8936 == len(batch[1][0])) count += 1 assert (4 == count) count = 0 for batch in test_dataloader: assert (500 == len(batch[0])) assert (500 == len(batch[1])) count += 1 assert (4 == count)
def distance_transform_cdt(input, metric='chessboard', return_distances=True, return_indices=False, distances=None, indices=None): if ((not return_distances) and (not return_indices)): msg = 'at least one of distances/indices must be specified' raise RuntimeError(msg) ft_inplace = isinstance(indices, numpy.ndarray) dt_inplace = isinstance(distances, numpy.ndarray) input = numpy.asarray(input) if (metric in ['taxicab', 'cityblock', 'manhattan']): rank = input.ndim metric = generate_binary_structure(rank, 1) elif (metric == 'chessboard'): rank = input.ndim metric = generate_binary_structure(rank, rank) else: try: metric = numpy.asarray(metric) except Exception: raise RuntimeError('invalid metric provided') for s in metric.shape: if (s != 3): raise RuntimeError('metric sizes must be equal to 3') if (not metric.flags.contiguous): metric = metric.copy() if dt_inplace: if (distances.dtype.type != numpy.int32): raise RuntimeError('distances must be of int32 type') if (distances.shape != input.shape): raise RuntimeError('distances has wrong shape') dt = distances dt[...] = numpy.where(input, (- 1), 0).astype(numpy.int32) else: dt = numpy.where(input, (- 1), 0).astype(numpy.int32) rank = dt.ndim if return_indices: sz = numpy.prod(dt.shape, axis=0) ft = numpy.arange(sz, dtype=numpy.int32) ft.shape = dt.shape else: ft = None _nd_image.distance_transform_op(metric, dt, ft) dt = dt[tuple(([slice(None, None, (- 1))] * rank))] if return_indices: ft = ft[tuple(([slice(None, None, (- 1))] * rank))] _nd_image.distance_transform_op(metric, dt, ft) dt = dt[tuple(([slice(None, None, (- 1))] * rank))] if return_indices: ft = ft[tuple(([slice(None, None, (- 1))] * rank))] ft = numpy.ravel(ft) if ft_inplace: if (indices.dtype.type != numpy.int32): raise RuntimeError('indices must of int32 type') if (indices.shape != ((dt.ndim,) + dt.shape)): raise RuntimeError('indices has wrong shape') tmp = indices else: tmp = numpy.indices(dt.shape, dtype=numpy.int32) for ii in range(tmp.shape[0]): rtmp = numpy.ravel(tmp[(ii, ...)])[ft] rtmp.shape = dt.shape tmp[(ii, ...)] = rtmp ft = tmp result = [] if (return_distances and (not dt_inplace)): result.append(dt) if (return_indices and (not ft_inplace)): result.append(ft) if (len(result) == 2): return tuple(result) elif (len(result) == 1): return result[0] else: return None
class AMAZON2Processor(TextClassProcessor): def __init__(self): self.has_title = True def get_labels(self): return [str(i) for i in range(1, 3)] def get_train_size(self): return 3600000 def get_dev_size(self): return 400000 def get_unsup_examples(self, raw_data_dir, unsup_set): if (unsup_set == 'unsup_in'): return self._create_examples(self._read_tsv(os.path.join(raw_data_dir, 'train.csv'), quotechar='"', delimiter=','), 'unsup_in', skip_unsup=False) else: dir_cell = raw_data_dir[5:7] unsup_dir = None return self._create_examples(self._read_tsv(os.path.join(unsup_dir, '{:s}.csv'.format(unsup_set)), quotechar='"', delimiter=','), unsup_set, skip_unsup=False)
class DefaultJsonEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, np.ndarray): return o.tolist() if isinstance(o, np.generic): return o.item() if (isinstance(o, pd.DataFrame) or isinstance(o, pd.Series)): return o.to_dict() if isinstance(o, PilImage.Image): return np.array(o).tolist() if isinstance(o, ExplanationBase): return {'module': o.__class__.__module__, 'class': o.__class__.__name__, 'data': {k: deepcopy(v) for (k, v) in o.__dict__.items()}} return super().default(o)
def main(): config = parser.parse_args() fine_LSTM = MyModel.fine_LSTM(config).cuda(config.use_gpu) coarseNet = MyModel.coarseNet(config).cuda(config.use_gpu) if (config.stage == 'test'): fine_LSTM = torch.load(((('output/' + '730') + config.testName) + 'fine_LSTM.pkl'), map_location=(lambda storage, loc: storage.cuda(config.use_gpu))) coarseNet = torch.load(((('output/' + '730') + config.testName) + 'coarse.pkl'), map_location=(lambda storage, loc: storage.cuda(config.use_gpu))) dataRoot = 'processed_data/' transform_origin = transforms.Compose([Rescale(config.origin_image_size), ToTensor()]) train_dataset_origin = LandmarksDataset(csv_file=(dataRoot + config.traincsv), root_dir=(dataRoot + 'images'), transform=transform_origin, landmarksNum=config.landmarkNum) val_dataset = LandmarksDataset(csv_file=(dataRoot + config.testcsv), root_dir=(dataRoot + 'images'), transform=transform_origin, landmarksNum=config.landmarkNum) train_dataloader = [] val_dataloader = [] train_dataloader_t = DataLoader(train_dataset_origin, batch_size=config.batchSize, shuffle=False, num_workers=0) if (config.stage == 'train'): for data in train_dataloader_t: train_dataloader.append(data) val_dataloader_t = DataLoader(val_dataset, batch_size=config.batchSize, shuffle=False, num_workers=0) for data in val_dataloader_t: val_dataloader.append(data) print(len(train_dataloader), len(val_dataloader)) dataloaders = {'train': train_dataloader, 'val': val_dataloader} criterion_coarse = LossFunction.coarse_heatmap(config) criterion_fine = LossFunction.fine_heatmap(config) params = (list(coarseNet.parameters()) + list(fine_LSTM.parameters())) optimizer_ft = optim.Adam(params) TrainNet.train_model(coarseNet, fine_LSTM, dataloaders, criterion_coarse, criterion_fine, optimizer_ft, config)
class CrossEntropyLoss2d(nn.Module): def __init__(self, weight=None, ignore_index=255, reduction='mean', label_smoothing=0.0, loss_weight=1.0, loss_name='ce_loss'): super(CrossEntropyLoss2d, self).__init__() self.loss_weight = loss_weight self._loss_name = loss_name self.criterion = nn.CrossEntropyLoss(weight=weight, ignore_index=ignore_index, reduction=reduction, label_smoothing=label_smoothing) def forward(self, pred, target): return (self.loss_weight * self.criterion(pred, target.long())) def loss_name(self): return self._loss_name
(**njit_dict_no_parallel) def deposition_estimator_kasen(energy, ejecta_density, iron_group_fraction): return ((get_average_compton_fraction(energy) * compton_opacity_calculation(energy, ejecta_density)) + photoabsorption_opacity_calculation(energy, ejecta_density, iron_group_fraction))
class LabelSmoothingCrossEntropy(nn.Module): def __init__(self, : float=0.1, reduction='mean'): super().__init__() (self., self.reduction) = (, reduction) def forward(self, output, target): c = output.size()[(- 1)] log_preds = F.log_softmax(output, dim=(- 1)) loss = reduce_loss((- log_preds.sum(dim=(- 1))), self.reduction) nll = F.nll_loss(log_preds, target, reduction=self.reduction) return (((1 - self.) * nll) + (self. * (loss / c)))
def make_tree(cfg, logger=None): if (logger is not None): logger('\n[Preparing loss...]') loss_file = cfg.loss if (not loss_file.lower().endswith('.txt')): loss_file += '.txt' with open(loss_file, 'r') as f: lines = f.read().splitlines() lines = parse(lines) hparams = parse(cfg.hparams) for (k, v) in hparams.items(): if (k in lines): lines[k] = v for (k, v) in lines.items(): meta_k = '$({})'.format(k) for (kk, vv) in lines.items(): if (meta_k in vv): lines[kk] = vv.replace(meta_k, v) root = node.LossNode('total', cfg, lookup=lines) if (logger is not None): logger(root) with open(logger.get_path('loss.txt'), 'w') as f: for (k, v) in lines.items(): f.write('{}={}\n'.format(k, v)) root = gpu_utils.obj2device(root) return root
def _fused_bias_act_cuda(x, b, axis, act, alpha, gain): x = tf.convert_to_tensor(x) empty_tensor = tf.constant([], dtype=x.dtype) b = (tf.convert_to_tensor(b) if (b is not None) else empty_tensor) act_spec = activation_funcs[act] assert ((len(b.shape) == 1) and ((b.shape[0] == 0) or (b.shape[0] == x.shape[axis]))) assert ((b.shape[0] == 0) or (0 <= axis < len(x.shape))) if (alpha is None): alpha = act_spec.def_alpha if (gain is None): gain = act_spec.def_gain if ((act == 'linear') and (b is None) and (gain == 1.0)): return x if (act_spec.cuda_idx is None): return _fused_bias_act_ref(x=x, b=b, axis=axis, act=act, alpha=alpha, gain=gain) cuda_kernel = _get_plugin().fused_bias_act cuda_kwargs = dict(axis=axis, act=act_spec.cuda_idx, alpha=alpha, gain=gain) def func_y(x, b): y = cuda_kernel(x=x, b=b, ref=empty_tensor, grad=0, **cuda_kwargs) y.set_shape(x.shape) return y def grad_dx(dy, x, y): ref = {'x': x, 'y': y}[act_spec.ref] dx = cuda_kernel(x=dy, b=empty_tensor, ref=ref, grad=1, **cuda_kwargs) dx.set_shape(x.shape) return dx def grad_db(dx): if (b.shape[0] == 0): return empty_tensor db = dx if (axis < (len(x.shape) - 1)): db = tf.reduce_sum(db, list(range((axis + 1), len(x.shape)))) if (axis > 0): db = tf.reduce_sum(db, list(range(axis))) db.set_shape(b.shape) return db def grad2_d_dy(d_dx, d_db, x, y): ref = {'x': x, 'y': y}[act_spec.ref] d_dy = cuda_kernel(x=d_dx, b=d_db, ref=ref, grad=1, **cuda_kwargs) d_dy.set_shape(x.shape) return d_dy def grad2_d_x(d_dx, d_db, x, y): ref = {'x': x, 'y': y}[act_spec.ref] d_x = cuda_kernel(x=d_dx, b=d_db, ref=ref, grad=2, **cuda_kwargs) d_x.set_shape(x.shape) return d_x _gradient def func_zero_2nd_grad(x, b): y = func_y(x, b) _gradient def grad(dy): dx = grad_dx(dy, x, y) db = grad_db(dx) def grad2(d_dx, d_db): d_dy = grad2_d_dy(d_dx, d_db, x, y) return d_dy return ((dx, db), grad2) return (y, grad) _gradient def func_nonzero_2nd_grad(x, b): y = func_y(x, b) def grad_wrap(dy): _gradient def grad_impl(dy, x): dx = grad_dx(dy, x, y) db = grad_db(dx) def grad2(d_dx, d_db): d_dy = grad2_d_dy(d_dx, d_db, x, y) d_x = grad2_d_x(d_dx, d_db, x, y) return (d_dy, d_x) return ((dx, db), grad2) return grad_impl(dy, x) return (y, grad_wrap) if act_spec.zero_2nd_grad: return func_zero_2nd_grad(x, b) return func_nonzero_2nd_grad(x, b)
def zou_et_al_criterion_rescaling(criterion, n_samples, noise_variance): return ((criterion - (n_samples * np.log(((2 * np.pi) * noise_variance)))) - n_samples)
def rerank(model_file, ctx_file, rnk_file, score=False): output_wfile = open(((rnk_file + '_LEN') + ('.f' if score else '.gen')), 'w') begin = True for (ctx_line, rnk_line) in itertools.izip(open(ctx_file), open(rnk_file)): suffix = ctx_line.strip().split('\t') candidates = rnk_line.strip().split('\t') cscores = map(len, candidates) wscores = map(len, [x.split() for x in candidates]) if (not score): raise Exception('Not supported!') else: if begin: ((print >> output_wfile), 'cLEN wLEN allLEN') begin = False for (wscore, cscore) in zip(wscores, cscores): ((print >> output_wfile), cscore, wscore, len(suffix)) output_wfile.close()
class DecoderBlockPreNorm(DecoderBlock): def __init__(self, *kargs, **kwargs): super(DecoderBlockPreNorm, self).__init__(*kargs, **kwargs) def forward(self, inputs, context, state=None): x = inputs res = x x = (self.lnorm1(x) if hasattr(self, 'lnorm1') else x) if self.stateful: (x, state) = self.state_block(x, state) else: if (state is None): x_past = x else: x_past = torch.cat((state, x), 1) (x, _) = self.masked_attention(x, x_past, x_past) state = x_past if hasattr(self, 'state_proj'): x = self.state_proj(x) x = self.dropout(x).add(res) res = x x = (self.lnorm2(x) if hasattr(self, 'lnorm2') else x) (x, attn_enc) = self.attention(x, context, context) x = self.dropout(x).add_(res) res = x x = (self.lnorm3(x) if hasattr(self, 'lnorm3') else x) x = self.fc(x) x = self.dropout(x).add_(res) return (x, attn_enc, state)
def mask_tokens(inputs, mlm_probability, tokenizer, special_tokens_mask): labels = np.copy(inputs) probability_matrix = np.random.random_sample(labels.shape) special_tokens_mask = special_tokens_mask.astype(np.bool_) probability_matrix[special_tokens_mask] = 0.0 masked_indices = (probability_matrix > (1 - mlm_probability)) labels[(~ masked_indices)] = (- 100) indices_replaced = ((np.random.random_sample(labels.shape) < 0.8) & masked_indices) inputs[indices_replaced] = tokenizer.convert_tokens_to_ids(tokenizer.mask_token) indices_random = (((np.random.random_sample(labels.shape) < 0.5) & masked_indices) & (~ indices_replaced)) random_words = np.random.randint(low=0, high=len(tokenizer), size=np.count_nonzero(indices_random), dtype=np.int64) inputs[indices_random] = random_words return (inputs, labels)
def load_dataset(args): transform_px = tr.Compose([tr.ToTensor(), (lambda x: (x * 255))]) if (args.dataset == 'cifar100'): cls = dataset_without_label(torchvision.datasets.CIFAR100) test_dataset = cls(root=args.data_path, transform=transform_px) elif (args.dataset in ['celeba', 'img32', 'tinyimg']): cls = dataset_without_label(torchvision.datasets.ImageFolder) set_name = ('train' if (args.dataset in ['celeba']) else 'val') test_dataset = cls(root=os.path.join(args.data_path, set_name), transform=transform_px) else: assert False, ('dataset %s' % args.dataset) return test_dataset
class FeatureSparseToDense(ModelLayer): def __init__(self, model, input_record, input_specs, name='feature_sparse_to_dense', default_dense_value=None, **kwargs): super(FeatureSparseToDense, self).__init__(model, name, input_record, **kwargs) if (default_dense_value is None): default_dense_value = 0.0 default_dense_value = float(default_dense_value) assert (np.isnan(default_dense_value) or (default_dense_value == 0.0)), 'default_dense_value can only be 0.0 or NaN' self.input_specs = input_specs self.default_float_value = (model.global_constants['NAN'] if np.isnan(default_dense_value) else model.global_constants['ZERO']) self.zero_range = model.global_constants['ZERO_RANGE'] outputs = [] for (field, feature_specs) in self.input_specs: assert (len(feature_specs.feature_names) == len(feature_specs.feature_ids)) if (feature_specs.feature_type == 'FLOAT'): outputs.append((field, schema.Scalar((np.float32, (len(feature_specs.feature_ids),)), self.get_next_blob_reference((field + '_output'))))) elif (feature_specs.feature_type == 'ID_LIST'): outputs.append((field, schema.Struct(('ranges', schema.Scalar((np.int32, (len(feature_specs.feature_ids), 2)), self.get_next_blob_reference((field + '_ranges')))), ('values', schema.Scalar(np.int64, self.get_next_blob_reference((field + '_values'))))))) elif (feature_specs.feature_type == 'ID_SCORE_LIST'): outputs.append((field, schema.Struct(('ranges', schema.Scalar((np.int32, (len(feature_specs.feature_ids), 2)), self.get_next_blob_reference((field + '_ranges')))), ('ids', schema.Scalar(np.int64, self.get_next_blob_reference((field + '_ids')))), ('scores', schema.Scalar(np.float32, self.get_next_blob_reference((field + '_scores'))))))) elif (feature_specs.feature_type == 'EMBEDDING'): outputs.append((field, schema.Struct(('ranges', schema.Scalar((np.int32, (len(feature_specs.feature_ids), 2)), self.get_next_blob_reference((field + '_ranges')))), ('values', schema.Scalar(np.float32, self.get_next_blob_reference((field + '_values'))))))) elif (feature_specs.feature_type == 'GENERIC_FEATURE'): outputs.append((field, schema.Struct(('ranges', schema.Scalar((np.int32, (len(feature_specs.feature_ids), 2)), self.get_next_blob_reference((field + '_ranges')))), ('values', schema.Scalar(np.float32, self.get_next_blob_reference((field + '_values'))))))) else: raise TypeError('Unsupported input type: {0}'.format(feature_specs.feature_type)) self.output_schema = schema.Struct(*outputs) for (field, feature_specs) in input_specs: schema.attach_metadata_to_scalars(self.output_schema[field], schema.Metadata(feature_specs=feature_specs)) def add_ops(self, net): record = self.input_record for (field, feature_specs) in self.input_specs: if (feature_specs.feature_type == 'FLOAT'): net.SparseToDenseMask([record[field].keys(), record[field].values(), self.default_float_value, record[field].lengths()], [self.output_schema[field]()], mask=feature_specs.feature_ids) elif (feature_specs.feature_type == 'ID_LIST'): id_list_ranges = net.LengthsToRanges(record[field].values.lengths(), net.NextScopedBlob('id_list_ranges')) net.SparseToDenseMask([record[field].keys(), id_list_ranges, self.zero_range, record[field].lengths()], self.output_schema[field].ranges(), mask=feature_specs.feature_ids) net.Alias(record[field].values.items(), self.output_schema[field].values()) elif (feature_specs.feature_type == 'ID_SCORE_LIST'): id_list_ranges = net.LengthsToRanges(record[field].values.lengths(), net.NextScopedBlob('id_score_list_ranges')) net.SparseToDenseMask([record[field].keys(), id_list_ranges, self.zero_range, record[field].lengths()], self.output_schema[field].ranges(), mask=feature_specs.feature_ids) net.Alias(record[field].values.keys(), self.output_schema[field].ids()) net.Alias(record[field].values.values(), self.output_schema[field].scores()) elif (feature_specs.feature_type == 'EMBEDDING'): ranges = net.LengthsToRanges(record[field].values.lengths(), net.NextScopedBlob('embeddings_ranges')) net.SparseToDenseMask([record[field].keys(), ranges, self.zero_range, record[field].lengths()], self.output_schema[field].ranges(), mask=feature_specs.feature_ids) net.Alias(record[field].values.items(), self.output_schema[field].values()) elif (feature_specs.feature_type == 'GENERIC_FEATURE'): (feature_lengths_blob, feature_ids_blob, value_lengths_blob, value_values_blob) = net.ParseGeneric([record[field]()], ['feature_lengths', 'feature_ids', 'value_lengths', 'value_values'], feature_type_enum=1) ranges = net.LengthsToRanges(value_lengths_blob, net.NextScopedBlob('generics_ranges')) net.SparseToDenseMask([feature_ids_blob, ranges, self.zero_range, feature_lengths_blob], self.output_schema[field].ranges(), mask=feature_specs.feature_ids) net.Alias(value_values_blob, self.output_schema[field].values()) def get_metadata(self): metadata = [] for (field, feature_specs) in self.input_specs: metadata.append(({'type': feature_specs.feature_type, 'names': feature_specs.feature_names, 'ids': feature_specs.feature_ids}, self.output_schema[field].field_blobs(), self.output_schema[field].field_types())) if (feature_specs.feature_type == 'FLOAT'): metadata[(- 1)][0]['cardinality'] = 1 return metadata def get_accessed_features(self): accessed_features = defaultdict(list) for (field, feature_specs) in self.input_specs: accessed_features[field].append(AccessedFeatures(feature_specs.feature_type, set(feature_specs.feature_ids))) return accessed_features
class SegmentationSoftmax(Layer): output_layer = True def __init__(self, name, inputs, dataset, network_input_dict, tower_setup, resize_targets=False, resize_logits=False, loss='ce', fraction=None): super().__init__() self.n_classes = dataset.num_classes() targets = network_input_dict[DataKeys.SEGMENTATION_LABELS] assert (targets.get_shape().ndims == 4), targets.get_shape() assert (not (resize_targets and resize_logits)) assert (len(inputs) == 1), len(inputs) logits = inputs[0] assert (logits.get_shape()[(- 1)] == self.n_classes) if resize_targets: print('warning, using resize_targets=True, so the resulting scores will not be computed at the initial resolution', file=log.v1) targets = tf.image.resize_nearest_neighbor(targets, tf.shape(logits)[1:3]) if resize_logits: logits = tf.image.resize_images(logits, tf.shape(targets)[1:3]) output = tf.nn.softmax(logits, (- 1), 'softmax') self.outputs = [output] if (self.n_classes == 2): self.extractions[Extractions.SEGMENTATION_POSTERIORS] = output[(..., 1)] class_pred = tf.argmax(logits, axis=3) targets = tf.cast(targets, tf.int64) targets = tf.squeeze(targets, axis=3) self.loss = self._create_loss(loss, fraction, logits, targets) self.losses.append(self.loss) batch_size = smart_shape(targets)[0] if ((not tower_setup.is_training) and (batch_size == 1) and (DataKeys.SEGMENTATION_LABELS_ORIGINAL_SIZE in network_input_dict)): print(tower_setup.network_name, name, ': Using SEGMENTATION_LABELS_ORIGINAL_SIZE for calculating IoU', file=log.v1) targets_for_measures = network_input_dict[DataKeys.SEGMENTATION_LABELS_ORIGINAL_SIZE] targets_for_measures = tf.cast(targets_for_measures, tf.int64) targets_for_measures = tf.squeeze(targets_for_measures, axis=3) self.extractions[Extractions.SEGMENTATION_MASK_INPUT_SIZE] = class_pred class_pred_for_measures = self._resize_predictions_to_original_size(class_pred, network_input_dict, targets_for_measures) self.extractions[Extractions.SEGMENTATION_MASK_ORIGINAL_SIZE] = class_pred_for_measures else: print(tower_setup.network_name, name, ': Using SEGMENTATION_LABELS for calculating IoU', file=log.v1) targets_for_measures = targets class_pred_for_measures = class_pred self.extractions[Extractions.SEGMENTATION_MASK_INPUT_SIZE] = class_pred_for_measures self.measures = self._create_measures(class_pred_for_measures, targets_for_measures) self.add_image_summary(tf.cast(tf.expand_dims(class_pred, axis=3), tf.float32), 'predicted labels') self.add_scalar_summary(self.loss, 'loss') def _create_loss(self, loss_str, fraction, logits, targets): raw_ce = None n_valid_pixels_per_im = None if ('ce' in loss_str): no_void_label_mask = tf.not_equal(targets, VOID_LABEL) targets_no_void = tf.where(no_void_label_mask, targets, tf.zeros_like(targets)) raw_ce = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=targets_no_void, name='ce') raw_ce *= tf.cast(no_void_label_mask, tf.float32) n_valid_pixels_per_im = tf.reduce_sum(tf.cast(no_void_label_mask, tf.int32), axis=[1, 2]) if (loss_str == 'ce'): ce_per_im = tf.reduce_sum(raw_ce, axis=[1, 2]) ce_per_im /= tf.cast(tf.maximum(n_valid_pixels_per_im, 1), tf.float32) ce_total = tf.reduce_mean(ce_per_im, axis=0) loss = ce_total elif (loss_str == 'bootstrapped_ce'): loss = bootstrapped_ce_loss(raw_ce, fraction, n_valid_pixels_per_im) elif (loss_str == 'class_balanced_ce'): loss = class_balanced_ce_loss(raw_ce, targets, self.n_classes) else: assert False, ('unknown loss', loss_str) return loss def _create_measures(self, pred, targets): n_examples = tf.shape(targets)[0] measures = {Measures.LOSS: (self.loss * tf.cast(n_examples, tf.float32)), Measures.N_EXAMPLES: n_examples} if (self.n_classes == 2): binary_measures = compute_measures_for_binary_segmentation_tf(pred, targets) measures.update(binary_measures) return measures def _resize_predictions_to_original_size(class_pred, network_input_dict, targets_for_measures): if (DataKeys.CROP_BOXES_y0x0y1x1 in network_input_dict): crop_box = tf.squeeze(network_input_dict[DataKeys.CROP_BOXES_y0x0y1x1], axis=0) (y0, x0, y1, x1) = tf.unstack(crop_box) height_before_resize = (y1 - y0) width_before_resize = (x1 - x0) else: (height_before_resize, width_before_resize) = tf.shape(targets_for_measures)[1:3] (y0, x0, y1, x1) = (None, None, None, None) class_pred_original_size = tf.squeeze(tf.image.resize_nearest_neighbor(class_pred[(..., tf.newaxis)], [height_before_resize, width_before_resize]), axis=(- 1)) if (DataKeys.CROP_BOXES_y0x0y1x1 in network_input_dict): pad_y_l = y0 pad_y_r = (tf.shape(targets_for_measures)[1] - y1) pad_x_l = x0 pad_x_r = (tf.shape(targets_for_measures)[2] - x1) class_pred_for_measures = tf.pad(class_pred_original_size, [[0, 0], [pad_y_l, pad_y_r], [pad_x_l, pad_x_r]]) else: class_pred_for_measures = class_pred_original_size return class_pred_for_measures
class anglit_gen(rv_continuous): def _shape_info(self): return [] def _pdf(self, x): return np.cos((2 * x)) def _cdf(self, x): return (np.sin((x + (np.pi / 4))) ** 2.0) def _sf(self, x): return (np.cos((x + (np.pi / 4))) ** 2.0) def _ppf(self, q): return (np.arcsin(np.sqrt(q)) - (np.pi / 4)) def _stats(self): return (0.0, (((np.pi * np.pi) / 16) - 0.5), 0.0, (((- 2) * ((np.pi ** 4) - 96)) / (((np.pi * np.pi) - 8) ** 2))) def _entropy(self): return (1 - np.log(2))
def add_context(stat: Stat, context: MetricContext) -> Stat: return Stat(replace(stat.name, split=context.split, sub_split=context.sub_split, perturbation=context.perturbation)).merge(stat)
def get_keras_lstm(num_buckets, embed_dim=16, rnn_state_size=64): lstm_model = tf.keras.Sequential() lstm_model.add(tf.keras.layers.Embedding(num_buckets, embed_dim)) lstm_model.add(tf.keras.layers.LSTM(rnn_state_size, activation=tf.nn.relu)) lstm_model.add(tf.keras.layers.Dense(1, activation=tf.nn.sigmoid)) lstm_model.compile('Adagrad', 'binary_crossentropy', metrics=['accuracy']) return lstm_model
_utils.test(arch=[ti.cuda, ti.vulkan, ti.amdgpu]) def test_shared_array_atomics(): N = 256 block_dim = 32 def atomic_test(out: ti.types.ndarray()): ti.loop_config(block_dim=block_dim) for i in range(N): tid = (i % block_dim) val = tid sharr = ti.simt.block.SharedArray((block_dim,), ti.i32) sharr[tid] = val ti.simt.block.sync() sharr[0] += val ti.simt.block.sync() out[i] = sharr[tid] arr = ti.ndarray(ti.i32, N) atomic_test(arr) ti.sync() sum = ((block_dim * (block_dim - 1)) // 2) assert (arr[0] == sum) assert (arr[32] == sum) assert (arr[128] == sum) assert (arr[224] == sum)
def realize_text_and_extract_scene(scene, template, filter_objs): default_list = (lambda : collections.defaultdict(list)) graph = {'relationships': collections.defaultdict(default_list), 'counts': {}, 'exists': {}, 'history': [], 'objects': {}} n_inputs = template.get('inputs', 1) text_sample = random.choice(template['text']) text_sample_index = template['text'].index(text_sample) tags = re.findall('(<[\\d\\w]*>)', text_sample) tag_groups = collections.defaultdict(list) for tag in tags: group_id = get_tag_group(tag) tag_groups[group_id].append(tag) arg_sample = random.choice(filter_objs) graph_item = arg_sample['graph'] for arg_ind in range(n_inputs): obj_sample = arg_sample['objects'][arg_ind] avail_attrs = (obj_sample['optional'] + obj_sample['required']) for ii in tag_groups[arg_ind][::(- 1)]: if (mapping(ii) not in avail_attrs): tag_groups[arg_ind].remove(ii) text_sample = replace_attribute(text_sample, ii, arg_sample, True) for attribute in obj_sample['required']: required_tag = inv_mapping(attribute, arg_ind) assert (required_tag in tag_groups[arg_ind]), 'A required attribute is missing in template!' tags_to_keep = [inv_mapping(ii, arg_ind) for ii in obj_sample['required']] optional_tags = [inv_mapping(ii, arg_ind) for ii in obj_sample['optional']] optional_tags = [ii for ii in optional_tags if (ii in tag_groups[arg_ind])] if (len(optional_tags) > 0): if (len(tags_to_keep) > 0): n_tags_sample = [0, 1, 2] else: n_tags_sample = [1, 2, 3] n_sample = np.random.choice(n_tags_sample, 1, p=gvars.METAINFO['probabilities'], replace=False) n_sample = min(n_sample[0], len(optional_tags)) if (n_sample > 0): tags_to_keep += random.sample(optional_tags, n_sample) for tag in tag_groups[arg_ind]: remove = (tag not in tags_to_keep) text_sample = replace_attribute(text_sample, tag, arg_sample, remove) if ('objects' in graph_item): for ii in gvars.METAINFO['attributes']: if (inv_mapping(ii, arg_ind) not in tags_to_keep): if (ii in graph_item['objects'][arg_ind]): del graph_item['objects'][arg_ind][ii] graph_item['round'] = 0 sample = {} sample['template_info'] = [copy.deepcopy(template)] del sample['template_info'][(- 1)]['text'] sample['template_info'][(- 1)]['index'] = text_sample_index sample['caption'] = text_sample sample['dialog'] = [] graph['history'].append(graph_item) sample['graph'] = utils.merge_update_scene_graph(graph, graph_item) return sample
def train_model(): (g, train_tensor) = build_model() with g.as_default(): slim.learning.train(train_tensor, FLAGS.checkpoint_dir, is_chief=(FLAGS.task == 0), master=FLAGS.master, log_every_n_steps=FLAGS.log_every_n_steps, graph=g, number_of_steps=FLAGS.number_of_steps, save_summaries_secs=FLAGS.save_summaries_secs, save_interval_secs=FLAGS.save_interval_secs, init_fn=get_checkpoint_init_fn(), global_step=tf.train.get_global_step())