code
stringlengths
281
23.7M
def animate(): rot = la.quat_from_euler((0.005, 0.01), order='xy') cube1.local.rotation = la.quat_mul(rot, cube1.local.rotation) cube2.local.rotation = la.quat_mul(rot, cube2.local.rotation) renderer1.render(scene1, camera1) renderer2.render(scene2, camera2) renderer2.request_draw()
(hookwrapper=True) def pytest_runtest_makereport(item, call): outcome = (yield) result = outcome.get_result() if (result.when == 'setup'): setattr(item, '_test_failed_statuses', {}) _test_failed_statuses = getattr(item, '_test_failed_statuses', {}) _test_failed_statuses[result.when] = result.failed item._test_failed_statuses = _test_failed_statuses
class CommandGraphNode(metaclass=abc.ABCMeta): def selector(self) -> ((str | int) | None): def selectors(self) -> list[SelectorType]: def parent(self) -> (CommandGraphNode | None): def children(self) -> list[str]: def navigate(self, name: str, selector: ((str | int) | None)) -> CommandGraphNode: if (name in self.children): return _COMMAND_GRAPH_MAP[name](selector, self) raise KeyError('Given node is not an object: {}'.format(name)) def call(self, name: str) -> CommandGraphCall: return CommandGraphCall(name, self)
.parametrize('username,password', users) .parametrize('project_id', projects) def test_project_export_xml(db, client, files, username, password, project_id): client.login(username=username, password=password) url = reverse('project_export', args=[project_id, 'xml']) response = client.get(url) if (project_id in export_project_permission_map.get(username, [])): assert (response.status_code == 200) elif password: assert (response.status_code == 403) else: assert (response.status_code == 302)
class TASFSave(TestCase): original = os.path.join(DATA_DIR, 'silence-1.wma') def setUp(self): self.filename = get_temp_copy(self.original) self.audio = ASF(self.filename) def tearDown(self): os.unlink(self.filename) def test_save_filename(self): self.audio.save(self.audio.filename) def test_multiple_delete(self): self.audio['large_value1'] = ('#' * 50000) self.audio.save() audio = ASF(self.filename) for tag in audio.keys(): del audio[tag] audio.save() def test_readd_objects(self): header = self.audio._header del header.objects[:] self.audio.save() self.assertTrue(header.get_child(ContentDescriptionObject.GUID)) self.assertTrue(header.get_child(ExtendedContentDescriptionObject.GUID)) self.assertTrue(header.get_child(HeaderExtensionObject.GUID)) ext = header.get_child(HeaderExtensionObject.GUID) self.assertTrue(ext.get_child(MetadataObject.GUID)) self.assertTrue(ext.get_child(MetadataLibraryObject.GUID)) def test_keep_others(self): self.audio.save() new = ASF(self.filename) self.assertTrue(new._header.get_child(CodecListObject.GUID)) def test_padding(self): old_tags = sorted(self.audio.items()) def get_padding(fn): header = ASF(fn)._header return len(header.get_child(PaddingObject.GUID).data) for i in [0, 1, 2, 3, 42, 100, 5000, 30432, 1]: def padding_cb(info): self.assertEqual(info.size, 30432) return i self.audio.save(padding=padding_cb) self.assertEqual(get_padding(self.filename), i) last = ASF(self.filename) self.assertEqual(sorted(last.items()), old_tags)
class NodeScenarioSuccessOutput(): nodes: typing.Dict[(int, Node)] = field(metadata={'name': 'Nodes started/stopped/terminated/rebooted', 'description': 'Map between timestamps and the pods started/stopped/terminated/rebooted.\n The timestamp is provided in nanoseconds'}) action: kube_helper.Actions = field(metadata={'name': 'The action performed on the node', 'description': 'The action performed or attempted to be performed on the node. Possible values\n are : Start, Stop, Terminate, Reboot'})
class BaseDriver(ABC): def __init__(self, molecule: Optional[Molecule]=None, basis: str='sto3g', hf_method: str='rhf', supports_molecule: bool=False) -> None: if ((molecule is not None) and (not supports_molecule)): raise QiskitChemistryError("Driver doesn't support molecule.") self._molecule = molecule self._basis = basis self._hf_method = hf_method self._supports_molecule = supports_molecule def supports_molecule(self) -> bool: return self._supports_molecule def molecule(self) -> Optional[Molecule]: return self._molecule def molecule(self, value: Molecule) -> None: if (not self.supports_molecule): raise QiskitChemistryError("Driver doesn't support molecule.") self._molecule = value def basis(self) -> str: return self._basis def basis(self, value: str) -> None: self._basis = value def hf_method(self) -> str: return self._hf_method _method.setter def hf_method(self, value: str) -> None: self._hf_method = value
def _show_files_win32(dirname, entries): if (not is_windows()): raise BrowseError('windows only') if (not entries): try: if (subprocess.call(['explorer', dirname]) != 0): raise OSError('explorer error return status') except OSError as e: raise BrowseError(e) from e else: from quodlibet.util.windows import open_folder_and_select_items try: open_folder_and_select_items(dirname, entries) except OSError as e: raise BrowseError(e) from e
class EPICSBooleanButton(gui.Container, EPICSWidget): icon = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAC4AAAAuCAYAAABXuSs3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAOxAAADsQBlSsOGwAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAKMSURBVGiB7ZqxaxNRGMB/9+7uXZK2aaMVEUWEKNSh4CQIrhWnujiIQqGig7gXF/+B7tLNbp2KQyfBwcVdsdhWJa5WHZKKvZqXe3cO57VNmraJyd0ZuB88LnkvfO/H43sfB18Mz/OCIAhoN4DdZ9IYhrH7bDesSPJlRfHsrc+nmpGK6HGcGQp4clVwsywBMJRSgVKKqWXNpitS1juaS6M+L26ZSCnDE1dKsenaAHy/MUNjtNJ10JG1WYofHvTbtYnPWwKlFLZth+Ke5wGheKP0EXVireugemizz5rt8TyPIAgQe+KDQZO41jptn47RWofiAL7vp+3TMZGrtb9mAxTfP0bnf3QdMPf1Wt/kjiLytVoXRtZnEhHolf+7cB9BJp40mXjSDKz4gXLYHQ6vHtmUD8z7LC+4zAGz00M8PXv4q3Jl4xdTr7vfuUfx9pvP3xnm9v086893WFzZZjFamMzz7rpg7c02d1d72zOWVJn75oNjcDmO4H+JRXz+tKCyEaZKXPQlVcoTw3yZaJ776dpAox/h2xJLjocXUrI02eg5lw8jllRZXPGoYHBqPI7oIQNbx2MRn522KNc1S/9Qnzslpsvps7yws1e/Y6BH8TpTC/XOf766w5U+XdYsx5MmE0+aTDxpMvGkycSTRkTNoEEh8hXRl0Ehch3cEzcMA9M0GbdV2k7Hcj7/G9M098Qty+LhxSrnHDdtt0M5adW5d2ELy7LCU1dKBa7rUqvVqFaruK5LvV5Ptbvc2lV2HIdCoUCpVGJsbIxCoYAVLRSLRaSUKKVQStHaYkmDSFxKiZSSXC6H4zjhvOd5ge/7aK2bRtQkSruXL4TANM2mIYTA0FoHrWmR9h8QIlpTZv/nP6KyI2uh/zMtAAAAAElFTkSuQmCC' _attribute_decorator('WidgetSpecific', 'Specifies if the button is toggle or must reset the value on release', bool, {}) def toggle(self): return self.__toggle def toggle(self, v): self.__toggle = v self.button.onmouseup.do((self.reset_bit if (not self.__toggle) else None)) _attribute_decorator('WidgetSpecific', 'Text content', str, {}) def text(self): return self.button.get_text() def text(self, value): self.button.set_text(value) button = None led = None def __init__(self, button_label='epics button', epics_pv_name='', toggle=False, *args, **kwargs): self.color_inactive = 'darkgray' self.color_active = 'rgb(0,255,0)' self.button = gui.Button(button_label, width='100%', height='100%', style=style_inheritance_dict) self.led = gui.Widget(width=15, height=5, style={'position': 'absolute', 'left': '2px', 'top': '2px', 'background-color': self.color_inactive}) self.led_status = False default_style = {'position': 'absolute', 'left': '10px', 'top': '10px', 'background-color': 'rgb(4, 90, 188)', 'color': 'white'} default_style.update(kwargs.get('style', {})) kwargs['style'] = default_style kwargs['width'] = kwargs['style'].get('width', kwargs.get('width', '100px')) kwargs['height'] = kwargs['style'].get('height', kwargs.get('height', '100px')) super(EPICSBooleanButton, self).__init__(*args, **kwargs) _style = {'position': 'relative'} _style.update(style_inheritance_dict) self.append(gui.Container(children=[self.button, self.led], width='100%', height='100%', style=_style)) self.toggle = toggle self.epics_pv_name = epics_pv_name self.button.onmousedown.do(self.set_bit) def set_bit(self, emitter, *args, **kwargs): self.pressed = True self.written = False value = 1 if self.toggle: value = (0 if self.led_status else 1) self.epics_pv.put(value, callback=(self.put_done if (not self.toggle) else None)) def put_done(self, *args, **kwargs): self.written = True if (not self.pressed): self.epics_pv.put(0) def reset_bit(self, emitter, x, y, *args, **kwargs): self.pressed = False if self.written: self.epics_pv.put(0) def set_value(self, value): if (not self.get_app_instance()): return with self.get_app_instance().update_lock: self.led_status = (float(value) > 0.0) self.led.style.update({'background-color': (self.color_active if self.led_status else self.color_inactive)})
def filter_available_models(model_dict: Union[(List[dict], Tuple[(dict, ...)])], dataset_name_or_id: Union[(str, int)]): valid = [] for trained_model in model_dict: plans_manager = PlansManager(join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id), (trained_model['plans'] + '.json'))) if (trained_model['configuration'] not in plans_manager.available_configurations): print(f'''Configuration {trained_model['configuration']} not found in plans {trained_model['plans']}. Inferred plans file: {join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id), (trained_model['plans'] + '.json'))}.''') continue expected_output_folder = get_output_folder(dataset_name_or_id, trained_model['trainer'], trained_model['plans'], trained_model['configuration'], fold=None) if (not isdir(expected_output_folder)): raise RuntimeError(f"Trained my_models {trained_model} does not have an output folder. Expected: {expected_output_folder}. Please run the training for this my_models! (don't forget the --npz flag if you want to ensemble multiple configurations)") valid.append(trained_model) return valid
def download_subprocess(dii, save_dir): for image in tqdm(dii): (key, value) = image.popitem() try: img_data = requests.get(value).content img = Image.open(BytesIO(img_data)).convert('RGB') h = img.size[0] w = img.size[1] if (min(h, w) > 512): img = img.resize(((int((h / (w / 512))), 512) if (h > w) else (512, int((w / (h / 512)))))) img.save('{}/{}.jpg'.format(save_dir, key)) except: print(key, value)
class ChannelTest(unittest.TestCase): def setUp(self): zero_state = np.array([[1], [0]], dtype=complex) one_state = np.array([[0], [1]], dtype=complex) one_one_state = np.kron(one_state, one_state) zero_zero_state = np.kron(zero_state, zero_state) cat_state = ((1.0 / np.sqrt(2)) * (zero_zero_state + one_one_state)) self.density_matrix = np.dot(one_one_state, one_one_state.T) self.cat_matrix = np.dot(cat_state, cat_state.T) def test_amplitude_damping(self): test_density_matrix = amplitude_damping_channel(self.density_matrix, 0, 1) self.assertAlmostEqual(norm((self.density_matrix - test_density_matrix)), 0.0) test_density_matrix = amplitude_damping_channel(self.density_matrix, 0, 1, transpose=True) self.assertAlmostEqual(norm((self.density_matrix - test_density_matrix)), 0.0) correct_density_matrix = np.zeros((4, 4), dtype=complex) correct_density_matrix[(2, 2)] = 1 test_density_matrix = amplitude_damping_channel(self.density_matrix, 1, 1) self.assertAlmostEqual(norm((correct_density_matrix - test_density_matrix)), 0.0) def test_dephasing(self): test_density_matrix = dephasing_channel(self.density_matrix, 1, 1) self.assertAlmostEqual(norm((self.density_matrix - test_density_matrix)), 0.0) test_density_matrix = dephasing_channel(self.density_matrix, 1, 1, transpose=True) correct_matrix = np.array([[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.5, (- 0.5)], [0.0, 0.0, (- 0.5), 1.0]]) self.assertAlmostEqual(norm((correct_matrix - test_density_matrix)), 0.0) test_density_matrix = dephasing_channel(self.cat_matrix, 0, 1) self.assertAlmostEqual(norm((self.cat_matrix - test_density_matrix)), 0.0) correct_matrix = np.array([[0.5, 0.25, 0.0, 0.0], [0.25, 0.25, 0.0, (- 0.25)], [0.0, 0.0, 0.0, 0.0], [0.0, (- 0.25), 0.0, 0.5]]) test_density_matrix = dephasing_channel(self.cat_matrix, 1, 1) self.assertAlmostEqual(norm((correct_matrix - test_density_matrix)), 0.0) def test_depolarizing(self): test_density_matrix = depolarizing_channel(self.cat_matrix, 0, 1) self.assertAlmostEqual(norm((self.cat_matrix - test_density_matrix)), 0.0) test_density_matrix = depolarizing_channel(self.cat_matrix, 0, 1, transpose=True) self.assertAlmostEqual(norm((self.cat_matrix - test_density_matrix)), 0.0) correct_density_matrix = np.array([[0., 0.0, 0.0, 0.], [0.0, 0., 0.0, 0.0], [0.0, 0.0, 0., 0.0], [0., 0.0, 0.0, 0.]]) test_density_matrix = depolarizing_channel(self.cat_matrix, 1, 0) test_density_matrix = depolarizing_channel(test_density_matrix, 1, 1) self.assertAlmostEqual(norm((correct_density_matrix - test_density_matrix)), 0.0, places=6) test_density_matrix = depolarizing_channel(self.cat_matrix, 1, 0, transpose=True) test_density_matrix = depolarizing_channel(test_density_matrix, 1, 1, transpose=True) self.assertAlmostEqual(norm((correct_density_matrix - test_density_matrix)), 0.0, places=6) correct_density_matrix = (np.eye(4) / 4.0) test_density_matrix = depolarizing_channel(self.cat_matrix, 1, 'All') self.assertAlmostEqual(norm((correct_density_matrix - test_density_matrix)), 0.0, places=6) def test_verification(self): with self.assertRaises(ValueError): _ = amplitude_damping_channel(self.density_matrix, 2, 1) with self.assertRaises(ValueError): _ = amplitude_damping_channel(self.density_matrix, 0.5, 3) with self.assertRaises(ValueError): bad_density = np.zeros((3, 4)) _ = amplitude_damping_channel(bad_density, 0.5, 3)
class AverageMeter(): def __init__(self, num=100): self.num = num self.reset() def reset(self): self.val = {} self.sum = {} self.count = {} self.history = {} def update(self, batch=1, **kwargs): val = {} for k in kwargs: val[k] = (kwargs[k] / float(batch)) self.val.update(val) for k in kwargs: if (k not in self.sum): self.sum[k] = 0 self.count[k] = 0 self.history[k] = [] self.sum[k] += kwargs[k] self.count[k] += batch for _ in range(batch): self.history[k].append(val[k]) if (self.num <= 0): self.history[k] = [] if (self.num == 0): self.sum[k] = self.val[k] self.count[k] = 1 elif (len(self.history[k]) > self.num): pop_num = (len(self.history[k]) - self.num) for _ in range(pop_num): self.sum[k] -= self.history[k][0] del self.history[k][0] self.count[k] -= 1 def __repr__(self): s = '' for k in self.sum: s += self.format_str(k) return s def format_str(self, attr): return '{name}: {val:.6f} ({avg:.6f}) '.format(name=attr, val=float(self.val[attr]), avg=(float(self.sum[attr]) / self.count[attr])) def __getattr__(self, attr): if (attr in self.__dict__): return super(AverageMeter, self).__getattr__(attr) if (attr not in self.sum): print("invalid key '{}'".format(attr)) return Meter(attr, 0, 0) return Meter(attr, self.val[attr], self.avg(attr)) def avg(self, attr): return (float(self.sum[attr]) / self.count[attr])
def test_specifying_process_covariance_Q_motion_model(): with open(CONFIG_FILE, 'r') as config_file: config = json.load(config_file)['TrackerConfig'] assert ('G' in config['MotionModel']) assert ('Q' not in config['MotionModel']) model = utils.read_motion_model(config) test_config = dict(config) sigma = test_config['MotionModel']['G']['sigma'] del test_config['MotionModel']['G'] test_config['MotionModel']['Q'] = {'sigma': sigma, 'matrix': (model.Q.ravel() / sigma).tolist()} assert ('G' not in test_config['MotionModel']) assert ('Q' in test_config['MotionModel']) test_model = utils.read_motion_model(test_config) np.testing.assert_equal(model.Q, test_model.Q)
_param_scheduler('composite') class CompositeParamScheduler(param_scheduler.CompositeParamScheduler): __doc__ = param_scheduler.CompositeParamScheduler.__doc__ def __init__(self, schedulers: Sequence[param_scheduler.ParamScheduler], lengths: Sequence[float], interval_scaling: Sequence[Union[(IntervalScaling, str)]], update_interval: UpdateInterval=UpdateInterval.STEP): scaling_name = {IntervalScaling.RESCALED: 'rescaled', IntervalScaling.FIXED: 'fixed'} interval_scaling = [(scaling_name[s] if isinstance(s, IntervalScaling) else s) for s in interval_scaling] super().__init__(schedulers, lengths, interval_scaling) self.update_interval = update_interval def from_config(cls, config: Dict[(str, Any)]) -> 'CompositeParamScheduler': assert (('schedulers' in config) and ('lengths' in config)), 'Composite scheduler needs both a list of schedulers and lengths' interval_scaling = [] if ('interval_scaling' in config): assert (len(config['schedulers']) == len(config['interval_scaling'])), 'Schedulers and interval scaling must be the same length' for interval_scale in config['interval_scaling']: assert (interval_scale in {'fixed', 'rescaled'}), "Choices for interval scaling are 'fixed' or 'rescaled'" interval_scaling.append(IntervalScaling[interval_scale.upper()]) else: interval_scaling = ([IntervalScaling.RESCALED] * len(config['schedulers'])) if ('num_epochs' in config): config['schedulers'] = [dict(schedule, **{'num_epochs': config['num_epochs']}) for schedule in config['schedulers']] return cls(schedulers=[build_param_scheduler(scheduler) for scheduler in config['schedulers']], lengths=config['lengths'], update_interval=UpdateInterval.from_config(config, UpdateInterval.STEP), interval_scaling=interval_scaling)
class TestDataset(Dataset): def __init__(self, args, raw_datasets, cache_root): self.args = args self.raw_datasets = raw_datasets cache_path = os.path.join(cache_root, 'e2e_nlg_cleaned_test.cache') if (os.path.exists(cache_path) and args.dataset.use_cache): (self.full_src_lst, self.full_tgt_lst) = torch.load(cache_path) else: self.full_src_tgt_dic = {} for example in tqdm(self.raw_datasets): mr_attr_value_pairs = parse_meaning_representation(example['meaning_representation']) human_reference = example['human_reference'] mr_str = '' for (attr, value) in mr_attr_value_pairs.items(): mr_str += '{} : {} | '.format(attr, value) if (mr_str in self.full_src_tgt_dic.keys()): self.full_src_tgt_dic[mr_str].append(human_reference) else: self.full_src_tgt_dic[mr_str] = [human_reference] self.full_src_lst = [] self.full_tgt_lst = [] for (src, tgt) in self.full_src_tgt_dic.items(): self.full_src_lst.append(src) self.full_tgt_lst.append(tgt) if args.dataset.use_cache: torch.save((self.full_src_lst, self.full_tgt_lst), cache_path) def __getitem__(self, index) -> T_co: raw_data = self.raw_datasets[index] raw_data.update({'struct_in': self.full_src_lst[index], 'text_in': '', 'seq_out': self.full_tgt_lst[index][0], 'references': self.full_tgt_lst[index]}) return raw_data def __len__(self): return len(self.full_src_lst)
.parametrize('input_value, clean, alias', [({'type': 'null', 'alias': None, 'doc': None, 'logical': None}, False, False), ('null', True, False), ('bool', True, False), ({'type': 'int', 'bits': 32, 'signed': True, 'alias': None, 'doc': None, 'logical': None}, False, False), ({'type': 'int', 'bits': 32}, True, False), ({'type': 'int', 'bits': 32, 'signed': False}, True, False), ({'type': 'float', 'bits': 32}, True, False), ('string', True, False), ({'type': 'string', 'bytes': 50}, True, False), ('bytes', True, False), ({'type': 'bytes', 'bytes': 50}, True, False), ({'type': 'enum', 'symbols': ['foo', 'bar']}, True, False), ({'type': 'list', 'values': {'type': 'int', 'bits': 32}}, True, False), ({'type': 'list', 'values': {'type': 'int', 'bits': 32}, 'length': 10}, True, False), ({'type': 'map', 'keys': {'type': 'int', 'bits': 32}, 'values': {'type': 'string', 'bytes': 50}}, True, False), ([{'type': 'int', 'bits': 32}, {'type': 'string', 'bytes': 50}], True, False), ({'type': 'struct', 'fields': [{'type': 'int', 'bits': 32}, {'type': 'string', 'bytes': 50}]}, True, False), ({'type': 'struct', 'fields': [{'type': 'union', 'types': [{'type': 'int', 'bits': 32}, {'type': 'string', 'bytes': 50}], 'optional': True}]}, True, False), ({'type': 'struct', 'fields': [{'type': 'list', 'values': {'type': 'map', 'keys': {'type': 'int', 'bits': 32}, 'values': {'type': 'string', 'bytes': 50}}}]}, True, False), ('test_proxy', True, True), ('test_proxy', True, False), ('int32', True, True), ('int32', True, False), ('decimal256', True, True), ('decimal256', True, False), ({'type': 'struct', 'fields': [{'type': 'string', 'bytes': 10, 'variable': False, 'default': 'bar', 'name': 'foo'}]}, True, True)]) def test_from_to_dict(input_value: dict, clean: bool, alias: bool) -> None: assert (to_dict(from_dict(input_value), clean, alias) == input_value)
class InceptionResNetV2(nn.Module): def __init__(self, dropout_rate=0.0, in_channels=3, in_size=(299, 299), num_classes=1000): super(InceptionResNetV2, self).__init__() self.in_size = in_size self.num_classes = num_classes layers = [10, 21, 11] normal_units = [InceptionAUnit, InceptionBUnit, InceptionCUnit] reduction_units = [ReductionAUnit, ReductionBUnit] self.features = nn.Sequential() self.features.add_module('init_block', InceptInitBlock(in_channels=in_channels)) for (i, layers_per_stage) in enumerate(layers): stage = nn.Sequential() for j in range(layers_per_stage): if ((j == 0) and (i != 0)): unit = reduction_units[(i - 1)] else: unit = normal_units[i] if ((i == (len(layers) - 1)) and (j == (layers_per_stage - 1))): stage.add_module('unit{}'.format((j + 1)), unit(scale=1.0, activate=False)) else: stage.add_module('unit{}'.format((j + 1)), unit()) self.features.add_module('stage{}'.format((i + 1)), stage) self.features.add_module('final_conv', incept_conv1x1(in_channels=2080, out_channels=1536)) self.features.add_module('final_pool', nn.AvgPool2d(kernel_size=8, stride=1)) self.output = nn.Sequential() if (dropout_rate > 0.0): self.output.add_module('dropout', nn.Dropout(p=dropout_rate)) self.output.add_module('fc', nn.Linear(in_features=1536, out_features=num_classes)) self._init_params() def _init_params(self): for (name, module) in self.named_modules(): if isinstance(module, nn.Conv2d): init.kaiming_uniform_(module.weight) if (module.bias is not None): init.constant_(module.bias, 0) def forward(self, x): x = self.features(x) x = x.view(x.size(0), (- 1)) x = self.output(x) return x
class CmdOpenLid(Command): key = 'open lid' aliases = ['open button', 'open'] locks = 'cmd:all()' def func(self): if self.obj.db.lid_locked: self.caller.msg('This lid seems locked in place for the moment.') return string = '\nA ticking sound is heard, like a winding mechanism. Seems ' string += 'the lid will soon close again.' self.caller.msg(string) self.caller.location.msg_contents(('%s opens the lid of the button.' % self.caller.name), exclude=self.caller) self.obj.cmdset.add(LidClosedCmdSet) self.obj.open_lid()
((not DASK_INSTALLED), reason='Dask is not installed in a supported version.') class DaskDataSourceTest(_DistributedDataSourceTest, unittest.TestCase): def _testAssignPartitions(self, part_nodes, actor_nodes, expected_actor_parts): partitions = list(range(len(part_nodes))) part_to_node = dict(zip(range(len(partitions)), [f'node{n}' for n in part_nodes])) node_to_part = [(n, p) for (p, n) in part_to_node.items()] actors_to_node = dict(enumerate((f'node{n}' for n in actor_nodes))) actor_to_parts = self._getActorToParts(actors_to_node, node_to_part) for (actor_rank, part_ids) in expected_actor_parts.items(): for (i, part_id) in enumerate(part_ids): self.assertEqual(actor_to_parts[actor_rank][i], partitions[part_id], msg=f'Assignment failed: Actor rank {actor_rank}, partition {i} is not partition with ID {part_id}.') def _getActorToParts(self, actors_to_node, node_to_part): def ip_to_parts(data, *args, **kwargs): from collections import defaultdict ip_to_parts_dict = defaultdict(list) for (node, pid) in data: ip_to_parts_dict[node].append(pid) return ip_to_parts_dict def actor_ranks(actors): return actors_to_node with patch('xgboost_ray.data_sources.dask.get_ip_to_parts') as mock_parts, patch('xgboost_ray.data_sources.dask.get_actor_rank_ips') as mock_ranks: mock_parts.side_effect = ip_to_parts mock_ranks.side_effect = actor_ranks (_, actor_to_parts) = Dask.get_actor_shards(data=node_to_part, actors=[]) return actor_to_parts def _testDataSourceAssignment(self, part_nodes, actor_nodes, expected_actor_parts): self.skipTest('Data-locality aware scheduling using Dask is currently broken.') import dask import dask.dataframe as dd from ray.util.dask import ray_dask_get dask.config.set(scheduler=ray_dask_get) node_ips = [node['NodeManagerAddress'] for node in ray.nodes() if node['Alive']] if (len(node_ips) < (max(max(actor_nodes), max(part_nodes)) + 1)): print('Not running on cluster, skipping rest of this test.') return actor_node_ips = [node_ips[nid] for nid in actor_nodes] part_node_ips = [node_ips[nid] for nid in part_nodes] (num_cpus=0.1) def create_remote_df(arr): return dd.from_array(arr) partitions = np.array_split(self.x, len(part_nodes)) node_dfs: List[dd.DataFrame] = ray.get([create_remote_df.options(resources={f'node:{pip}': 0.1}).remote(partitions[pid]) for (pid, pip) in enumerate(part_node_ips)]) node_dfs_concat = dd.concat(node_dfs).persist() partition_locations_df = node_dfs_concat.map_partitions((lambda df: pd.DataFrame([ray.util.get_node_ip_address()]))).compute() partition_locations = [partition_locations_df[0].iloc[i] for i in range(partition_locations_df.size)] dask_df = dd.concat(node_dfs, axis=0) try: self.assertSequenceEqual([df[0][0] for df in partitions], [df[0][0] for df in dask_df.partitions.compute()], msg='Dask mixed up the partition order') self.assertSequenceEqual(part_node_ips, partition_locations, msg='Dask moved partitions to different IPs') except AssertionError as exc: print(f'Dask part of the test failed: {exc}') print('This is a stochastic test failure. Ignoring the rest of this test.') return actors = [_RemoteRayXGBoostActor.options(resources={f'node:{nip}': 0.1}).remote(rank=rank, num_actors=len(actor_nodes)) for (rank, nip) in enumerate(actor_node_ips)] (_, actor_to_parts) = Dask.get_actor_shards(dask_df, actors) for (actor_rank, part_ids) in expected_actor_parts.items(): for (i, part_id) in enumerate(part_ids): assigned_df = ray.get(actor_to_parts[actor_rank][i]) part_df = pd.DataFrame(partitions[part_id]) self.assertTrue(assigned_df.equals(part_df), msg=f'Assignment failed: Actor rank {actor_rank}, partition {i} is not partition with ID {part_id}.')
class RegRst(Component): def construct(s, Type, reset_value=0): s.out = OutPort(Type) s.in_ = InPort(Type) _ff def up_regrst(): if s.reset: s.out <<= reset_value else: s.out <<= s.in_ def line_trace(s): return f"[{('rst' if s.reset else ' ')}|{s.in_} > {s.out}]"
class TMP4Chapters(TMP4): original = os.path.join(DATA_DIR, 'nero-chapters.m4b') def test_has_chapters(self): self.failUnless(hasattr(self.audio, 'chapters')) chapters = self.audio.chapters self.failUnlessEqual(len(chapters), 112) for (i, c) in enumerate(chapters): self.failUnlessEqual(c.title, str((i + 1)).zfill(3))
def compute_average_flops_cost(self, bw_weight=4, bw_act=4, strategy=(None, None), print_layerwise=False): quant_idx = 0 (w_str, a_str) = (strategy[0], strategy[1]) batches_count = self.__batch_counter__ flops_sum = 0 for (name, module) in self.named_modules(): if is_supported_instance(module): if isinstance(module, (torch.nn.Conv2d, torch.nn.Linear)): flops_sum += module.__flops__[1] if isinstance(w_str, list): mod_flops = ((module.__flops__[0] * max(w_str[quant_idx], a_str[quant_idx])) / 32.0) flops_sum += mod_flops quant_idx += 1 elif isinstance(w_str, dict): if (name in w_str): mod_flops = ((module.__flops__[0] * max(w_str[name], w_str[name])) / 32.0) else: mod_flops = ((module.__flops__[0] * max(bw_weight, bw_act)) / 32.0) flops_sum += mod_flops else: mod_flops = ((module.__flops__[0] * max(bw_weight, bw_act)) / 32.0) flops_sum += mod_flops else: mod_flops = module.__flops__ flops_sum += mod_flops if print_layerwise: print(name, flops_sum) return (flops_sum / batches_count)
class RpmPackage(Package): def is_installed(self): return (self.run_test('rpm -q %s', self.name).rc == 0) def version(self): return self.check_output('rpm -q --queryformat="%%{VERSION}" %s', self.name) def release(self): return self.check_output('rpm -q --queryformat="%%{RELEASE}" %s', self.name)
def test_binary_ssvm_repellent_potentials(): (X, Y) = generate_checker() crf = GridCRF(inference_method=inference_method) clf = NSlackSSVM(model=crf, max_iter=10, C=100, check_constraints=True) clf.fit(X, Y) Y_pred = clf.predict(X) assert_array_equal(Y, Y_pred) submodular_clf = NSlackSSVM(model=crf, max_iter=10, C=100, check_constraints=True, negativity_constraint=[4, 5, 6]) submodular_clf.fit(X, Y) Y_pred = submodular_clf.predict(X) for (i, x) in enumerate(X): y_pred_unaries = crf.inference(x, np.array([1, 0, 0, 1, 0, 0, 0])) assert_array_equal(y_pred_unaries, Y_pred[i])
class Effect4974(BaseEffect): type = 'passive' def handler(fit, ship, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Shield Operation')), 'shieldBonus', ship.getModifiedItemAttr('eliteBonusViolators2'), skill='Marauders', **kwargs)
def _run_crefl_abi(refl, mus, muv, phi, solar_zenith, sensor_zenith, height, *coeffs): a_O3 = [268.45, 0.5, 115.42, (- 3.2922)] a_H2O = [0.0311, 0.1, 92.471, (- 1.3814)] a_O2 = [0.4567, 0.007, 96.4884, (- 1.697)] G_O3 = (_G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3)) G_H2O = (_G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O)) G_O2 = (_G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2)) atm_vars = _ABIAtmosphereVariables(G_O3, G_H2O, G_O2, mus, muv, phi, height, *coeffs) (sphalb, rhoray, TtotraytH2O, tOG) = atm_vars() return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb)
def unary_union(shapes): if (shapely_version < '1.2.16'): raise Exception('shapely 1.2.16 or higher needed for unary_union; upgrade shapely or try cascade_union instead') o = [] for shape in shapes: if (not hasattr(shape, GEO_INTERFACE_ATTR)): raise TypeError((SHAPE_TYPE_ERR % shape)) o.append(geom.shape(shape)) res = shops.unary_union(o) return asShape(res)
def test_optimizer(): nan_detected = [False] def detect_nan(fgraph, i, node, fn): for output in fn.outputs: if np.isnan(output[0]).any(): print('*** NaN detected ***') debugprint(node) print(('Inputs : %s' % [input[0] for input in fn.inputs])) print(('Outputs: %s' % [output[0] for output in fn.outputs])) nan_detected[0] = True break x = dscalar('x') mode = MonitorMode(post_func=detect_nan) mode = mode.excluding('fusion') f = function([x], [(log(x) * x)], mode=mode) assert (len(f.maker.fgraph.apply_nodes) == 2) try: old_stdout = sys.stdout sys.stdout = StringIO() f(0) finally: sys.stdout = old_stdout assert nan_detected[0]
def pad_if_smaller(img, size, fill=0): min_size = min(img.size) if (min_size < size): (original_width, original_height) = img.size pad_height = ((size - original_height) if (original_height < size) else 0) pad_width = ((size - original_width) if (original_width < size) else 0) img = functional.pad(img, (0, 0, pad_width, pad_height), fill=fill) return img
class F28_TestCase(F20_TestCase): def runTest(self): F20_TestCase.runTest(self) self.assert_parse('firewall --use-system-defaults', 'firewall --use-system-defaults\n') self.assert_parse('firewall --enabled --service=ssh --use-system-defaults', 'firewall --use-system-defaults\n')
def partition_data(dataset, datadir, logdir, partition, n_parties, labeled_num, beta=0.4): if (dataset == 'cifar10'): (X_train, y_train, X_test, y_test) = load_cifar10_data(datadir) state = np.random.get_state() np.random.shuffle(X_train) np.random.set_state(state) np.random.shuffle(y_train) n_train = y_train.shape[0] if ((partition == 'homo') or (partition == 'iid')): idxs = np.random.permutation(n_train) batch_idxs = np.array_split(idxs, n_parties) net_dataidx_map = {i: batch_idxs[i] for i in range(n_parties)} elif ((partition == 'noniid-labeldir') or (partition == 'noniid')): min_size = 0 min_require_size = 10 K = 10 sup_size = int((len(y_train) / 10)) N = (y_train.shape[0] - sup_size) net_dataidx_map = {} for sup_i in range(labeled_num): net_dataidx_map[sup_i] = [i for i in range((sup_i * sup_size), ((sup_i + 1) * sup_size))] while (min_size < min_require_size): idx_batch = [[] for _ in range((n_parties - labeled_num))] for k in range(K): idx_k = (np.where((y_train[int(((labeled_num * len(y_train)) / 10)):] == k))[0] + sup_size) np.random.shuffle(idx_k) proportions = np.random.dirichlet(np.repeat(beta, n_parties)) proportions = np.array([(p * (len(idx_j) < (N / (n_parties - labeled_num)))) for (p, idx_j) in zip(proportions, idx_batch)]) proportions = (proportions / proportions.sum()) proportions = (np.cumsum(proportions) * len(idx_k)).astype(int)[:(- 1)] idx_batch = [(idx_j + idx.tolist()) for (idx_j, idx) in zip(idx_batch, np.split(idx_k, proportions))] min_size = min([len(idx_j) for idx_j in idx_batch]) for j in range((n_parties - labeled_num)): np.random.shuffle(idx_batch[j]) net_dataidx_map[(j + labeled_num)] = idx_batch[j] traindata_cls_counts = record_net_data_stats(y_train, net_dataidx_map, logdir) return (X_train, y_train, X_test, y_test, net_dataidx_map, traindata_cls_counts)
class GaussianProcessLayer(gpytorch.models.ApproximateGP): def __init__(self, num_dim, grid_bounds=((- 10.0), 10.0), grid_size=64): variational_distribution = gpytorch.variational.CholeskyVariationalDistribution(num_inducing_points=grid_size, batch_shape=torch.Size([num_dim])) variational_strategy = gpytorch.variational.MultitaskVariationalStrategy(gpytorch.variational.GridInterpolationVariationalStrategy(self, grid_size=grid_size, grid_bounds=[grid_bounds], variational_distribution=variational_distribution), num_tasks=num_dim) super().__init__(variational_strategy) self.covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel(lengthscale_prior=gpytorch.priors.SmoothedBoxPrior(math.exp((- 1)), math.exp(1), sigma=0.1, transform=torch.exp))) self.mean_module = gpytorch.means.ConstantMean() self.grid_bounds = grid_bounds def forward(self, x): mean = self.mean_module(x) covar = self.covar_module(x) return gpytorch.distributions.MultivariateNormal(mean, covar)
def _get_help_record(opt): def _write_opts(opts): (rv, _) = click.formatting.join_options(opts) if ((not opt.is_flag) and (not opt.count)): rv += ' <{}>'.format(opt.name) return rv rv = [_write_opts(opt.opts)] if opt.secondary_opts: rv.append(_write_opts(opt.secondary_opts)) help = (opt.help or '') extra = [] if ((opt.default is not None) and opt.show_default): extra.append(('default: %s' % ((', '.join((('%s' % d) for d in opt.default)) if isinstance(opt.default, (list, tuple)) else opt.default),))) if opt.required: extra.append('required') if extra: help = ('%s[%s]' % (((help and (help + ' ')) or ''), '; '.join(extra))) if isinstance(opt.type, click.Choice): help = ('%s\n\n:options: %s' % (((help and (help + ' ')) or ''), '|'.join(opt.type.choices))) return (', '.join(rv), help)
def change_coordinate_frame(boxlist, window, scope=None): with tf.name_scope(scope, 'ChangeCoordinateFrame'): win_height = (window[2] - window[0]) win_width = (window[3] - window[1]) boxlist_new = scale(box_list.BoxList((boxlist.get() - [window[0], window[1], window[0], window[1]])), (1.0 / win_height), (1.0 / win_width)) boxlist_new = _copy_extra_fields(boxlist_new, boxlist) return boxlist_new
class RequestTests(GeneratorTestCase): def setUp(self): super().setUp() self.reader = StreamReader() def parse(self): return Request.parse(self.reader.read_line) def test_parse(self): self.reader.feed_data(b'GET /chat HTTP/1.1\r\nHost: server.example.com\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\nOrigin: chat, superchat\r\nSec-WebSocket-Version: 13\r\n\r\n') request = self.assertGeneratorReturns(self.parse()) self.assertEqual(request.path, '/chat') self.assertEqual(request.headers['Upgrade'], 'websocket') def test_parse_empty(self): self.reader.feed_eof() with self.assertRaises(EOFError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), 'connection closed while reading HTTP request line') def test_parse_invalid_request_line(self): self.reader.feed_data(b'GET /\r\n\r\n') with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), 'invalid HTTP request line: GET /') def test_parse_unsupported_method(self): self.reader.feed_data(b'OPTIONS * HTTP/1.1\r\n\r\n') with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), 'unsupported HTTP method: OPTIONS') def test_parse_unsupported_version(self): self.reader.feed_data(b'GET /chat HTTP/1.0\r\n\r\n') with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), 'unsupported HTTP version: HTTP/1.0') def test_parse_invalid_header(self): self.reader.feed_data(b'GET /chat HTTP/1.1\r\nOops\r\n') with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), 'invalid HTTP header line: Oops') def test_parse_body(self): self.reader.feed_data(b'GET / HTTP/1.1\r\nContent-Length: 3\r\n\r\nYo\n') with self.assertRaises(ValueError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), 'unsupported request body') def test_parse_body_with_transfer_encoding(self): self.reader.feed_data(b'GET / HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n') with self.assertRaises(NotImplementedError) as raised: next(self.parse()) self.assertEqual(str(raised.exception), "transfer codings aren't supported") def test_serialize(self): request = Request('/chat', Headers([('Host', 'server.example.com'), ('Upgrade', 'websocket'), ('Connection', 'Upgrade'), ('Sec-WebSocket-Key', 'dGhlIHNhbXBsZSBub25jZQ=='), ('Origin', ' ('Sec-WebSocket-Protocol', 'chat, superchat'), ('Sec-WebSocket-Version', '13')])) self.assertEqual(request.serialize(), b'GET /chat HTTP/1.1\r\nHost: server.example.com\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\nOrigin: chat, superchat\r\nSec-WebSocket-Version: 13\r\n\r\n')
class RankRounder(CompRatioRounder): def __init__(self, multiplicity: int, cost_calculator): self._multiplicity = multiplicity self._cost_calculator = cost_calculator def round(self, layer: Layer, comp_ratio: Decimal, cost_metric: CostMetric) -> Decimal: if (self._multiplicity == 1): return comp_ratio rank = self._cost_calculator.calculate_rank_given_comp_ratio(layer, comp_ratio, cost_metric) max_rank = self._cost_calculator.calculate_rank_given_comp_ratio(layer, 1.0, cost_metric) rounded_rank_candidate = utils.round_up_to_multiplicity(self._multiplicity, rank, max_rank) if (rank == rounded_rank_candidate): updated_comp_ratio = comp_ratio else: updated_comp_ratio = self._cost_calculator.calculate_comp_ratio_given_rank(layer, rounded_rank_candidate, cost_metric) assert (0 <= updated_comp_ratio <= 1) assert (comp_ratio <= updated_comp_ratio) return updated_comp_ratio
def main(): datapipes_folder = os.path.join('torchdata', 'datapipes') init_file = '__init__.py' docs_source_folder = os.path.join('docs', 'source') exit_code = 0 for (target, ignore_set) in zip(['iter', 'map', 'utils'], [{'IterDataPipe', 'Extractor'}, {'MapDataPipe'}, {}]): init_path = os.path.join(datapipes_folder, target, init_file) rst_path = os.path.join(docs_source_folder, (('torchdata.datapipes.' + target) + '.rst')) init_set = collect_init_dps(init_path) rst_set = collect_rst_dps(rst_path) dif_init = compare_sets(init_set, rst_set, ignore_set) dif_rst = compare_sets(rst_set, init_set) for elem in dif_init: print(f'Please add {elem} to {rst_path}') exit_code = 1 for elem in dif_rst: print(f'{elem} is present in {rst_path} but not in {init_path}') exit_code = 1 sys.exit(exit_code)
class Color(VersionBase): def __init__(self, color_type, color_definition): self.color_type = convert_enum(color_type, ColorType, False) if (not isinstance(color_definition, _ColorDefinition)): raise TypeError('input is not a color definition') self.color_definition = color_definition def __eq__(self, other): if isinstance(other, Color): if ((self.get_attributes() == other.get_attributes()) and (self.color_definition == other.color_definition)): return True return False def parse(element): color_type = convert_enum(element.attrib['colorType'], ColorType) if (element.find('ColorRgb') is not None): color_def = ColorRGB.parse(element.find('ColorRgb')) elif (element.find('ColorCmyk') is not None): color_def = ColorCMYK.parse(element.find('ColorCmyk')) return Color(color_type, color_def) def get_attributes(self): retdict = {} retdict['colorType'] = self.color_type.get_name() return retdict def get_element(self): if (not self.isVersion(minor=2)): raise OpenSCENARIOVersionError('Color was introduced in OpenSCENARIO V1.2') element = ET.Element('Color', attrib=self.get_attributes()) element.append(self.color_definition.get_element()) return element
def test_get_bindings(): def function1(a: int) -> None: pass assert (get_bindings(function1) == {}) def function2(a: int) -> None: pass assert (get_bindings(function2) == {'a': int}) ('b') def function3(a: int, b: str) -> None: pass assert (get_bindings(function3) == {'a': int}) ('b') def function3b(a: int, b: str) -> None: pass assert (get_bindings(function3b) == {'a': int}) def function4(a: Inject[int], b: str) -> None: pass assert (get_bindings(function4) == {'a': int}) def function5(a: Inject[int], b: str) -> None: pass assert (get_bindings(function5) == {'a': int, 'b': str}) def function6(a: int, b: NoInject[str]) -> None: pass assert (get_bindings(function6) == {'a': int}) def function7(a: int, b: NoInject[str]) -> None: pass assert (get_bindings(function7) == {}) def function8(a: NoInject[int], b: NoInject[int]) -> None: pass assert (get_bindings(function8) == {}) ('b') def function9(self, a: int, b: Optional[str]=None): pass def function10(self, a: int, b: NoInject[Optional[str]]=None): pass assert (get_bindings(function9) == {'a': int} == get_bindings(function10)) def function11(a: int) -> 'InvalidForwardReference': pass assert (get_bindings(function11) == {'a': int})
class TBEArmor(DefaultObject): def at_object_creation(self): self.db.damage_reduction = 4 self.db.defense_modifier = (- 4) def at_before_drop(self, dropper): if is_in_combat(dropper): dropper.msg("You can't doff armor in a fight!") return False return True def at_drop(self, dropper): if (dropper.db.worn_armor == self): dropper.db.worn_armor = None dropper.location.msg_contents(('%s removes %s.' % (dropper, self))) def at_before_give(self, giver, getter): if is_in_combat(giver): dropper.msg("You can't doff armor in a fight!") return False return True def at_give(self, giver, getter): if (giver.db.worn_armor == self): giver.db.worn_armor = None giver.location.msg_contents(('%s removes %s.' % (giver, self)))
def test_primitive_types(client): client = BigQueryClient(client) recap_schema = client.schema('test_project', 'test_dataset', 'test_table') recap_fields = recap_schema.fields assert (recap_fields[0] == UnionType(types=[NullType(), StringType()], default=None, name='test_string')) assert (recap_fields[1] == UnionType(types=[NullType(), BytesType()], default=None, name='test_bytes')) assert (recap_fields[2] == UnionType(types=[NullType(), IntType(bits=64)], default=None, name='test_int64')) assert (recap_fields[3] == UnionType(types=[NullType(), FloatType(bits=64)], default=None, name='test_float64')) assert (recap_fields[4] == UnionType(types=[NullType(), BoolType()], default=None, name='test_boolean')) assert (recap_fields[5] == UnionType(types=[NullType(), IntType(bits=64, logical='build.recap.Timestamp', unit='microsecond')], default=None, name='test_timestamp')) assert (recap_fields[6] == UnionType(types=[NullType(), IntType(bits=64, logical='build.recap.Timestamp', unit='microsecond')], default=None, name='test_datetime')) assert (recap_fields[7] == UnionType(types=[NullType(), IntType(bits=32, logical='build.recap.Date', unit='day')], default=None, name='test_date')) assert (recap_fields[8] == UnionType(types=[NullType(), IntType(bits=32, logical='build.recap.Time', unit='microsecond')], default=None, name='test_time')) assert (recap_fields[9] == UnionType(types=[NullType(), BytesType(bytes_=16, variable=False, logical='build.recap.Decimal', precision=38, scale=0)], default=None, name='test_numeric')) assert (recap_fields[10] == UnionType(types=[NullType(), BytesType(bytes_=32, variable=False, logical='build.recap.Decimal', precision=76, scale=0)], default=None, name='test_bigdecimal'))
def sample_and_group_all(xyz, points, use_xyz=True): batch_size = xyz.get_shape()[0].value nsample = xyz.get_shape()[1].value new_xyz = tf.constant(np.tile(np.array([0, 0, 0]).reshape((1, 1, 3)), (batch_size, 1, 1)), dtype=tf.float32) idx = tf.constant(np.tile(np.array(range(nsample)).reshape((1, 1, nsample)), (batch_size, 1, 1))) grouped_xyz = tf.reshape(xyz, (batch_size, 1, nsample, 3)) if (points is not None): if use_xyz: new_points = tf.concat([xyz, points], axis=2) else: new_points = points new_points = tf.expand_dims(new_points, 1) else: new_points = grouped_xyz return (new_xyz, new_points, idx, grouped_xyz)
.skipif((sys.version_info < (3, 11)), reason='Native ExceptionGroup not implemented') .parametrize('outer_chain', ['none', 'from', 'another']) .parametrize('inner_chain', ['none', 'from', 'another']) def test_native_exceptiongroup(pytester: Pytester, outer_chain, inner_chain) -> None: _exceptiongroup_common(pytester, outer_chain, inner_chain, native=True)
def markup_bbcMicro_word(pronunc): global bbc_partsSoFar, bbc_charsSoFar thisPartCount = bbcMicro_partPhonemeCount(pronunc) if (((not bbc_partsSoFar) or ((bbc_partsSoFar + thisPartCount) > 115)) or ((not bbc_charsSoFar) or ((bbc_charsSoFar + len(pronunc)) > 238))): if bbc_charsSoFar: r = '\n' else: r = '' cmd = '*SPEAK' bbc_charsSoFar = ((len(cmd) + len(pronunc)) + 1) bbc_partsSoFar = (thisPartCount + 1) return (as_utf8((r + cmd)) + pronunc) else: bbc_charsSoFar += (len(pronunc) + 1) bbc_partsSoFar += (thisPartCount + 1) return pronunc
def test_directionoftraveldistribution(): dotd = OSC.DirectionOfTravelDistribution(1, 2) dotd2 = OSC.DirectionOfTravelDistribution(1, 2) prettyprint(dotd.get_element()) dotd3 = OSC.DirectionOfTravelDistribution(1, 1) assert (dotd == dotd2) assert (dotd != dotd3) dotd4 = OSC.DirectionOfTravelDistribution.parse(dotd.get_element()) prettyprint(dotd4.get_element()) assert (dotd4 == dotd) assert (version_validation('DirectionOfTravelDistribution', dotd, 0) == ValidationResponse.OSC_VERSION) assert (version_validation('DirectionOfTravelDistribution', dotd, 1) == ValidationResponse.OSC_VERSION) assert (version_validation('DirectionOfTravelDistribution', dotd, 2) == ValidationResponse.OK)
class Formatter(metaclass=ClassRegistry): subclasses = [] def __init__(self): pass def formatters(cls, filename): for formatter in cls.subclasses: if formatter.supports(filename): (yield formatter) def supports(cls, filename): for pattern in cls.patterns: if fnmatch.fnmatch(os.path.basename(filename), pattern): return True return False def all_patterns(cls): patterns = set() for formatter in cls.subclasses: patterns.update(formatter.patterns) return patterns
class STM32F4xxRccV1(STM32F4xxRcc): class Type(ctypes.Structure): _fields_ = [('CR', ctypes.c_uint32), ('PLLCFGR', ctypes.c_uint32), ('CFGR', ctypes.c_uint32), ('CIR', ctypes.c_uint32), ('AHB1RSTR', ctypes.c_uint32), ('AHB2RSTR', ctypes.c_uint32), ('AHB3RSTR', ctypes.c_uint32), ('RESERVED0', ctypes.c_uint32), ('APB1RSTR', ctypes.c_uint32), ('APB2RSTR', ctypes.c_uint32), ('RESERVED1', (ctypes.c_uint32 * 2)), ('AHB1ENR', ctypes.c_uint32), ('AHB2ENR', ctypes.c_uint32), ('AHB3ENR', ctypes.c_uint32), ('RESERVED2', ctypes.c_uint32), ('APB1ENR', ctypes.c_uint32), ('APB2ENR', ctypes.c_uint32), ('RESERVED3', (ctypes.c_uint32 * 2)), ('AHB1LPENR', ctypes.c_uint32), ('AHB2LPENR', ctypes.c_uint32), ('AHB3LPENR', ctypes.c_uint32), ('RESERVED4', ctypes.c_uint32), ('APB1LPENR', ctypes.c_uint32), ('APB2LPENR', ctypes.c_uint32), ('RESERVED5', (ctypes.c_uint32 * 2)), ('BDCR', ctypes.c_uint32), ('CSR', ctypes.c_uint32), ('RESERVED6', (ctypes.c_uint32 * 2)), ('SSCGR', ctypes.c_uint32), ('PLLI2SCFGR', ctypes.c_uint32), ('RESERVED7', ctypes.c_uint32), ('DCKCFGR', ctypes.c_uint32), ('CKGATENR', ctypes.c_uint32), ('DCKCFGR2', ctypes.c_uint32)]
def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon): rl1 = ntg1.get_ref_lat_1() assert isinstance(rl1, float) np.testing.assert_allclose(rl1, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5) np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75) with pytest.raises(ValueError, match='Could not find reference latitude for area test-area-north-stereo'): ntg_weird.get_ref_lat_1() with pytest.raises(AttributeError): ntg_latlon.get_ref_lat_1()
def _modexp(data: bytes) -> int: (base_length, exponent_length, modulus_length) = _extract_lengths(data) if (base_length == 0): return 0 elif (modulus_length == 0): return 0 base_end_idx = (96 + base_length) exponent_end_idx = (base_end_idx + exponent_length) modulus_end_dx = (exponent_end_idx + modulus_length) modulus_bytes = zpad_right(data[exponent_end_idx:modulus_end_dx], to_size=modulus_length) modulus = big_endian_to_int(modulus_bytes) if (modulus == 0): return 0 base_bytes = zpad_right(data[96:base_end_idx], to_size=base_length) base = big_endian_to_int(base_bytes) exponent_bytes = zpad_right(data[base_end_idx:exponent_end_idx], to_size=exponent_length) exponent = big_endian_to_int(exponent_bytes) result = pow(base, exponent, modulus) return result
def test_shifted_qr_np(): np.set_printoptions(formatter={'float': '{:0.2f}'.format}) np_dtype = np.float64 N = 10 diag = generate_spectrum(coeff=0.5, scale=1.0, size=N, dtype=np_dtype) A = generate_pd_from_diag(diag, dtype=diag.dtype, seed=21) (A_new, _) = shifted_qr_np(A, shifts=np.zeros((50,))) approx = np.sort(np.diag(A_new))[::(- 1)] rel_error = relative_error(approx, diag) assert (rel_error < 1e-08) (A_new, Q) = shifted_qr_np(A, shifts=diag[5:]) approx = ((Q A_new) Q.T) rel_error = relative_error(approx, A) assert (rel_error < 1e-12) (A_new, _) = shifted_qr_np(A_new, shifts=np.zeros((40,))) approx = np.sort(np.diag(A_new))[::(- 1)][:5] rel_error = relative_error(approx, diag[:5]) assert (rel_error < 1e-08) (A_new, _) = shifted_qr_np(A, shifts=(diag[2] * np.ones((6,)))) approx = np.sort(np.diag(A_new))[::(- 1)][2] rel_error = relative_error(approx, diag[2]) print(f'Rel error: {rel_error:2.5e}') assert (rel_error < 1e-08)
def event_input_bert_mrc_fn(input_Xs, start_Ys, end_Ys, token_type_ids, query_lens, is_training, is_testing, args): _shapes = (([None], (), (), [None]), ([None], [None])) _types = ((tf.int32, tf.int32, tf.int32, tf.int32), (tf.int32, tf.int32)) _pads = ((0, 0, 0, 0), (0, 0)) ds = tf.data.Dataset.from_generator((lambda : event_data_generator_bert_mrc(input_Xs, start_Ys, end_Ys, token_type_ids, query_lens)), output_shapes=_shapes, output_types=_types) if is_training: ds = ds.shuffle(args.shuffle_buffer).repeat(args.epochs) if is_training: ds = ds.padded_batch(args.train_batch_size, _shapes, _pads) elif is_testing: ds = ds.padded_batch(args.test_batch_size, _shapes, _pads) else: ds = ds.padded_batch(args.valid_batch_size, _shapes, _pads) ds = ds.prefetch(args.pre_buffer_size) return ds
def add_content_to_text(text: str, content: str, add_after: Optional[Union[(str, Pattern)]]=None, add_before: Optional[Union[(str, Pattern)]]=None, exact_match: bool=False) -> str: if ((add_after is None) and (add_before is None)): raise ValueError('You need to pass either `add_after` or `add_before`') if ((add_after is not None) and (add_before is not None)): raise ValueError("You can't pass both `add_after` or `add_before`") pattern = (add_after if (add_before is None) else add_before) def this_is_the_line(line): if isinstance(pattern, Pattern): return (pattern.search(line) is not None) elif exact_match: return (pattern == line) else: return (pattern in line) new_lines = [] for line in text.split('\n'): if this_is_the_line(line): if (add_before is not None): new_lines.append(content) new_lines.append(line) if (add_after is not None): new_lines.append(content) else: new_lines.append(line) return '\n'.join(new_lines)
class CoSQL(datasets.GeneratorBasedBuilder): VERSION = datasets.Version('1.0.0') BUILDER_CONFIGS = [datasets.BuilderConfig(name='cosql', version=VERSION, description='A Conversational Text-to-SQL Challenge Towards Cross-Domain Natural Language Interfaces to Databases')] def __init__(self, *args, writer_batch_size=None, **kwargs): super().__init__(*args, writer_batch_size=writer_batch_size, **kwargs) self.schema_cache = dict() def _info(self): features = datasets.Features({'query': datasets.Value('string'), 'utterances': datasets.features.Sequence(datasets.Value('string')), 'turn_idx': datasets.Value('int32'), 'db_id': datasets.Value('string'), 'db_path': datasets.Value('string'), 'db_table_names': datasets.features.Sequence(datasets.Value('string')), 'db_column_names': datasets.features.Sequence({'table_id': datasets.Value('int32'), 'column_name': datasets.Value('string')}), 'db_column_types': datasets.features.Sequence(datasets.Value('string')), 'db_primary_keys': datasets.features.Sequence({'column_id': datasets.Value('int32')}), 'db_foreign_keys': datasets.features.Sequence({'column_id': datasets.Value('int32'), 'other_column_id': datasets.Value('int32')})}) return datasets.DatasetInfo(description=_DESCRIPTION, features=features, supervised_keys=None, homepage=_HOMEPAGE, license=_LICENSE, citation=_CITATION) def _split_generators(self, dl_manager): downloaded_filepath = dl_manager.download_and_extract(_URL) return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={'data_filepath': (downloaded_filepath + '/cosql_dataset/sql_state_tracking/cosql_train.json'), 'db_path': (downloaded_filepath + '/cosql_dataset/database')}), datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={'data_filepath': (downloaded_filepath + '/cosql_dataset/sql_state_tracking/cosql_dev.json'), 'db_path': (downloaded_filepath + '/cosql_dataset/database')})] def _generate_examples(self, data_filepath, db_path): logger.info('generating examples from = %s', data_filepath) with open(data_filepath, encoding='utf-8') as f: cosql = json.load(f) idx = 0 for sample in cosql: db_id = sample['database_id'] if (db_id not in self.schema_cache): self.schema_cache[db_id] = dump_db_json_schema((((((db_path + '/') + db_id) + '/') + db_id) + '.sqlite'), db_id) schema = self.schema_cache[db_id] db_stuff = {'db_id': db_id, 'db_path': db_path, 'db_table_names': schema['table_names_original'], 'db_column_names': [{'table_id': table_id, 'column_name': column_name} for (table_id, column_name) in schema['column_names_original']], 'db_column_types': schema['column_types'], 'db_primary_keys': [{'column_id': column_id} for column_id in schema['primary_keys']], 'db_foreign_keys': [{'column_id': column_id, 'other_column_id': other_column_id} for (column_id, other_column_id) in schema['foreign_keys']]} (yield (idx, {'utterances': [sample['final']['utterance']], 'query': sample['final']['query'], 'turn_idx': (- 1), **db_stuff})) idx += 1 utterances = [] for (turn_idx, turn) in enumerate(sample['interaction']): utterances.extend((utterance.strip() for utterance in turn['utterance'].split(sep='|'))) (yield (idx, {'utterances': list(utterances), 'query': turn['query'], 'turn_idx': turn_idx, **db_stuff})) idx += 1
class GPint(object): def __init__(self, x_init, model): self.model = model self.x = x_init self.y = model.predict(np.array(x_init, ndmin=2)) def getState(self): return (self.x, self.model.predict(np.array(self.x, ndmin=2))[0]) def setX(self, x_new): self.x = x_new
_images def test_package(host, docker_image): assert (not host.package('zsh').is_installed) ssh = host.package('openssh-server') version = {'rockylinux9': '8.', 'debian_bookworm': '1:9.2'}[docker_image] assert ssh.is_installed assert ssh.version.startswith(version) release = {'rockylinux9': '.el9', 'debian_bookworm': None}[docker_image] if (release is None): with pytest.raises(NotImplementedError): ssh.release else: assert (release in ssh.release)
def test_read_setup_py_empty(tmp_path): with open((tmp_path / 'setup.py'), 'w') as f: f.write(dedent('\n from setuptools import setup\n\n REQUIRES = "3.21"\n\n setuptools.setup(\n name = "hello",\n other = 23,\n example = ["item", "other"],\n )\n ')) assert (setup_py_python_requires(tmp_path.joinpath('setup.py').read_text()) is None) assert (get_requires_python_str(tmp_path) is None)
def skip_reverse_union_constraints(cs: list[Constraint]) -> list[Constraint]: reverse_union_cs = set() for c in cs: p_target = get_proper_type(c.target) if isinstance(p_target, UnionType): for item in p_target.items: if isinstance(item, TypeVarType): reverse_union_cs.add(Constraint(item, neg_op(c.op), c.origin_type_var)) return [c for c in cs if (c not in reverse_union_cs)]
class MetaConvModel(MetaModule): def __init__(self, in_channels, out_features, hidden_size=64, feature_size=64): super(MetaConvModel, self).__init__() self.in_channels = in_channels self.out_features = out_features self.hidden_size = hidden_size self.feature_size = feature_size self.features = MetaSequential(OrderedDict([('layer1', conv_block(in_channels, hidden_size, kernel_size=3, stride=1, padding=1, bias=True)), ('layer2', conv_block(hidden_size, hidden_size, kernel_size=3, stride=1, padding=1, bias=True)), ('layer3', conv_block(hidden_size, hidden_size, kernel_size=3, stride=1, padding=1, bias=True)), ('layer4', conv_block(hidden_size, hidden_size, kernel_size=3, stride=1, padding=1, bias=True))])) self.classifier = MetaLinear(feature_size, out_features, bias=True) def forward(self, inputs, params=None): features = self.features(inputs, params=get_subdict(params, 'features')) features = features.view((features.size(0), (- 1))) logits = self.classifier(features, params=get_subdict(params, 'classifier')) return (features.detach(), logits)
def test_sky_temple_key_distribution_logic_all_guardians_valid(echoes_game_description): results = sky_temple_keys.add_sky_temple_key_distribution_logic(echoes_game_description, LayoutSkyTempleKeyMode.ALL_GUARDIANS) assert (results == _create_pool_with(3, echoes_game_description.resource_database))
class Connect(): MAX_REDIRECTS_ALLOWED = 10 def __init__(self, uri: str, *, create_protocol: Optional[Callable[(..., WebSocketClientProtocol)]]=None, logger: Optional[LoggerLike]=None, compression: Optional[str]='deflate', origin: Optional[Origin]=None, extensions: Optional[Sequence[ClientExtensionFactory]]=None, subprotocols: Optional[Sequence[Subprotocol]]=None, extra_headers: Optional[HeadersLike]=None, user_agent_header: Optional[str]=USER_AGENT, open_timeout: Optional[float]=10, ping_interval: Optional[float]=20, ping_timeout: Optional[float]=20, close_timeout: Optional[float]=None, max_size: Optional[int]=(2 ** 20), max_queue: Optional[int]=(2 ** 5), read_limit: int=(2 ** 16), write_limit: int=(2 ** 16), **kwargs: Any) -> None: timeout: Optional[float] = kwargs.pop('timeout', None) if (timeout is None): timeout = 10 else: warnings.warn('rename timeout to close_timeout', DeprecationWarning) if (close_timeout is None): close_timeout = timeout klass: Optional[Type[WebSocketClientProtocol]] = kwargs.pop('klass', None) if (klass is None): klass = WebSocketClientProtocol else: warnings.warn('rename klass to create_protocol', DeprecationWarning) if (create_protocol is None): create_protocol = klass legacy_recv: bool = kwargs.pop('legacy_recv', False) _loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop('loop', None) if (_loop is None): loop = asyncio.get_event_loop() else: loop = _loop warnings.warn('remove loop argument', DeprecationWarning) wsuri = parse_uri(uri) if wsuri.secure: kwargs.setdefault('ssl', True) elif (kwargs.get('ssl') is not None): raise ValueError('connect() received a ssl argument for a ws:// URI, use a wss:// URI to enable TLS') if (compression == 'deflate'): extensions = enable_client_permessage_deflate(extensions) elif (compression is not None): raise ValueError(f'unsupported compression: {compression}') if (subprotocols is not None): validate_subprotocols(subprotocols) factory = functools.partial(create_protocol, logger=logger, origin=origin, extensions=extensions, subprotocols=subprotocols, extra_headers=extra_headers, user_agent_header=user_agent_header, ping_interval=ping_interval, ping_timeout=ping_timeout, close_timeout=close_timeout, max_size=max_size, max_queue=max_queue, read_limit=read_limit, write_limit=write_limit, host=wsuri.host, port=wsuri.port, secure=wsuri.secure, legacy_recv=legacy_recv, loop=_loop) if kwargs.pop('unix', False): path: Optional[str] = kwargs.pop('path', None) create_connection = functools.partial(loop.create_unix_connection, factory, path, **kwargs) else: host: Optional[str] port: Optional[int] if (kwargs.get('sock') is None): (host, port) = (wsuri.host, wsuri.port) else: (host, port) = (None, None) if kwargs.get('ssl'): kwargs.setdefault('server_hostname', wsuri.host) host = kwargs.pop('host', host) port = kwargs.pop('port', port) create_connection = functools.partial(loop.create_connection, factory, host, port, **kwargs) self.open_timeout = open_timeout if (logger is None): logger = logging.getLogger('websockets.client') self.logger = logger self._create_connection = create_connection self._uri = uri self._wsuri = wsuri def handle_redirect(self, uri: str) -> None: old_uri = self._uri old_wsuri = self._wsuri new_uri = urllib.parse.urljoin(old_uri, uri) new_wsuri = parse_uri(new_uri) if (old_wsuri.secure and (not new_wsuri.secure)): raise SecurityError('redirect from WSS to WS') same_origin = ((old_wsuri.host == new_wsuri.host) and (old_wsuri.port == new_wsuri.port)) if (not same_origin): factory = self._create_connection.args[0] factory = functools.partial(factory.func, *factory.args, **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port)) self._create_connection = functools.partial(self._create_connection.func, *(factory, new_wsuri.host, new_wsuri.port), **self._create_connection.keywords) self._uri = new_uri self._wsuri = new_wsuri BACKOFF_MIN = 1.92 BACKOFF_MAX = 60.0 BACKOFF_FACTOR = 1.618 BACKOFF_INITIAL = 5 async def __aiter__(self) -> AsyncIterator[WebSocketClientProtocol]: backoff_delay = self.BACKOFF_MIN while True: try: async with self as protocol: (yield protocol) except Exception: if (backoff_delay == self.BACKOFF_MIN): initial_delay = (random.random() * self.BACKOFF_INITIAL) self.logger.info('! connect failed; reconnecting in %.1f seconds', initial_delay, exc_info=True) (await asyncio.sleep(initial_delay)) else: self.logger.info('! connect failed again; retrying in %d seconds', int(backoff_delay), exc_info=True) (await asyncio.sleep(int(backoff_delay))) backoff_delay = (backoff_delay * self.BACKOFF_FACTOR) backoff_delay = min(backoff_delay, self.BACKOFF_MAX) continue else: backoff_delay = self.BACKOFF_MIN async def __aenter__(self) -> WebSocketClientProtocol: return (await self) async def __aexit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> None: (await self.protocol.close()) def __await__(self) -> Generator[(Any, None, WebSocketClientProtocol)]: return self.__await_impl_timeout__().__await__() async def __await_impl_timeout__(self) -> WebSocketClientProtocol: async with asyncio_timeout(self.open_timeout): return (await self.__await_impl__()) async def __await_impl__(self) -> WebSocketClientProtocol: for redirects in range(self.MAX_REDIRECTS_ALLOWED): (_transport, _protocol) = (await self._create_connection()) protocol = cast(WebSocketClientProtocol, _protocol) try: (await protocol.handshake(self._wsuri, origin=protocol.origin, available_extensions=protocol.available_extensions, available_subprotocols=protocol.available_subprotocols, extra_headers=protocol.extra_headers)) except RedirectHandshake as exc: protocol.fail_connection() (await protocol.wait_closed()) self.handle_redirect(exc.uri) except (Exception, asyncio.CancelledError): protocol.fail_connection() (await protocol.wait_closed()) raise else: self.protocol = protocol return protocol else: raise SecurityError('too many redirects') __iter__ = __await__
def process_dir(query_path, gallery_path): query_img_paths = glob.glob(os.path.join(query_path, '*.jpg')) gallery_img_paths = glob.glob(os.path.join(gallery_path, '*.jpg')) query_paths = [] pattern = re.compile('([-\\d]+)_c(\\d)') for img_path in query_img_paths: (pid, camid) = map(int, pattern.search(img_path).groups()) query_paths.append([img_path, pid, camid]) gallery_paths = [] for img_path in gallery_img_paths: (pid, camid) = map(int, pattern.search(img_path).groups()) gallery_paths.append([img_path, pid, camid]) return (query_paths, gallery_paths)
_specialize('shape_unsafe') _rewriter([Elemwise]) def local_elemwise_alloc(fgraph, node): if (len(node.inputs) == 1): return None def dimshuffled_alloc(i): return (isinstance(i.owner.op, DimShuffle) and i.owner.inputs[0].owner and isinstance(i.owner.inputs[0].owner.op, Alloc)) alloc_idxs = [idx for (idx, i) in enumerate(node.inputs) if (i.owner and (isinstance(i.owner.op, Alloc) or dimshuffled_alloc(i)))] if (len(alloc_idxs) == 0): return False new_inputs = list(node.inputs) for idx in alloc_idxs: i = node.inputs[idx] if isinstance(i.owner.op, Alloc): new_inp = i.owner.inputs[0] elif isinstance(i.owner.op, DimShuffle): old_alloc = i.owner.inputs[0] old_alloc_inp = old_alloc.owner.inputs[0] missing_ndims = (old_alloc.type.ndim - old_alloc_inp.type.ndim) if (missing_ndims > 0): old_alloc_inp = shape_padleft(old_alloc_inp, missing_ndims) new_inp = i.owner.op(old_alloc_inp) copy_stack_trace(i, new_inp) new_inputs[idx] = new_inp new_outs = node.op(*new_inputs, return_list=True) if (new_outs[0].type.broadcastable != node.outputs[0].type.broadcastable): new_outs = [alloc_like(new_out, node.outputs[0], fgraph) for new_out in new_outs] copy_stack_trace(node.outputs, new_outs) return new_outs
def create_temporary_tag_if_necessary(manifest, expiration_sec): tag_name = ('$temp-%s' % str(uuid.uuid4())) now_ms = get_epoch_timestamp_ms() end_ms = (now_ms + (expiration_sec * 1000)) with db_transaction(): try: Tag.select().where((Tag.manifest == manifest), ((Tag.lifetime_end_ms >> None) | (Tag.lifetime_end_ms >= end_ms))).get() return None except Tag.DoesNotExist: pass return Tag.create(name=tag_name, repository=manifest.repository_id, lifetime_start_ms=now_ms, lifetime_end_ms=end_ms, reversion=False, hidden=True, manifest=manifest, tag_kind=Tag.tag_kind.get_id('tag'))
_sentencepiece _tokenizers class DebertaV2TokenizationTest(TokenizerTesterMixin, unittest.TestCase): tokenizer_class = DebertaV2Tokenizer rust_tokenizer_class = None test_rust_tokenizer = False test_sentencepiece = True test_sentencepiece_ignore_case = True def setUp(self): super().setUp() tokenizer = DebertaV2Tokenizer(SAMPLE_VOCAB) tokenizer.save_pretrained(self.tmpdirname) def get_input_output_texts(self, tokenizer): input_text = 'this is a test' output_text = 'this is a test' return (input_text, output_text) def test_convert_token_and_id(self): token = '<pad>' token_id = 0 self.assertEqual(self.get_tokenizer()._convert_token_to_id(token), token_id) self.assertEqual(self.get_tokenizer()._convert_id_to_token(token_id), token) def test_get_vocab(self): vocab_keys = list(self.get_tokenizer().get_vocab().keys()) self.assertEqual(vocab_keys[0], '<pad>') self.assertEqual(vocab_keys[1], '<unk>') self.assertEqual(vocab_keys[(- 1)], '[PAD]') self.assertEqual(len(vocab_keys), 30001) def test_vocab_size(self): self.assertEqual(self.get_tokenizer().vocab_size, 30000) def test_rust_and_python_full_tokenizers(self): if (not self.test_rust_tokenizer): return tokenizer = self.get_tokenizer() rust_tokenizer = self.get_rust_tokenizer() sequence = 'I was born in 92000, and this is false.' tokens = tokenizer.tokenize(sequence) rust_tokens = rust_tokenizer.tokenize(sequence) self.assertListEqual(tokens, rust_tokens) ids = tokenizer.encode(sequence, add_special_tokens=False) rust_ids = rust_tokenizer.encode(sequence, add_special_tokens=False) self.assertListEqual(ids, rust_ids) rust_tokenizer = self.get_rust_tokenizer() ids = tokenizer.encode(sequence) rust_ids = rust_tokenizer.encode(sequence) self.assertListEqual(ids, rust_ids) def test_full_tokenizer(self): tokenizer = DebertaV2Tokenizer(SAMPLE_VOCAB, keep_accents=True) tokens = tokenizer.tokenize('This is a test') self.assertListEqual(tokens, ['', '[UNK]', 'his', 'is', 'a', 'test']) self.assertListEqual(tokenizer.convert_tokens_to_ids(tokens), [13, 1, 4398, 25, 21, 1289]) tokens = tokenizer.tokenize('I was born in 92000, and this is false.') self.assertListEqual(tokens, ['', '[UNK]', 'was', 'born', 'in', '9', '2000', ',', 'and', 'this', 'is', 'fal', 's', '[UNK]', '.']) ids = tokenizer.convert_tokens_to_ids(tokens) self.assertListEqual(ids, [13, 1, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 8256, 18, 1, 9]) back_tokens = tokenizer.convert_ids_to_tokens(ids) self.assertListEqual(back_tokens, ['', '<unk>', 'was', 'born', 'in', '9', '2000', ',', 'and', 'this', 'is', 'fal', 's', '<unk>', '.']) def test_sequence_builders(self): tokenizer = DebertaV2Tokenizer(SAMPLE_VOCAB) text = tokenizer.encode('sequence builders') text_2 = tokenizer.encode('multi-sequence build') encoded_sentence = tokenizer.build_inputs_with_special_tokens(text) encoded_pair = tokenizer.build_inputs_with_special_tokens(text, text_2) self.assertEqual((([tokenizer.cls_token_id] + text) + [tokenizer.sep_token_id]), encoded_sentence) self.assertEqual((((([tokenizer.cls_token_id] + text) + [tokenizer.sep_token_id]) + text_2) + [tokenizer.sep_token_id]), encoded_pair) def test_tokenizer_integration(self): expected_encoding = {'input_ids': [[1, 39867, 36, 19390, 486, 27, 35052, 81436, 18, 60685, 1225, 7, 35052, 81436, 18, 9367, 16899, 18, 15937, 53, 594, 773, 18, 16287, 30465, 36, 15937, 6, 41139, 38, 36979, 60763, 191, 6, 34132, 99, 6, 50538, 390, 43230, 6, 34132, 2779, 20850, 14, 699, 1072, 1194, 36, 382, 10901, 53, 7, 699, 1072, 2084, 36, 20422, 630, 53, 19, 105, 3049, 1896, 1053, 16899, 1506, 11, 37978, 4243, 7, 1237, 31869, 200, 16566, 654, 6, 35052, 81436, 7, 55630, 13593, 4, 2], [1, 26, 15011, 13, 667, 8, 1053, 18, 23611, 1237, 72356, 12820, 34, 104134, 1209, 35, 13313, 6627, 21, 202, 347, 7, 164, 2399, 11, 46, 4485, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 5, 1232, 2864, 15785, 14951, 105, 5, 8581, 1250, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], 'token_type_ids': [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], 'attention_mask': [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]} self.tokenizer_integration_test_util(expected_encoding=expected_encoding, model_name='microsoft/deberta-v2-xlarge', revision='ad6e42c1532ddf3a15c39246b63f5559d558b670')
def _numerator(z_slice_shape, precision, unroll=1): def fwd(qs, ks, vs): def body(p, qkv): (q, k, v) = qkv p += jnp.einsum('...m,...d->...md', k, v, precision=precision) X_slice = jnp.einsum('...m,...md->...d', q, p, precision=precision) return (p, X_slice) init_value = jnp.zeros(z_slice_shape) (p, W) = lax.scan(body, init_value, (qs, ks, vs), unroll=unroll) return (W, (p, qs, ks, vs)) def bwd(pqkv, W_ct): def body(carry, qkv_xct): (p, p_ct) = carry (q, k, v, x_ct) = qkv_xct q_ct = jnp.einsum('...d,...md->...m', x_ct, p, precision=precision) p_ct += jnp.einsum('...d,...m->...md', x_ct, q, precision=precision) k_ct = jnp.einsum('...md,...d->...m', p_ct, v, precision=precision) v_ct = jnp.einsum('...md,...m->...d', p_ct, k, precision=precision) p -= jnp.einsum('...m,...d->...md', k, v, precision=precision) return ((p, p_ct), (q_ct, k_ct, v_ct)) (p, qs, ks, vs) = pqkv (_, (qs_ct, ks_ct, vs_ct)) = lax.scan(body, (p, jnp.zeros_like(p)), (qs, ks, vs, W_ct), reverse=True, unroll=unroll) return (qs_ct, ks_ct, vs_ct) _vjp def _numerator_impl(qs, ks, vs): (W, _) = fwd(qs, ks, vs) return W _numerator_impl.defvjp(fwd, bwd) return _numerator_impl
class Transformer(nn.Module): def __init__(self, dim, proj_kernel, kv_proj_stride, depth, heads, dim_head=64, mlp_mult=4, dropout=0.0): super().__init__() self.layers = nn.ModuleList([]) for _ in range(depth): self.layers.append(nn.ModuleList([PreNorm(dim, Attention(dim, proj_kernel=proj_kernel, kv_proj_stride=kv_proj_stride, heads=heads, dim_head=dim_head, dropout=dropout)), PreNorm(dim, FeedForward(dim, mlp_mult, dropout=dropout))])) def forward(self, x): for (attn, ff) in self.layers: x = (attn(x) + x) x = (ff(x) + x) return x
class PreferencesButton(Gtk.HBox): def __init__(self, browser): super().__init__() self._menu = menu = Gtk.Menu() pref_item = MenuItem(_('_Preferences'), Icons.PREFERENCES_SYSTEM) def preferences_cb(menu_item): window = Preferences(browser) window.show() pref_item.connect('activate', preferences_cb) menu.append(pref_item) menu.show_all() button = MenuButton(SymbolicIconImage(Icons.EMBLEM_SYSTEM, Gtk.IconSize.MENU), arrow=True) button.set_menu(menu) button.show() self.pack_start(button, True, True, 0)
def satisfy_filter(finds_address, is_order): def inner(x): order = [] for address in finds_address: if (address not in x): return False else: order.append(x.find(address)) else: if is_order: return (True if (order == list(sorted(order))) else False) return True return inner
class UserScriptsCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(UserScriptsCollector, self).get_default_config_help() config_help.update({'scripts_path': 'Path to find the scripts to run'}) return config_help def get_default_config(self): config = super(UserScriptsCollector, self).get_default_config() config.update({'path': '.', 'scripts_path': '/etc/diamond/user_scripts/', 'floatprecision': 4}) return config def collect(self): scripts_path = self.config['scripts_path'] if (not os.access(scripts_path, os.R_OK)): return None for script in os.listdir(scripts_path): absolutescriptpath = os.path.join(scripts_path, script) executable = os.access(absolutescriptpath, os.X_OK) is_file = os.path.isfile(absolutescriptpath) if is_file: if (not executable): self.log.info(('%s is not executable' % absolutescriptpath)) continue else: continue out = None self.log.debug(('Executing %s' % absolutescriptpath)) try: proc = subprocess.Popen([absolutescriptpath], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = proc.communicate() except subprocess.CalledProcessError as e: self.log.error(('%s error launching: %s; skipping' % (absolutescriptpath, e))) continue if proc.returncode: self.log.error(('%s return exit value %s; skipping' % (absolutescriptpath, proc.returncode))) if (not out): self.log.info(('%s return no output' % absolutescriptpath)) continue if err: self.log.error(('%s returned error output (stderr): %s' % (absolutescriptpath, err))) for line in filter(None, out.split('\n')): try: (name, value) = line.split() float(value) except ValueError: self.log.error(('%s returned invalid/unparsable output: %s' % (absolutescriptpath, line))) continue (name, value) = line.split() floatprecision = 0 if ('.' in value): floatprecision = self.config['floatprecision'] self.publish(name, value, precision=floatprecision)
def _decode_headers(decoder, encoded_header_block): try: return decoder.decode(encoded_header_block, raw=True) except OversizedHeaderListError as e: raise DenialOfServiceError(('Oversized header block: %s' % e)) except (HPACKError, IndexError, TypeError, UnicodeDecodeError) as e: raise ProtocolError(('Error decoding header block: %s' % e))
class MpiAdam(object): def __init__(self, var_list, *, beta1=0.9, beta2=0.999, epsilon=1e-08, scale_grad_by_procs=True, comm=None): self.var_list = var_list self.beta1 = beta1 self.beta2 = beta2 self.epsilon = epsilon self.scale_grad_by_procs = scale_grad_by_procs size = sum((U.numel(v) for v in var_list)) self.m = np.zeros(size, 'float32') self.v = np.zeros(size, 'float32') self.t = 0 self.setfromflat = U.SetFromFlat(var_list) self.getflat = U.GetFlat(var_list) self.comm = (MPI.COMM_WORLD if ((comm is None) and (MPI is not None)) else comm) def update(self, localg, stepsize): if ((self.t % 100) == 0): self.check_synced() localg = localg.astype('float32') if (self.comm is not None): globalg = np.zeros_like(localg) self.comm.Allreduce(localg, globalg, op=MPI.SUM) if self.scale_grad_by_procs: globalg /= self.comm.Get_size() else: globalg = np.copy(localg) self.t += 1 a = ((stepsize * np.sqrt((1 - (self.beta2 ** self.t)))) / (1 - (self.beta1 ** self.t))) self.m = ((self.beta1 * self.m) + ((1 - self.beta1) * globalg)) self.v = ((self.beta2 * self.v) + ((1 - self.beta2) * (globalg * globalg))) step = (((- a) * self.m) / (np.sqrt(self.v) + self.epsilon)) self.setfromflat((self.getflat() + step)) def sync(self): if (self.comm is None): return theta = self.getflat() self.comm.Bcast(theta, root=0) self.setfromflat(theta) def check_synced(self): if (self.comm is None): return if (self.comm.Get_rank() == 0): theta = self.getflat() self.comm.Bcast(theta, root=0) else: thetalocal = self.getflat() thetaroot = np.empty_like(thetalocal) self.comm.Bcast(thetaroot, root=0) assert (thetaroot == thetalocal).all(), (thetaroot, thetalocal)
class ResultCollection(): def load(cls, directory): if (not os.path.isdir(directory)): raise ValueError(f"Given filepath '{directory}' is not a directory") order_fp = os.path.join(directory, '.order') if os.path.isfile(order_fp): collection = cls._load_ordered(directory, order_fp) else: warnings.warn(f"The directory '{directory}' does not contain a .order file. The files will be read into the collection in the order the filesystem provides them in.") collection = cls._load_unordered(directory) return collection def _load_ordered(cls, directory, order_fp): collection = cls() with open(order_fp, 'r') as order_fh: for result_name in order_fh.read().splitlines(): result_fp = cls._get_result_fp(directory, result_name) collection[result_name] = Result.load(result_fp) return collection def _load_unordered(cls, directory): collection = cls() for result in os.listdir(directory): result_fp = os.path.join(directory, result) result_name = result.rstrip('.qza') result_name = result_name.rstrip('.qzv') collection[result_name] = Result.load(result_fp) return collection def _get_result_fp(cls, directory, result_name): result_fp = os.path.join(directory, result_name) if (not os.path.isfile(result_fp)): result_fp += '.qza' if (not os.path.isfile(result_fp)): result_fp = result_fp[:(- 4)] result_fp += '.qzv' if (not os.path.isfile(result_fp)): raise ValueError(f"The Result '{result_name}' is referenced in the order file but does not exist in the directory.") return result_fp def __init__(self, collection=None): if (collection is None): self.collection = {} elif isinstance(collection, dict): qiime2.sdk.util.validate_result_collection_keys(*collection.keys()) self.collection = collection else: self.collection = {str(k): v for (k, v) in enumerate(collection)} def __contains__(self, item): return (item in self.collection) def __eq__(self, other): if isinstance(other, dict): return (self.collection == other) elif isinstance(other, ResultCollection): return (self.collection == other.collection) else: raise TypeError(f"Equality between '{type(other)}' and ResultCollection is undefined.") def __len__(self): return len(self.collection) def __iter__(self): (yield self.collection.__iter__()) def __setitem__(self, key, item): qiime2.sdk.util.validate_result_collection_keys(key) self.collection[key] = item def __getitem__(self, key): return self.collection[key] def __repr__(self): return f'<{self.__class__.__name__.lower()}: {self.type}>' def type(self): inner_type = qiime2.core.type.grammar.UnionExp((v.type for v in self.collection.values())).normalize() return qiime2.core.type.Collection[inner_type] def extension(self): if (str(self.type) == 'Collection[Visualization]'): return '.qzv' return '.qza' def save(self, directory): if os.path.exists(directory): raise ValueError(f"The given directory '{directory}' already exists. A new directory must be given to save the collection to.") os.makedirs(directory) with open(os.path.join(directory, '.order'), 'w') as fh: for (name, result) in self.collection.items(): result_fp = os.path.join(directory, name) result.save(result_fp) fh.write(f'''{name} ''') return directory def save_unordered(self, directory): if os.path.exists(directory): raise ValueError(f"The given directory '{directory}' already exists. A new directory must be given to save the collection to.") os.makedirs(directory) for (name, result) in self.collection.items(): result_fp = os.path.join(directory, name) result.save(result_fp) return directory def keys(self): return self.collection.keys() def values(self): return self.collection.values() def items(self): return self.collection.items() def validate(self, view, level=None): for result in self.values(): result.validate(view, level) def result(self): return self
def get_polarity_form_result(score_dict): extracted_meta = {} for (source, score) in score_dict.items(): source = source.lower().strip() if (source != ''): if (score['PosScore'] < score['NegScore']): extracted_meta[source] = 'negative' else: extracted_meta[source] = 'positive' return extracted_meta
class RandomHierarchicalPolicy(object): def __init__(self, base_policy, num_skills, steps_per_option): self._steps_per_option = steps_per_option self._base_policy = base_policy self._num_skills = num_skills self.reset() def reset(self): self._t = 0 self._z = None def get_action(self, obs): if ((self._t % self._steps_per_option) == 0): self._z = np.random.choice(self._num_skills) self._t += 1 aug_obs = concat_obs_z(obs, self._z, self._num_skills) return self._base_policy.get_action(aug_obs)
class RenderedObservation(ObservationWrapper): def __init__(self, env, observation_type, image_size, render_kwargs, crop=None): super(RenderedObservation, self).__init__(env) self._type = observation_type self._size = image_size if (observation_type == 'rgb_image'): last_dim = 3 elif (observation_type == 'binary_image'): last_dim = 1 else: raise RuntimeError('Invalid observation type') self.observation_space = Box(0.0, 1.0, (image_size + (last_dim,)), np.float32) self._render_kwargs = render_kwargs self._crop = crop def observation(self, _): image = self.env.render(**self._render_kwargs) image = Image.fromarray(image) if self._crop: (w, h) = image.size image = image.crop((self._crop[0], self._crop[1], (w - self._crop[2]), (h - self._crop[3]))) if (image.size != self._size): image = image.resize(self._size, Image.BILINEAR) if (self._type == 'binary_image'): image = image.convert('L') image = np.array(image, copy=False) image = np.clip(image, 0, 255).astype(np.float32) bias = dict(rgb_image=0.5, binary_image=0.0).get(self._type) return utils.preprocess(image, bias)
class SOTLAgent(Agent): def __init__(self, dic_agent_conf, dic_traffic_env_conf, dic_path, cnt_round): super(SOTLAgent, self).__init__(dic_agent_conf, dic_traffic_env_conf, dic_path) self.current_phase_time = 0 if (self.dic_traffic_env_conf['SIMULATOR_TYPE'] == 'anon'): self.DIC_PHASE_MAP = {1: 0, 2: 1, 3: 2, 4: 3, 5: 4, 6: 5, 7: 6, 8: 7, 0: 0} self.green_4_lane = {0: [0, 1], 1: [2, 3]} self.green_8_lane = {0: [1, 3], 1: [5, 7], 2: [0, 2], 3: [4, 6], 4: [0, 1], 5: [2, 3], 6: [6, 7], 7: [4, 5]} else: self.DIC_PHASE_MAP = {0: 0, 1: 1, 2: 2, 3: 3, (- 1): (- 1)} def choose_action(self, count, state): if (state['cur_phase'][0] == (- 1)): return self.action cur_phase = self.DIC_PHASE_MAP[state['cur_phase'][0]] print(state['time_this_phase'][0], self.dic_agent_conf['PHI'], cur_phase) if (len(self.dic_traffic_env_conf['PHASE']) == 2): green_lane = self.green_4_lane else: green_lane = self.green_8_lane if ((state['time_this_phase'][0] >= self.dic_agent_conf['PHI']) and (cur_phase != (- 1))): green_vec = sum([state['lane_num_vehicle_been_stopped_thres1'][i] for i in green_lane[cur_phase]]) red_vec = (sum(state['lane_num_vehicle_been_stopped_thres1']) - green_vec) print(('green: %d, red: %d' % (green_vec, red_vec))) if ((green_vec <= self.dic_agent_conf['MIN_GREEN_VEC']) and (red_vec > self.dic_agent_conf['MAX_RED_VEC'])): self.current_phase_time = 0 self.action = ((cur_phase + 1) % len(self.dic_traffic_env_conf['PHASE'])) return ((cur_phase + 1) % len(self.dic_traffic_env_conf['PHASE'])) else: self.action = cur_phase self.current_phase_time += 1 return cur_phase else: self.action = cur_phase self.current_phase_time += 1 return cur_phase
def get_material_parameters(mat: material): try: bulk_recombination_energy = (mat.bulk_recombination_energy / q) except ValueError: bulk_recombination_energy = 0 try: auger_electron = (mat.electron_auger_recombination * .0) except ValueError: auger_electron = 0 try: auger_hole = (mat.hole_auger_recombination * .0) except ValueError: auger_hole = 0 try: electron_minority_lifetime = mat.electron_minority_lifetime except ValueError: electron_minority_lifetime = carrier_constants('electron_minority_lifetime', mat) try: hole_minority_lifetime = mat.hole_minority_lifetime except ValueError: hole_minority_lifetime = carrier_constants('hole_minority_lifetime', mat) new_mat = {'Nc': (mat.Nc * 1e-06), 'Nv': (mat.Nv * 1e-06), 'Eg': (mat.band_gap / q), 'affinity': (mat.electron_affinity / q), 'epsilon': mat.relative_permittivity, 'mu_e': (mat.electron_mobility * 10000.0), 'mu_h': (mat.hole_mobility * 10000.0), 'tau_e': electron_minority_lifetime, 'tau_h': hole_minority_lifetime, 'Et': bulk_recombination_energy, 'B': (mat.radiative_recombination * 1000000.0), 'Cn': auger_electron, 'Cp': auger_hole} return new_mat
def train(epochs, decay=0, threshold=0.0): model.train() pbar = tqdm(range(epochs), total=epochs) curves = np.zeros((epochs, 14)) for epoch in pbar: for (batch_idx, (data, target)) in enumerate(train_loader): (data, target) = (data.to(device), target.to(device)) optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) reg = 0.0 if decay: reg = 0.0 for param in model.parameters(): if (param.requires_grad and (torch.sum(torch.abs(param)) > 0)): if (args.reg_type == 1): reg += torch.sum(torch.abs(param)) elif (args.reg_type == 2): reg += (torch.sum(torch.abs(param)) / torch.sqrt(torch.sum((param ** 2)))) elif (args.reg_type == 3): reg += ((torch.sum(torch.abs(param)) ** 2) / torch.sum((param ** 2))) elif (args.reg_type == 4): reg += torch.sum(((2 * torch.abs(param)) / (1 + torch.abs(param)))) else: reg = 0.0 total_loss = (loss + (decay * reg)) total_loss.backward() optimizer.step() if ((batch_idx % args.log_interval) == 0): done = (batch_idx * len(data)) percentage = ((100.0 * batch_idx) / len(train_loader)) pbar.set_description(f'Train Epoch: {epoch} [{done:5}/{len(train_loader.dataset)} ({percentage:3.0f}%)] Loss: {loss.item():.3f} Reg: {reg:.3f}')
(scope='module') def survey_project(project_urls, project_token, mocked_responses) -> Project: def request_callback_survey(req): (request_data, request_headers, request_type) = parse_request(req) request_handler = get_survey_project_request_handler(request_type) response = request_handler(data=request_data, headers=request_headers) return response survey_project_url = project_urls['survey_project'] mocked_responses.add_callback(responses.POST, survey_project_url, callback=request_callback_survey, content_type='application/json') return Project(survey_project_url, project_token, verify_ssl=False)
class Effect5673(BaseEffect): type = 'passive' def handler(fit, ship, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Small Projectile Turret')), 'damageMultiplier', ship.getModifiedItemAttr('eliteBonusInterceptor2'), skill='Interceptors', **kwargs)
class NestedDictionaryDataset(FairseqDataset): def __init__(self, defn, sizes=None): super().__init__() self.defn = _flatten(defn) self.sizes = ([sizes] if (not isinstance(sizes, (list, tuple))) else sizes) first = None for v in self.defn.values(): if (not isinstance(v, (FairseqDataset, torch.utils.data.Dataset))): raise ValueError('Expected Dataset but found: {}'.format(v.__class__)) first = (first or v) if (len(v) > 0): assert (len(v) == len(first)), 'dataset lengths must match' self._len = len(first) def __getitem__(self, index): return OrderedDict(((k, ds[index]) for (k, ds) in self.defn.items())) def __len__(self): return self._len def collater(self, samples): if (len(samples) == 0): return {} sample = OrderedDict() for (k, ds) in self.defn.items(): try: sample[k] = ds.collater([s[k] for s in samples]) except NotImplementedError: sample[k] = default_collate([s[k] for s in samples]) return _unflatten(sample) def num_tokens(self, index): return max((s[index] for s in self.sizes)) def size(self, index): if (len(self.sizes) == 1): return self.sizes[0][index] else: return (s[index] for s in self.sizes) def supports_prefetch(self): return any((ds.supports_prefetch for ds in self.defn.values())) def prefetch(self, indices): for ds in self.defn.values(): if getattr(ds, 'supports_prefetch', False): ds.prefetch(indices) def set_epoch(self, epoch): super().set_epoch(epoch) for ds in self.defn.values(): ds.set_epoch(epoch)
class ScoreEvaluator(): def __init__(self, scores_file='cos_eor/scripts/orm/mlm_scores.npy', splits_file='cos_eor/scripts/orm/amt_data/splits.yaml', matching='obj_room_recep', random=False, seed=0) -> None: self.splits_file = splits_file self.scores_file = scores_file self.get_scores(matching=matching) if random: np.random.seed(seed) self.scores = self.gen_random_ranks(self.scores) self.amt = AmtDataReader() def get_objects_list(self, split='trainval'): all_objects = yaml.full_load(open(self.splits_file)) objects_list = all_objects['objects'][split] if ('guitar' in objects_list): objects_list.remove('guitar') return objects_list def read_scores_file(self): scores_dict = np.load(self.scores_file, allow_pickle=True).item() return (scores_dict['scores'], scores_dict['objects'], scores_dict['rooms'], scores_dict['receptacles']) def get_scores(self, matching='obj_room_recep'): if (matching == 'obj_room_recep'): (self.scores, self.objects_in_file, self.rooms, self.receptacles) = self.read_scores_file() self.rooms = [room.replace(' ', '_') for room in self.rooms] self.receptacles = [rec.replace(' ', '_') for rec in self.receptacles] room_indices = np.argsort(self.rooms) self.rooms = np.sort(self.rooms).tolist() self.scores = np.take(self.scores, room_indices, axis=1) self.obj_to_idx = {obj.replace(' ', '_'): idx for (idx, obj) in enumerate(self.objects_in_file)} self.recep_to_idx = {recep.replace(' ', '_'): idx for (idx, recep) in enumerate(self.receptacles)} self.room_to_idx = {room: idx for (idx, room) in enumerate(self.rooms)} return self.scores else: scores_dict = np.load(self.scores_file, allow_pickle=True).item() if ('object_room' in scores_dict): scores_dict = scores_dict['object_room'] (self.scores, self.objects_in_file, self.rooms) = (scores_dict['scores'], scores_dict['objects'], scores_dict['rooms']) self.rooms = [room.lstrip().strip().replace(' ', '_') for room in self.rooms] self.objects_in_file = [obj.replace(' ', '_') for obj in self.objects_in_file] room_indices = np.argsort(self.rooms) self.rooms = np.sort(self.rooms).tolist() self.scores = np.take(self.scores, room_indices, axis=1) self.room_to_idx = {room: idx for (idx, room) in enumerate(self.rooms)} self.obj_to_idx = {obj.replace(' ', '_'): idx for (idx, obj) in enumerate(self.objects_in_file)} def get_scores_for_obj_room(self, object, room, recep_list): recep_indices = [self.recep_to_idx[recep] for recep in recep_list] return self.scores[(self.obj_to_idx[object], self.room_to_idx[room], recep_indices)] def get_scores_for_obj(self, object): return self.scores[self.obj_to_idx[object]] def mAP(self, gt, preds): if (np.sum(gt) == 0): return (- 1) return ap_score(gt, preds) def evaluate_metrics(self, object, room, drop_receptacles=[], return_all=False): recep_list = self.amt.receps_per_room[room] filtered_recep_list = [recep_list[i] for i in range(len(recep_list)) if (recep_list[i] not in drop_receptacles)] predictions = self.get_scores_for_obj_room(object, room, filtered_recep_list) gt = self.amt.get_recep_with_k_votes(object, room, k=6) gt = [gt[i] for i in range(len(gt)) if (recep_list[i] not in drop_receptacles)] mAP = self.mAP(gt, predictions) if return_all: return (predictions, gt, recep_list, mAP) return mAP def evaluate(self, split='trainval', drop_receptacles=[]): objects = self.get_objects_list(split) mAP = np.array([[self.evaluate_metrics(o, r, drop_receptacles=drop_receptacles) for o in objects] for r in self.rooms if (r in self.amt.receps_per_room)]) return np.mean(mAP[(mAP > (- 1))]) def evaluate_obj_room_scores_per_obj(self, obj, obj_room_mappings, return_all=False): room_list = self.rooms predictions = self.get_scores_for_obj(obj) gt = obj_room_mappings[obj] gt = [(1 if (r in gt) else 0) for r in room_list] mAP = self.mAP(gt, predictions) if return_all: return (predictions, gt, room_list, mAP) return mAP def evaluate_obj_room_scores(self, split='trainval'): mappings = self.amt.gen_gt_obj_room_mappings(rooms_criteria=3, min_votes=6, K=1) objects = self.get_objects_list(split) mAP = np.array([self.evaluate_obj_room_scores_per_obj(o, mappings) for o in objects]) return np.mean(mAP[(mAP > (- 1))]) def gen_random_ranks(self, sample_matrix): n_items = sample_matrix.shape[(- 1)] n_rankings = int((np.prod(sample_matrix.shape) / n_items)) return np.array([np.random.permutation(n_items) for _ in range(n_rankings)]).reshape(sample_matrix.shape)
def test_upper_lower_index_size_check(): n_tensor = numpy.zeros((2, 2, 2)) m_tensor = numpy.zeros((2, 2)) with pytest.raises(IndexError): wedge(n_tensor, m_tensor, (3, 1), (1, 1)) with pytest.raises(IndexError): wedge(n_tensor, m_tensor, (4, 0), (1, 2)) with pytest.raises(IndexError): wedge(n_tensor, m_tensor, (2, 1), (1, 2))
class LabelItem(GraphicsWidgetAnchor, GraphicsWidget): def __init__(self, text=' ', parent=None, angle=0, **args): GraphicsWidget.__init__(self, parent) GraphicsWidgetAnchor.__init__(self) self.item = QtWidgets.QGraphicsTextItem(self) self.opts = {'color': None, 'justify': 'center'} self.opts.update(args) self._sizeHint = {} self.setText(text) self.setAngle(angle) def setAttr(self, attr, value): self.opts[attr] = value def setText(self, text, **args): self.text = text opts = self.opts for k in args: opts[k] = args[k] optlist = [] color = self.opts['color'] if (color is None): color = getConfigOption('foreground') color = fn.mkColor(color) optlist.append(('color: ' + color.name(QtGui.QColor.NameFormat.HexArgb))) if ('size' in opts): optlist.append(('font-size: ' + opts['size'])) if (('bold' in opts) and (opts['bold'] in [True, False])): optlist.append(('font-weight: ' + {True: 'bold', False: 'normal'}[opts['bold']])) if (('italic' in opts) and (opts['italic'] in [True, False])): optlist.append(('font-style: ' + {True: 'italic', False: 'normal'}[opts['italic']])) full = ("<span style='%s'>%s</span>" % ('; '.join(optlist), text)) self.item.setHtml(full) self.updateMin() self.resizeEvent(None) self.updateGeometry() def resizeEvent(self, ev): self.item.setPos(0, 0) bounds = self.itemRect() left = (self.mapFromItem(self.item, QtCore.QPointF(0, 0)) - self.mapFromItem(self.item, QtCore.QPointF(1, 0))) rect = self.rect() if (self.opts['justify'] == 'left'): if (left.x() != 0): bounds.moveLeft(rect.left()) if (left.y() < 0): bounds.moveTop(rect.top()) elif (left.y() > 0): bounds.moveBottom(rect.bottom()) elif (self.opts['justify'] == 'center'): bounds.moveCenter(rect.center()) elif (self.opts['justify'] == 'right'): if (left.x() != 0): bounds.moveRight(rect.right()) if (left.y() < 0): bounds.moveBottom(rect.bottom()) elif (left.y() > 0): bounds.moveTop(rect.top()) self.item.setPos((bounds.topLeft() - self.itemRect().topLeft())) self.updateMin() def setAngle(self, angle): self.angle = angle self.item.resetTransform() self.item.setRotation(angle) self.updateMin() def updateMin(self): bounds = self.itemRect() self.setMinimumWidth(bounds.width()) self.setMinimumHeight(bounds.height()) self._sizeHint = {QtCore.Qt.SizeHint.MinimumSize: (bounds.width(), bounds.height()), QtCore.Qt.SizeHint.PreferredSize: (bounds.width(), bounds.height()), QtCore.Qt.SizeHint.MaximumSize: ((- 1), (- 1)), QtCore.Qt.SizeHint.MinimumDescent: (0, 0)} self.updateGeometry() def sizeHint(self, hint, constraint): if (hint not in self._sizeHint): return QtCore.QSizeF(0, 0) return QtCore.QSizeF(*self._sizeHint[hint]) def itemRect(self): return self.item.mapRectToParent(self.item.boundingRect())
def guess_terminal(preference: ((str | Sequence) | None)=None) -> (str | None): test_terminals = [] if isinstance(preference, str): test_terminals += [preference] elif isinstance(preference, Sequence): test_terminals += list(preference) if ('WAYLAND_DISPLAY' in os.environ): test_terminals += ['foot'] test_terminals += ['roxterm', 'sakura', 'hyper', 'alacritty', 'terminator', 'termite', 'gnome-terminal', 'konsole', 'xfce4-terminal', 'lxterminal', 'mate-terminal', 'kitty', 'yakuake', 'tilda', 'guake', 'eterm', 'st', 'urxvt', 'wezterm', 'xterm', 'x-terminal-emulator'] for terminal in test_terminals: logger.debug('Guessing terminal: %s', terminal) if (not which(terminal, os.X_OK)): continue logger.info('Terminal found: %s', terminal) return terminal logger.error('Default terminal has not been found.') return None
class EmbeddingTowerCollectionSharder(BaseEmbeddingSharder[EmbeddingTowerCollection]): def __init__(self, fused_params: Optional[Dict[(str, Any)]]=None, qcomm_codecs_registry: Optional[Dict[(str, QuantizedCommCodecs)]]=None) -> None: super().__init__(fused_params=fused_params, qcomm_codecs_registry=qcomm_codecs_registry) self._tower_sharder = EmbeddingTowerSharder(self.fused_params, qcomm_codecs_registry=qcomm_codecs_registry) def shard(self, module: EmbeddingTowerCollection, params: Dict[(str, ParameterSharding)], env: ShardingEnv, device: Optional[torch.device]=None) -> ShardedEmbeddingTowerCollection: return ShardedEmbeddingTowerCollection(module=module, table_name_to_parameter_sharding=params, tower_sharder=self._tower_sharder, env=env, fused_params=self.fused_params, device=device, qcomm_codecs_registry=self.qcomm_codecs_registry) def sharding_types(self, compute_device_type: str) -> List[str]: return [ShardingType.TABLE_ROW_WISE.value, ShardingType.TABLE_COLUMN_WISE.value] def shardable_parameters(self, module: EmbeddingTowerCollection) -> Dict[(str, nn.Parameter)]: named_parameters: Dict[(str, nn.Parameter)] = {} for tower in module.towers: named_parameters.update(self._tower_sharder.shardable_parameters(tower)) return named_parameters def module_type(self) -> Type[EmbeddingTowerCollection]: return EmbeddingTowerCollection
def parse_condition(toks, start_idx, tables_with_alias, schema, default_tables=None): idx = start_idx len_ = len(toks) isBlock = False if (toks[idx] == '('): isBlock = True idx += 1 conds = [] while (idx < len_): (idx, val_unit) = parse_val_unit(toks, idx, tables_with_alias, schema, default_tables) not_op = False if (toks[idx] == 'not'): not_op = True idx += 1 assert ((idx < len_) and (toks[idx] in WHERE_OPS)), 'Error condition: idx: {}, tok: {}'.format(idx, toks[idx]) op_id = WHERE_OPS.index(toks[idx]) idx += 1 val1 = val2 = None if (op_id == WHERE_OPS.index('between')): (idx, val1) = parse_value(toks, idx, tables_with_alias, schema, default_tables) assert (toks[idx] == 'and') idx += 1 (idx, val2) = parse_value(toks, idx, tables_with_alias, schema, default_tables) else: (idx, val1) = parse_value(toks, idx, tables_with_alias, schema, default_tables) val2 = None conds.append((not_op, op_id, val_unit, val1, val2)) if ((idx < len_) and ((toks[idx] in CLAUSE_KEYWORDS) or (toks[idx] in (')', ';')) or (toks[idx] in JOIN_KEYWORDS))): break if ((idx < len_) and (toks[idx] in COND_OPS)): conds.append(toks[idx]) idx += 1 if isBlock: assert (toks[idx] == ')') idx += 1 return (idx, conds)
def generate(args): constants.DATA_SAVE_PATH = args.save_path print(('Force Unsave Data: %s' % str(args.force_unsave))) if isinstance(args.x_display, (list, tuple)): args.x_display = random.choice(args.x_display) succ_traj = pd.DataFrame(columns=['goal', 'pickup', 'movable', 'receptacle', 'scene']) for (scene_type, ids) in constants.SCENE_TYPE.items(): for id in ids: obj_json_file = os.path.join(constants.LAYOUTS_PATH, ('FloorPlan%d-objects.json' % id)) with open(obj_json_file, 'r') as of: scene_objs = json.load(of) id_str = str(id) scene_id_to_objs[id_str] = scene_objs for obj in scene_objs: if (obj not in obj_to_scene_ids): obj_to_scene_ids[obj] = set() obj_to_scene_ids[obj].add(id_str) for g in constants.GOALS: for st in constants.GOALS_VALID[g]: scenes_for_goal[g].extend([str(s) for s in constants.SCENE_TYPE[st]]) scenes_for_goal[g] = set(scenes_for_goal[g]) for st in constants.SCENE_TYPE: for s in constants.SCENE_TYPE[st]: scene_to_type[str(s)] = st (succ_traj, full_traj) = dataset_management_util.load_successes_from_disk(args.save_path, succ_traj, args.just_examine, args.repeats_per_cond) if args.just_examine: print_successes(succ_traj) return fail_traj = dataset_management_util.load_fails_from_disk(args.save_path) print(('Loaded %d known failed tuples' % len(fail_traj))) env = ThorEnv(x_display=args.x_display) game_state = TaskGameStateFullKnowledge(env) agent = DeterministicPlannerAgent(thread_id=0, game_state=game_state) errors = {} goal_candidates = constants.GOALS[:] pickup_candidates = list(set().union(*[constants.VAL_RECEPTACLE_OBJECTS[obj] for obj in constants.VAL_RECEPTACLE_OBJECTS])) pickup_candidates = [p for p in pickup_candidates if (constants.OBJ_PARENTS[p] in obj_to_scene_ids)] movable_candidates = list(set(constants.MOVABLE_RECEPTACLES).intersection(obj_to_scene_ids.keys())) receptacle_candidates = ([obj for obj in constants.VAL_RECEPTACLE_OBJECTS if ((obj not in constants.MOVABLE_RECEPTACLES) and (obj in obj_to_scene_ids))] + [obj for obj in constants.VAL_ACTION_OBJECTS['Toggleable'] if (obj in obj_to_scene_ids)]) receptacle_candidates.remove('Toaster') receptacle_candidates.sort() scene_candidates = list(scene_id_to_objs.keys()) n_until_load_successes = args.async_load_every_n_samples print_successes(succ_traj) task_sampler = sample_task_params(succ_traj, full_traj, fail_traj, goal_candidates, pickup_candidates, movable_candidates, receptacle_candidates, scene_candidates) while True: sampled_task = next(task_sampler) print(sampled_task) if (sampled_task is None): sys.exit(('No valid tuples left to sample (all are known to fail or already have %d trajectories' % args.repeats_per_cond)) (gtype, pickup_obj, movable_obj, receptacle_obj, sampled_scene) = sampled_task print(('sampled tuple: ' + str((gtype, pickup_obj, movable_obj, receptacle_obj, sampled_scene)))) tries_remaining = args.trials_before_fail target_remaining = (args.repeats_per_cond - len(succ_traj.loc[(((((succ_traj['goal'] == gtype) & (succ_traj['pickup'] == pickup_obj)) & (succ_traj['movable'] == movable_obj)) & (succ_traj['receptacle'] == receptacle_obj)) & (succ_traj['scene'] == str(sampled_scene)))])) num_place_fails = 0 while ((tries_remaining > 0) and (target_remaining > 0)): constants.pddl_goal_type = gtype print(('PDDLGoalType: ' + constants.pddl_goal_type)) task_id = create_dirs(gtype, pickup_obj, movable_obj, receptacle_obj, sampled_scene) setup_data_dict() constants.data_dict['task_id'] = task_id constants.data_dict['task_type'] = constants.pddl_goal_type constants.data_dict['dataset_params']['video_frame_rate'] = constants.VIDEO_FRAME_RATE try: constraint_objs = {'repeat': [(constants.OBJ_PARENTS[pickup_obj], np.random.randint((2 if (gtype == 'pick_two_obj_and_place') else 1), (constants.PICKUP_REPEAT_MAX + 1)))], 'sparse': [(receptacle_obj.replace('Basin', ''), (num_place_fails * constants.RECEPTACLE_SPARSE_POINTS))]} if (movable_obj != 'None'): constraint_objs['repeat'].append((movable_obj, np.random.randint(1, (constants.PICKUP_REPEAT_MAX + 1)))) for obj_type in scene_id_to_objs[str(sampled_scene)]: if ((obj_type in pickup_candidates) and (obj_type != constants.OBJ_PARENTS[pickup_obj]) and (obj_type != movable_obj)): constraint_objs['repeat'].append((obj_type, np.random.randint(1, (constants.MAX_NUM_OF_OBJ_INSTANCES + 1)))) if (gtype in goal_to_invalid_receptacle): constraint_objs['empty'] = [(r.replace('Basin', ''), (num_place_fails * constants.RECEPTACLE_EMPTY_POINTS)) for r in goal_to_invalid_receptacle[gtype]] constraint_objs['seton'] = [] if (gtype == 'look_at_obj_in_light'): constraint_objs['seton'].append((receptacle_obj, False)) if (num_place_fails > 0): print((('Failed %d placements in the past; increased free point constraints: ' % num_place_fails) + str(constraint_objs))) scene_info = {'scene_num': sampled_scene, 'random_seed': random.randint(0, (2 ** 32))} info = agent.reset(scene=scene_info, objs=constraint_objs) task_objs = {'pickup': pickup_obj} if (movable_obj != 'None'): task_objs['mrecep'] = movable_obj if (gtype == 'look_at_obj_in_light'): task_objs['toggle'] = receptacle_obj else: task_objs['receptacle'] = receptacle_obj agent.setup_problem({'info': info}, scene=scene_info, objs=task_objs) object_poses = [{'objectName': obj['name'].split('(Clone)')[0], 'position': obj['position'], 'rotation': obj['rotation']} for obj in env.last_event.metadata['objects'] if obj['pickupable']] dirty_and_empty = (gtype == 'pick_clean_then_place_in_recep') object_toggles = [{'objectType': o, 'isOn': v} for (o, v) in constraint_objs['seton']] constants.data_dict['scene']['object_poses'] = object_poses constants.data_dict['scene']['dirty_and_empty'] = dirty_and_empty constants.data_dict['scene']['object_toggles'] = object_toggles print('Performing reset via thor_env API') env.reset(sampled_scene) print('Performing restore via thor_env API') env.restore_scene(object_poses, object_toggles, dirty_and_empty) event = env.step(dict(constants.data_dict['scene']['init_action'])) terminal = False while ((not terminal) and (agent.current_frame_count <= constants.MAX_EPISODE_LENGTH)): action_dict = agent.get_action(None) agent.step(action_dict) (reward, terminal) = agent.get_reward() dump_data_dict() save_video() except Exception as e: import traceback traceback.print_exc() print(('Error: ' + repr(e))) print('Invalid Task: skipping...') if args.debug: print(traceback.format_exc()) deleted = delete_save((args.num_threads > 0)) if (not deleted): target_remaining = 0 elif (str(e) == 'API Action Failed: No valid positions to place object found'): num_place_fails += 1 tries_remaining -= 1 else: tries_remaining -= 1 estr = str(e) if (len(estr) > 120): estr = estr[:120] if (estr not in errors): errors[estr] = 0 errors[estr] += 1 print('') es = sum([errors[er] for er in errors]) print(('\terrors (%d):' % es)) for (er, v) in sorted(errors.items(), key=(lambda kv: kv[1]), reverse=True): if ((v / es) < 0.01): break print(('\t(%.2f) (%d)\t%s' % ((v / es), v, er))) print('') continue if args.force_unsave: delete_save((args.num_threads > 0)) succ_traj = succ_traj.append({'goal': gtype, 'movable': movable_obj, 'pickup': pickup_obj, 'receptacle': receptacle_obj, 'scene': str(sampled_scene)}, ignore_index=True) target_remaining -= 1 tries_remaining += args.trials_before_fail if ((tries_remaining == 0) and (target_remaining == args.repeats_per_cond)): new_fails = [(gtype, pickup_obj, movable_obj, receptacle_obj, str(sampled_scene))] fail_traj = dataset_management_util.load_fails_from_disk(args.save_path, to_write=new_fails) print('') print(('failures (%d)' % len(fail_traj))) print('') if (target_remaining == 0): full_traj.add((gtype, pickup_obj, movable_obj, receptacle_obj, sampled_scene)) if (args.num_threads > 0): if (n_until_load_successes > 0): n_until_load_successes -= 1 else: print('Reloading trajectories from disk because of parallel processes...') succ_traj = pd.DataFrame(columns=succ_traj.columns) (succ_traj, full_traj) = load_successes_from_disk(args.save_path, succ_traj, False, args.repeats_per_cond) print(('... Loaded %d trajectories' % len(succ_traj.index))) n_until_load_successes = args.async_load_every_n_samples print_successes(succ_traj) task_sampler = sample_task_params(succ_traj, full_traj, fail_traj, goal_candidates, pickup_candidates, movable_candidates, receptacle_candidates, scene_candidates) print('... Created fresh instance of sample_task_params generator')
def select_by_keyword(session: Session, dag: nx.DiGraph) -> set[str]: keywordexpr = session.config['expression'] if (not keywordexpr): return None try: expression = Expression.compile_(keywordexpr) except ParseError as e: msg = f"Wrong expression passed to '-k': {keywordexpr}: {e}" raise ValueError(msg) from None remaining: set[str] = set() for task in session.tasks: if (keywordexpr and expression.evaluate(KeywordMatcher.from_task(task))): remaining.update(task_and_preceding_tasks(task.signature, dag)) return remaining
def get_data(): ((x_train, y_train), (x_test, y_test)) = test_utils.get_test_data(num_train=batch_size, num_test=batch_size, input_shape=(data_dim,), classification=True, num_classes=num_classes) y_train = np_utils.to_categorical(y_train, num_classes) y_test = np_utils.to_categorical(y_test, num_classes) return ((x_train, y_train), (x_test, y_test))
def encoder_rgcn(inputs, units, training, dropout_rate=0.0): (graph_convolution_units, auxiliary_units) = units with tf.variable_scope('graph_convolutions'): output = multi_graph_convolution_layers(inputs, graph_convolution_units, activation=tf.nn.tanh, dropout_rate=dropout_rate, training=training) with tf.variable_scope('graph_aggregation'): (_, hidden_tensor, node_tensor) = inputs annotations = tf.concat(((output, hidden_tensor, node_tensor) if (hidden_tensor is not None) else (output, node_tensor)), (- 1)) output = graph_aggregation_layer(annotations, auxiliary_units, activation=tf.nn.tanh, dropout_rate=dropout_rate, training=training) return output
class HashChecker(ContentChecker): pattern = re.compile('(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=(?P<expected>[a-f0-9]+)') def __init__(self, hash_name, expected): self.hash_name = hash_name self.hash = hashlib.new(hash_name) self.expected = expected def from_url(cls, url): fragment = urllib.parse.urlparse(url)[(- 1)] if (not fragment): return ContentChecker() match = cls.pattern.search(fragment) if (not match): return ContentChecker() return cls(**match.groupdict()) def feed(self, block): self.hash.update(block) def is_valid(self): return (self.hash.hexdigest() == self.expected) def report(self, reporter, template): msg = (template % self.hash_name) return reporter(msg)
def get_test_attrs(): attrs = {'name': 'IR_108', 'start_time': datetime.datetime(2018, 1, 1, 0), 'end_time': datetime.datetime(2018, 1, 1, 0, 15), 'int': 1, 'float': 1.0, 'none': None, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ['1', ['2', [3]]], 'bool': True, 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': np.array([True, False, True]), 'array_2d': np.array([[1, 2], [3, 4]]), 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), 'dict': {'a': 1, 'b': 2}, 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, 'raw_metadata': OrderedDict([('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), ('flag', np.bool_(True)), ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))]))])} encoded = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', 'end_time': '2018-01-01 00:15:00', 'int': 1, 'float': 1.0, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': 'true', 'numpy_void': '[]', 'numpy_bytes': 'test', 'numpy_string': 'test', 'list': [1, 2, np.float64(3)], 'nested_list': '["1", ["2", [3]]]', 'bool': 'true', 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': ['true', 'false', 'true'], 'array_2d': '[[1, 2], [3, 4]]', 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', 'dict': '{"a": 1, "b": 2}', 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], "flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} encoded_flat = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', 'end_time': '2018-01-01 00:15:00', 'int': 1, 'float': 1.0, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': 'true', 'numpy_void': '[]', 'numpy_bytes': 'test', 'numpy_string': 'test', 'list': [1, 2, np.float64(3)], 'nested_list': '["1", ["2", [3]]]', 'bool': 'true', 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': ['true', 'false', 'true'], 'array_2d': '[[1, 2], [3, 4]]', 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', 'dict_a': 1, 'dict_b': 2, 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', 'raw_metadata_flag': 'true', 'raw_metadata_dict_a': 1, 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return (attrs, encoded, encoded_flat)
def conv1x1(in_channels, out_channels, module_name, postfix, stride=1, groups=1, kernel_size=1, padding=0): return [(f'{module_name}_{postfix}/conv', nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=False)), (f'{module_name}_{postfix}/norm', nn.BatchNorm2d(out_channels)), (f'{module_name}_{postfix}/relu', nn.ReLU(inplace=True))]
_2_unicode_compatible class ConferenceVenue(AuditModel): name = models.CharField(max_length=100) address = models.TextField() latitude = models.DecimalField(max_digits=17, decimal_places=15) longitudes = models.DecimalField(max_digits=19, decimal_places=16) def __str__(self): return self.name
.parametrize('rho_type, args', [('oper', {}), ('ket', {}), ('oper', {'fock_numbers': [0, 1, 2, 3]}), ('oper', {'unit_y_range': False})]) def test_plot_fock_distribution(rho_type, args): if (rho_type == 'oper'): rho = qutip.rand_dm(4) else: rho = qutip.basis(2, 0) (fig, ax) = qutip.plot_fock_distribution(rho, **args) plt.close() assert isinstance(fig, mpl.figure.Figure) assert isinstance(ax, mpl.axes.Axes)
def getCharactersForUser(lookfor, eager=None): if isinstance(lookfor, int): eager = processEager(eager) with sd_lock: characters = saveddata_session.query(Character).options(*eager).filter((Character.ownerID == lookfor)).all() else: raise TypeError('Need integer as argument') return characters
class RootComponentFinder(): def run(self): paths = self.get_paths() templates = self.get_templates(paths) components = self.get_components(templates) self.register_components(components) def get_loaders(self): template_source_loaders = [] for e in engines.all(): if hasattr(e, 'engine'): template_source_loaders.extend(e.engine.get_template_loaders(e.engine.loaders)) loaders = [] for loader in template_source_loaders: if hasattr(loader, 'loaders'): loaders.extend(loader.loaders) else: loaders.append(loader) return loaders def get_paths(self) -> set[str]: paths: set[str] = set() for loader in self.get_loaders(): with contextlib.suppress(ImportError, AttributeError, TypeError): module = import_module(loader.__module__) get_template_sources = getattr(module, 'get_template_sources', None) if (get_template_sources is None): get_template_sources = loader.get_template_sources paths.update((smart_str(origin) for origin in get_template_sources(''))) return paths def get_templates(self, paths: set[str]) -> set[str]: extensions = ['.html'] templates: set[str] = set() for path in paths: for (root, _, files) in os.walk(path, followlinks=False): templates.update((os.path.join(root, name) for name in files if ((not name.startswith('.')) and any((fnmatch(name, f'*{glob}') for glob in extensions))))) return templates def get_components(self, templates: set[str]) -> set[str]: components: set[str] = set() for template in templates: with contextlib.suppress(Exception): with open(template, 'r', encoding='utf-8') as template_file: clean_template = COMMENT_REGEX.sub('', template_file.read()) regex_iterable = COMPONENT_REGEX.finditer(clean_template) component_paths = [match.group('path').replace('"', '').replace("'", '') for match in regex_iterable] components.update(component_paths) if (not components): _logger.warning('\x1b[93mReactPy did not find any components! You are either not using any ReactPy components, using the template tag incorrectly, or your HTML templates are not registered with Django.\x1b[0m') return components def register_components(self, components: set[str]) -> None: if components: _logger.debug('Auto-detected ReactPy root components:') for component in components: try: _logger.debug('\t+ %s', component) register_component(component) except Exception: _logger.exception("\x1b[91mReactPy failed to register component '%s'!\nThis component path may not be valid, or an exception may have occurred while importing.\nSee the traceback below for more information.\x1b[0m", component)
def test_set_progress_bar_enabled(): TINY_MODEL = 'hf-internal-testing/tiny-random-distilbert' with patch('tqdm.auto.tqdm') as mock_tqdm: disable_progress_bar() _ = AutoConfig.from_pretrained(TINY_MODEL, force_download=True) mock_tqdm.assert_not_called() mock_tqdm.reset_mock() enable_progress_bar() _ = AutoConfig.from_pretrained(TINY_MODEL, force_download=True) mock_tqdm.assert_called()