code
stringlengths
281
23.7M
class OptionPlotoptionsDumbbellSonificationTracksMappingHighpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.usefixtures('prepare_client_test') class ClientTestBase(): def _fake_data(self, num_batches=None, batch_size=None): num_batches = (self.num_batches if (num_batches is None) else num_batches) batch_size = (batch_size or self.batch_size) torch.manual_seed(0) dataset = [torch.rand(batch_size, 2) for _ in range(num_batches)] dataset = utils.DatasetFromList(dataset) return utils.DummyUserData(dataset, utils.SampleNet(utils.TwoFC())) def _get_client(self, data=None, store_models_and_optimizers: bool=False, timeout_simulator=None): data = (data or self._fake_data()) config = ClientConfig(store_models_and_optimizers=store_models_and_optimizers, lr_scheduler=ConstantLRSchedulerConfig()) return Client(**OmegaConf.structured(config), dataset=data, timeout_simulator=timeout_simulator) def _get_dp_client(self, data=None, noise_multiplier: int=1, clipping_value: int=1, store_models_and_optimizers: bool=False): privacy_setting = PrivacySetting(noise_multiplier=noise_multiplier, clipping=ClippingSetting(clipping_value=clipping_value)) config = DPClientConfig(store_models_and_optimizers=store_models_and_optimizers, privacy_setting=privacy_setting) return DPClient(**OmegaConf.structured(config), dataset=(data or self._fake_data())) def _get_mime_client(self, data=None, store_models_and_optimizers: bool=False, timeout_simulator=None): data = (data or self._fake_data()) config = MimeClientConfig(store_models_and_optimizers=store_models_and_optimizers, lr_scheduler=ConstantLRSchedulerConfig()) return MimeClient(**OmegaConf.structured(config), dataset=data, timeout_simulator=timeout_simulator) def _get_mimelite_client(self, data=None, store_models_and_optimizers: bool=False, timeout_simulator=None): data = (data or self._fake_data()) config = MimeLiteClientConfig(store_models_and_optimizers=store_models_and_optimizers, lr_scheduler=ConstantLRSchedulerConfig()) return MimeLiteClient(**OmegaConf.structured(config), dataset=data, timeout_simulator=timeout_simulator) def _train(self, data: IFLUserData, model, optim) -> None: model.fl_get_module().train() for batch in data.train_data(): optim.zero_grad() _batch = model.fl_create_training_batch(batch) loss = model.fl_forward(_batch).loss loss.backward() optim.step() def _run_client_eval_test(self, client) -> None: class Net(utils.SampleNet): def get_eval_metrics(self, batch): assert (self.sample_nn.training is False), 'Client should call eval after setting model.eval()' return self.sample_nn(batch) input_dim = 2 model = Net(nn.Linear(input_dim, 1)) model.fl_get_module().train() client.eval(model=model) assert model.fl_get_module().training def _test_reload_server_state(self, client): model = utils.SampleNet(utils.TwoFC()) base_optim = torch.optim.Adam(model.fl_get_module().parameters()) for batch in client.dataset.train_data(): base_optim.zero_grad() model.fl_forward(batch).loss.backward() base_optim.step() client.server_opt_state = base_optim.state_dict()['state'] optim = torch.optim.Adam(model.fl_get_module().parameters()) client._reload_server_state(optim) error_msg = utils.verify_optimizer_state_dict_equal(optim.state_dict()['state'], base_optim.state_dict()['state']) assertEmpty(error_msg) client.server_opt_state = {} optim = torch.optim.Adam(model.fl_get_module().parameters()) client._reload_server_state(optim) assertEqual(optim.state_dict()['state'], {})
def patch_module(*names: str, new_callable: Any=Mock) -> Iterator: prev = {} class MockModule(types.ModuleType): def __getattr__(self, attr: str) -> Any: setattr(self, attr, new_callable()) return types.ModuleType.__getattribute__(self, attr) mods = [] for name in names: try: prev[name] = sys.modules[name] except KeyError: pass mod = sys.modules[name] = MockModule(name) mods.append(mod) try: (yield mods) finally: for name in names: try: sys.modules[name] = prev[name] except KeyError: try: del sys.modules[name] except KeyError: pass
def extractWalkTheJiangHu(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('TTNH Chapter' in item['title']): return buildReleaseMessageWithType(item, 'Transcending the Nine Heavens', vol, chp, frag=frag, postfix=postfix) return False
class JailsReaderTestCache(LogCaptureTestCase): def _readWholeConf(self, basedir, force_enable=False, share_config=None): configurator = Configurator(force_enable=force_enable, share_config=share_config) configurator.setBaseDir(basedir) configurator.readEarly() configurator.getEarlyOptions() configurator.readAll() self.assertTrue(configurator.getOptions(None)) def _getLoggedReadCount(self, filematch): cnt = 0 for s in self.getLog().rsplit('\n'): if re.match(('^\\s*Reading files?: .*/' + filematch), s): cnt += 1 return cnt _tmpdir def testTestJailConfCache(self, basedir): unittest.F2B.SkipIfFast() saved_ll = configparserinc.logLevel configparserinc.logLevel = logging.DEBUG try: shutil.rmtree(basedir) shutil.copytree(CONFIG_DIR, basedir) shutil.copy((CONFIG_DIR + '/jail.conf'), (basedir + '/jail.local')) shutil.copy((CONFIG_DIR + '/fail2ban.conf'), (basedir + '/fail2ban.local')) share_cfg = dict() self._readWholeConf(basedir, share_config=share_cfg) cnt = self._getLoggedReadCount('jail.local') self.assertTrue((cnt == 1), ('Unexpected count by reading of jail files, cnt = %s' % cnt)) self._readWholeConf(basedir, force_enable=True, share_config=share_cfg) cnt = self._getLoggedReadCount('jail\\.local') self.assertTrue((cnt == 1), ('Unexpected count by second reading of jail files, cnt = %s' % cnt)) cnt = self._getLoggedReadCount('filter\\.d/common\\.conf') self.assertTrue((cnt == 1), ('Unexpected count by reading of filter files, cnt = %s' % cnt)) cnt = self._getLoggedReadCount('action\\.d/iptables\\.conf') self.assertTrue((cnt == 1), ('Unexpected count by reading of action files, cnt = %s' % cnt)) finally: configparserinc.logLevel = saved_ll
class StreamBuffer(): def __init__(self, stream: str): self._buffer = (stream + _CHARS_END) self._index = 0 self._line = 0 self._column = 0 def index(self) -> int: return self._index def line(self) -> int: return self._line def column(self) -> int: return self._column def peek(self, index: int=0) -> str: return self._buffer[(self._index + index)] def prefix(self, length: int=1) -> str: return self._buffer[self._index:(self._index + length)] def forward(self, length: int=1) -> None: while length: ch = self._buffer[self._index] self._index += 1 if ((ch in '\n\x85\u2028\u2029') or ((ch == '\r') and (self._buffer[self._index] != '\n'))): self._line += 1 self._column = 0 elif (ch != '\ufeff'): self._column += 1 length -= 1 def get_position(self) -> Position: return Position(self._index, self._line, self._column)
class SingletonModuleFactory(ModuleFactory): _parent = Any def __init__(self, *args, **kwargs): HasTraits.__init__(self) self._scene = gcf() if (not ('figure' in kwargs)): self._engine = get_engine() else: figure = kwargs['figure'] self._engine = engine_manager.find_figure_engine(figure) self._engine.current_scene = figure kwargs.pop('figure') if (self._scene.scene is not None): self._scene.scene.disable_render = True if (len(args) == 1): (parent,) = args elif (len(args) == 0): parent = self._engine.current_object else: raise ValueError('Wrong number of arguments') if (parent is None): target = self._scene else: target = parent klass = self._target.__class__ for obj in tools._traverse(target): if (isinstance(obj, klass) and (obj.name == self.name)): self._target = obj break else: self._parent = parent self._engine.add_module(self._target, obj=parent) self.trait_set(**kwargs) if (self._scene.scene is not None): self._scene.scene.disable_render = False
def get_images_eval(html, url): base = ' html = html.replace('\n', '') s = re.search("page = '';\\s*(.+?);\\s*var g_comic_name", html).group(1) pages = eval((s + '; pages')) pages = eval(pages) return [(base + page) for page in pages if (page and (not page.lower().endswith('thumbs.db')))]
class TestAccuracyScore(SimpleClassificationTestTopK): name = 'Accuracy Score' def get_value(self, result: DatasetClassificationQuality): return result.accuracy def get_description(self, value: Numeric) -> str: return f'The Accuracy Score is {value:.3g}. The test threshold is {self.get_condition()}'
class OptionSeriesHistogramPointEvents(Options): def click(self): return self._config_get(None) def click(self, value: Any): self._config(value, js_type=False) def drag(self): return self._config_get(None) def drag(self, value: Any): self._config(value, js_type=False) def dragStart(self): return self._config_get(None) def dragStart(self, value: Any): self._config(value, js_type=False) def drop(self): return self._config_get(None) def drop(self, value: Any): self._config(value, js_type=False) def mouseOut(self): return self._config_get(None) def mouseOut(self, value: Any): self._config(value, js_type=False) def mouseOver(self): return self._config_get(None) def mouseOver(self, value: Any): self._config(value, js_type=False) def remove(self): return self._config_get(None) def remove(self, value: Any): self._config(value, js_type=False) def select(self): return self._config_get(None) def select(self, value: Any): self._config(value, js_type=False) def unselect(self): return self._config_get(None) def unselect(self, value: Any): self._config(value, js_type=False) def update(self): return self._config_get(None) def update(self, value: Any): self._config(value, js_type=False)
def test_recover_from_signature_obj(key_api, private_key): signature = key_api.ecdsa_sign(MSGHASH, private_key) public_key = signature.recover_public_key_from_msg_hash(MSGHASH) assert (public_key == signature.recover_public_key_from_msg(MSG)) assert (public_key == private_key.public_key)
class TestFIPA(UseOef): def setup_class(cls): cls.connection1 = _make_oef_connection(FETCHAI_ADDRESS_ONE, DUMMY_PUBLIC_KEY, oef_addr='127.0.0.1', oef_port=10000) cls.connection2 = _make_oef_connection(FETCHAI_ADDRESS_TWO, DUMMY_PUBLIC_KEY, oef_addr='127.0.0.1', oef_port=10000) cls.multiplexer1 = Multiplexer([cls.connection1], protocols=[FipaMessage, DefaultMessage]) cls.multiplexer2 = Multiplexer([cls.connection2], protocols=[FipaMessage, DefaultMessage]) cls.multiplexer1.connect() cls.multiplexer2.connect() def test_cfp(self): cfp_message = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.CFP, query=Query([Constraint('something', ConstraintType('>', 1))])) cfp_message.to = FETCHAI_ADDRESS_TWO cfp_message.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=cfp_message.to, sender=cfp_message.sender, message=cfp_message)) envelope = self.multiplexer2.get(block=True, timeout=5.0) expected_cfp_message = FipaMessage.serializer.decode(envelope.message) expected_cfp_message.to = cfp_message.to expected_cfp_message.sender = cfp_message.sender assert (expected_cfp_message == cfp_message) cfp_none = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.CFP, query=Query([Constraint('something', ConstraintType('>', 1))])) cfp_none.to = FETCHAI_ADDRESS_TWO cfp_none.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=cfp_none.to, sender=cfp_none.sender, message=cfp_none)) envelope = self.multiplexer2.get(block=True, timeout=5.0) expected_cfp_none = FipaMessage.serializer.decode(envelope.message) expected_cfp_none.to = cfp_none.to expected_cfp_none.sender = cfp_none.sender assert (expected_cfp_none == cfp_none) def test_propose(self): propose_empty = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.PROPOSE, proposal=Description({'foo': 'bar'})) propose_empty.to = FETCHAI_ADDRESS_TWO propose_empty.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=propose_empty.to, sender=propose_empty.sender, message=propose_empty)) envelope = self.multiplexer2.get(block=True, timeout=2.0) expected_propose_empty = FipaMessage.serializer.decode(envelope.message) expected_propose_empty.to = propose_empty.to expected_propose_empty.sender = propose_empty.sender assert (expected_propose_empty == propose_empty) propose_descriptions = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.PROPOSE, proposal=Description({'foo': 'bar'}, DataModel('foobar', [Attribute('foo', str, True)]))) propose_descriptions.to = FETCHAI_ADDRESS_TWO propose_descriptions.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=propose_descriptions.to, sender=propose_descriptions.sender, message=propose_descriptions)) envelope = self.multiplexer2.get(block=True, timeout=2.0) expected_propose_descriptions = FipaMessage.serializer.decode(envelope.message) expected_propose_descriptions.to = propose_descriptions.to expected_propose_descriptions.sender = propose_descriptions.sender assert (expected_propose_descriptions == propose_descriptions) def test_accept(self): accept = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.ACCEPT) accept.to = FETCHAI_ADDRESS_TWO accept.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=accept.to, sender=accept.sender, message=accept)) envelope = self.multiplexer2.get(block=True, timeout=2.0) expected_accept = FipaMessage.serializer.decode(envelope.message) expected_accept.to = accept.to expected_accept.sender = accept.sender assert (expected_accept == accept) def test_match_accept(self): match_accept = FipaMessage(message_id=4, dialogue_reference=(str(0), ''), target=3, performative=FipaMessage.Performative.MATCH_ACCEPT) match_accept.to = FETCHAI_ADDRESS_TWO match_accept.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=match_accept.to, sender=match_accept.sender, message=match_accept)) envelope = self.multiplexer2.get(block=True, timeout=2.0) expected_match_accept = FipaMessage.serializer.decode(envelope.message) expected_match_accept.to = match_accept.to expected_match_accept.sender = match_accept.sender assert (expected_match_accept == match_accept) def test_decline(self): decline = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.DECLINE) decline.to = FETCHAI_ADDRESS_TWO decline.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=decline.to, sender=decline.sender, message=decline)) envelope = self.multiplexer2.get(block=True, timeout=2.0) expected_decline = FipaMessage.serializer.decode(envelope.message) expected_decline.to = decline.to expected_decline.sender = decline.sender assert (expected_decline == decline) def test_match_accept_w_inform(self): match_accept_w_inform = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.MATCH_ACCEPT_W_INFORM, info={'address': 'my_address'}) match_accept_w_inform.to = FETCHAI_ADDRESS_TWO match_accept_w_inform.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=match_accept_w_inform.to, sender=match_accept_w_inform.sender, message=match_accept_w_inform)) envelope = self.multiplexer2.get(block=True, timeout=2.0) returned_match_accept_w_inform = FipaMessage.serializer.decode(envelope.message) returned_match_accept_w_inform.to = match_accept_w_inform.to returned_match_accept_w_inform.sender = match_accept_w_inform.sender assert (returned_match_accept_w_inform == match_accept_w_inform) def test_accept_w_inform(self): accept_w_inform = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.ACCEPT_W_INFORM, info={'address': 'my_address'}) accept_w_inform.to = FETCHAI_ADDRESS_TWO accept_w_inform.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=accept_w_inform.to, sender=accept_w_inform.sender, message=accept_w_inform)) envelope = self.multiplexer2.get(block=True, timeout=2.0) returned_accept_w_inform = FipaMessage.serializer.decode(envelope.message) returned_accept_w_inform.to = accept_w_inform.to returned_accept_w_inform.sender = accept_w_inform.sender assert (returned_accept_w_inform == accept_w_inform) def test_inform(self): payload = {'foo': 'bar'} inform = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.INFORM, info=payload) inform.to = FETCHAI_ADDRESS_TWO inform.sender = FETCHAI_ADDRESS_ONE self.multiplexer1.put(Envelope(to=inform.to, sender=inform.sender, message=inform)) envelope = self.multiplexer2.get(block=True, timeout=2.0) returned_inform = FipaMessage.serializer.decode(envelope.message) returned_inform.to = inform.to returned_inform.sender = inform.sender assert (returned_inform == inform) def test_serialisation_fipa(self): def _encode_fipa_cfp(msg: FipaMessage) -> bytes: message_pb = ProtobufMessage() dialogue_message_pb = DialogueMessage() fipa_msg = fipa_pb2.FipaMessage() dialogue_message_pb.message_id = msg.message_id dialogue_reference = msg.dialogue_reference dialogue_message_pb.dialogue_starter_reference = dialogue_reference[0] dialogue_message_pb.dialogue_responder_reference = dialogue_reference[1] dialogue_message_pb.target = msg.target performative = fipa_pb2.FipaMessage.Cfp_Performative() fipa_msg.cfp.CopyFrom(performative) dialogue_message_pb.content = fipa_msg.SerializeToString() message_pb.dialogue_message.CopyFrom(dialogue_message_pb) fipa_bytes = message_pb.SerializeToString() return fipa_bytes with pytest.raises(ValueError): msg = FipaMessage(performative=FipaMessage.Performative.CFP, message_id=1, dialogue_reference=(str(0), ''), target=0, query=Query([Constraint('something', ConstraintType('>', 1))])) with mock.patch.object(FipaMessage, 'Performative') as mock_performative_enum: mock_performative_enum.CFP.value = 'unknown' (FipaMessage.serializer.encode(msg), 'Raises Value Error') with pytest.raises(ValueError): cfp_msg = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.CFP, query=Query([Constraint('something', ConstraintType('>', 1))])) with mock.patch.object(FipaMessage, 'Performative') as mock_performative_enum: mock_performative_enum.CFP.value = 'unknown' fipa_bytes = _encode_fipa_cfp(cfp_msg) FipaMessage.serializer.decode(fipa_bytes) def test_on_oef_error(self): oef_connection = self.multiplexer1.connections[0] oef_channel = oef_connection.channel oef_channel.oef_msg_id += 1 dialogue_reference = ('1', '') query = Query(constraints=[Constraint('foo', ConstraintType('==', 'bar'))], model=None) dialogues = oef_channel.oef_search_dialogues oef_search_msg = OefSearchMessage(performative=OefSearchMessage.Performative.SEARCH_SERVICES, dialogue_reference=dialogue_reference, query=query) oef_search_msg.to = str(oef_connection.connection_id) oef_search_msg.sender = SOME_SKILL_ID dialogue = dialogues.update(oef_search_msg) assert (dialogue is not None) oef_channel.oef_msg_id_to_dialogue[oef_channel.oef_msg_id] = dialogue oef_channel.on_oef_error(answer_id=oef_channel.oef_msg_id, operation=OEFErrorOperation.SEARCH_SERVICES) envelope = self.multiplexer1.get(block=True, timeout=5.0) dec_msg = envelope.message assert (dec_msg.dialogue_reference[0] == dialogue_reference[0]) assert (dec_msg.performative is OefSearchMessage.Performative.OEF_ERROR), 'It should be an error message' def test_send(self): envelope = Envelope(to=str(self.connection1.connection_id), sender=SOME_SKILL_ID, protocol_specification_id=DefaultMessage.protocol_specification_id, message=b'Hello') self.multiplexer1.put(envelope) received_envelope = self.multiplexer1.get(block=True, timeout=5.0) assert (received_envelope is not None) def teardown_class(cls): cls.multiplexer1.disconnect() cls.multiplexer2.disconnect()
class ScaleGeneratorTest(unittest.TestCase): def test_chromatic_scale_with_sharps(self): expected = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'] self.assertEqual(Scale('C').chromatic(), expected) def test_chromatic_scale_with_flats(self): expected = ['F', 'Gb', 'G', 'Ab', 'A', 'Bb', 'B', 'C', 'Db', 'D', 'Eb', 'E'] self.assertEqual(Scale('F').chromatic(), expected) def test_simple_major_scale(self): expected = ['C', 'D', 'E', 'F', 'G', 'A', 'B', 'C'] self.assertEqual(Scale('C').interval('MMmMMMm'), expected) def test_major_scale_with_sharps(self): expected = ['G', 'A', 'B', 'C', 'D', 'E', 'F#', 'G'] self.assertEqual(Scale('G').interval('MMmMMMm'), expected) def test_major_scale_with_flats(self): expected = ['F', 'G', 'A', 'Bb', 'C', 'D', 'E', 'F'] self.assertEqual(Scale('F').interval('MMmMMMm'), expected) def test_minor_scale_with_sharps(self): expected = ['F#', 'G#', 'A', 'B', 'C#', 'D', 'E', 'F#'] self.assertEqual(Scale('f#').interval('MmMMmMM'), expected) def test_minor_scale_with_flats(self): expected = ['Bb', 'C', 'Db', 'Eb', 'F', 'Gb', 'Ab', 'Bb'] self.assertEqual(Scale('bb').interval('MmMMmMM'), expected) def test_dorian_mode(self): expected = ['D', 'E', 'F', 'G', 'A', 'B', 'C', 'D'] self.assertEqual(Scale('d').interval('MmMMMmM'), expected) def test_mixolydian_mode(self): expected = ['Eb', 'F', 'G', 'Ab', 'Bb', 'C', 'Db', 'Eb'] self.assertEqual(Scale('Eb').interval('MMmMMmM'), expected) def test_lydian_mode(self): expected = ['A', 'B', 'C#', 'D#', 'E', 'F#', 'G#', 'A'] self.assertEqual(Scale('a').interval('MMMmMMm'), expected) def test_phrygian_mode(self): expected = ['E', 'F', 'G', 'A', 'B', 'C', 'D', 'E'] self.assertEqual(Scale('e').interval('mMMMmMM'), expected) def test_locrian_mode(self): expected = ['G', 'Ab', 'Bb', 'C', 'Db', 'Eb', 'F', 'G'] self.assertEqual(Scale('g').interval('mMMmMMM'), expected) def test_harmonic_minor(self): expected = ['D', 'E', 'F', 'G', 'A', 'Bb', 'Db', 'D'] self.assertEqual(Scale('d').interval('MmMMmAm'), expected) def test_octatonic(self): expected = ['C', 'D', 'D#', 'F', 'F#', 'G#', 'A', 'B', 'C'] self.assertEqual(Scale('C').interval('MmMmMmMm'), expected) def test_hexatonic(self): expected = ['Db', 'Eb', 'F', 'G', 'A', 'B', 'Db'] self.assertEqual(Scale('Db').interval('MMMMMM'), expected) def test_pentatonic(self): expected = ['A', 'B', 'C#', 'E', 'F#', 'A'] self.assertEqual(Scale('A').interval('MMAMA'), expected) def test_enigmatic(self): expected = ['G', 'G#', 'B', 'C#', 'D#', 'F', 'F#', 'G'] self.assertEqual(Scale('G').interval('mAMMMmm'), expected)
def test_describe_dict(): d = OrderedDict() d['desc'] = 'long description' d['list'] = list(range(10)) d['empty'] = None case1 = ' desc : long\n description\n list : 0 1 2 3 4 5\n 6 7 8 9\n empty : None\n\n' buf = StringIO() exclude = parseoptions('') describe_dict(buf, d, width=20, indent=' ', exclude=exclude) assert (str(buf.getvalue()) == case1) case2 = ' desc : long\n description\n list : 0 1 2 3 4 5\n 6 7 8 9\n\n' buf = StringIO() exclude = parseoptions('e') describe_dict(buf, d, width=20, indent=' ', exclude=exclude) assert (str(buf.getvalue()) == case2)
class TestCustomCorsMiddleware(): def test_raises(self): with pytest.raises(ValueError, match='passed to allow_origins'): falcon.CORSMiddleware(allow_origins=['*']) with pytest.raises(ValueError, match='passed to allow_credentials'): falcon.CORSMiddleware(allow_credentials=['*']) .parametrize('allow, fail_origins, success_origins', (('*', [None], ['foo', 'bar']), ('test', ['other', 'Test', 'TEST'], ['test']), (['foo', 'bar'], ['foo, bar', 'foobar', 'foo,bar', 'Foo', 'BAR'], ['foo', 'bar']))) def test_allow_origin(self, make_cors_client, allow, fail_origins, success_origins): client = make_cors_client(falcon.CORSMiddleware(allow_origins=allow)) client.app.add_route('/', CORSHeaderResource()) for origin in fail_origins: h = ({'Origin': origin} if (origin is not None) else {}) res = client.simulate_get(headers=h) h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Allow-Origin'.lower() not in h) assert ('Access-Control-Allow-Credentials'.lower() not in h) assert ('Access-Control-Expose-Headers'.lower() not in h) for origin in success_origins: res = client.simulate_get(headers={'Origin': origin}) assert ((res.headers['Access-Control-Allow-Origin'] == '*') if (allow == '*') else origin) h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Allow-Credentials'.lower() not in h) assert ('Access-Control-Expose-Headers'.lower() not in h) def test_allow_credential_wildcard(self, make_cors_client): client = make_cors_client(falcon.CORSMiddleware(allow_credentials='*')) client.app.add_route('/', CORSHeaderResource()) res = client.simulate_get(headers={'Origin': 'localhost'}) assert (res.headers['Access-Control-Allow-Origin'] == 'localhost') assert (res.headers['Access-Control-Allow-Credentials'] == 'true') .parametrize('allow, successOrigin', ((['foo', 'bar'], ['foo', 'bar']), ('foo', ['foo']))) def test_allow_credential_list_or_str(self, make_cors_client, allow, successOrigin): client = make_cors_client(falcon.CORSMiddleware(allow_credentials=allow)) client.app.add_route('/', CORSHeaderResource()) for origin in ('foo, bar', 'foobar', 'foo,bar', 'Foo', 'BAR'): res = client.simulate_get(headers={'Origin': origin}) assert (res.headers['Access-Control-Allow-Origin'] == '*') h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Allow-Credentials'.lower() not in h) assert ('Access-Control-Expose-Headers'.lower() not in h) for origin in successOrigin: res = client.simulate_get(headers={'Origin': origin}) assert (res.headers['Access-Control-Allow-Origin'] == origin) assert (res.headers['Access-Control-Allow-Credentials'] == 'true') h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Expose-Headers'.lower() not in h) def test_allow_credential_existing_origin(self, make_cors_client): client = make_cors_client(falcon.CORSMiddleware(allow_credentials='*')) client.app.add_route('/', CORSHeaderResource()) res = client.simulate_delete(headers={'Origin': 'something'}) assert (res.headers['Access-Control-Allow-Origin'] == 'example.com') h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Allow-Credentials'.lower() not in h) def test_allow_origin_allow_credential(self, make_cors_client): client = make_cors_client(falcon.CORSMiddleware(allow_origins='test', allow_credentials='*')) client.app.add_route('/', CORSHeaderResource()) for origin in ['foo', 'TEST']: res = client.simulate_get(headers={'Origin': origin}) h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Allow-Origin'.lower() not in h) assert ('Access-Control-Allow-Credentials'.lower() not in h) assert ('Access-Control-Expose-Headers'.lower() not in h) res = client.simulate_get(headers={'Origin': 'test'}) assert (res.headers['Access-Control-Allow-Origin'] == 'test') assert (res.headers['Access-Control-Allow-Credentials'] == 'true') h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Expose-Headers'.lower() not in h) .parametrize('attr, exp', (('foo', 'foo'), ('foo, bar', 'foo, bar'), (['foo', 'bar'], 'foo, bar'))) def test_expose_headers(self, make_cors_client, attr, exp): client = make_cors_client(falcon.CORSMiddleware(expose_headers=attr, allow_credentials=None)) client.app.add_route('/', CORSHeaderResource()) res = client.simulate_get(headers={'Origin': 'something'}) assert (res.headers['Access-Control-Allow-Origin'] == '*') assert (res.headers['Access-Control-Expose-Headers'] == exp) h = dict(res.headers.lower_items()).keys() assert ('Access-Control-Allow-Credentials'.lower() not in h)
class AgentContext(): __slots__ = ('_shared_state', '_identity', '_connection_status', '_outbox', '_decision_maker_message_queue', '_decision_maker_handler_context', '_task_manager', '_search_service_address', '_decision_maker_address', '_default_ledger_id', '_currency_denominations', '_default_connection', '_default_routing', '_storage_callable', '_data_dir', '_namespace', '_send_to_skill') def __init__(self, identity: Identity, connection_status: MultiplexerStatus, outbox: OutBox, decision_maker_message_queue: Queue, decision_maker_handler_context: SimpleNamespace, task_manager: TaskManager, default_ledger_id: str, currency_denominations: Dict[(str, str)], default_connection: Optional[PublicId], default_routing: Dict[(PublicId, PublicId)], search_service_address: Address, decision_maker_address: Address, data_dir: str, storage_callable: Callable[([], Optional[Storage])]=(lambda : None), send_to_skill: Optional[Callable]=None, **kwargs: Any) -> None: self._shared_state = {} self._identity = identity self._connection_status = connection_status self._outbox = outbox self._decision_maker_message_queue = decision_maker_message_queue self._decision_maker_handler_context = decision_maker_handler_context self._task_manager = task_manager self._search_service_address = search_service_address self._decision_maker_address = decision_maker_address self._default_ledger_id = default_ledger_id self._currency_denominations = currency_denominations self._default_connection = default_connection self._default_routing = default_routing self._storage_callable = storage_callable self._data_dir = data_dir self._namespace = SimpleNamespace(**kwargs) self._send_to_skill = send_to_skill def send_to_skill(self, message_or_envelope: Union[(Message, Envelope)], context: Optional[EnvelopeContext]=None) -> None: if (self._send_to_skill is None): raise ValueError('Send to skill feature is not supported') self._send_to_skill(message_or_envelope, context) def storage(self) -> Optional[Storage]: return self._storage_callable() def data_dir(self) -> str: return self._data_dir def shared_state(self) -> Dict[(str, Any)]: return self._shared_state def identity(self) -> Identity: return self._identity def agent_name(self) -> str: return self.identity.name def addresses(self) -> Dict[(str, Address)]: return self.identity.addresses def public_keys(self) -> Dict[(str, str)]: return self.identity.public_keys def address(self) -> Address: return self.identity.address def public_key(self) -> str: return self.identity.public_key def connection_status(self) -> MultiplexerStatus: return self._connection_status def outbox(self) -> OutBox: return self._outbox def decision_maker_message_queue(self) -> Queue: return self._decision_maker_message_queue def decision_maker_handler_context(self) -> SimpleNamespace: return self._decision_maker_handler_context def task_manager(self) -> TaskManager: return self._task_manager def search_service_address(self) -> Address: return self._search_service_address def decision_maker_address(self) -> Address: return self._decision_maker_address def default_ledger_id(self) -> str: return self._default_ledger_id def currency_denominations(self) -> Dict[(str, str)]: return self._currency_denominations def default_connection(self) -> Optional[PublicId]: return self._default_connection def default_routing(self) -> Dict[(PublicId, PublicId)]: return self._default_routing def namespace(self) -> SimpleNamespace: return self._namespace
class SparklineTreeNode(TreeNode): sparkline_renderer = SparklineRenderer() word_wrap_renderer = WordWrapRenderer() def get_renderer(self, object, column=0): if (column == 1): return self.sparkline_renderer else: return self.word_wrap_renderer def get_icon(self, object, is_expanded): return object.color
class ModeSolverMonitor(AbstractModeMonitor): direction: Direction = pydantic.Field('+', title='Propagation Direction', description='Direction of waveguide mode propagation along the axis defined by its normal dimension.') colocate: bool = pydantic.Field(True, title='Colocate Fields', description='Toggle whether fields should be colocated to grid cell boundaries (i.e. primal grid nodes).') def storage_size(self, num_cells: int, tmesh: int) -> int: bytes_single = ((((6 * BYTES_COMPLEX) * num_cells) * len(self.freqs)) * self.mode_spec.num_modes) if (self.mode_spec.precision == 'double'): return (2 * bytes_single) return bytes_single
def _get_alias(contract_name: str, path_str: str) -> str: data_path = _get_data_folder().parts path_parts = Path(path_str).parts if (path_parts[:len(data_path)] == data_path): idx = (len(data_path) + 1) return f'{path_parts[idx]}/{path_parts[(idx + 1)]}/{contract_name}' else: return contract_name
def compile_name(mh, gst, stab, n_name): assert isinstance(mh, Message_Handler) assert isinstance(gst, goto_ast.GOTO_Symbol_Table) assert isinstance(stab, dict) assert isinstance(n_name, m_ast.Name) if isinstance(n_name, m_ast.Identifier): typ = make_type() pretty_name = str(n_name) mangled_name = stab[pretty_name] sym = goto_ast.Symbol_Expr(typ, mangled_name) set_location(sym, n_name.loc()) return sym else: mh.error(n_name.loc(), ('mh_bmc does not %s names yet' % n_name.__class__.__name__))
class ApiTestUtils(TestCase): def test_param_to_list(self): from api.view_utils import param_to_list self.assertEqual(param_to_list('foo'), ['foo']) self.assertEqual(param_to_list('foo,bar'), ['foo', 'bar']) self.assertEqual(param_to_list(None), []) self.assertEqual(param_to_list([]), [])
def test_context_registry_path_does_not_exist(): with pytest.raises(ValueError, match='Registry path directory provided .* can not be found.'): Context(cwd='.', verbosity='', registry_path='some_path_does_not_exist').registry_path with TemporaryDirectory() as tmp_dir: with cd(tmp_dir): with pytest.raises(ValueError, match='Registry path not provided and local registry `packages` not found'): Context(cwd='.', verbosity='', registry_path=None).registry_path
class OptionSeriesCylinderDragdropDraghandle(Options): def className(self): return self._config_get('highcharts-drag-handle') def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('#fff') def color(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('rgba(0, 0, 0, 0.6)') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(1) def lineWidth(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(901) def zIndex(self, num: float): self._config(num, js_type=False)
def create_camera(camera_type: Union[(pygfx.PerspectiveCamera, str)]) -> pygfx.PerspectiveCamera: if isinstance(camera_type, pygfx.PerspectiveCamera): return camera_type elif (camera_type == '2d'): return pygfx.PerspectiveCamera(fov=0) elif (camera_type == '3d'): return pygfx.PerspectiveCamera() else: raise ValueError("camera must be one of: '2d', '3d' or an instance of pygfx.PerspectiveCamera")
_set_msg_type(ofproto.OFPT_SET_CONFIG) class OFPSetConfig(MsgBase): def __init__(self, datapath, flags=0, miss_send_len=0): super(OFPSetConfig, self).__init__(datapath) self.flags = flags self.miss_send_len = miss_send_len def _serialize_body(self): assert (self.flags is not None) assert (self.miss_send_len is not None) msg_pack_into(ofproto.OFP_SWITCH_CONFIG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.flags, self.miss_send_len)
_required _required _required('VMS_VM_SNAPSHOT_ENABLED') def snapshot(request, hostname): context = collect_view_data(request, 'vm_list') context['vm'] = vm = get_vm(request, hostname, sr=('dc', 'owner', 'template', 'slavevm')) context['vms'] = vms = get_vms(request) context['vms_tags'] = get_vms_tags(vms) context['can_edit'] = request.user.is_admin(request) context['can_image'] = (request.user.is_staff or request.user.has_permission(request, ImageAdminPermission.name)) context['cannot_snapshot'] = (not (request.user.is_admin(request) or vm.is_installed())) context['snapform_create'] = CreateSnapshotForm(vm, prefix='snap_create', initial={'disk_id': 1}) context['snapform_update'] = UpdateSnapshotForm(vm, prefix='snap_update') context['lastsnap'] = [] context['snapdefs'] = get_vm_snapdefs(vm) context.update(get_vm_snapshots(request, vm)) if context['can_edit']: context['snapdeform_update'] = UpdateSnapshotDefineForm(request, vm) context['snapdeform_create'] = CreateSnapshotDefineForm(request, vm, prefix='snapdef_create', initial={'disk_id': 1, 'active': True}) context['bkpform_restore'] = RestoreBackupForm(vms) if context['can_image']: context['imgform'] = SnapshotImageForm(vm, request, None, prefix='img', initial={'owner': request.user.username, 'access': Image.PRIVATE, 'version': '1.0'}) view_vm_snapshot.send(sender='gui.vm.views.snapshot', request=request, context=context) return render(request, 'gui/vm/snapshot.html', context)
def test_verification_check(config_env: Dict): if (CONFIG_ERROR in config_env.keys()): fail(f'Config Error: {config_env[CONFIG_ERROR]}') if (EXPECTED_VERIFY not in config_env[EXPECTED_RESULTS].keys()): skip(f'Test not requested: {EXPECTED_VERIFY}') if (COSE not in config_env.keys()): skip(f'Test dataset does not contain {COSE}') if ((TEST_CONTEXT not in config_env.keys()) or (CERTIFICATE not in config_env[TEST_CONTEXT].keys())): skip(f'Test dataset does not contain {TEST_CONTEXT} and/or {CERTIFICATE}') try: dgc = _dgc(config_env) dsc = _dsc(config_env) except Exception: if config_env[EXPECTED_RESULTS][EXPECTED_VERIFY]: raise else: return given_kid = None if (COSE in config_env.keys()): if (KID in dgc.phdr.keys()): given_kid = dgc.phdr[KID] else: given_kid = dgc.uhdr[KID] if config_env[EXPECTED_RESULTS][EXPECTED_VERIFY]: assert (given_kid == dsc[1]), f'Invalid COSE kid value {given_kid}' dgc.key = dsc[0] assert dgc.verify_signature(), 'Could not validate DGC Signature' elif (dgc and dsc and dsc[0]): dgc.key = dsc[0] assert (not all(((dsc[1] == given_kid), dgc.verify_signature())))
def test_get_dataset_rid(mocker, is_integration_test, client, iris_dataset): if (not is_integration_test): mock_get = mocker.patch('requests.Session.request') mock_get.return_value.status_code = 200 mock_get.return_value.json.return_value = {'rid': iris_dataset[0], 'name': 'iris', 'created': {'time': '2020-01-30T11:18:00.130419Z', 'userId': '3c8fbda5-686e-4fcb-ad52-d95e4281d99f'}, 'modified': {'time': '2020-01-30T11:18:14.111774Z', 'userId': '3c8fbda5-686e-4fcb-ad52-d95e4281d99f'}, 'lastModified': .0, 'description': None, 'operations': ['compass:edit-project', 'compass:remove-imports', 'compass:tags:change-resource-tags', 'compass:view-project-group', 'compass:share-link', 'compass:linked-items:edit', 'compass:linked-items:view', 'compass:create-organization', 'compass:import-resource-to', 'compass:write-resource', 'compass:move-between-projects', 'compass:edit-alias', 'compass:move-project', 'compass:apply-markings', 'gatekeeper:view-resource', 'compass:read-branch', 'compass:share-resource', 'compass:delete', 'compass:read-resource', 'compass:open-resource-links', 'compass:create-project', 'compass:delete-project-group', 'compass:import-resource-from', 'compass:edit-organization', 'compass:import-resource', 'compass:change-resource-permission', 'compass:write-branch', 'compass:move-resource', 'compass:view-long-description', 'compass:edit-project-group', 'compass:view-project-imports', 'compass:tags:change-project-resource-tags', 'compass:tags:change-non-project-resource-tags', 'compass:move-within-project', 'compass:create-project-group', 'compass:view', 'compass:discover', 'compass:edit', 'compass:manage'], 'urlVariables': {'compass:isProject': 'false'}, 'favorite': None, 'branches': None, 'defaultBranch': None, 'defaultBranchWithMarkings': None, 'branchesCount': None, 'hasBranches': None, 'path': None, 'longDescription': None, 'directlyTrashed': False, 'inTrash': None, 'isAutosave': False, 'collections': None, 'namedCollections': None, 'tags': None, 'namedTags': None, 'alias': None, 'collaborators': None, 'namedAncestors': None, 'markings': None, 'linkedItems': None} rid = client.get_dataset_rid(iris_dataset[1]) assert (rid == iris_dataset[0])
class Invert(Bijector): def __init__(self, bijector: flowtorch.Lazy, *, shape: torch.Size, context_shape: Optional[torch.Size]=None) -> None: b = bijector(shape=shape) super().__init__(None, shape=shape, context_shape=context_shape) self.bijector = b def forward(self, x: torch.Tensor, context: Optional[torch.Tensor]=None) -> torch.Tensor: y = self.bijector.inverse(x, context=context) return y def inverse(self, y: torch.Tensor, x: Optional[torch.Tensor]=None, context: Optional[torch.Tensor]=None) -> torch.Tensor: if (x is not None): raise RuntimeError('x must be None when calling InverseBijector.inverse') x = self.bijector.forward(y, context=context) return x def log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, context: Optional[torch.Tensor]=None) -> torch.Tensor: return self.bijector.log_abs_det_jacobian(y, x, context) def param_shapes(self, shape: torch.Size) -> Sequence[torch.Size]: return self.bijector.param_shapes(shape) def __repr__(self) -> str: return self.bijector.__repr__() def forward_shape(self, shape: torch.Size) -> torch.Size: return self.bijector.forward_shape(shape) def inverse_shape(self, shape: torch.Size) -> torch.Size: return self.bijector.inverse_shape(shape)
def start_new_thread(target, args=(), kwargs=None, daemon=True, use_caller_name=False): if use_caller_name: name = inspect.stack()[1][3] else: name = target.__name__ logging.getLogger(inspect.currentframe().f_back.f_globals.get('__name__')).debug('new thread: {}'.format(name)) if PY2: _thread = threading.Thread(target=target, args=args, kwargs=kwargs, name=name) _thread.setDaemon(daemon) else: _thread = threading.Thread(target=target, args=args, kwargs=kwargs, name=name, daemon=daemon) _thread.start() return _thread
class OptionPlotoptionsVectorSonificationDefaultspeechoptionsMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class PluginManager(): def __init__(self, config, parser, parsed_args, unparsed_argv): self._config = config self._parser = parser self._plugins = {} self.args = parsed_args self.remaining_argv = unparsed_argv def parse_args(self, *keys, default=None): path = (self._config.get(*keys, 'plugin') or default) LOG.info('loading plug-in: %s', path) try: plugin_class = load_dotted_object(path) except ImportError as e: raise ValueError(f"Error in config: {'->'.join(keys)}->plugin: {e}") from e if (not (isclass(plugin_class) and issubclass(plugin_class, Plugin))): raise TypeError(f"Error in config: {'->'.join(keys)}->plugin: '{path}' is not a {Plugin}") cfg = partial(self._config.get, *keys, 'options') plugin = plugin_class(self._parser, cfg) (self.args, self.remaining_argv) = self._parser.parse_known_args(self.remaining_argv, self.args) LOG.info('parsed args=%s remaining args=%s', self.args, self.remaining_argv) self._plugins[keys] = plugin def instantiate(self, *keys, default=None, must_be=None): if (keys not in self._plugins): self.parse_args(*keys, default) instance = self._plugins[keys].instantiate(self.args) if (must_be and (not isinstance(instance, must_be))): raise TypeError(f"Error in config: {'->'.join(keys)}->plugin: plugin did not build a {must_be}") return instance
def repl_absolute(m, base_path): link = m.group(0) try: (scheme, netloc, path, params, query, fragment, is_url, is_absolute) = util.parse_url(m.group('path')[1:(- 1)]) if ((not is_absolute) and (not is_url)): path = util.url2path(path) path = os.path.normpath(os.path.join(base_path, path)) path = util.path2url(path) start = ('/' if (not path.startswith('/')) else '') link = ('%s"%s%s"' % (m.group('name'), start, urlunparse((scheme, netloc, path, params, query, fragment)))) except Exception: pass return link
class OptionPlotoptionsScatterSonificationTracks(Options): def activeWhen(self) -> 'OptionPlotoptionsScatterSonificationTracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsScatterSonificationTracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsScatterSonificationTracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsScatterSonificationTracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsScatterSonificationTracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsScatterSonificationTracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
class DataStore(): def add_board(self, model) -> None: raise NotImplementedError def get_board(self, id) -> 'Board': raise NotImplementedError def get_boards(self) -> list['Board']: raise NotImplementedError def update_board(self, model, update): raise NotImplementedError def remove_board(self, board) -> None: raise NotImplementedError def add_user(self, model) -> None: raise NotImplementedError def get_users(self) -> list['User']: raise NotImplementedError def get_user(self, id) -> 'User': raise NotImplementedError def remove_user(self, id) -> None: raise NotImplementedError def add_list(self, board, model) -> None: raise NotImplementedError def get_lists(self) -> list['BoardList']: raise NotImplementedError def get_list(self, id) -> 'BoardList': raise NotImplementedError def get_lists_by_board(self, board) -> list['BoardList']: raise NotImplementedError def remove_list(self, board, id) -> None: raise NotImplementedError def add_item(self, board_list, model) -> None: raise NotImplementedError def get_items(self, board_list) -> list['Item']: raise NotImplementedError def get_item(self, id) -> 'Item': raise NotImplementedError def get_items_by_board(self, board) -> list['Item']: raise NotImplementedError def remove_item(self, board_list, id) -> None: raise NotImplementedError
def test_check_auth_admin(db): user = create_user(email='', password='password') user.is_admin = True status = AuthManager.check_auth_admin('', 'password') assert (True == status) user = create_user(email='', password='password') user.is_admin = False status = AuthManager.check_auth_admin('', 'password') assert (False == status)
def smile(): cap = cv.VideoCapture('/home/ubuntu/emo/face_video_3_2.mp4') out_win = 'l' cv.namedWindow(out_win, cv.WINDOW_NORMAL) cv.setWindowProperty(out_win, cv.WND_PROP_FULLSCREEN, cv.WINDOW_FULLSCREEN) while True: (ret, frame) = cap.read() if (frame is not None): cv.imshow(out_win, frame) if (((cv.waitKey(1) & 255) == ord('q')) or (ret == False)): cap = cv.VideoCapture('/home/ubuntu/emo/look_happy.mp4')
def _single_source_shape_constraint_funcs(src_type: Type[Operator], target_type: Type[Operator]) -> Tuple[(Callable[([Tensor], bool)], Callable[([Tensor], Tensor)])]: def matchFunc(tensor: Tensor) -> bool: src_ops = tensor._attrs['src_ops'] if ((src_ops is None) or (len(src_ops) != 1)): return False src_op = list(src_ops)[0] if (src_op._attrs['op'] != src_type()._attrs['op']): return False (A, B) = src_op._attrs['inputs'] if (not target_type.is_valid_shape(A, B)): return False return True def replaceFunc(old_tensor: Tensor) -> Tensor: src_op = list(old_tensor._attrs['src_ops'])[0] (A, B) = src_op._attrs['inputs'] new_op = target_type() new_tensor = new_op(A, B) copy_tensor_attributes(new_tensor, old_tensor) copy_src_op_attributes(new_tensor, old_tensor) remove_dst_op_from_tensor([A, B], src_op) replace_tensor(old_tensor, new_tensor) return new_tensor return (matchFunc, replaceFunc)
class SchemaspaceCreate(SchemaspaceBase): name_option = CliOption('--name', name='name', description='The name of the metadata instance.') file_option = FileOption('--file', name='file', description='The filename containing the metadata instance. Can be used to bypass individual property arguments.') json_option = JSONOption('--json', name='json', description='The JSON string containing the metadata instance. Can be used to bypass individual property arguments.') options: List[Option] = [file_option, json_option] update_mode = False def __init__(self, **kwargs): super().__init__(**kwargs) self.complex_properties: List[str] = [] self.metadata_manager = MetadataManager(schemaspace=self.schemaspace) schema_list = list(self.schemas.keys()) if (len(schema_list) == 1): self.schema_name_option = CliOption('--schema_name', name='schema_name', default_value=schema_list[0], description=f"The schema_name of the metadata instance (defaults to '{schema_list[0]}')", required=True) else: enum = schema_list self.schema_name_option = CliOption('--schema_name', name='schema_name', enum=enum, description=f'The schema_name of the metadata instance Must be one of: {enum}', required=True) self.options.extend([self.schema_name_option, self.name_option]) bulk_metadata = self._process_json_based_options() relax_required = (bulk_metadata or self.update_mode) self.process_cli_option(self.schema_name_option, check_help=True) schema = self.schemas[self.schema_name_option.value] schema_options = self._schema_to_options(schema, relax_required) self.options.extend(schema_options) def start(self): super().start() name = self.name_option.value schema_name = self.schema_name_option.value display_name = None metadata = {} for option in self.options: if isinstance(option, MetadataSchemaProperty): if ((not option.required) and (not option.value) and (option.type != 'null')): continue metadata[option.name] = option.value elif isinstance(option, SchemaProperty): if (option.name == 'display_name'): display_name = option.value continue elif isinstance(option, JSONBasedOption): metadata.update(option.metadata) if ((display_name is None) and (self.update_mode is False)): self.log_and_exit(f"Could not determine display_name from schema '{schema_name}'") ex_msg = None new_instance = None try: if self.update_mode: updated_instance = self.metadata_manager.get(name) updated_instance.schema_name = schema_name if display_name: updated_instance.display_name = display_name updated_instance.metadata.update(metadata) new_instance = self.metadata_manager.update(name, updated_instance) else: instance = Metadata(schema_name=schema_name, name=name, display_name=display_name, metadata=metadata) new_instance = self.metadata_manager.create(name, instance) except Exception as ex: ex_msg = str(ex) if new_instance: print(f"Metadata instance '{new_instance.name}' for schema '{schema_name}' has been written to: {new_instance.resource}") elif ex_msg: self.log_and_exit(f"The following exception occurred saving metadata instance for schema '{schema_name}': {ex_msg}", display_help=False) else: self.log_and_exit(f"A failure occurred saving metadata instance '{name}' for schema '{schema_name}'.", display_help=False) def _process_json_based_options(self) -> bool: bulk_metadata = False self.process_cli_option(self.file_option, check_help=True) self.process_cli_option(self.json_option, check_help=True) if ((self.json_option.value is not None) and (self.file_option.value is not None)): self.log_and_exit("At most one of '--json' or '--file' can be set at a time.", display_help=True) elif (self.json_option.value is not None): bulk_metadata = True self.json_option.transfer_names_to_argvs(self.argv, self.argv_mappings) elif (self.file_option.value is not None): bulk_metadata = True self.file_option.transfer_names_to_argvs(self.argv, self.argv_mappings) return bulk_metadata def _schema_to_options(self, schema: Dict, relax_required: bool=False) -> List[Option]: options = {} properties = schema['properties'] for (name, value) in properties.items(): if (name == 'schema_name'): continue if (name != 'metadata'): options[name] = SchemaProperty(name, value) else: metadata_properties = properties['metadata']['properties'] for (md_name, md_value) in metadata_properties.items(): msp = MetadataSchemaProperty(md_name, md_value) if ((msp.cli_option not in self.argv_mappings) and relax_required): continue if msp.unsupported_meta_props: self.complex_properties.append(md_name) options[md_name] = msp if (not relax_required): required_props = properties['metadata'].get('required') for required in required_props: options.get(required).required = True if (self.update_mode is False): required_props = (set(schema.get('required')) - {'schema_name', 'metadata'}) for required in required_props: options.get(required).required = True return list(options.values()) def print_help(self): super().print_help() if self.complex_properties: print(f'Note: The following properties in this schema contain JSON keywords that are not supported by the tooling: {self.complex_properties}.') print("This can impact the tool's ability to derive context from the schema, including a property's type, description, or behaviors included in complex types like 'oneOf'.") print('It is recommended that options corresponding to these properties be set after understanding the schema or indirectly using `--file` or `--json` options.') print('If the property is of type "object" it can be set using a file containing only that property\'s JSON.') print(f'The following are considered unsupported keywords: {SchemaProperty.unsupported_keywords}')
def fortios_certificate(data, fos, check_mode): fos.do_member_operation('certificate', 'remote') if data['certificate_remote']: resp = certificate_remote(data, fos, check_mode) else: fos._module.fail_json(msg=('missing task body: %s' % 'certificate_remote')) if check_mode: return resp return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
class Solution(object): def shortestToChar(self, S, C): (l, l1, s) = (len(S), 0, 1) r = ([None] * l) for (i, c) in enumerate(S): if (c == C): r[i] = 0 l1 += 1 while (l1 < l): for (i, n) in enumerate(r): if (n != 0): continue if (((i - s) >= 0) and (r[(i - s)] is None)): r[(i - s)] = s l1 += 1 if (((i + s) < l) and (r[(i + s)] is None)): r[(i + s)] = s l1 += 1 s += 1 return r
_renderer(wrap_type=TestColumnAllUniqueValues) class TestColumnAllUniqueValuesRenderer(TestRenderer): def render_html(self, obj: TestColumnAllUniqueValues) -> TestHtmlInfo: info = super().render_html(obj) column_name = obj.column_name counts_data = obj.metric.get_result().plot_data.counts_of_values if (counts_data is not None): curr_df = counts_data['current'] ref_df = None if ('reference' in counts_data.keys()): ref_df = counts_data['reference'] additional_plots = plot_value_counts_tables_ref_curr(column_name, curr_df, ref_df, 'AllUniqueValues') info.details = additional_plots return info
(scope='function') def pymssql_connection(request): conn = pymssql.connect(os.environ.get('MSSQL_HOST', 'localhost'), os.environ.get('MSSQL_USER', 'SA'), os.environ.get('MSSQL_PASSWORD', 'Very(!)Secure'), os.environ.get('MSSQL_DATABASE', 'tempdb')) cursor = conn.cursor() cursor.execute("CREATE TABLE test(id INT, name NVARCHAR(5) NOT NULL);INSERT INTO test VALUES (1, 'one'), (2, 'two'), (3, 'three');") (yield conn) conn.rollback()
.parametrize('test_input,expected', [(Action('autoscaling', 'DescribeLaunchConfigurations'), [Action('autoscaling', 'CreateLaunchConfiguration'), Action('autoscaling', 'DeleteLaunchConfiguration')]), (Action('autoscaling', 'CreateLaunchConfiguration'), [Action('autoscaling', 'DeleteLaunchConfiguration'), Action('autoscaling', 'DescribeLaunchConfigurations')]), (Action('autoscaling', 'DeleteLaunchConfiguration'), [Action('autoscaling', 'CreateLaunchConfiguration'), Action('autoscaling', 'DescribeLaunchConfigurations')]), (Action('autoscaling', 'UpdateAutoScalingGroup'), [Action('autoscaling', 'CreateAutoScalingGroup'), Action('autoscaling', 'DeleteAutoScalingGroup'), Action('autoscaling', 'DescribeAutoScalingGroups')]), (Action('autoscaling', 'DeleteAutoScalingGroup'), [Action('autoscaling', 'CreateAutoScalingGroup'), Action('autoscaling', 'UpdateAutoScalingGroup'), Action('autoscaling', 'DescribeAutoScalingGroups')]), (Action('ec2', 'DetachVolume'), [Action('ec2', 'CreateVolume'), Action('ec2', 'DeleteVolume'), Action('ec2', 'AttachVolume'), Action('ec2', 'DescribeVolumes')]), (Action('ecr', 'ListImages'), [Action('ecr', 'PutImage'), Action('ecr', 'DescribeImages')]), (Action('s3', 'PutObject'), [Action('s3', 'DeleteObject'), Action('s3', 'GetObject'), Action('s3', 'ListObjects')])]) def test_find_matching_actions_without_filtering(test_input, expected): assert (test_input.matching_actions(allowed_prefixes=None) == expected)
def test_def_rename_only_variable_nested(): string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)}) file_path = ((test_dir / 'subdir') / 'test_rename.F90') string += def_request(file_path, 15, 6) (errcode, results) = run_request(string) assert (errcode == 0) ref_res = [[1, 1, str(((test_dir / 'subdir') / 'test_rename.F90'))]] assert (len(ref_res) == (len(results) - 1)) for (i, res) in enumerate(ref_res): validate_def(results[(i + 1)], res)
def test_rmsd_grad(): rs = RandomState(MT19937(SeedSequence())) n = 3 coords3d = np.eye(3) ref_coords3d = (coords3d + (0.2 * rs.random((n, 3)))) (rmsd, grad) = rmsd_grad(coords3d, ref_coords3d) assert (rmsd == approx(0.0508618)) ref_grad = np.array([[0., (- 0.), (- 0.)], [(- 0.), (- 0.), 0.], [(- 0.), 0., (- 0.)]]) np.testing.assert_allclose(grad, ref_grad, atol=1e-08)
class TicketTester(UnitTestDBBase): def setUp(self): super(TicketTester, self).setUp() from stalker import Status self.test_status1 = Status(name='N', code='N') self.test_status2 = Status(name='R', code='R') from stalker import Type ticket_types = Type.query.filter((Type.target_entity_type == 'Ticket')).all() self.ticket_type_1 = ticket_types[0] self.ticket_type_2 = ticket_types[1] from stalker import User self.test_user = User(name='Test User', login='testuser1', email='', password='secret') from stalker import Repository self.test_repo = Repository(name='Test Repo', code='TR') self.test_project_type = Type(name='Commercial Project', code='comm', target_entity_type='Project') self.test_project_status1 = Status(name='PrjStat1', code='PrjStat1') self.test_project_status2 = Status(name='PrjStat2', code='PrjStat2') from stalker import Project self.test_project = Project(name='Test Project 1', code='TEST_PROJECT_1', type=self.test_project_type, repository=self.test_repo) from stalker.db.session import DBSession DBSession.add(self.test_project) DBSession.commit() self.test_asset_type = Type(name='Character Asset', code='char', target_entity_type='Asset') from stalker import Asset self.test_asset = Asset(name='Test Asset', code='ta', project=self.test_project, type=self.test_asset_type) DBSession.add(self.test_asset) DBSession.commit() from stalker import Task self.test_task = Task(name='Modeling of Asset 1', resources=[self.test_user], parent=self.test_asset) DBSession.add(self.test_task) DBSession.commit() from stalker import Version self.test_version = Version(name='Test Version', task=self.test_task, version=1, full_path='some/path') self.kwargs = {'project': self.test_project, 'links': [self.test_version], 'summary': 'This is a test ticket', 'description': 'This is the long description', 'priority': 'TRIVIAL', 'reported_by': self.test_user} from stalker import Ticket self.test_ticket = Ticket(**self.kwargs) DBSession.add(self.test_ticket) DBSession.commit() self.status_new = Status.query.filter_by(name='New').first() self.status_accepted = Status.query.filter_by(name='Accepted').first() self.status_assigned = Status.query.filter_by(name='Assigned').first() self.status_reopened = Status.query.filter_by(name='Reopened').first() self.status_closed = Status.query.filter_by(name='Closed').first() def test___auto_name__class_attribute_is_set_to_True(self): from stalker import Ticket assert (Ticket.__auto_name__ is True) def test_name_argument_is_not_used(self): from stalker import Ticket test_value = 'Test Name' self.kwargs['name'] = test_value new_ticket = Ticket(**self.kwargs) assert (new_ticket.name != test_value) def test_name_argument_is_skipped_will_not_raise_error(self): from stalker import Ticket if ('name' in self.kwargs): self.kwargs.pop('name') Ticket(**self.kwargs) def test_number_attribute_is_not_created_per_project(self): from stalker import Project proj1 = Project(name='Test Project 1', code='TP1', repository=self.test_repo) proj2 = Project(name='Test Project 2', code='TP2', repository=self.test_repo) proj3 = Project(name='Test Project 3', code='TP3', repository=self.test_repo) from stalker import Ticket p1_t1 = Ticket(project=proj1) from stalker.db.session import DBSession DBSession.add(p1_t1) DBSession.commit() assert (p1_t1.number == 2) p1_t2 = Ticket(project=proj1) DBSession.add(p1_t2) DBSession.commit() assert (p1_t2.number == 3) p2_t1 = Ticket(project=proj2) DBSession.add(p2_t1) DBSession.commit() assert (p2_t1.number == 4) p1_t3 = Ticket(project=proj1) DBSession.add(p1_t3) DBSession.commit() assert (p1_t3.number == 5) p3_t1 = Ticket(project=proj3) DBSession.add(p3_t1) DBSession.commit() assert (p3_t1.number == 6) p2_t2 = Ticket(project=proj2) DBSession.add(p2_t2) DBSession.commit() assert (p2_t2.number == 7) p3_t2 = Ticket(project=proj3) DBSession.add(p3_t2) DBSession.commit() assert (p3_t2.number == 8) p2_t3 = Ticket(project=proj2) DBSession.add(p2_t3) DBSession.commit() assert (p2_t3.number == 9) def test_number_attribute_is_read_only(self): with pytest.raises(AttributeError) as cm: self.test_ticket.number = 234 assert (str(cm.value) == "can't set attribute") def test_number_attribute_is_automatically_increased(self): from stalker import Ticket ticket1 = Ticket(**self.kwargs) from stalker.db.session import DBSession DBSession.add(ticket1) DBSession.commit() ticket2 = Ticket(**self.kwargs) DBSession.add(ticket2) DBSession.commit() assert ((ticket1.number + 1) == ticket2.number) assert (ticket1.number == 2) assert (ticket2.number == 3) def test_links_argument_accepts_anything_derived_from_SimpleEntity(self): self.kwargs['links'] = [self.test_project, self.test_project_status1, self.test_project_status2, self.test_repo, self.test_version] from stalker import Ticket new_ticket = Ticket(**self.kwargs) assert (sorted(self.kwargs['links'], key=(lambda x: x.name)) == sorted(new_ticket.links, key=(lambda x: x.name))) def test_links_attribute_accepts_anything_derived_from_SimpleEntity(self): links = [self.test_project, self.test_project_status1, self.test_project_status2, self.test_repo, self.test_version] self.test_ticket.links = links assert (sorted(links, key=(lambda x: x.name)) == sorted(self.test_ticket.links, key=(lambda x: x.name))) def test_related_tickets_attribute_is_an_empty_list_on_init(self): assert (self.test_ticket.related_tickets == []) def test_related_tickets_attribute_is_set_to_something_other_then_a_list_of_Tickets(self): with pytest.raises(TypeError) as cm: self.test_ticket.related_tickets = ['a ticket'] assert (str(cm.value) == 'Ticket.related_ticket attribute should be a list of other stalker.models.ticket.Ticket instances not str') def test_related_tickets_attribute_accepts_list_of_Ticket_instances(self): from stalker import Ticket new_ticket1 = Ticket(**self.kwargs) from stalker.db.session import DBSession DBSession.add(new_ticket1) DBSession.commit() new_ticket2 = Ticket(**self.kwargs) DBSession.add(new_ticket2) DBSession.commit() self.test_ticket.related_tickets = [new_ticket1, new_ticket2] def test_related_ticket_attribute_will_not_accept_self(self): with pytest.raises(ValueError) as cm: self.test_ticket.related_tickets = [self.test_ticket] assert (str(cm.value) == 'Ticket.related_ticket attribute can not have itself in the list') def test_priority_argument_is_skipped_will_set_it_to_zero(self): from stalker import Ticket self.kwargs.pop('priority') new_ticket = Ticket(**self.kwargs) assert (new_ticket.priority == 'TRIVIAL') def test_comments_attribute_is_synonym_for_notes_attribute(self): from stalker import Note note1 = Note(name='Test Note 1', content='Test note 1') note2 = Note(name='Test Note 2', content='Test note 2') self.test_ticket.comments.append(note1) self.test_ticket.comments.append(note2) assert (note1 in self.test_ticket.notes) assert (note2 in self.test_ticket.notes) self.test_ticket.notes.remove(note1) assert (note1 not in self.test_ticket.comments) self.test_ticket.notes.remove(note2) assert (note2 not in self.test_ticket.comments) def test_reported_by_attribute_is_synonym_of_created_by(self): from stalker import User user1 = User(name='user1', login='user1', password='secret', email='') self.test_ticket.reported_by = user1 assert (user1 == self.test_ticket.created_by) def test_status_for_newly_created_tickets_will_be_NEW_when_skipped(self): from stalker import Ticket new_ticket = Ticket(**self.kwargs) assert (new_ticket.status == self.status_new) def test_project_argument_is_skipped(self): from stalker import Ticket self.kwargs.pop('project') with pytest.raises(TypeError) as cm: Ticket(**self.kwargs) assert (str(cm.value) == 'Ticket.project should be an instance of stalker.models.project.Project, not NoneType') def test_project_argument_is_None(self): from stalker import Ticket self.kwargs['project'] = None with pytest.raises(TypeError) as cm: Ticket(**self.kwargs) assert (str(cm.value) == 'Ticket.project should be an instance of stalker.models.project.Project, not NoneType') def test_project_argument_accepts_Project_instances_only(self): from stalker import Ticket self.kwargs['project'] = 'Not a Project instance' with pytest.raises(TypeError) as cm: Ticket(**self.kwargs) assert (str(cm.value) == 'Ticket.project should be an instance of stalker.models.project.Project, not str') def test_project_argument_is_working_properly(self): from stalker import Ticket self.kwargs['project'] = self.test_project new_ticket = Ticket(**self.kwargs) assert (new_ticket.project == self.test_project) def test_project_attribute_is_read_only(self): with pytest.raises(AttributeError) as cm: self.test_ticket.project = self.test_project assert (str(cm.value) == "can't set attribute") def test_resolve_method_will_change_the_status_from_New_to_Closed_and_creates_a_log(self): assert (self.test_ticket.status == self.status_new) ticket_log = self.test_ticket.resolve() assert (self.test_ticket.status == self.status_closed) assert (ticket_log.from_status == self.status_new) assert (ticket_log.to_status == self.status_closed) assert (ticket_log.action == 'resolve') def test_resolve_method_will_change_the_status_from_Accepted_to_Closed(self): self.test_ticket.status = self.status_accepted assert (self.test_ticket.status == self.status_accepted) ticket_log = self.test_ticket.resolve() assert (self.test_ticket.status == self.status_closed) assert (ticket_log.from_status == self.status_accepted) assert (ticket_log.to_status == self.status_closed) assert (ticket_log.action == 'resolve') def test_resolve_method_will_change_the_status_from_Assigned_to_Closed(self): self.test_ticket.status = self.status_assigned assert (self.test_ticket.status == self.status_assigned) ticket_log = self.test_ticket.resolve() assert (self.test_ticket.status == self.status_closed) assert (ticket_log.from_status == self.status_assigned) assert (ticket_log.to_status == self.status_closed) assert (ticket_log.action == 'resolve') def test_resolve_method_will_change_the_status_from_Reopened_to_Closed(self): self.test_ticket.status = self.status_reopened assert (self.test_ticket.status == self.status_reopened) ticket_log = self.test_ticket.resolve() assert (self.test_ticket.status == self.status_closed) assert (ticket_log.from_status == self.status_reopened) assert (ticket_log.to_status == self.status_closed) assert (ticket_log.action == 'resolve') def test_resolve_method_will_not_change_the_status_from_Closed_to_anything(self): self.test_ticket.status = self.status_closed assert (self.test_ticket.status == self.status_closed) ticket_log = self.test_ticket.resolve() assert (ticket_log is None) assert (self.test_ticket.status == self.status_closed) def test_reopen_method_will_not_change_the_status_from_New_to_anything(self): self.test_ticket.status = self.status_new assert (self.test_ticket.status == self.status_new) ticket_log = self.test_ticket.reopen() assert (ticket_log is None) assert (self.test_ticket.status == self.status_new) def test_reopen_method_will_not_change_the_status_from_Accepted_to_anything(self): self.test_ticket.status = self.status_accepted assert (self.test_ticket.status == self.status_accepted) ticket_log = self.test_ticket.reopen() assert (ticket_log is None) assert (self.test_ticket.status == self.status_accepted) def test_reopen_method_will_not_change_the_status_from_Assigned_to_anything(self): self.test_ticket.status = self.status_assigned assert (self.test_ticket.status == self.status_assigned) ticket_log = self.test_ticket.reopen() assert (ticket_log is None) assert (self.test_ticket.status == self.status_assigned) def test_reopen_method_will_not_change_the_status_from_Reopened_to_anything(self): self.test_ticket.status = self.status_reopened assert (self.test_ticket.status == self.status_reopened) ticket_log = self.test_ticket.reopen() assert (ticket_log is None) assert (self.test_ticket.status == self.status_reopened) def test_reopen_method_will_change_the_status_from_Closed_to_Reopened(self): self.test_ticket.status = self.status_closed assert (self.test_ticket.status == self.status_closed) ticket_log = self.test_ticket.reopen() assert (self.test_ticket.status == self.status_reopened) assert (ticket_log.from_status == self.status_closed) assert (ticket_log.to_status == self.status_reopened) assert (ticket_log.action == 'reopen') def test_accept_method_will_change_the_status_from_New_to_Accepted(self): self.test_ticket.status = self.status_new assert (self.test_ticket.status == self.status_new) ticket_log = self.test_ticket.accept() assert (self.test_ticket.status == self.status_accepted) assert (ticket_log.from_status == self.status_new) assert (ticket_log.to_status == self.status_accepted) assert (ticket_log.action == 'accept') def test_accept_method_will_change_the_status_from_Accepted_to_Accepted(self): self.test_ticket.status = self.status_accepted assert (self.test_ticket.status == self.status_accepted) ticket_log = self.test_ticket.accept() assert (self.test_ticket.status == self.status_accepted) assert (ticket_log.from_status == self.status_accepted) assert (ticket_log.to_status == self.status_accepted) assert (ticket_log.action == 'accept') def test_accept_method_will_change_the_status_from_Assigned_to_Accepted(self): self.test_ticket.status = self.status_assigned assert (self.test_ticket.status == self.status_assigned) ticket_log = self.test_ticket.accept() assert (self.test_ticket.status == self.status_accepted) assert (ticket_log.from_status == self.status_assigned) assert (ticket_log.to_status == self.status_accepted) assert (ticket_log.action == 'accept') def test_accept_method_will_change_the_status_from_Reopened_to_Accepted(self): self.test_ticket.status = self.status_reopened assert (self.test_ticket.status == self.status_reopened) ticket_log = self.test_ticket.accept() assert (self.test_ticket.status == self.status_accepted) assert (ticket_log.from_status == self.status_reopened) assert (ticket_log.to_status == self.status_accepted) assert (ticket_log.action == 'accept') def test_accept_method_will_not_change_the_status_of_Closed_to_Anything(self): self.test_ticket.status = self.status_closed assert (self.test_ticket.status == self.status_closed) ticket_log = self.test_ticket.accept() assert (ticket_log is None) assert (self.test_ticket.status == self.status_closed) def test_reassign_method_will_change_the_status_from_New_to_Assigned(self): self.test_ticket.status = self.status_new assert (self.test_ticket.status == self.status_new) ticket_log = self.test_ticket.reassign() assert (self.test_ticket.status == self.status_assigned) assert (ticket_log.from_status == self.status_new) assert (ticket_log.to_status == self.status_assigned) assert (ticket_log.action == 'reassign') def test_reassign_method_will_change_the_status_from_Accepted_to_Assigned(self): self.test_ticket.status = self.status_accepted assert (self.test_ticket.status == self.status_accepted) ticket_log = self.test_ticket.reassign() assert (self.test_ticket.status == self.status_assigned) assert (ticket_log.from_status == self.status_accepted) assert (ticket_log.to_status == self.status_assigned) assert (ticket_log.action == 'reassign') def test_reassign_method_will_change_the_status_from_Assigned_to_Assigned(self): self.test_ticket.status = self.status_assigned assert (self.test_ticket.status == self.status_assigned) ticket_log = self.test_ticket.reassign() assert (self.test_ticket.status == self.status_assigned) assert (ticket_log.from_status == self.status_assigned) assert (ticket_log.to_status == self.status_assigned) assert (ticket_log.action == 'reassign') def test_reassign_method_will_change_the_status_from_Reopened_to_Assigned(self): self.test_ticket.status = self.status_reopened assert (self.test_ticket.status == self.status_reopened) ticket_log = self.test_ticket.reassign() assert (self.test_ticket.status == self.status_assigned) assert (ticket_log.from_status == self.status_reopened) assert (ticket_log.to_status == self.status_assigned) assert (ticket_log.action == 'reassign') def test_reassign_method_will_not_change_the_status_of_Closed_to_Anything(self): self.test_ticket.status = self.status_closed assert (self.test_ticket.status == self.status_closed) ticket_log = self.test_ticket.reassign() assert (ticket_log is None) assert (self.test_ticket.status == self.status_closed) def test_resolve_method_will_set_the_resolution(self): assert (self.test_ticket.status == self.status_new) ticket_log = self.test_ticket.resolve(resolution='fixed') assert (self.test_ticket.status == self.status_closed) assert (ticket_log.from_status == self.status_new) assert (ticket_log.to_status == self.status_closed) assert (ticket_log.action == 'resolve') assert (self.test_ticket.resolution == 'fixed') def test_reopen_will_clear_resolution(self): from stalker import TicketLog assert (self.test_ticket.status == self.status_new) self.test_ticket.resolve(resolution='fixed') assert (self.test_ticket.resolution == 'fixed') ticket_log = self.test_ticket.reopen() assert isinstance(ticket_log, TicketLog) assert (self.test_ticket.resolution == '') def test_reassign_will_set_the_owner(self): from stalker import TicketLog assert (self.test_ticket.status == self.status_new) assert (self.test_ticket.owner != self.test_user) ticket_log = self.test_ticket.reassign(assign_to=self.test_user) assert isinstance(ticket_log, TicketLog) assert (self.test_ticket.owner == self.test_user) def test_accept_will_set_the_owner(self): from stalker import TicketLog assert (self.test_ticket.status == self.status_new) assert (self.test_ticket.owner != self.test_user) ticket_log = self.test_ticket.accept(created_by=self.test_user) assert isinstance(ticket_log, TicketLog) assert (self.test_ticket.owner == self.test_user) def test_summary_argument_skipped(self): from stalker import Ticket try: self.kwargs.pop('summary') except KeyError: pass new_ticket = Ticket(**self.kwargs) assert (new_ticket.summary == '') def test_summary_argument_can_be_None(self): from stalker import Ticket self.kwargs['summary'] = None new_ticket = Ticket(**self.kwargs) assert (new_ticket.summary == '') def test_summary_attribute_can_be_set_to_None(self): self.test_ticket.summary = None assert (self.test_ticket.summary == '') def test_summary_argument_is_not_a_string(self): from stalker import Ticket self.kwargs['summary'] = ['not a string instance'] with pytest.raises(TypeError) as cm: Ticket(self.kwargs) assert (str(cm.value) == 'Ticket.project should be an instance of stalker.models.project.Project, not dict') def test_summary_attribute_is_set_to_a_value_other_than_a_string(self): with pytest.raises(TypeError) as cm: self.test_ticket.summary = ['not a string'] assert (str(cm.value) == 'Ticket.summary should be an instance of str, not list') def test_summary_argument_is_working_properly(self): from stalker import Ticket test_value = 'test summary' self.kwargs['summary'] = test_value new_ticket = Ticket(**self.kwargs) assert (new_ticket.summary == test_value) def test_summary_attribute_is_working_properly(self): test_value = 'test_summary' assert (self.test_ticket.summary != test_value) self.test_ticket.summary = test_value assert (self.test_ticket.summary == test_value)
def create_address(details, county_details, state_details, doctype, ref_docname): address = frappe.new_doc('Address') address.address_type = 'Billing' address.address_line1 = details['address1'] address.address_line2 = details['address2'] address.city = details['city'] address.country = county_details['name'] address.state = (state_details['name'] if state_details else '') address.pincode = details['zip_code'] address.email_id = details['email'] address.phone = details['phone'] address.set('links', []) link = {'link_doctype': doctype, 'link_name': ref_docname} address.append('links', link) address.insert()
def test(tmpdir, tb, interface, reset, hdl, simtool, defines=[], gui=False, pytest_run=True): tb_dir = path_join(TEST_DIR, 'test_rmap') beh_dir = path_join(TEST_DIR, 'beh') sim = Simulator(name=simtool, gui=gui, cwd=tmpdir) sim.incdirs += [tmpdir, tb_dir, beh_dir] sim.sources += [path_join(tb_dir, ('%s.sv' % tb))] sim.sources += beh_dir.glob('*.sv') sim.defines += defines sim.top = tb sim.setup() src = gen_rtl(tmpdir, interface, reset, hdl) sim.sources = (list(src) + sim.sources) sim.defines += [('INTERFACE_%s' % interface.upper()), ('RESET_ACTIVE=%d' % ('pos' in reset))] sim.run() if pytest_run: assert sim.is_passed
def usage(): print(('%s version %s\nusage: %s [-aeqhCx] [-s server] [-p port] [-c count] [-t type] [-w wait] hostname\n\n -h --help Show this help\n -q --quiet Quiet mode: No extra information, only traceroute output.\n -T --tcp Use TCP as transport protocol\n -x --expert Print expert hints if available\n -a --asn Turn on AS# lookups for each hop encountered\n -s --server DNS server to use (default: first system resolver)\n -p --port DNS server port number (default: 53)\n -S --srcip Query source IP address (default: default interface address)\n -c --count Maximum number of hops (default: 30)\n -w --wait Maximum wait time for a reply (default: 2)\n -t --type DNS request record type (default: A)\n -C --color Print colorful output\n -e --edns Enable EDNS0 (Default: Disabled)\n' % (__progname__, __version__, __progname__))) sys.exit()
def encrypt(body): enc = {'0': '7', '1': '1', '2': 'u', '3': 'N', '4': 'K', '5': 'J', '6': 'M', '7': '9', '8': "'", '9': 'm', '!': 'P', '%': '/', "'": 'n', '(': 'A', ')': 'E', '*': 's', '+': '+', '-': 'f', '.': 'q', 'A': 'O', 'B': 'V', 'C': 't', 'D': 'T', 'E': 'a', 'F': 'x', 'G': 'H', 'H': 'r', 'I': 'c', 'J': 'v', 'K': 'l', 'L': '8', 'M': 'F', 'N': '3', 'O': 'o', 'P': 'L', 'Q': 'Y', 'R': 'j', 'S': 'W', 'T': '*', 'U': 'z', 'V': 'Z', 'W': '!', 'X': 'B', 'Y': ')', 'Z': 'U', 'a': '(', 'b': '~', 'c': 'i', 'd': 'h', 'e': 'p', 'f': '_', 'g': '-', 'h': 'I', 'i': 'R', 'j': '.', 'k': 'G', 'l': 'S', 'm': 'd', 'n': '6', 'o': 'w', 'p': '5', 'q': '0', 'r': '4', 's': 'D', 't': 'k', 'u': 'Q', 'v': 'g', 'w': 'b', 'x': 'C', 'y': '2', 'z': 'X', '~': 'e', '_': 'y'} plain_text = (re.compile('(?<="encode":")(.*?)(?=",")') if (re.compile('(?<="encode":")(.*?)(?=",")').findall(body) is False) else re.compile('(?<="data":")(.*?)(?=",")')) print(plain_text) out = '' for item in plain_text.findall(body): out += enc.get(item, item) return plain_text.sub(out, body)
class TraitInstance(TraitHandler): def __init__(self, aClass, allow_none=True, module=''): self._allow_none = allow_none self.module = module if isinstance(aClass, str): self.aClass = aClass else: if (not isinstance(aClass, type)): aClass = aClass.__class__ self.aClass = aClass self.set_fast_validate() def allow_none(self): self._allow_none = True if hasattr(self, 'fast_validate'): self.set_fast_validate() def set_fast_validate(self): fast_validate = [ValidateTrait.instance, self.aClass] if self._allow_none: fast_validate = [ValidateTrait.instance, None, self.aClass] if (self.aClass in TypeTypes): fast_validate[0] = ValidateTrait.type self.fast_validate = tuple(fast_validate) def validate(self, object, name, value): if (value is None): if self._allow_none: return value else: self.error(object, name, value) if isinstance(self.aClass, str): self.resolve_class(object, name, value) if isinstance(value, self.aClass): return value self.error(object, name, value) def info(self): aClass = self.aClass if (type(aClass) is not str): aClass = aClass.__name__ result = class_of(aClass) if self._allow_none: return (result + ' or None') return result def resolve_class(self, object, name, value): aClass = self.validate_class(self.find_class(self.aClass)) if (aClass is None): self.error(object, name, value) self.aClass = aClass self.set_fast_validate() trait = object.base_trait(name) handler = trait.handler if ((handler is not self) and hasattr(handler, 'item_trait')): trait = handler.item_trait trait.set_validate(self.fast_validate) def find_class(self, klass): module = self.module col = klass.rfind('.') if (col >= 0): module = klass[:col] klass = klass[(col + 1):] theClass = getattr(sys.modules.get(module), klass, None) if ((theClass is None) and (col >= 0)): try: mod = import_module(module) theClass = getattr(mod, klass, None) except Exception: pass return theClass def validate_class(self, aClass): return aClass def create_default_value(self, *args, **kw): aClass = args[0] if isinstance(aClass, str): aClass = self.validate_class(self.find_class(aClass)) if (aClass is None): raise TraitError(('Unable to locate class: ' + args[0])) return aClass(*args[1:], **kw) def get_editor(self, trait): if (self.editor is None): from traitsui.api import InstanceEditor self.editor = InstanceEditor(label=(trait.label or ''), view=(trait.view or ''), kind=(trait.kind or 'live')) return self.editor
class ExperimentLogger(ABC): def log_hyperparams(self, params: tp.Union[(tp.Dict[(str, tp.Any)], Namespace)], metrics: tp.Optional[dict]=None) -> None: ... def log_metrics(self, prefix: tp.Union[(str, tp.List[str])], metrics: dict, step: tp.Optional[int]=None) -> None: ... def log_audio(self, prefix: tp.Union[(str, tp.List[str])], key: str, audio: tp.Any, sample_rate: int, step: tp.Optional[int]=None, **kwargs: tp.Any) -> None: ... def log_image(self, prefix: tp.Union[(str, tp.List[str])], key: str, image: tp.Any, step: tp.Optional[int]=None, **kwargs: tp.Any) -> None: raise NotImplementedError def log_text(self, prefix: tp.Union[(str, tp.List[str])], key: str, text: str, step: tp.Optional[int]=None, **kwargs: tp.Any) -> None: raise NotImplementedError def with_media_logging(self) -> bool: ... def save_dir(self) -> tp.Optional[str]: ... def name(self) -> str: ... def group_separator(self) -> str: return '/'
class OptionSeriesTimelineSonificationDefaultspeechoptions(Options): def activeWhen(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsActivewhen': return self._config_sub_data('activeWhen', OptionSeriesTimelineSonificationDefaultspeechoptionsActivewhen) def language(self): return self._config_get('en-US') def language(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsMapping': return self._config_sub_data('mapping', OptionSeriesTimelineSonificationDefaultspeechoptionsMapping) def pointGrouping(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesTimelineSonificationDefaultspeechoptionsPointgrouping) def preferredVoice(self): return self._config_get(None) def preferredVoice(self, text: str): self._config(text, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('speech') def type(self, text: str): self._config(text, js_type=False)
def get_int_column_roots(grid): for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) if (gridinfo.tile_type not in INT_TILE_TYPES): continue next_gridinfo = grid.gridinfo_at_loc((loc.grid_x, (loc.grid_y + 1))) if (next_gridinfo.tile_type in INT_TILE_TYPES): continue if (next_gridinfo.tile_type in HCLK_TILE_TYPES): continue assert (next_gridinfo.tile_type in ['B_TERM_INT', 'BRKH_INT', 'BRKH_B_TERM_INT']), next_gridinfo.tile_type (yield tile_name)
class Text(MixHtmlState.HtmlStates, Html.Html): name = 'Text' tag = 'div' _option_cls = OptText.OptionsText def __init__(self, page: primitives.PageModel, text: str, color: str, align: str, width, height, html_code: str, tooltip: str, options, helper: str, profile): super(Text, self).__init__(page, text, css_attrs={'color': color, 'width': width, 'height': height}, html_code=html_code, profile=profile, options=options) self.add_helper(helper) self.css({'text-align': align}) if (tooltip is not None): self.tooltip(tooltip) def click(self, js_funcs: Union[(list, str)], profile: Optional[Union[(bool, dict)]]=None, source_event: Optional[str]=None, on_ready: bool=False): self.style.css.cursor = 'pointer' if ('data-group' in self.attr): return super(Text, self).click((js_funcs + [("document.querySelectorAll('[data-group=%s]').forEach(function(dom){dom.classList.remove('%s')})" % (self.attr['data-group'], self.dom.classList.style_select)), self.dom.classList.select()]), profile, source_event, on_ready) return super(Text, self).click(js_funcs, profile, source_event, on_ready) def goto(self, url: str, js_funcs: Union[(list, str)]=None, profile: Optional[Union[(bool, dict)]]=None, target: str='_blank', source_event: Optional[str]=None, on_ready: bool=False): js_funcs = (js_funcs or []) if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] js_funcs.append(self.js.location.open_new_tab(url, target)) return self.click(js_funcs, profile, source_event, on_ready) def val(self): return self._vals def val(self, val): self._vals = val def dom(self) -> JsHtml.JsHtmlRich: if (self._dom is None): self._dom = JsHtml.JsHtmlRich(self, page=self.page) return self._dom def options(self) -> OptText.OptionsText: return super().options def editable(self): self.style.add_classes.text.content_editable() self.set_attrs({'contenteditable': 'false', 'ondblclick': "this.contentEditable=true;this.className='inEdit'", 'onblur': "this.contentEditable=false;this.className=''"}) return self def write(self, timer: int=50): value = self.val self.val = '' self.page.body.onReady([self.page.js.objects.string(value, js_code=('%s_writer' % self.htmlCode), set_var=True), self.page.js.objects.number(0, js_code=('%s_pos' % self.htmlCode), set_var=True), self.build(''), self.page.js.window.setInterval([self.page.js.if_((self.page.js.objects.number.get(('window.%s_pos' % self.htmlCode)) < self.page.js.objects.string.get(('window.%s_writer' % self.htmlCode)).length), [self.page.js.objects.number((self.page.js.objects.number.get(('window.%s_pos' % self.htmlCode)) + 1), js_code=('window.%s_pos' % self.htmlCode), set_var=True), self.dom.append(self.page.js.objects.string.get(('window.%s_writer' % self.htmlCode)).charAt(self.page.js.objects.number.get(('window.%s_pos' % self.htmlCode))), new_line=False)]).else_(self.page.js.window.clearInterval(('%s_interval' % self.htmlCode)))], ('%s_interval' % self.htmlCode), timer)]) return self def __str__(self): self.onReady([self.dom.setAttribute('data-content', self.dom.content)]) if self.options.markdown: self.page.properties.js.add_builders(self.refresh()) return ('<%s %s></%s>%s' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tag, self.helper)) return ('<%s %s>%s</%s>%s' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.content, self.tag, self.helper))
class DB(Gbfs): authed = True meta = {'company': ['Deutsche Bahn AG'], 'system': 'deutschebahn'} def __init__(self, tag, meta, key, provider, bbox=None): self.key = key super(DB, self).__init__(tag, meta, FEED_URL.format(provider=provider), bbox=bbox) def auth_headers(self): return {'DB-Client-Id': self.key['client_id'], 'DB-Api-Key': self.key['client_secret'], 'accept': 'application/json'} def update(self, scraper=None): scraper = (scraper or PyBikesScraper()) scraper.headers.update(self.auth_headers) super(DB, self).update(scraper)
class OptionPlotoptionsLollipopSonificationDefaultinstrumentoptions(Options): def activeWhen(self) -> 'OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMapping': return self._config_sub_data('mapping', OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
def find_available_port(custom_host: Optional[str]=None, custom_range: Optional[range]=None, will_close_then_reopen_socket: bool=False) -> Tuple[(str, int, socket.socket)]: current_host = (custom_host if (custom_host is not None) else _get_ip_address()) current_range = (custom_range if (custom_range is not None) else range(51820, (51840 + 1))) if (current_range.start == current_range.stop): ports = list(range(current_range.start, (current_range.stop + 1))) else: ports = list(range(current_range.start, current_range.stop)) random.shuffle(ports) for port in ports: try: return (current_host, port, _bind_socket(host=current_host, port=port, will_close_then_reopen_socket=will_close_then_reopen_socket)) except PortAlreadyInUseException: continue raise NoPortsInRangeException(f'No available ports in range {current_range}.')
class TestCanRaiseHTTPError(): (autouse=True) def patch_ patch_ pass def setup_method(self): def no_error(): pass def raise_ raise def raise_generic_error(): raise ValueError('Not HTTP Error') self.no_error = no_error self.raise_ = raise_ self.raise_generic_error = raise_generic_error def test_no_error(self): wrapped = can_raise_ wrapped() def test_ wrapped = can_raise_ with pytest.raises(Exception) as execinfo: wrapped() assert isinstance(execinfo.value, FintocError) def test_generic_error(self): wrapped = can_raise_ with pytest.raises(Exception) as execinfo: wrapped() assert (not isinstance(execinfo.value, FintocError))
class MyBackgroundMainNode(Node): config: MyBackgroundMainConfig def setup(self) -> None: self.barrier = threading.Barrier(2) async def my_background(self) -> None: with open(self.config.background_filename, 'w') as f: f.write(self.config.string) self.barrier.wait() def my_main(self) -> None: with open(self.config.main_filename, 'w') as f: f.write(self.config.string) self.barrier.wait() raise NormalTermination()
def create_production_storage(fuel_type: str, production_point: dict[(str, float)], negative_threshold: float) -> tuple[((ProductionMix | None), (StorageMix | None))]: production_value = production_point['value'] production_mix = ProductionMix() storage_mix = StorageMix() if ((production_value < 0) and (fuel_type == 'hydro')): storage_mix.add_value('hydro', abs(production_value)) return (None, storage_mix) production_mix.add_value(fuel_type, production_value, (production_value > negative_threshold)) return (production_mix, None)
def save_config(name, config: Dict, overwrite=False): config_path = ((Path(__file__).parent.parent / 'config') / name) if (config_path.exists() and (not overwrite)): raise ValueError(f"pass overwrite=True to overwrite existing config: '{config_path}'") config_obj = configparser.ConfigParser() config_obj.read_dict(config) with open(config_path, 'w') as configfile: config_obj.write(configfile)
def upgrade(): op.create_table('sandwiches', sa.Column('id', sa.String(256), primary_key=True), sa.Column('created_at', sa.TIMESTAMP, server_default=sa.func.now()), sa.Column('block_number', sa.Numeric, nullable=False), sa.Column('sandwicher_address', sa.String(256), nullable=False), sa.Column('frontrun_swap_transaction_hash', sa.String(256), nullable=False), sa.Column('frontrun_swap_trace_address', sa.ARRAY(sa.Integer), nullable=False), sa.Column('backrun_swap_transaction_hash', sa.String(256), nullable=False), sa.Column('backrun_swap_trace_address', sa.ARRAY(sa.Integer), nullable=False)) op.create_index('ik_sandwiches_frontrun', 'sandwiches', ['block_number', 'frontrun_swap_transaction_hash', 'frontrun_swap_trace_address']) op.create_index('ik_sandwiches_backrun', 'sandwiches', ['block_number', 'backrun_swap_transaction_hash', 'backrun_swap_trace_address']) op.create_table('sandwiched_swaps', sa.Column('created_at', sa.TIMESTAMP, server_default=sa.func.now()), sa.Column('sandwich_id', sa.String(1024), primary_key=True), sa.Column('block_number', sa.Numeric, primary_key=True), sa.Column('transaction_hash', sa.String(66), primary_key=True), sa.Column('trace_address', sa.ARRAY(sa.Integer), primary_key=True), sa.ForeignKeyConstraint(['sandwich_id'], ['sandwiches.id'], ondelete='CASCADE')) op.create_index('ik_sandwiched_swaps_secondary', 'sandwiched_swaps', ['block_number', 'transaction_hash', 'trace_address'])
class Trial(): condition: Condition stimuli: typing.List[Stimulus] results: RESULTS_T = field(default_factory=dict, compare=False) identifier: typing.Optional[int] = field(default=None, compare=False) def __post_init__(self) -> None: for stimulus in self.stimuli: stimulus.trial = self if (self.identifier is None): self.identifier = id(self) def end(self) -> None: for stimulus in self.stimuli: stimulus.end()
def checkTcpMssClamp(tcp_mss_clamp_value): if tcp_mss_clamp_value: if tcp_mss_clamp_value.isdigit(): if (int(tcp_mss_clamp_value) < 536): return False elif (tcp_mss_clamp_value == 'None'): return True elif (tcp_mss_clamp_value != 'pmtu'): return False return True
def extractAlexanderwalesCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('worth the candle', 'worth the candle', 'oel'), ('the dark wizard of donkerk', 'the dark wizard of donkerk', 'oel'), ('Glimwarden', 'Glimwarden', 'oel'), ('Shadows of the Limelight', 'Shadows of the Limelight', 'oel'), ('The Last Christmas', 'The Last Christmas', 'oel'), ('a bluer shade of white', 'a bluer shade of white', 'oel'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_filesystem_read_write_file_details(io, metadata, event, details_io, extra_detail_io): event.category = ('Read' if (event.operation == 'ReadFile') else 'Write') details_io.seek(4, 1) io_flags_and_priority = read_u32(details_io) io_flags = (io_flags_and_priority & ) priority = ((io_flags_and_priority >> 17) & 7) details_io.seek(4, 1) length = read_u32(details_io) if (metadata.sizeof_pvoid == 8): details_io.seek(4, 1) details_io.seek(4, 1) if (metadata.sizeof_pvoid == 8): details_io.seek(4, 1) offset = read_s64(details_io) event.details['Offset'] = offset if extra_detail_io: length = read_u32(extra_detail_io) event.details['Length'] = length if (io_flags != 0): event.details['I/O Flags'] = get_filesysyem_io_flags(io_flags) if (priority != 0): event.details['Priority'] = FilesystemPriority.get(priority, '0x{:x}'.format(priority))
class OkLCh(base.OkLCh): def to_string(self, parent: Color, *, alpha: (bool | None)=None, precision: (int | None)=None, fit: ((bool | str) | dict[(str, Any)])=True, none: bool=False, color: bool=False, percent: (bool | Sequence[bool])=False, **kwargs: Any) -> str: return serialize.serialize_css(parent, func='oklch', alpha=alpha, precision=precision, fit=fit, none=none, color=color, percent=percent) def match(self, string: str, start: int=0, fullmatch: bool=True) -> (tuple[(tuple[(Vector, float)], int)] | None): return parse.parse_css(self, string, start, fullmatch)
class TestsTOML(): __init__ = _custom_dataclass_init cases: Dict[(str, TestCaseTOML)] def load(cls, toml_path: Path): with toml_path.open('rb') as f: data = tomllib.load(f) return cls({uuid: TestCaseTOML(uuid, *opts) for (uuid, opts) in data.items() if (opts.get('include', None) is not False)})
class AttachedWindow(Gtk.Window): __gsignals__ = {'show': 'override'} def __init__(self, parent): Gtk.Window.__init__(self, Gtk.WindowType.TOPLEVEL) self.set_decorated(False) self.props.skip_taskbar_hint = True self.set_keep_above(True) self.realize() self.get_window().set_functions(Gdk.WMFunction.RESIZE) self.parent_widget = parent self.parent_window_connections = [] parent.connect('hierarchy-changed', self._on_parent_hierarchy_changed) def update_location(self): workarea = Gdk.Rectangle() workarea.x = workarea.y = 0 (workarea.width, workarea.height) = get_workarea_size() parent_alloc = self.parent_widget.get_allocation() toplevel_position = self.parent_widget.get_toplevel().get_window().get_position() parent_alloc.x += toplevel_position[0] parent_alloc.y += toplevel_position[1] alloc = self.get_allocation() if ((workarea.width - parent_alloc.x) < alloc.width): x = ((parent_alloc.x + parent_alloc.width) - alloc.width) else: x = parent_alloc.x if ((workarea.height - parent_alloc.y) < alloc.height): y = (parent_alloc.y - alloc.height) else: y = (parent_alloc.y + parent_alloc.height) self.move(x, y) def do_show(self): Gtk.Window.do_show(self) self.update_location() def _on_parent_hierarchy_changed(self, parent_widget, previous_toplevel): conns = self.parent_window_connections for conn in conns: previous_toplevel.disconnect(conn) conns[:] = () toplevel = parent_widget.get_toplevel() if (not isinstance(toplevel, Gtk.Window)): return self.set_transient_for(toplevel) conns.append(toplevel.connect('configure-event', self._on_parent_window_configure_event)) conns.append(toplevel.connect('hide', self._on_parent_window_hide)) def _on_parent_window_configure_event(self, _widget, _event): if self.props.visible: self.update_location() def _on_parent_window_hide(self, _window): self.emit('hide')
class _MagiclinkReferencePattern(_MagiclinkShorthandPattern): def process_issues(self, el, provider, user, repo, issue): issue_type = issue[:1] issue_value = issue[1:] if (issue_type == '#'): issue_link = PROVIDER_INFO[provider]['issue'] issue_label = self.labels.get('issue', 'Issue') class_name = 'magiclink-issue' else: issue_link = PROVIDER_INFO[provider]['pull'] issue_label = self.labels.get('pull', 'Pull Request') class_name = 'magiclink-pull' if self.my_repo: text = ('%s%s' % (issue_type, issue_value)) elif self.my_user: text = ('%s%s%s' % (repo, issue_type, issue_value)) else: text = ('%s/%s%s%s' % (user, repo, issue_type, issue_value)) el.set('href', (issue_link % (user, repo, issue_value))) el.text = md_util.AtomicString(text) el.set('class', ('magiclink magiclink-%s %s' % (provider, class_name))) el.set('title', ('%s %s: %s/%s%s%s' % (PROVIDER_INFO[provider]['provider'], issue_label, user, repo, issue_type, issue_value))) def process_commit(self, el, provider, user, repo, commit): hash_ref = commit[0:PROVIDER_INFO[provider]['hash_size']] if self.my_repo: text = hash_ref elif self.my_user: text = ('%%s' % (repo, hash_ref)) else: text = ('%s/%%s' % (user, repo, hash_ref)) el.set('href', (PROVIDER_INFO[provider]['commit'] % (user, repo, commit))) el.text = md_util.AtomicString(text) el.set('class', ('magiclink magiclink-%s magiclink-commit' % provider)) el.set('title', ('%s %s: %s/%%s' % (PROVIDER_INFO[provider]['provider'], self.labels.get('commit', 'Commit'), user, repo, hash_ref))) def process_compare(self, el, provider, user, repo, commit1, commit2): hash_ref1 = commit1[0:PROVIDER_INFO[provider]['hash_size']] hash_ref2 = commit2[0:PROVIDER_INFO[provider]['hash_size']] if self.my_repo: text = ('%s...%s' % (hash_ref1, hash_ref2)) elif self.my_user: text = ('%%s...%s' % (repo, hash_ref1, hash_ref2)) else: text = ('%s/%%s...%s' % (user, repo, hash_ref1, hash_ref2)) el.set('href', (PROVIDER_INFO[provider]['compare'] % (user, repo, commit1, commit2))) el.text = md_util.AtomicString(text) el.set('class', ('magiclink magiclink-%s magiclink-compare' % provider)) el.set('title', ('%s %s: %s/%%s...%s' % (PROVIDER_INFO[provider]['provider'], self.labels.get('compare', 'Compare'), user, repo, hash_ref1, hash_ref2)))
def test_component_loading_module_not_found_error_non_framework_package(component_configuration): with mock.patch.object(Protocol, 'from_config', side_effect=ModuleNotFoundError("No module named 'generic.package'")): with pytest.raises(ModuleNotFoundError): load_component_from_config(component_configuration)
class ButtonMenuItem(): name = 'Button Menu Item' def __init__(self, page: primitives.PageModel, component_id: str, container: Html.Html): (self.component_id, self.page) = (component_id, page) (self.container, self._js, self._events) = (container, None, []) def js(self) -> JsComponents.Menu: if (self._js is None): self._js = JsComponents.Menu(self.container, js_code=self.component_id, page=self.page) return self._js def on(self, event: str, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, source_event: Optional[str]=None, on_ready: bool=False): if (not isinstance(js_funcs, list)): js_funcs = [js_funcs] self._events.append(("%s.addEventListener('%s', function (event) {%s})" % ((source_event or self.component_id), event, JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))) if on_ready: self.page.body.onReady([self.page.js.getElementById(self.component_id).dom.events.trigger(event)]) return self.container def click(self, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, source_event: Optional[str]=None, on_ready: bool=False): return self.on('click', js_funcs, profile, source_event, on_ready)
def entry_points(*, group: str) -> 'metadata.EntryPoints': if (sys.version_info >= (3, 10)): return metadata.entry_points(group=group) epg = metadata.entry_points() if ((sys.version_info < (3, 8)) and hasattr(epg, 'select')): return epg.select(group=group) return epg.get(group, [])
.provider(fields.Dictionary({}, description='Most client middleware has no constructor arguments, but subclasses can override this schema')) class ClientMiddleware(object): def request(self, send_request): return send_request def response(self, get_response): return get_response
def custom_break_flow(self): if (self.name in (conditional_branch + unconditional_branch)): return True if self.name.startswith('LOOP'): return True if self.name.startswith('RET'): return True if self.name.startswith('INT'): return True if self.name.startswith('SYS'): return True if self.name.startswith('CMOV'): return True if self.name.startswith('SBB'): return True return (self.name in ['CALL', 'HLT', 'IRET', 'IRETD', 'IRETQ', 'ICEBP', 'UD2'])
def _register_worker_signals(client) -> None: def worker_shutdown(*args, **kwargs) -> None: client.close() def connect_worker_process_init(*args, **kwargs) -> None: signals.worker_process_shutdown.connect(worker_shutdown, dispatch_uid='elasticapm-shutdown-worker', weak=False) signals.worker_init.connect(connect_worker_process_init, dispatch_uid='elasticapm-connect-start-threads', weak=False)
def kernel_name(op, func_attrs): from cutlass_lib import library threadblock = op.tile_description.procedural_name() extended_name = op.extended_name() opcode_class_name = library.OpcodeClassNames[op.tile_description.math_instruction.opcode_class] layout = op.layout_name() align_ab = op.A.alignment align_c = op.C.alignment name = KERNEL_KEY_TEMPLATE.render(threadblock=threadblock, extended_name=extended_name, opcode_class_name=opcode_class_name, layout=layout, align_ab=align_ab, align_c=align_c) return name.replace('\n', '')
_dataloader('youtube-to-text') class YoutubeToTextDataloader(SpeechToTextDataloader): def from_youtube(cls, source: Union[(Path, str)], target: Union[(Path, str)]) -> YoutubeToTextDataloader: assert (AudioSegment is not None) assert (youtube_dl is not None) source_list = [download_youtube_video(source)] target_list = [target] dataloader = cls(source_list, target_list) return dataloader def from_args(cls, args: Namespace): args.source_type = 'youtube' args.target_type = 'text' return cls.from_youtube(args.source, args.target)
('llama_recipes.finetuning.train') ('llama_recipes.finetuning.LlamaForCausalLM.from_pretrained') ('llama_recipes.finetuning.LlamaTokenizer.from_pretrained') ('llama_recipes.finetuning.get_preprocessed_dataset') ('llama_recipes.finetuning.optim.AdamW') ('llama_recipes.finetuning.StepLR') def test_batching_strategy(step_lr, optimizer, get_dataset, tokenizer, get_model, train): kwargs = {'batching_strategy': 'packing'} get_dataset.return_value = get_fake_dataset() main(**kwargs) assert (train.call_count == 1) (args, kwargs) = train.call_args (train_dataloader, eval_dataloader) = args[1:3] assert isinstance(train_dataloader.batch_sampler, BatchSampler) assert isinstance(eval_dataloader.batch_sampler, BatchSampler) kwargs['batching_strategy'] = 'padding' train.reset_mock() main(**kwargs) assert (train.call_count == 1) (args, kwargs) = train.call_args (train_dataloader, eval_dataloader) = args[1:3] assert isinstance(train_dataloader.batch_sampler, LengthBasedBatchSampler) assert isinstance(eval_dataloader.batch_sampler, LengthBasedBatchSampler) kwargs['batching_strategy'] = 'none' with pytest.raises(ValueError): main(**kwargs)
class TestSuperFencesCustomException(util.MdCase): extension = ['pymdownx.superfences'] extension_configs = {'pymdownx.superfences': {'custom_fences': [{'name': 'test', 'class': 'test', 'format': custom_format, 'validator': custom_validator_except}]}} def test_custom_fail_exception(self): with self.assertRaises(SuperFencesException): self.check_markdown('\n ```test\n test\n ```\n ', '', True)
def main(): segmk = Segmaker('design.bits') tiledata = {} pipdata = {} ignpip = set() with open(os.path.join(os.getenv('FUZDIR'), '..', 'piplist', 'build', 'clk_bufg', 'clk_bufg_bot_r.txt')) as f: for l in f: (tile_type, dst, src) = l.strip().split('.') if (tile_type not in pipdata): pipdata[tile_type] = [] pipdata[tile_type].append((src, dst)) with open(os.path.join(os.getenv('FUZDIR'), '..', 'piplist', 'build', 'clk_bufg', 'clk_bufg_top_r.txt')) as f: for l in f: (tile_type, dst, src) = l.strip().split('.') if (tile_type not in pipdata): pipdata[tile_type] = [] pipdata[tile_type].append((src, dst)) print('Loading tags from design.txt.') with open('design.txt', 'r') as f: for line in f: (tile, pip, src, dst, pnum, pdir) = line.split() if (not tile.startswith('CLK_BUFG')): continue if tile.startswith('CLK_BUFG_REBUF'): continue (pip_prefix, _) = pip.split('.') (tile_from_pip, tile_type) = pip_prefix.split('/') assert (tile == tile_from_pip) (_, src) = src.split('/') (_, dst) = dst.split('/') pnum = int(pnum) pdir = int(pdir) if (tile not in tiledata): tiledata[tile] = {'type': tile_type, 'pips': set(), 'srcs': set(), 'dsts': set()} tiledata[tile]['pips'].add((src, dst)) tiledata[tile]['srcs'].add(src) tiledata[tile]['dsts'].add(dst) if (pdir == 0): tiledata[tile]['srcs'].add(dst) tiledata[tile]['dsts'].add(src) muxed_src = (re.match('^CLK_BUFG_(TOP|BOT)_R_CK_MUXED', src) is not None) if ((pnum == 1) or (pdir == 0) or muxed_src): ignpip.add((src, dst)) for (tile, pips_srcs_dsts) in tiledata.items(): tile_type = pips_srcs_dsts['type'] pips = pips_srcs_dsts['pips'] for (src, dst) in pipdata[tile_type]: if ((src, dst) in ignpip): pass elif ((src, dst) in pips): segmk.add_tile_tag(tile, ('%s.%s' % (dst, src)), 1) elif (dst not in tiledata[tile]['dsts']): segmk.add_tile_tag(tile, ('%s.%s' % (dst, src)), 0) segmk.compile(bitfilter=bitfilter) segmk.write()
def test_bad_whitelist(utils): badConfig = {'elements_in': ['sel1/an1/el1']} badAn = EmptyAnalyser(badConfig, 'whitelistErrorAnalyser', LocalStorage(folder=utils.TEMP_ELEMENT_DIR)) with pytest.raises(InvalidAnalyserElements, match="'elements_in' you specified does not exist"): badAn.start_analysing()
def option_parser(): parser = argparse.ArgumentParser() parser.add_argument('--texture_feat_file', type=str, help='pickle file with texture features of pieces') parser.add_argument('--shape_feat_file', type=str, help='pickle file with shape features of pieces') parser.add_argument('--dataset_dir', type=str, help='directory path to read and write files') parser.add_argument('--load_pretrain_clf', type=str, default='', help='load the pretrained classification model from the specified location') parser.add_argument('--update_fname', type=str, help='the filename of the imagee we are updating') parser.add_argument('--clf_epoch', type=int, default=(- 1), help='load model at epoch; -1 for highest epoch') parser.add_argument('--save_dir', type=str, default='results/fashion/updates/', help='path to save generated image') parser.add_argument('--network_arch', type=str, default='mlp', help='architecture of the network [mlp|linear]') parser.add_argument('--in_dim', type=int, default=12, help='input dimension for first fc layer') parser.add_argument('--out_dim', type=int, default=2, help='output dimension for first fc layer') parser.add_argument('--param_m', type=int, default=1, help='number of hidden layers in MLP') parser.add_argument('--param_k', type=int, default=8, help='number of neurons at each hidden layer') parser.add_argument('--fc1_dim', type=int, default=8, help='dimension for fc layer') parser.add_argument('--fc2_dim', type=int, default=2, help='dimension for fc layer') parser.add_argument('--use_dropout', action='store_true', help='if specified, use dropout layer') parser.add_argument('--load_pretrain_texture_gen', type=str, default='', help='load the pretrained generator model from the specified location') parser.add_argument('--color_mode', type=str, default='RGB', help='color mode of our color image [Lab|RGB]') parser.add_argument('--model_type', type=str, default='pix2pixHD', help='currently only suppport pix2pixHD') parser.add_argument('--texture_feat_num', type=int, default=3, help='texture generator feature dimension') parser.add_argument('--load_pretrain_shape_gen', type=str, default='', help='load the pretrained generator model from the specified location') parser.add_argument('--shape_feat_num', type=int, default=8, help='shapee generator feature dimension') parser.add_argument('--stop_criterion', type=str, default='maxiter', help='stop scriterion for optimization process: maxiter | deltaloss | thresholdloss') parser.add_argument('--max_iter_hr', type=int, default=15, help='how many iterations to run') parser.add_argument('--min_deltaloss', type=float, default=0.0, help='the amount of change the loss should make before stop') parser.add_argument('--min_thresholdloss', type=float, default=0.0, help='the thresholded loss that the optimizer needs to reach before stops') parser.add_argument('--lr', type=float, default=0.05, help='optimizer learning rate; here is the step size for updating module') parser.add_argument('--lambda_smooth', type=float, default=10, help='weight of the smooth term') parser.add_argument('--netG', type=str, default='global', help='generator architecture [global|local]') parser.add_argument('--update_full', action='store_true', help='if specified, update the whole outfit instead of only the swapped') parser.add_argument('--update_type', type=str, default='shape_and_texture', help='when partially update: shape_only | texture_only | shape_and_texture') parser.add_argument('--display_freq', type=int, default=5, help='how often to compute accuracy') parser.add_argument('--autoswap', action='store_true', help='if specified, automatically decide which part to swap out; should not be used together with swapped_partID') parser.add_argument('--generate_or_save', type=str, default='generate', help='generate updated image or save the updated vector: generate | save') parser.add_argument('--iterative_generation', action='store_true', help='if specified, generate each iteration of an image update') parser.add_argument('--classname', type=str, help='segmentation definition from dataset: humanparsing') parser.add_argument('--swapped_partID', type=int, default=0, help='predefine which part to swap to; has no effect when autoswap option is specified; for humanparsing classname, 0: top; 1: skirt; 2: pants; 3:dress') return parser.parse_args()
def make_keck_atmospheric_layers(input_grid): heights = np.array([0, 2100, 4100, 6500, 9000, 12000, 14800]) velocities = np.array([6.7, 13.9, 20.8, 29.0, 29.0, 29.0, 29.0]) outer_scales = np.array([20, 20, 20, 20, 20, 20, 20]) Cn_squared = (np.array([0.369, 0.219, 0.127, 0.101, 0.046, 0.111, 0.027]) * 1e-12) layers = [] for (h, v, cn, L0) in zip(heights, velocities, Cn_squared, outer_scales): layers.append(InfiniteAtmosphericLayer(input_grid, cn, L0, v, h, 2)) return layers
class TASProgramActivityList(PaginationMixin, AgencyBase): endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/agency/treasury_account/tas/program_activity.md' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.params_to_validate = ['fiscal_year', 'filter'] self._submission_ids = None def tas_rendering_label(self): return self.kwargs['tas'] def submission_ids(self): if (self._submission_ids is not None): return self._submission_ids self._submission_ids = get_latest_submission_ids_for_fiscal_year(self.fiscal_year) return self._submission_ids _response() def get(self, request: Request, *args: Any, **kwargs: Any) -> Response: self.sortable_columns = ['name', 'obligated_amount', 'gross_outlay_amount'] self.default_sort_column = 'obligated_amount' results = list(self.get_program_activity_list()) page_metadata = get_pagination_metadata(len(results), self.pagination.limit, self.pagination.page) results = results[self.pagination.lower_limit:self.pagination.upper_limit] pagination_limit_results = results[:self.pagination.limit] response_dict = {'treasury_account_symbol': self.tas_rendering_label, 'fiscal_year': self.fiscal_year, 'page_metadata': page_metadata, 'results': pagination_limit_results, 'messages': self.standard_response_messages} for (idx, program_activity_row) in enumerate(pagination_limit_results): object_class_results = self.get_object_class_by_program_activity_list(program_activity_row['name']) child_response_dict = self.format_object_class_children_response(object_class_results) response_dict['results'][idx]['children'] = child_response_dict return Response(response_dict) def get_program_activity_list(self) -> List[dict]: filters = [Q(financialaccountsbyprogramactivityobjectclass__submission_id__in=self.submission_ids), Q(financialaccountsbyprogramactivityobjectclass__treasury_account__tas_rendering_label=self.tas_rendering_label), Q((((Q(financialaccountsbyprogramactivityobjectclass__obligations_incurred_by_program_object_class_cpe__gt=0) | Q(financialaccountsbyprogramactivityobjectclass__obligations_incurred_by_program_object_class_cpe__lt=0)) | Q(financialaccountsbyprogramactivityobjectclass__gross_outlay_amount_by_program_object_class_cpe__gt=0)) | Q(financialaccountsbyprogramactivityobjectclass__gross_outlay_amount_by_program_object_class_cpe__lt=0)))] if self.filter: filters.append(Q(program_activity_name__icontains=self.filter)) queryset_results = RefProgramActivity.objects.filter(*filters).values('program_activity_name').annotate(name=F('program_activity_name'), obligated_amount=Sum('financialaccountsbyprogramactivityobjectclass__obligations_incurred_by_program_object_class_cpe'), gross_outlay_amount=Sum('financialaccountsbyprogramactivityobjectclass__gross_outlay_amount_by_program_object_class_cpe')).order_by(f"{('-' if (self.pagination.sort_order == 'desc') else '')}{self.pagination.sort_key}").values('name', 'obligated_amount', 'gross_outlay_amount') return queryset_results def get_object_class_by_program_activity_list(self, program_activity_name) -> List[dict]: filters = [Q(financialaccountsbyprogramactivityobjectclass__submission_id__in=self.submission_ids), Q(financialaccountsbyprogramactivityobjectclass__program_activity__program_activity_name=program_activity_name), Q(financialaccountsbyprogramactivityobjectclass__treasury_account__tas_rendering_label=self.tas_rendering_label), Q((((Q(financialaccountsbyprogramactivityobjectclass__obligations_incurred_by_program_object_class_cpe__gt=0) | Q(financialaccountsbyprogramactivityobjectclass__obligations_incurred_by_program_object_class_cpe__lt=0)) | Q(financialaccountsbyprogramactivityobjectclass__gross_outlay_amount_by_program_object_class_cpe__gt=0)) | Q(financialaccountsbyprogramactivityobjectclass__gross_outlay_amount_by_program_object_class_cpe__lt=0)))] queryset_results = ObjectClass.objects.filter(*filters).values('major_object_class_name').annotate(name=F('major_object_class_name'), obligated_amount=Sum('financialaccountsbyprogramactivityobjectclass__obligations_incurred_by_program_object_class_cpe'), gross_outlay_amount=Sum('financialaccountsbyprogramactivityobjectclass__gross_outlay_amount_by_program_object_class_cpe')).order_by(f"{('-' if (self.pagination.sort_order == 'desc') else '')}{self.pagination.sort_key}").values('name', 'obligated_amount', 'gross_outlay_amount') return queryset_results def format_object_class_children_response(self, children): response = [] for child in children: response.append({'name': child.get('name'), 'obligated_amount': child.get('obligated_amount'), 'gross_outlay_amount': child.get('gross_outlay_amount')}) return response
def main(): segmk = Segmaker('design.bits') designdata = {} tiledata = {} pipdata = {} ppipdata = {} ignpip = set() all_clks = {} piplists = ['cmt_top_l_upper_t.txt', 'cmt_top_r_upper_t.txt'] wirelists = ['cmt_top_l_upper_t_wires.txt', 'cmt_top_r_upper_t_wires.txt'] ppiplists = ['ppips_cmt_top_l_upper_t.db', 'ppips_cmt_top_r_upper_t.db'] print('Loading PIP lists...') for piplist in piplists: with open(os.path.join(os.getenv('FUZDIR'), '..', 'piplist', 'build', 'cmt_top', piplist)) as f: for l in f: (tile_type, dst, src) = l.strip().split('.') if (tile_type not in pipdata): pipdata[tile_type] = [] all_clks[tile_type] = set() pipdata[tile_type].append((src, dst)) if dst.split('_')[(- 1)].startswith('CLK'): all_clks[tile_type].add(src) wiredata = {} for wirelist in wirelists: with open(os.path.join(os.getenv('FUZDIR'), '..', 'piplist', 'build', 'cmt_top', wirelist)) as f: for l in f: (tile_type, wire) = l.strip().split() if (tile_type not in wiredata): wiredata[tile_type] = set() wiredata[tile_type].add(wire) print('Loading PPIP lists...') for ppiplist in ppiplists: fname = os.path.join(os.getenv('FUZDIR'), '..', '071-ppips', 'build', ppiplist) with open(fname, 'r') as f: for l in f: (pip_data, pip_type) = l.strip().split() if (pip_type != 'always'): continue (tile_type, dst, src) = pip_data.split('.') if (tile_type not in ppipdata): ppipdata[tile_type] = [] ppipdata[tile_type].append((src, dst)) print('Loading design data...') with open('design.txt', 'r') as f: for line in f: fields = line.strip().split(',') designdata[fields[0]] = fields[1:] with open('design_pips.txt', 'r') as f: for line in f: (tile, pip, src, dst, pnum, pdir) = line.split() if (not tile.startswith('CMT_TOP')): continue if ('UPPER_B' in tile): continue if ('LOWER_T' in tile): continue (pip_prefix, _) = pip.split('.') (tile_from_pip, tile_type) = pip_prefix.split('/') assert (tile == tile_from_pip) (_, src) = src.split('/') (_, dst) = dst.split('/') pnum = int(pnum) pdir = int(pdir) if (tile not in tiledata): tiledata[tile] = {'type': tile_type, 'pips': set(), 'srcs': set(), 'dsts': set()} tiledata[tile]['pips'].add((src, dst)) tiledata[tile]['srcs'].add(src) tiledata[tile]['dsts'].add(dst) if (pdir == 0): tiledata[tile]['srcs'].add(dst) tiledata[tile]['dsts'].add(src) if (dst.startswith('CMT_TOP_R_UPPER_T_CLK') or dst.startswith('CMT_TOP_L_UPPER_T_CLK')): ignpip.add((src, dst)) active_wires = {} with open('design_wires.txt', 'r') as f: for l in f: (tile, wire) = l.strip().split('/') if (tile not in active_wires): active_wires[tile] = set() active_wires[tile].add(wire) tags = {} for (tile, (site, in_use)) in designdata.items(): if (tile not in tags): tags[tile] = {} tags[tile]['IN_USE'] = int(in_use) active_clks = {} for tile in tags.keys(): tile_type = tile.rsplit('_', maxsplit=1)[0] in_use = tags[tile]['IN_USE'] if (not in_use): active_pips = [] else: active_pips = tiledata[tile]['pips'] for (src, dst) in pipdata[tile_type]: if ((src, dst) in ignpip): continue if ((src, dst) in ppipdata[tile_type]): continue tag = '{}.{}'.format(dst, src) val = (in_use if ((src, dst) in active_pips) else False) if (not (in_use and (not val))): if (tile not in active_clks): active_clks[tile] = set() active_clks[tile].add(src) tags[tile][tag] = int(val) for wire in wiredata[tile_type]: if ('CLK' not in wire): continue if ('CLKFBOUT2IN' in wire): continue if ('CLKPLL' in wire): continue if ('CLKOUT' in wire): continue if (tile not in active_wires): active_wires[tile] = set() segmk.add_tile_tag(tile, '{}_ACTIVE'.format(wire), (wire in active_wires[tile])) for (tile, tile_tags) in tags.items(): for (t, v) in tile_tags.items(): segmk.add_tile_tag(tile, t, v) segmk.compile(bitfilter=bitfilter) segmk.write()
def _filter_node_ids(unique_ids: List[str], fal_dbt: FalDbt, selectors: List[str], nodeGraph: NodeGraph) -> List[str]: output = set() union = parse_union(selectors) for intersection in union.components: try: plan_outputs = [set(SelectorPlan(selector, unique_ids, fal_dbt).execute(nodeGraph)) for selector in intersection.components if selector] except nx.NetworkXError: plan_outputs = [] if plan_outputs: output |= set.intersection(*plan_outputs) return list(output)
def parse(repo, tag): with open(os.path.join(current_dir, 'tags', repo, repo, 'emoji.json'), 'r') as f: emojis = json.loads(f.read()) emoji_db = {} shortnames = set() aliases = {} for v in emojis.values(): shortnames.add(v['shortname']) emoji_db[v['shortname']] = {'name': v['name'], 'unicode': v['unicode'], 'category': v['category']} alt = get_unicode_alt(v) if alt: emoji_db[v['shortname']]['unicode_alt'] = alt for alias in v['aliases']: aliases[alias] = v['shortname'] for test in ('png', 'png sprite', 'svg', 'svg sprite', 'entities', 'long title', 'no title'): with open(('../tests/extensions/emoji/emoji1 (%s).txt' % test), 'w') as f: f.write('# Emojis\n') count = 0 for emoji in sorted(shortnames): f.write(''.join(('%s %s<br>\n' % (emoji[1:(- 1)], emoji)))) count += 1 if ((test != 'png') and (count == 10)): break with open('../pymdownx/emoji1_db.py', 'w') as f: f.write(('"""Emojione autogen.\n\nGenerated from emojione source. Do not edit by hand.\n%s"""\n' % LICENSE)) f.write(('version = "%s"\n' % tag)) f.write('name = "emojione"\n') f.write(('emoji = %s\n' % json.dumps(emoji_db, sort_keys=True, indent=4, separators=(',', ': ')))) f.write(('aliases = %s\n' % json.dumps(aliases, sort_keys=True, indent=4, separators=(',', ': '))))
class NodeModel(): def __init__(self, grid, connections, tile_wires, node_wires, progressbar=None): self.grid = grid self.connections = connections self.tile_wires = tile_wires self.specific_node_wires = set(node_wires['specific_node_wires']) node_pattern_wires = node_wires['node_pattern_wires'] self.node_pattern_wires = {} for tile_type in node_pattern_wires: assert (tile_type not in self.node_pattern_wires) self.node_pattern_wires[tile_type] = set(node_pattern_wires[tile_type]) for tile_type in self.tile_wires: if (tile_type not in self.node_pattern_wires): self.node_pattern_wires[tile_type] = set() self.nodes = None self.wire_to_node_map = None if (progressbar is None): self.progressbar = (lambda x: x) else: self.progressbar = progressbar def _build_nodes(self): tile_wire_map = {} wires = {} flat_wires = [] for tile in self.progressbar(self.grid.tiles()): gridinfo = self.grid.gridinfo_at_tilename(tile) tile_type = gridinfo.tile_type for wire in self.tile_wires[tile_type]: wire_pkey = len(flat_wires) tile_wire_map[(tile, wire)] = wire_pkey flat_wires.append((tile, wire)) wires[wire_pkey] = None for connection in self.progressbar(self.connections.get_connections()): a_pkey = tile_wire_map[(connection.wire_a.tile, connection.wire_a.wire)] b_pkey = tile_wire_map[(connection.wire_b.tile, connection.wire_b.wire)] a_node = wires[a_pkey] b_node = wires[b_pkey] if (a_node is None): a_node = set((a_pkey,)) if (b_node is None): b_node = set((b_pkey,)) if (a_node is not b_node): a_node |= b_node for wire in a_node: wires[wire] = a_node nodes = {} for (wire_pkey, node) in self.progressbar(wires.items()): if (node is None): node = set((wire_pkey,)) assert (wire_pkey in node) nodes[id(node)] = node def get_node_wire_for_wires(wire_pkeys): if (len(wire_pkeys) == 1): for wire_pkey in wire_pkeys: return flat_wires[wire_pkey] for wire_pkey in wire_pkeys: (tile, wire) = flat_wires[wire_pkey] if ('{}/{}'.format(tile, wire) in self.specific_node_wires): return (tile, wire) for wire_pkey in wire_pkeys: (tile, wire) = flat_wires[wire_pkey] gridinfo = self.grid.gridinfo_at_tilename(tile) if (wire in self.node_pattern_wires[gridinfo.tile_type]): return (tile, wire) return None self.nodes = {} for node_wire_pkeys in self.progressbar(nodes.values()): node_wire = get_node_wire_for_wires(node_wire_pkeys) if (node_wire is None): continue self.nodes[node_wire] = [flat_wires[wire_pkey] for wire_pkey in node_wire_pkeys] def get_nodes(self): if (self.nodes is None): self._build_nodes() return self.nodes.keys() def get_wires_for_node(self, tile, wire): if (self.nodes is None): self._build_nodes() return self.nodes[(tile, wire)] def _build_wire_to_node_map(self): self.wire_to_node_map = {} if (self.nodes is None): self._build_nodes() for (node, wires) in self.nodes.items(): for tile_wire in wires: assert (tile_wire not in self.wire_to_node_map) self.wire_to_node_map[tile_wire] = node def get_node_for_wire(self, tile, wire): if (self.wire_to_node_map is None): self._build_wire_to_node_map() return self.wire_to_node_map[(tile, wire)]
def launch_job(args, j): img = j['image'] if ('/' in img): pull_image(args.sudo, img) c = [] if args.sudo: c += ['sudo'] c += ['docker', 'run'] if args.background: c += ['-d'] else: c += ['-it'] work_dir = os.path.abspath(os.path.dirname(__file__)) project_dir = os.path.abspath(os.path.join(work_dir, '..')) print('Work dir: ', work_dir) c += ['-v', (work_dir + ':/config'), '-v', (project_dir + '/data/oef-logs:/logs')] for arg in j['params']: c += map((lambda x: x.replace('$PWD', project_dir)), arg) c += [img] cmd_config = j['cmd'].get(args.cmd, None) if (not cmd_config): fail('Selected command {} not configured in config file!'.format(args.cmd)) c.extend(parse_command(cmd_config)) extra_args = [a for a in args.rest if (a != '--')] print('Extra arguments to search: ', extra_args) c += extra_args r = run(c) if (r != 0): fail(("can't launch " + img))
class OptionPlotoptionsCylinderSonificationContexttracksMappingHighpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def wrap_workflow_list_response(workflow_list: List[WorkflowProto]): if ((workflow_list is None) or (len(workflow_list) == 0)): return Response(return_code=str(RESOURCE_DOES_NOT_EXIST), error_msg=ReturnCode.Name(RESOURCE_DOES_NOT_EXIST).lower(), data=None) else: list_proto = WorkflowListProto(workflows=workflow_list) return Response(return_code=str(SUCCESS), error_msg=None, data=MessageToJson(list_proto, preserving_proto_field_name=True))
class _HVMType(models.Model): Hypervisor_KVM = 1 Hypervisor_BHYVE = 2 Hypervisor_NONE = 3 HVM_TYPE = ((Hypervisor_KVM, _('KVM hypervisor')), (Hypervisor_BHYVE, _('BHYVE hypervisor')), (Hypervisor_NONE, _('NO hypervisor'))) HVM_TYPE_GUI = ((Hypervisor_KVM, _('KVM')), (Hypervisor_BHYVE, _('BHYVE'))) HVM_TYPE_GUI_NO_HYPERVISOR = ((Hypervisor_NONE, _('NO hypervisor')),) HVM = frozenset([Hypervisor_KVM, Hypervisor_BHYVE]) class Meta(): app_label = 'vms' abstract = True
class Group(db.Model, helpers.Serializer): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(127)) fas_name = db.Column(db.String(127), unique=True) def at_name(self): return u'{}'.format(self.name) def __str__(self): return self.__unicode__() def __unicode__(self): return '{} (fas: {})'.format(self.name, self.fas_name)
class DatePicker(Control): def __init__(self, ref: Optional[Ref]=None, expand: Optional[Union[(bool, int)]]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, open: bool=False, value: Optional[datetime]=None, text_style: Optional[TextStyle]=None, first_date: Optional[datetime]=None, last_date: Optional[datetime]=None, current_date: Optional[datetime]=None, keyboard_type: Optional[KeyboardType]=None, date_picker_mode: Optional[DatePickerMode]=None, date_picker_entry_mode: Optional[DatePickerEntryMode]=None, help_text: Optional[str]=None, cancel_text: Optional[str]=None, confirm_text: Optional[str]=None, error_format_text: Optional[str]=None, error_invalid_text: Optional[str]=None, field_hint_text: Optional[str]=None, field_label_text: Optional[str]=None, switch_to_calendar_icon: Optional[str]=None, switch_to_input_icon: Optional[str]=None, on_change=None, on_dismiss=None): Control.__init__(self, ref=ref, expand=expand, col=col, opacity=opacity, tooltip=tooltip, visible=visible, disabled=disabled, data=data) self.value = value self.first_date = first_date self.last_date = last_date self.current_date = current_date self.keyboard_type = keyboard_type self.help_text = help_text self.cancel_text = cancel_text self.confirm_text = confirm_text self.error_format_text = error_format_text self.error_invalid_text = error_invalid_text self.date_picker_mode = date_picker_mode self.date_picker_entry_mode = date_picker_entry_mode self.text_style = text_style self.field_hint_text = field_hint_text self.field_label_text = field_label_text self.switch_to_calendar_icon = switch_to_calendar_icon self.switch_to_input_icon = switch_to_input_icon self.on_change = on_change self.on_dismiss = on_dismiss self.open = open def _get_control_name(self): return 'datepicker' def _before_build_command(self): super()._before_build_command() def pick_date(self): self.open = True self.update() async def pick_date_async(self): self.open = True (await self.update_async()) def open(self) -> Optional[bool]: return self._get_attr('open', data_type='bool', def_value=False) def open(self, value: Optional[bool]): self._set_attr('open', value) def value(self) -> Optional[datetime]: value_string = self._get_attr('value', def_value=None) return (datetime.fromisoformat(value_string) if value_string else None) def value(self, value: Optional[Union[(datetime, str)]]): if isinstance(value, (date, datetime)): value = value.isoformat() self._set_attr('value', value) def first_date(self) -> Optional[datetime]: value_string = self._get_attr('firstDate', def_value=None) if (value_string is None): return None else: return datetime.fromisoformat(value_string) _date.setter def first_date(self, value: Optional[Union[(datetime, str)]]): if isinstance(value, (date, datetime)): value = value.isoformat() self._set_attr('firstDate', value) def last_date(self) -> Optional[datetime]: value_string = self._get_attr('lastDate', def_value=None) if (value_string is None): return None else: return datetime.fromisoformat(value_string) _date.setter def last_date(self, value: Optional[Union[(datetime, str)]]): if isinstance(value, (date, datetime)): value = value.isoformat() self._set_attr('lastDate', value) def current_date(self) -> Optional[datetime]: value_string = self._get_attr('currentDate', def_value=None) if (value_string is None): return None else: return datetime.fromisoformat(value_string) _date.setter def current_date(self, value: Optional[Union[(datetime, str)]]): if isinstance(value, (date, datetime)): value = value.isoformat() self._set_attr('currentDate', value) def field_hint_text(self) -> Optional[str]: return self._get_attr('fieldHintText', def_value=None) _hint_text.setter def field_hint_text(self, value: Optional[str]): self._set_attr('fieldHintText', value) def field_label_text(self) -> Optional[str]: return self._get_attr('fieldLabelText', def_value=None) _label_text.setter def field_label_text(self, value: Optional[str]): self._set_attr('fieldLabelText', value) def help_text(self) -> Optional[str]: return self._get_attr('helpText', def_value=None) _text.setter def help_text(self, value: Optional[str]): self._set_attr('helpText', value) def cancel_text(self) -> Optional[str]: return self._get_attr('cancelText', def_value=None) _text.setter def cancel_text(self, value: Optional[str]): self._set_attr('cancelText', value) def confirm_text(self) -> Optional[str]: return self._get_attr('confirmText', def_value=None) _text.setter def confirm_text(self, value: Optional[str]): self._set_attr('confirmText', value) def error_format_text(self) -> Optional[str]: return self._get_attr('errorFormatText', def_value=None) _format_text.setter def error_format_text(self, value: Optional[str]): self._set_attr('errorFormatText', value) def error_invalid_text(self) -> Optional[str]: return self._get_attr('errorInvalidText', def_value=None) _invalid_text.setter def error_invalid_text(self, value: Optional[str]): self._set_attr('errorInvalidText', value) def keyboard_type(self) -> Optional[KeyboardType]: return self.__keyboard_type _type.setter def keyboard_type(self, value: Optional[KeyboardType]): self.__keyboard_type = value self._set_attr('keyboardType', (value.value if (value is not None) else None)) def date_picker_mode(self) -> Optional[DatePickerMode]: return self.__date_picker_mode _picker_mode.setter def date_picker_mode(self, value: Optional[DatePickerMode]): self.__date_picker_mode = value self._set_attr('datePickerMode', (value.value if (value is not None) else None)) def date_picker_entry_mode(self) -> Optional[DatePickerEntryMode]: return self.__date_picker_entry_mode _picker_entry_mode.setter def date_picker_entry_mode(self, value: Optional[DatePickerEntryMode]): self.__date_picker_entry_mode = value self._set_attr('datePickerEntryMode', (value.value if (value is not None) else None)) def switch_to_calendar_icon(self): return self._get_attr('switchToCalendarEntryModeIcon') _to_calendar_icon.setter def switch_to_calendar_icon(self, value): self._set_attr('switchToCalendarEntryModeIcon', value) def switch_to_input_icon(self): return self._get_attr('switchToInputEntryModeIcon') _to_input_icon.setter def switch_to_input_icon(self, value): self._set_attr('switchToInputEntryModeIcon', value) def on_change(self): return self._get_event_handler('change') _change.setter def on_change(self, handler): self._add_event_handler('change', handler) def on_dismiss(self): return self._get_event_handler('dismiss') _dismiss.setter def on_dismiss(self, handler): self._add_event_handler('dismiss', handler)
def test_butterworth_raises_exception_with_cutoff_having_its_biggest_value_bigger_than_frequency_divided_by_2(trace): with pytest.raises(ValueError): scared.signal_processing.butterworth(trace, 15, [1, 10], filter_type=scared.signal_processing.FilterType.BAND_PASS) with pytest.raises(ValueError): scared.signal_processing.butterworth(trace, 15, 20, filter_type=scared.signal_processing.FilterType.HIGH_PASS)
def swapuvs(self, context): uv_layers = bpy.context.object.data.uv_layers if ((uv_layers.active_index == 0) and (not self.is_down)): return {'FINISHED'} elif ((uv_layers.active_index == (len(uv_layers) - 1)) and self.is_down): return {'FINISHED'} def get_index(name): return [i for i in range(len(uv_layers)) if (uv_layers[i].name == name)][0] def move_bottom(name): uv_layers.active_index = get_index(name) bpy.ops.mesh.uv_texture_add() uv_layers.active_index = get_index(name) bpy.ops.mesh.uv_texture_remove() uv_layers.active_index = (len(uv_layers) - 1) uv_layers.active.name = name count = len(uv_layers) index_A = uv_layers.active_index index_B = (index_A + (1 if self.is_down else (- 1))) if (not self.is_down): for n in [uv_layers[i].name for i in range(index_B, count) if (i != index_A)]: move_bottom(n) bpy.context.scene.texToolsSettings.uv_channel = str(index_B) elif self.is_down: for n in [uv_layers[i].name for i in range(index_A, count) if (i != index_B)]: move_bottom(n) bpy.context.scene.texToolsSettings.uv_channel = str(index_B)
class CourseSerializers(serializers.ModelSerializer): add_time = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S', required=False, read_only=True) image = serializers.ImageField(required=False) user = UserSerializer(read_only=True) courselist_set = AddtutorialSerializers(many=True) class Meta(): model = Courses fields = '__all__'