code
stringlengths
281
23.7M
class OptionSeriesDependencywheelStatesSelect(Options): def animation(self) -> 'OptionSeriesDependencywheelStatesSelectAnimation': return self._config_sub_data('animation', OptionSeriesDependencywheelStatesSelectAnimation) def borderColor(self): return self._config_get('#000000') def borderColor(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('#cccccc') def color(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def halo(self) -> 'OptionSeriesDependencywheelStatesSelectHalo': return self._config_sub_data('halo', OptionSeriesDependencywheelStatesSelectHalo) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesDependencywheelStatesSelectMarker': return self._config_sub_data('marker', OptionSeriesDependencywheelStatesSelectMarker)
class uniform(TestCase): def test_empty(self): self.assertEqual(nutils.pointsseq.PointsSequence.uniform(square, 0), nutils.pointsseq._Empty(2)) def test_uniform(self): self.assertEqual(nutils.pointsseq.PointsSequence.uniform(square, 1), nutils.pointsseq._Uniform(square, 1)) def test_invalid_length(self): with self.assertRaisesRegex(ValueError, '^expected nonnegative `length` but got -1$'): nutils.pointsseq.PointsSequence.uniform(square, (- 1))
class MetaDataDetector(): architectures = {'ARC': ['ARC Cores'], 'ARM': ['ARM'], 'AVR': ['Atmel AVR'], 'PPC': ['PowerPC', 'PPC'], 'MIPS': ['MIPS'], 'x86': ['x86', '80386', '80486'], 'SPARC': ['SPARC'], 'RISC-V': ['RISC-V'], 'RISC': ['RISC', 'RS6000', '80960', '80860'], 'S/390': ['IBM S/390'], 'SuperH': ['Renesas SH'], 'ESP': ['Tensilica Xtensa'], 'Alpha': ['Alpha'], 'M68K': ['m68k', '68020'], 'Tilera': ['TILE-Gx', 'TILE64', 'TILEPro']} bitness = {'8-bit': ['8-bit'], '16-bit': ['16-bit'], '32-bit': ['32-bit', 'PE32', 'MIPS32'], '64-bit': ['64-bit', 'aarch64', 'x86-64', 'MIPS64', '80860']} endianness = {'little endian': ['LSB', '80386', '80486', 'x86'], 'big endian': ['MSB']} def get_device_architecture(self, file_object): type_of_file = file_object.processed_analysis['file_type']['result']['full'] arch_dict = file_object.processed_analysis.get('cpu_architecture', {}) architecture = self._search_for_arch_keys(type_of_file, self.architectures, delimiter='') if (not architecture): return arch_dict bitness = self._search_for_arch_keys(type_of_file, self.bitness) endianness = self._search_for_arch_keys(type_of_file, self.endianness) full_isa_result = f'{architecture}{bitness}{endianness} (M)' arch_dict.update({full_isa_result: 'Detection based on meta data'}) return arch_dict def _search_for_arch_keys(file_type_output, arch_dict, delimiter=', '): for key in arch_dict: for bit in arch_dict[key]: if (bit in file_type_output): return (delimiter + key) return ''
def fixture_data(db): ta0 = baker.make('references.ToptierAgency', toptier_code='001', abbreviation='ABCD', name='Dept. of Depts', _fill_optional=True) ta1 = baker.make('references.ToptierAgency', toptier_code='002', abbreviation='EFGH', name='The Bureau', _fill_optional=True) ta2 = baker.make('references.ToptierAgency', toptier_code='1601', abbreviation='DOL', name='Department of Labor', _fill_optional=True) ta3 = baker.make('references.ToptierAgency', toptier_code='097', abbreviation='DOD', name='Department of Defense', _fill_optional=True) ta4 = baker.make('references.ToptierAgency', toptier_code='021', abbreviation='DOD', name='Department of Navy', _fill_optional=True) fa0 = baker.make(FederalAccount, agency_identifier='001', main_account_code='0005', account_title='Something', federal_account_code='001-0005', parent_toptier_agency=ta0) fa1 = baker.make(FederalAccount, agency_identifier='002', main_account_code='0005', account_title='Nothing1', federal_account_code='002-0005', parent_toptier_agency=ta1) fa2 = baker.make(FederalAccount, agency_identifier='1600', main_account_code='0005', account_title='Nothing2', federal_account_code='1600-0005', parent_toptier_agency=ta2) fa3 = baker.make(FederalAccount, agency_identifier='097', main_account_code='0005', account_title='CGAC_DOD', federal_account_code='097-0005', parent_toptier_agency=ta3) fa4 = baker.make(FederalAccount, agency_identifier='021', main_account_code='0005', account_title='CGAC_DOD(NAVY)', federal_account_code='021-0005', parent_toptier_agency=ta4) ta0 = baker.make('accounts.TreasuryAppropriationAccount', federal_account=fa0, tas_rendering_label='tas-label-0') ta1 = baker.make('accounts.TreasuryAppropriationAccount', federal_account=fa1, tas_rendering_label='tas-label-1') ta2 = baker.make('accounts.TreasuryAppropriationAccount', federal_account=fa2, tas_rendering_label='tas-label-2') ta3 = baker.make('accounts.TreasuryAppropriationAccount', federal_account=fa3, tas_rendering_label='tas-label-3') ta4 = baker.make('accounts.TreasuryAppropriationAccount', federal_account=fa4, tas_rendering_label='tas-label-4') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta0, total_budgetary_resources_amount_cpe=1000, submission__reporting_period_start='2017-06-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=False, treasury_account_identifier=ta0, total_budgetary_resources_amount_cpe=100, submission__reporting_period_start='2017-03-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta0, total_budgetary_resources_amount_cpe=2000, submission__reporting_period_start='2017-06-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta1, total_budgetary_resources_amount_cpe=9000, submission__reporting_period_start='2017-06-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta1, total_budgetary_resources_amount_cpe=500, submission__reporting_period_start='2016-06-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier__treasury_account_identifier='999', total_budgetary_resources_amount_cpe=4000, submission__reporting_period_start='2017-06-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta2, total_budgetary_resources_amount_cpe=1000, submission__reporting_period_start='2015-06-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta3, total_budgetary_resources_amount_cpe=2000, submission__reporting_period_start='2018-03-01') baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, treasury_account_identifier=ta4, total_budgetary_resources_amount_cpe=2000, submission__reporting_period_start='2018-03-02') ta99 = baker.make('references.ToptierAgency', toptier_code='999', name='Dept. of Depts', abbreviation=None, _fill_optional=True) fa99 = baker.make(FederalAccount, id='9999', agency_identifier='999', main_account_code='0009', account_title='Custom 99', federal_account_code='999-0009', parent_toptier_agency=ta99) taa99 = baker.make('accounts.TreasuryAppropriationAccount', account_title='Cool Treasury Account', federal_account=fa99, tas_rendering_label='tas-label-99') baker.make(BureauTitleLookup, federal_account_code='999-0009', bureau_title='Test Bureau', bureau_slug='test-bureau') dabs99 = baker.make('submissions.DABSSubmissionWindowSchedule', submission_reveal_date='2022-09-01', submission_fiscal_year=2022) sub99 = baker.make('submissions.SubmissionAttributes', submission_id='099', reporting_fiscal_year=2022, is_final_balances_for_fy=True, submission_window_id=dabs99.id) sub100 = baker.make('submissions.SubmissionAttributes', submission_id='100', reporting_fiscal_year=2022, is_final_balances_for_fy=False, submission_window_id=dabs99.id) baker.make(FinancialAccountsByProgramActivityObjectClass, treasury_account=taa99, submission=sub99, obligations_incurred_by_program_object_class_cpe=500, gross_outlay_amount_by_program_object_class_cpe=800) baker.make(FinancialAccountsByProgramActivityObjectClass, treasury_account=taa99, submission=sub100, obligations_incurred_by_program_object_class_cpe=501, gross_outlay_amount_by_program_object_class_cpe=801) baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, submission=sub99, treasury_account_identifier=taa99, total_budgetary_resources_amount_cpe=1000) baker.make('accounts.AppropriationAccountBalances', final_of_fy=True, submission=sub100, treasury_account_identifier=taa99, total_budgetary_resources_amount_cpe=1001)
class ValveMirrorTestCase(ValveTestBases.ValveTestBig): CONFIG = ('\nacls:\n mirror_ospf:\n - rule:\n nw_dst: \'224.0.0.5\'\n dl_type: 0x800\n actions:\n mirror: p5\n allow: 1\n - rule:\n dl_type: 0x800\n actions:\n allow: 0\n - rule:\n actions:\n allow: 1\ndps:\n s1:\n%s\n interfaces:\n p1:\n number: 1\n native_vlan: v100\n lldp_beacon:\n enable: True\n system_name: "faucet"\n port_descr: "first_port"\n acls_in: [mirror_ospf]\n p2:\n number: 2\n native_vlan: v200\n tagged_vlans: [v100]\n p3:\n number: 3\n tagged_vlans: [v100, v200]\n p4:\n number: 4\n tagged_vlans: [v200]\n p5:\n number: 5\n output_only: True\n mirror: 4\nvlans:\n v100:\n vid: 0x100\n faucet_vips: [\'10.0.0.254/24\']\n routes:\n - route:\n ip_dst: 10.99.99.0/24\n ip_gw: 10.0.0.1\n - route:\n ip_dst: 10.99.98.0/24\n ip_gw: 10.0.0.99\n v200:\n vid: 0x200\n faucet_vips: [\'fc00::1:254/112\', \'fe80::1:254/64\']\n routes:\n - route:\n ip_dst: \'fc00::10:0/112\'\n ip_gw: \'fc00::1:1\'\n - route:\n ip_dst: \'fc00::20:0/112\'\n ip_gw: \'fc00::1:99\'\nrouters:\n router1:\n bgp:\n as: 1\n connect_mode: \'passive\'\n neighbor_as: 2\n port: 9179\n routerid: \'1.1.1.1\'\n server_addresses: [\'127.0.0.1\']\n neighbor_addresses: [\'127.0.0.1\']\n vlan: v100\n' % DP1_CONFIG) def setUp(self): self.setup_valves(self.CONFIG) def test_unmirror(self): config = yaml_load(self.CONFIG) del config['dps']['s1']['interfaces']['p5']['mirror'] self.update_config(yaml_dump(config), reload_type='warm')
class twitterFollowerSearch(Module): config = Config({Option('USERNAME', 'Provide your target Username', True): str('laet4x')}) def run(self): today = datetime.now().strftime('%Y-%m-%d') c = twint.Config() username = self.config.option('USERNAME').value c.To = username c.Since = today c.Followers = True c.Hide_output = True c.Store_object = True twint.run.Search(c) tweets = twint.output.tweets_list followers = [] for tweet in tweets: followers.append('{}'.format(tweet.username)) print(followers)
def test_keystore_volumes(): config = '\nkeystore:\n - secretName: test\n - secretName: test-with-custom-path\n items:\n - key: slack_url\n path: xpack.notification.slack.account.otheraccount.secure_url\n' r = helm_template(config) s = r['statefulset'][uname]['spec']['template']['spec'] assert ({'name': 'keystore-test', 'secret': {'secretName': 'test'}} in s['volumes']) assert ({'name': 'keystore-test-with-custom-path', 'secret': {'secretName': 'test-with-custom-path', 'items': [{'key': 'slack_url', 'path': 'xpack.notification.slack.account.otheraccount.secure_url'}]}} in s['volumes'])
.parametrize('test_input,expected', [('12:devices:/docker/051e2ee0bcea13df4a9e943137f19f957f38ac02d6bad96f9b700f76', {'container': {'id': '051e2ee0bcea13df4a9e943137f19f957f38ac02d6bad96f9b700f76'}}), ('1:name=systemd:/system.slice/docker-cde7c2bab394630a42d73dc610b9c57415dcedd427f6d.scope', {'container': {'id': 'cde7c2bab394630a42d73dc610b9c57415dcedd427f6d'}}), ('1:name=systemd:/kubepods/besteffort/pode9b90526-f47d-11e8-b2a5-080027b9f4fb/15aa6e53-b09a-40c7-8558-c6c31e36c88a', {'container': {'id': '15aa6e53-b09a-40c7-8558-c6c31e36c88a'}, 'kubernetes': {'pod': {'uid': 'e9b90526-f47d-11e8-b2a5-080027b9f4fb'}}}), ('1:name=systemd:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90d81341_92de_11e7_8cf2_507b9d4141fa.slice/crio-2227daf62df6694645fee5df53c1fa9560e8600a525690ae252b7f63.scope', {'container': {'id': '2227daf62df6694645fee5df53c1fa9560e8600a525690ae252b7f63'}, 'kubernetes': {'pod': {'uid': '90d81341-92de-11e7-8cf2-507b9d4141fa'}}}), ('1:name=systemd:/system.slice/garden.service/garden/70eb4ce5-a065-4401-6990-88ed', {'container': {'id': '70eb4ce5-a065-4401-6990-88ed'}}), ('12:pids:/kubepods/kubepods/besteffort/pod0e886e9a-3879-45f9-b44d-86ef9df03224/244a65edefdffe31685c42317c9054e71dc1193048cf9459e2a4dd35cbc1dba4', {'container': {'id': '244a65edefdffe31685c42317c9054e71dc1193048cf9459e2a4dd35cbc1dba4'}, 'kubernetes': {'pod': {'uid': '0e886e9a-3879-45f9-b44d-86ef9df03224'}}}), ('10:cpuset:/kubepods/pod5eadac96-ab58-11ea-b82b-0242ac110009/7fe41c8a2d1daf11dd91f6c3a44dfeb7d125dc594bddf', {'container': {'id': '7fe41c8a2d1daf11dd91f6c3a44dfeb7d125dc594bddf'}, 'kubernetes': {'pod': {'uid': '5eadac96-ab58-11ea-b82b-0242ac110009'}}}), ('9:freezer:/kubepods.slice/kubepods-pod22949dce_fd8b_11ea_8ede_98f2b32c645c.slice/docker-b15a5bdedd2e7645c3beb908314e4c77857bbfd32a041148c07f.scope', {'container': {'id': 'b15a5bdedd2e7645c3beb908314e4c77857bbfd32a041148c07f'}, 'kubernetes': {'pod': {'uid': '22949dce-fd8b-11ea-8ede-98f2b32c645c'}}})]) def test_cgroup_parsing(test_input, expected): f = io.StringIO(test_input) result = cgroup.parse_cgroups(f) assert (result == expected)
def delete_log(nodename): screen_log = get_logfile(node=nodename) pid_file = get_pidfile(node=nodename) roslog = get_ros_logfile(nodename) if os.path.isfile(screen_log): os.remove(screen_log) if os.path.isfile(pid_file): os.remove(pid_file) if os.path.isfile(roslog): os.remove(roslog)
def test_difference_with_two_frames_of_same_length_and_default_values(traces): expected = _read('difference_result_two_frames_same_length.npz') frame = slice(None, 50) frame_2 = slice(None, 50) result = scared.preprocesses.high_order.Difference(frame_1=frame, frame_2=frame_2)(traces) assert np.array_equal(expected, result)
class FluentRecordFormatter(logging.Formatter, object): def __init__(self, fmt=None, datefmt=None, style='%', fill_missing_fmt_key=False, format_json=True, exclude_attrs=None): super(FluentRecordFormatter, self).__init__(None, datefmt) if ((sys.version_info[0:2] >= (3, 2)) and (style != '%')): (self.__style, basic_fmt_dict) = {'{': (logging.StrFormatStyle, {'sys_host': '{hostname}', 'sys_name': '{name}', 'sys_module': '{module}'}), '$': (logging.StringTemplateStyle, {'sys_host': '${hostname}', 'sys_name': '${name}', 'sys_module': '${module}'})}[style] else: self.__style = None basic_fmt_dict = {'sys_host': '%(hostname)s', 'sys_name': '%(name)s', 'sys_module': '%(module)s'} if (exclude_attrs is not None): self._exc_attrs = set(exclude_attrs) self._fmt_dict = None self._formatter = self._format_by_exclusion self.usesTime = super(FluentRecordFormatter, self).usesTime else: self._exc_attrs = None if (not fmt): self._fmt_dict = basic_fmt_dict self._formatter = self._format_by_dict self.usesTime = self._format_by_dict_uses_time elif hasattr(fmt, '__call__'): self._formatter = fmt self.usesTime = fmt.usesTime else: self._fmt_dict = fmt self._formatter = self._format_by_dict self.usesTime = self._format_by_dict_uses_time if format_json: self._format_msg = self._format_msg_json else: self._format_msg = self._format_msg_default self.hostname = socket.gethostname() self.fill_missing_fmt_key = fill_missing_fmt_key def format(self, record): super(FluentRecordFormatter, self).format(record) record.hostname = self.hostname data = self._formatter(record) self._structuring(data, record) return data def usesTime(self): def _structuring(self, data, record): msg = record.msg if isinstance(msg, dict): self._add_dic(data, msg) elif isinstance(msg, str): self._add_dic(data, self._format_msg(record, msg)) else: self._add_dic(data, {'message': msg}) def _format_msg_json(self, record, msg): try: json_msg = json.loads(str(msg)) if isinstance(json_msg, dict): return json_msg else: return self._format_msg_default(record, msg) except ValueError: return self._format_msg_default(record, msg) def _format_msg_default(self, record, msg): return {'message': super(FluentRecordFormatter, self).format(record)} def _format_by_exclusion(self, record): data = {} for (key, value) in record.__dict__.items(): if (key not in self._exc_attrs): data[key] = value return data def _format_by_dict(self, record): data = {} for (key, value) in self._fmt_dict.items(): try: if self.__style: value = self.__style(value).format(record) else: value = (value % record.__dict__) except KeyError as exc: value = None if (not self.fill_missing_fmt_key): raise exc data[key] = value return data def _format_by_dict_uses_time(self): if self.__style: search = self.__style.asctime_search else: search = '%(asctime)' return any([(value.find(search) >= 0) for value in self._fmt_dict.values()]) def _add_dic(data, dic): for (key, value) in dic.items(): if isinstance(key, str): data[key] = value
class OptionSeriesBoxplotTooltipDatetimelabelformats(Options): def day(self): return self._config_get('%A, %e %b %Y') def day(self, text: str): self._config(text, js_type=False) def hour(self): return self._config_get('%A, %e %b, %H:%M') def hour(self, text: str): self._config(text, js_type=False) def millisecond(self): return self._config_get('%A, %e %b, %H:%M:%S.%L') def millisecond(self, text: str): self._config(text, js_type=False) def minute(self): return self._config_get('%A, %e %b, %H:%M') def minute(self, text: str): self._config(text, js_type=False) def month(self): return self._config_get('%B %Y') def month(self, text: str): self._config(text, js_type=False) def second(self): return self._config_get('%A, %e %b, %H:%M:%S') def second(self, text: str): self._config(text, js_type=False) def week(self): return self._config_get('Week from %A, %e %b %Y') def week(self, text: str): self._config(text, js_type=False) def year(self): return self._config_get('%Y') def year(self, text: str): self._config(text, js_type=False)
class OptionPlotoptionsScatter3dSonificationTracksMappingHighpass(Options): def frequency(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMappingHighpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsScatter3dSonificationTracksMappingHighpassFrequency) def resonance(self) -> 'OptionPlotoptionsScatter3dSonificationTracksMappingHighpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsScatter3dSonificationTracksMappingHighpassResonance)
class TransactionSearch(models.Model): transaction = models.OneToOneField('awards.TransactionNormalized', on_delete=models.DO_NOTHING, primary_key=True) award = models.ForeignKey('search.AwardSearch', on_delete=models.DO_NOTHING, null=True) transaction_unique_id = models.TextField(blank=False, null=False, default='NONE') usaspending_unique_transaction_id = models.TextField(null=True) modification_number = models.TextField(null=True) generated_unique_award_id = models.TextField(null=True) action_date = models.DateField(null=True) fiscal_action_date = models.DateField(null=True) last_modified_date = models.DateField(null=True) fiscal_year = models.IntegerField(null=True) award_certified_date = models.DateField(null=True) award_fiscal_year = models.IntegerField(null=True) create_date = models.DateTimeField(null=True) update_date = models.DateTimeField(null=True) award_update_date = models.DateTimeField(null=True) award_date_signed = models.DateField(null=True) etl_update_date = models.DateTimeField(null=True) period_of_performance_start_date = models.DateField(null=True) period_of_performance_current_end_date = models.DateField(null=True) awarding_agency_code = models.TextField(null=True) awarding_toptier_agency_name = models.TextField(null=True) awarding_toptier_agency_name_raw = models.TextField(null=True) funding_agency_code = models.TextField(null=True) funding_toptier_agency_name = models.TextField(null=True) funding_toptier_agency_name_raw = models.TextField(null=True) awarding_sub_tier_agency_c = models.TextField(null=True) awarding_subtier_agency_name = models.TextField(null=True) awarding_subtier_agency_name_raw = models.TextField(null=True) funding_sub_tier_agency_co = models.TextField(null=True) funding_subtier_agency_name = models.TextField(null=True) funding_subtier_agency_name_raw = models.TextField(null=True) awarding_toptier_agency_id = models.IntegerField(null=True) funding_toptier_agency_id = models.IntegerField(null=True) awarding_agency_id = models.IntegerField(null=True) funding_agency_id = models.IntegerField(null=True) awarding_toptier_agency_abbreviation = models.TextField(null=True) funding_toptier_agency_abbreviation = models.TextField(null=True) awarding_subtier_agency_abbreviation = models.TextField(null=True) funding_subtier_agency_abbreviation = models.TextField(null=True) awarding_office_code = models.TextField(null=True) awarding_office_name = models.TextField(null=True) funding_office_code = models.TextField(null=True) funding_office_name = models.TextField(null=True) is_fpds = models.BooleanField(blank=False, null=False) type = models.TextField(null=True) type_description = models.TextField(null=True) action_type = models.TextField(null=True) action_type_description = models.TextField(null=True) award_category = models.TextField(null=True) transaction_description = models.TextField(null=True) business_categories = ArrayField(models.TextField(), null=True) award_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) generated_pragmatic_obligation = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) federal_action_obligation = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) original_loan_subsidy_cost = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) face_value_loan_guarantee = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) indirect_federal_sharing = NumericField(blank=True, null=True) funding_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) total_funding_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) non_federal_funding_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) recipient_hash = models.UUIDField(null=True) recipient_levels = ArrayField(models.TextField(), null=True) recipient_uei = models.TextField(null=True) recipient_name_raw = models.TextField(null=True) recipient_name = models.TextField(null=True) recipient_unique_id = models.TextField(null=True) parent_recipient_hash = models.UUIDField(null=True) parent_uei = models.TextField(null=True) parent_recipient_name_raw = models.TextField(null=True) parent_recipient_name = models.TextField(null=True) parent_recipient_unique_id = models.TextField(null=True) recipient_location_country_code = models.TextField(null=True) recipient_location_country_name = models.TextField(null=True) recipient_location_state_code = models.TextField(null=True) recipient_location_state_name = models.TextField(null=True) recipient_location_state_fips = models.TextField(null=True) recipient_location_state_population = models.IntegerField(null=True) recipient_location_county_code = models.TextField(null=True) recipient_location_county_name = models.TextField(null=True) recipient_location_county_population = models.IntegerField(null=True) recipient_location_congressional_code = models.TextField(null=True) recipient_location_congressional_population = models.IntegerField(null=True) recipient_location_congressional_code_current = models.TextField(null=True) recipient_location_zip5 = models.TextField(null=True) legal_entity_zip4 = models.TextField(null=True) legal_entity_zip_last4 = models.TextField(null=True) legal_entity_city_code = models.TextField(null=True) recipient_location_city_name = models.TextField(null=True) legal_entity_address_line1 = models.TextField(null=True) legal_entity_address_line2 = models.TextField(null=True) legal_entity_address_line3 = models.TextField(null=True) legal_entity_foreign_city = models.TextField(null=True) legal_entity_foreign_descr = models.TextField(null=True) legal_entity_foreign_posta = models.TextField(null=True) legal_entity_foreign_provi = models.TextField(null=True) recipient_location_county_fips = models.TextField(null=True) place_of_performance_code = models.TextField(null=True) place_of_performance_scope = models.TextField(null=True) pop_country_code = models.TextField(null=True) pop_country_name = models.TextField(null=True) pop_state_code = models.TextField(null=True) pop_state_name = models.TextField(null=True) pop_state_fips = models.TextField(null=True) pop_state_population = models.IntegerField(null=True) pop_county_code = models.TextField(null=True) pop_county_name = models.TextField(null=True) pop_county_population = models.IntegerField(null=True) pop_congressional_code = models.TextField(null=True) pop_congressional_population = models.IntegerField(null=True) pop_congressional_code_current = models.TextField(null=True) pop_zip5 = models.TextField(null=True) place_of_performance_zip4a = models.TextField(null=True) place_of_perform_zip_last4 = models.TextField(null=True) pop_city_name = models.TextField(null=True) place_of_performance_forei = models.TextField(null=True) pop_county_fips = models.TextField(null=True) treasury_account_identifiers = ArrayField(models.IntegerField(), null=True) tas_paths = ArrayField(models.TextField(), null=True) tas_components = ArrayField(models.TextField(), null=True) federal_accounts = models.JSONField(null=True) disaster_emergency_fund_codes = ArrayField(models.TextField(), null=True) officer_1_name = models.TextField(null=True) officer_1_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) officer_2_name = models.TextField(null=True) officer_2_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) officer_3_name = models.TextField(null=True) officer_3_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) officer_4_name = models.TextField(null=True) officer_4_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) officer_5_name = models.TextField(null=True) officer_5_amount = models.DecimalField(max_digits=23, decimal_places=2, blank=True, null=True) published_fabs_id = models.IntegerField(blank=True, null=True) afa_generated_unique = models.TextField(null=True) business_funds_ind_desc = models.TextField(null=True) business_funds_indicator = models.TextField(null=True) business_types = models.TextField(null=True) business_types_desc = models.TextField(null=True) cfda_number = models.TextField(null=True) cfda_title = models.TextField(null=True) cfda_id = models.IntegerField(null=True) correction_delete_indicatr = models.TextField(null=True) correction_delete_ind_desc = models.TextField(null=True) fain = models.TextField(null=True) funding_opportunity_goals = models.TextField(blank=True, null=True) funding_opportunity_number = models.TextField(blank=True, null=True) record_type = models.IntegerField(null=True) record_type_description = models.TextField(null=True) sai_number = models.TextField(null=True) uri = models.TextField(null=True) detached_award_procurement_id = models.IntegerField(blank=True, null=True) detached_award_proc_unique = models.TextField(null=True) a_76_fair_act_action = models.TextField(null=True) a_76_fair_act_action_desc = models.TextField(null=True) agency_id = models.TextField(null=True) airport_authority = models.BooleanField(null=True) alaskan_native_owned_corpo = models.BooleanField(null=True) alaskan_native_servicing_i = models.BooleanField(null=True) american_indian_owned_busi = models.BooleanField(null=True) asian_pacific_american_own = models.BooleanField(null=True) base_and_all_options_value = models.TextField(null=True) base_exercised_options_val = models.TextField(null=True) black_american_owned_busin = models.BooleanField(null=True) c1862_land_grant_college = models.BooleanField(null=True) c1890_land_grant_college = models.BooleanField(null=True) c1994_land_grant_college = models.BooleanField(null=True) c8a_program_participant = models.BooleanField(null=True) cage_code = models.TextField(null=True) city_local_government = models.BooleanField(null=True) clinger_cohen_act_planning = models.TextField(null=True) clinger_cohen_act_pla_desc = models.TextField(null=True) commercial_item_acqui_desc = models.TextField(null=True) commercial_item_acquisitio = models.TextField(null=True) commercial_item_test_desc = models.TextField(null=True) commercial_item_test_progr = models.TextField(null=True) community_developed_corpor = models.BooleanField(null=True) community_development_corp = models.BooleanField(null=True) consolidated_contract = models.TextField(null=True) consolidated_contract_desc = models.TextField(null=True) construction_wage_rat_desc = models.TextField(null=True) construction_wage_rate_req = models.TextField(null=True) contingency_humanitar_desc = models.TextField(null=True) contingency_humanitarian_o = models.TextField(null=True) contract_award_type = models.TextField(null=True) contract_award_type_desc = models.TextField(null=True) contract_bundling = models.TextField(null=True) contract_bundling_descrip = models.TextField(null=True) contract_financing = models.TextField(null=True) contract_financing_descrip = models.TextField(null=True) contracting_officers_desc = models.TextField(null=True) contracting_officers_deter = models.TextField(null=True) contracts = models.BooleanField(null=True) corporate_entity_not_tax_e = models.BooleanField(null=True) corporate_entity_tax_exemp = models.BooleanField(null=True) cost_accounting_stand_desc = models.TextField(null=True) cost_accounting_standards = models.TextField(null=True) cost_or_pricing_data = models.TextField(null=True) cost_or_pricing_data_desc = models.TextField(null=True) council_of_governments = models.BooleanField(null=True) country_of_product_or_desc = models.TextField(null=True) country_of_product_or_serv = models.TextField(null=True) county_local_government = models.BooleanField(null=True) current_total_value_award = models.TextField(null=True) dod_claimant_prog_cod_desc = models.TextField(null=True) dod_claimant_program_code = models.TextField(null=True) domestic_or_foreign_e_desc = models.TextField(null=True) domestic_or_foreign_entity = models.TextField(null=True) domestic_shelter = models.BooleanField(null=True) dot_certified_disadvantage = models.BooleanField(null=True) economically_disadvantaged = models.BooleanField(null=True) educational_institution = models.BooleanField(null=True) emerging_small_business = models.BooleanField(null=True) epa_designated_produc_desc = models.TextField(null=True) epa_designated_product = models.TextField(null=True) evaluated_preference = models.TextField(null=True) evaluated_preference_desc = models.TextField(null=True) extent_competed = models.TextField(null=True) extent_compete_description = models.TextField(null=True) fair_opportunity_limi_desc = models.TextField(null=True) fair_opportunity_limited_s = models.TextField(null=True) fed_biz_opps = models.TextField(null=True) fed_biz_opps_description = models.TextField(null=True) federal_agency = models.BooleanField(null=True) federally_funded_research = models.BooleanField(null=True) for_profit_organization = models.BooleanField(null=True) foreign_funding = models.TextField(null=True) foreign_funding_desc = models.TextField(null=True) foreign_government = models.BooleanField(null=True) foreign_owned_and_located = models.BooleanField(null=True) foundation = models.BooleanField(null=True) government_furnished_desc = models.TextField(null=True) government_furnished_prope = models.TextField(null=True) grants = models.BooleanField(null=True) hispanic_american_owned_bu = models.BooleanField(null=True) hispanic_servicing_institu = models.BooleanField(null=True) historically_black_college = models.BooleanField(null=True) historically_underutilized = models.BooleanField(null=True) hospital_flag = models.BooleanField(null=True) housing_authorities_public = models.BooleanField(null=True) idv_type = models.TextField(null=True) idv_type_description = models.TextField(null=True) indian_tribe_federally_rec = models.BooleanField(null=True) information_technolog_desc = models.TextField(null=True) information_technology_com = models.TextField(null=True) inherently_government_desc = models.TextField(null=True) inherently_government_func = models.TextField(null=True) inter_municipal_local_gove = models.BooleanField(null=True) interagency_contract_desc = models.TextField(null=True) interagency_contracting_au = models.TextField(null=True) international_organization = models.BooleanField(null=True) interstate_entity = models.BooleanField(null=True) joint_venture_economically = models.BooleanField(null=True) joint_venture_women_owned = models.BooleanField(null=True) labor_standards = models.TextField(null=True) labor_standards_descrip = models.TextField(null=True) labor_surplus_area_firm = models.BooleanField(null=True) limited_liability_corporat = models.BooleanField(null=True) local_area_set_aside = models.TextField(null=True) local_area_set_aside_desc = models.TextField(null=True) local_government_owned = models.BooleanField(null=True) major_program = models.TextField(null=True) manufacturer_of_goods = models.BooleanField(null=True) materials_supplies_article = models.TextField(null=True) materials_supplies_descrip = models.TextField(null=True) minority_institution = models.BooleanField(null=True) minority_owned_business = models.BooleanField(null=True) multi_year_contract = models.TextField(null=True) multi_year_contract_desc = models.TextField(null=True) multiple_or_single_aw_desc = models.TextField(null=True) multiple_or_single_award_i = models.TextField(null=True) municipality_local_governm = models.BooleanField(null=True) naics_code = models.TextField(null=True) naics_description = models.TextField(null=True) national_interest_action = models.TextField(null=True) national_interest_desc = models.TextField(null=True) native_american_owned_busi = models.BooleanField(null=True) native_hawaiian_owned_busi = models.BooleanField(null=True) native_hawaiian_servicing = models.BooleanField(null=True) nonprofit_organization = models.BooleanField(null=True) number_of_actions = models.TextField(null=True) number_of_offers_received = models.TextField(null=True) ordering_period_end_date = models.TextField(null=True) organizational_type = models.TextField(null=True) other_minority_owned_busin = models.BooleanField(null=True) other_not_for_profit_organ = models.BooleanField(null=True) other_statutory_authority = models.TextField(null=True) other_than_full_and_o_desc = models.TextField(null=True) other_than_full_and_open_c = models.TextField(null=True) parent_award_id = models.TextField(null=True) partnership_or_limited_lia = models.BooleanField(null=True) performance_based_se_desc = models.TextField(null=True) performance_based_service = models.TextField(null=True) period_of_perf_potential_e = models.TextField(null=True) piid = models.TextField(null=True) place_of_manufacture = models.TextField(null=True) place_of_manufacture_desc = models.TextField(null=True) planning_commission = models.BooleanField(null=True) port_authority = models.BooleanField(null=True) potential_total_value_awar = models.TextField(null=True) price_evaluation_adjustmen = models.TextField(null=True) private_university_or_coll = models.BooleanField(null=True) product_or_service_code = models.TextField(null=True) product_or_service_description = models.TextField(null=True) program_acronym = models.TextField(null=True) program_system_or_equ_desc = models.TextField(null=True) program_system_or_equipmen = models.TextField(null=True) pulled_from = models.TextField(null=True) purchase_card_as_paym_desc = models.TextField(null=True) purchase_card_as_payment_m = models.TextField(null=True) receives_contracts_and_gra = models.BooleanField(null=True) recovered_materials_s_desc = models.TextField(null=True) recovered_materials_sustai = models.TextField(null=True) referenced_idv_agency_desc = models.TextField(null=True) referenced_idv_agency_iden = models.TextField(null=True) referenced_idv_modificatio = models.TextField(null=True) referenced_idv_type = models.TextField(null=True) referenced_idv_type_desc = models.TextField(null=True) referenced_mult_or_si_desc = models.TextField(null=True) referenced_mult_or_single = models.TextField(null=True) research = models.TextField(null=True) research_description = models.TextField(null=True) sam_exception = models.TextField(null=True) sam_exception_description = models.TextField(null=True) sba_certified_8_a_joint_ve = models.BooleanField(null=True) school_district_local_gove = models.BooleanField(null=True) school_of_forestry = models.BooleanField(null=True) sea_transportation = models.TextField(null=True) sea_transportation_desc = models.TextField(null=True) self_certified_small_disad = models.BooleanField(null=True) service_disabled_veteran_o = models.BooleanField(null=True) small_agricultural_coopera = models.BooleanField(null=True) small_business_competitive = models.BooleanField(null=True) small_disadvantaged_busine = models.BooleanField(null=True) sole_proprietorship = models.BooleanField(null=True) solicitation_date = models.DateField(null=True) solicitation_identifier = models.TextField(null=True) solicitation_procedur_desc = models.TextField(null=True) solicitation_procedures = models.TextField(null=True) state_controlled_instituti = models.BooleanField(null=True) subchapter_s_corporation = models.BooleanField(null=True) subcontinent_asian_asian_i = models.BooleanField(null=True) subcontracting_plan = models.TextField(null=True) subcontracting_plan_desc = models.TextField(null=True) the_ability_one_program = models.BooleanField(null=True) total_obligated_amount = models.TextField(null=True) township_local_government = models.BooleanField(null=True) transaction_number = models.TextField(null=True) transit_authority = models.BooleanField(null=True) tribal_college = models.BooleanField(null=True) tribally_owned_business = models.BooleanField(null=True) type_of_contract_pricing = models.TextField(null=True) type_of_contract_pric_desc = models.TextField(null=True) type_of_idc = models.TextField(null=True) type_of_idc_description = models.TextField(null=True) type_set_aside = models.TextField(null=True) type_set_aside_description = models.TextField(null=True) undefinitized_action = models.TextField(null=True) undefinitized_action_desc = models.TextField(null=True) us_federal_government = models.BooleanField(null=True) us_government_entity = models.BooleanField(null=True) us_local_government = models.BooleanField(null=True) us_state_government = models.BooleanField(null=True) us_tribal_government = models.BooleanField(null=True) vendor_doing_as_business_n = models.TextField(null=True) vendor_fax_number = models.TextField(null=True) vendor_phone_number = models.TextField(null=True) veteran_owned_business = models.BooleanField(null=True) veterinary_college = models.BooleanField(null=True) veterinary_hospital = models.BooleanField(null=True) woman_owned_business = models.BooleanField(null=True) women_owned_small_business = models.BooleanField(null=True) class Meta(): db_table = 'transaction_search' constraints = [models.UniqueConstraint(fields=['is_fpds', 'transaction'], name='ts_idx_is_fpds_transaction_id')] indexes = [models.Index(fields=['transaction'], name='ts_idx_transaction_id'), models.Index(fields=['generated_unique_award_id'], name='ts_idx_award_key'), models.Index(fields=['afa_generated_unique'], name='ts_idx_fabs_key_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['detached_award_proc_unique'], name='ts_idx_fpds_key_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['piid'], name='ts_idx_piid_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['parent_award_id'], name='ts_idx_parent_award_id_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['fain'], name='ts_idx_fain_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['uri'], name='ts_idx_uri_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['is_fpds'], name='ts_idx_is_fpds'), models.Index(fields=['-action_date'], name='ts_idx_action_date', condition=Q(action_date__gte='2007-10-01')), models.Index(fields=['-last_modified_date'], name='ts_idx_last_modified_date'), models.Index(fields=['-fiscal_year'], name='ts_idx_fiscal_year', condition=Q(action_date__gte='2007-10-01')), models.Index(fields=['type'], name='ts_idx_type', condition=(Q(type__isnull=False) & Q(action_date__gte='2007-10-01'))), models.Index(fields=['award'], name='ts_idx_award_id', condition=Q(action_date__gte='2007-10-01')), models.Index(fields=['pop_zip5'], name='ts_idx_pop_zip5', condition=(Q(pop_zip5__isnull=False) & Q(action_date__gte='2007-10-01'))), models.Index(fields=['recipient_unique_id'], name='ts_idx_recipient_unique_id', condition=(Q(recipient_unique_id__isnull=False) & Q(action_date__gte='2007-10-01'))), models.Index(fields=['parent_recipient_unique_id'], name='ts_idx_parent_recipient_unique', condition=(Q(parent_recipient_unique_id__isnull=False) & Q(action_date__gte='2007-10-01'))), models.Index(fields=['pop_state_code', 'action_date'], name='ts_idx_simple_pop_geolocation', condition=((Q(pop_country_code='USA') & Q(pop_state_code__isnull=False)) & Q(action_date__gte='2007-10-01'))), models.Index(fields=['recipient_hash'], name='ts_idx_recipient_hash', condition=Q(action_date__gte='2007-10-01')), models.Index(fields=['action_date'], name='ts_idx_action_date_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['etl_update_date'], name='ts_idx_etl_update_date'), models.Index(fields=['type_of_contract_pricing'], name='ts_idx_tocp_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['naics_code'], name='ts_idx_naics_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['extent_competed'], name='ts_idx_ext_com_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['product_or_service_code'], name='ts_idx_psc_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['type_set_aside'], name='ts_idx_type_set_aside_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['cfda_number'], name='ts_idx_cfda_aside_pre2008', condition=Q(action_date__lt='2007-10-01')), models.Index(fields=['awarding_agency_id'], name='ts_idx_awarding_agency_id'), models.Index(fields=['funding_agency_id'], name='ts_idx_funding_agency_id')]
.compilertest def test_valid_grpc_stats_upstream(): yaml = '\n---\napiVersion: getambassador.io/v3alpha1\nkind: Module\nmetadata:\n name: ambassador\n namespace: default\nspec:\n config:\n grpc_stats:\n upstream_stats: true\n' cache = Cache(logger) r1 = Compile(logger, yaml, k8s=True) r2 = Compile(logger, yaml, k8s=True, cache=cache) require_no_errors(r1['ir']) require_no_errors(r2['ir']) ir = r1['ir'].as_dict() stats_filters = [f for f in ir['filters'] if (f['name'] == 'grpc_stats')] assert (len(stats_filters) == 1) assert (stats_filters[0]['config'] == {'enable_upstream_stats': True, 'stats_for_all_methods': False})
class LoginForm(forms.Form): template_name = 'auth/forms/login_form.html' email = forms.EmailField(label='Email address', error_messages={'required': 'You need to enter your email.'}, widget=forms.EmailInput(attrs={'placeholder': 'Enter your email address.'})) password = forms.CharField(label='Password', error_messages={'required': 'You need to enter your password.'}, widget=forms.TextInput(attrs={'placeholder': 'Enter your password.'}))
def closest_lightness(l: float) -> Tuple[(int, float)]: if (l <= LCH_L[0]): li = 0 lf = 0.0 elif (l >= LCH_L[(- 1)]): li = (len(LCH_L) - 2) lf = 1.0 else: li = (bisect.bisect(LCH_L, l) - 1) (l1, l2) = LCH_L[li:(li + 2)] lf = (1 - ((l2 - l) / (l2 - l1))) return (li, lf)
def _step_internal(step: Dict, last_step: Dict, start: Union[(str, int)], stop: Union[(str, int)], gas: Tuple[(int, int)], subcall: Dict=None) -> str: if ((last_step['op'] in {'REVERT', 'INVALID'}) and _step_compare(step, last_step)): contract_color = color('bright red') else: contract_color = (color('bright cyan') if (not step['jumpDepth']) else color()) key = f"{color('dark white')}{contract_color}{step['fn']} {color('dark white')}" left_bracket = f"{color('dark white')}[" right_bracket = f"{color('dark white')}]" if subcall: key = f"{key}[{color}{subcall['op']}{right_bracket} " key = f'{key}{start}:{stop}{color}' if gas: if (gas[0] == gas[1]): gas_str = f"{color('bright yellow')}{gas[0]} gas" else: gas_str = f"{color('bright yellow')}{gas[0]} / {gas[1]} gas" key = f'{key} {left_bracket}{gas_str}{right_bracket}{color}' if (last_step['op'] == 'SELFDESTRUCT'): key = f"{key} {left_bracket}{color('bright red')}SELFDESTRUCT{right_bracket}{color}" return key
def test_get_china_stock_list(): print(settings.FOOLTRADER_STORE_PATH) df = technical.get_security_list('stock', exchanges=['sh', 'sz']) assert ('000001' in df.index) assert ('' == df.loc[('000001', 'sinaIndustry')]) df = technical.get_security_list('stock', exchanges=['sh']) assert ('600000' in df.index) assert ('' == df.loc[('600000', 'sinaIndustry')]) df = technical.get_security_list('stock', exchanges=['sh', 'sz'], start_code='000338', end_code='600388') assert ('000338' in df.index) assert ('600388' in df.index) assert ('600389' not in df.index) df = technical.get_security_list('stock', exchanges=['sh', 'sz'], codes=['300027', '000002']) assert (len(df.index) == 2) df = technical.get_security_list('stock', exchanges=['sh', 'sz'], mode='es') assert (type(df.loc[('600004', 'sinaArea')]) == list) assert ('' in df.loc[('600004', 'sinaArea')]) assert ('' in df.loc[('600004', 'sinaArea')])
class HelpEntryManager(TypedObjectManager): def find_topicmatch(self, topicstr, exact=False): dbref = utils.dbref(topicstr) if dbref: return self.filter(id=dbref) topics = self.filter(db_key__iexact=topicstr) if (not topics): topics = self.get_by_alias(topicstr) if ((not topics) and (not exact)): topics = self.filter(db_key__istartswith=topicstr) if (not topics): topics = self.filter(db_key__icontains=topicstr) return topics def find_apropos(self, topicstr): return self.filter(db_key__icontains=topicstr) def find_topicsuggestions(self, topicstr): return self.filter(db_key__icontains=topicstr).exclude(db_key__iexact=topicstr) def find_topics_with_category(self, help_category): return self.filter(db_help_category__iexact=help_category) def get_all_topics(self): return self.all() def get_all_categories(self): return list(set((topic.help_category for topic in self.all()))) def all_to_category(self, default_category): topics = self.all() for topic in topics: topic.help_category = default_category topic.save() string = 'Help database moved to category {default_category}'.format(default_category=default_category) logger.log_info(string) def search_help(self, ostring, help_category=None): ostring = ostring.strip().lower() if help_category: return self.filter(db_key__iexact=ostring, db_help_category__iexact=help_category) else: return self.filter(db_key__iexact=ostring) def create_help(self, key, entrytext, category='General', locks=None, aliases=None, tags=None): try: new_help = self.model() new_help.key = key new_help.entrytext = entrytext new_help.help_category = category if locks: new_help.locks.add(locks) if aliases: new_help.aliases.add(make_iter(aliases)) if tags: new_help.tags.batch_add(*tags) new_help.save() return new_help except IntegrityError: string = ("Could not add help entry: key '%s' already exists." % key) logger.log_err(string) return None except Exception: logger.log_trace() return None signals.SIGNAL_HELPENTRY_POST_CREATE.send(sender=new_help)
class ModeloOO(nn.Module): def __init__(self, input_size=(28 ** 2), output_size=10, layers=[128, 128]): super().__init__() self.l1 = nn.Linear(input_size, layers[0]) self.l2 = nn.Linear(layers[0], layers[1]) self.l3 = nn.Linear(layers[1], output_size) def forward(self, X): X = nn.functional.relu(self.l1(X)) X = nn.functional.relu(self.l2(X)) X = self.l3(X) return nn.functional.log_softmax(X, dim=1)
def shave_marks(text): text = str(text) decomposed_text = unicodedata.normalize('NFD', text) if (decomposed_text == text): return text keepers = [] last = ' ' for character in decomposed_text: if (unicodedata.combining(character) and (last in string.ascii_letters)): continue keepers.append(character) last = character shaved = ''.join(keepers) return unicodedata.normalize('NFC', shaved)
def gen_conv1d(): def conv1d(n: size, m: size, r: size, x: R[n], w: R[m], res: R[r]): for i in seq(0, r): res[i] = 0.0 for i in seq(0, r): for j in seq(0, n): if ((j < (i + 1)) and (j >= (i - (m - 1)))): res[i] += (x[j] * w[(i - j)]) return conv1d
class TestPrefixMap(unittest.TestCase): def test_assignment(self): person = Person() person.married = 'yea' self.assertEqual('yeah', person.married) self.assertEqual(1, person.married_) person.married = 'yes' self.assertEqual('yes', person.married) self.assertEqual(1, person.married_) person.married = 'na' self.assertEqual('nah', person.married) self.assertEqual(0, person.married_) with self.assertRaises(TraitError): person.married = 'unknown' with self.assertRaises(TraitError): person.married = 'ye' def test_bad_types(self): person = Person() wrong_type = [[], (1, 2, 3), 1j, 2.3, 23, b'not a string', None] for value in wrong_type: with self.subTest(value=value): with self.assertRaises(TraitError): person.married = value def test_no_default(self): mapping = {'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0} class Person(HasTraits): married = PrefixMap(mapping) p = Person() self.assertEqual(p.married, 'yes') self.assertEqual(p.married_, 1) def test_default(self): class Person(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}, default_value='nah') p = Person() self.assertEqual(p.married, 'nah') self.assertEqual(p.married_, 0) def test_default_keyword_only(self): with self.assertRaises(TypeError): PrefixMap({'yes': 1, 'no': 0}, 'yes') def test_default_method(self): class Person(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}) default_calls = Int(0) def _married_default(self): self.default_calls += 1 return 'nah' p = Person() self.assertEqual(p.married, 'nah') self.assertEqual(p.married_, 0) self.assertEqual(p.default_calls, 1) p2 = Person() self.assertEqual(p2.married_, 0) self.assertEqual(p2.married, 'nah') self.assertEqual(p2.default_calls, 1) def test_default_static_override_static(self): class BasePerson(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}, default_value='nah') class Person(BasePerson): married = 'yes' p = Person() self.assertEqual(p.married, 'yes') self.assertEqual(p.married_, 1) def test_default_static_override_method(self): class BasePerson(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}, default_value='nah') class Person(BasePerson): default_calls = Int(0) def _married_default(self): self.default_calls += 1 return 'yes' p = Person() self.assertEqual(p.married, 'yes') self.assertEqual(p.married_, 1) self.assertEqual(p.default_calls, 1) def test_default_method_override_static(self): class BasePerson(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}) default_calls = Int(0) def _married_default(self): self.default_calls += 1 return 'nah' class Person(BasePerson): married = 'yes' p = Person() self.assertEqual(p.married, 'yes') self.assertEqual(p.married_, 1) self.assertEqual(p.default_calls, 0) def test_default_method_override_method(self): class BasePerson(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}) default_calls = Int(0) def _married_default(self): self.default_calls += 1 return 'nah' class Person(BasePerson): def _married_default(self): self.default_calls += 1 return 'yes' p = Person() self.assertEqual(p.married, 'yes') self.assertEqual(p.married_, 1) self.assertEqual(p.default_calls, 1) def test_static_default_transformed(self): class Person(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0}, default_value='yea') p = Person() self.assertEqual(p.married, 'yeah') self.assertEqual(p.married_, 1) p = Person() self.assertEqual(p.married_, 1) self.assertEqual(p.married, 'yeah') def test_static_default_validation_error(self): with self.assertRaises(ValueError): class Person(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0}, default_value='meh') def test_no_nested_exception(self): class A(HasTraits): washable = PrefixMap({'yes': 1, 'no': 0}) a = A() try: a.washable = 'affirmatron' except TraitError as exc: self.assertIsNone(exc.__context__) self.assertIsNone(exc.__cause__) def test_pickle_roundtrip(self): class Person(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}, default_value='yea') p = Person() married_trait = p.traits()['married'] reconstituted = pickle.loads(pickle.dumps(married_trait)) self.assertEqual(married_trait.validate(p, 'married', 'yea'), 'yeah') self.assertEqual(reconstituted.validate(p, 'married', 'yea'), 'yeah') with self.assertRaises(TraitError): reconstituted.validate(p, 'married', 'uknown') with self.assertRaises(TraitError): reconstituted.validate(p, 'married', 'ye') def test_empty_map(self): with self.assertRaises(ValueError): PrefixMap({}) def test_pickle_shadow_trait(self): class Person(HasTraits): married = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}, default_value='yeah') p = Person() married_shadow_trait = p.trait('married_') reconstituted = pickle.loads(pickle.dumps(married_shadow_trait)) default_value_callable = reconstituted.default_value()[1] self.assertEqual(default_value_callable(p), 1) def test_existence_of__map(self): prefix_map = PrefixMap({'yes': 1, 'yeah': 1, 'no': 0, 'nah': 0}) self.assertEqual(prefix_map._map['yes'], 'yes')
class Fixed(Decimal): def __new__(cls, value: Any) -> Any: return super().__new__(cls, _to_fixed(value)) def __repr__(self) -> str: return f"Fixed('{str(self)}')" def __hash__(self) -> int: return super().__hash__() def __lt__(self, other: Any) -> bool: return super().__lt__(_to_fixed(other)) def __le__(self, other: Any) -> bool: return super().__le__(_to_fixed(other)) def __eq__(self, other: Any) -> bool: if isinstance(other, float): raise TypeError('Cannot compare to floating point - use a string instead') try: return super().__eq__(_to_fixed(other)) except TypeError: return False def __ne__(self, other: Any) -> bool: if isinstance(other, float): raise TypeError('Cannot compare to floating point - use a string instead') try: return super().__ne__(_to_fixed(other)) except TypeError: return True def __ge__(self, other: Any) -> bool: return super().__ge__(_to_fixed(other)) def __gt__(self, other: Any) -> bool: return super().__gt__(_to_fixed(other)) def __add__(self, other: Any) -> 'Fixed': return Fixed(super().__add__(_to_fixed(other))) def __sub__(self, other: Any) -> 'Fixed': return Fixed(super().__sub__(_to_fixed(other)))
class Tag(BaseObject): def __init__(self, api=None, count=None, name=None, **kwargs): self.api = api self.count = count self.name = name for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue
def eth_etherbone_packet_user_description(dw): param_layout = etherbone_packet_header.get_layout() param_layout = _remove_from_layout(param_layout, 'magic', 'portsize', 'addrsize', 'version') param_layout += eth_udp_user_description(dw).param_layout payload_layout = [('data', dw), ('last_be', (dw // 8)), ('error', (dw // 8))] return EndpointDescription(payload_layout, param_layout)
def test_fal_model_task_when_dbt_fails(mocker): task = FalModelTask(['a', 'b'], script=FalLocalHookTask('something.py', bound_model=FakeModel('model'))) task.set_run_index(DynamicIndexProvider()) fal_dbt = FakeFalDbt('/test') mock_dbt_run(mocker, FAILURE) assert (task.execute(None, fal_dbt) == FAILURE)
class BroadcastEvent(Event): def __init__(self, task_id=None, **kwargs): if (not task_id): dc_id = cq.conf.ERIGONES_DEFAULT_DC system_user_id = cq.conf.ERIGONES_TASK_USER task_id = task_id_from_string(system_user_id, dummy=True, dc_id=dc_id, tt=TT_DUMMY, tg=TG_DC_UNBOUND) kwargs['broadcast'] = True super(BroadcastEvent, self).__init__(task_id, **kwargs)
class NormalChatOutputParser(BaseOutputParser): def parse_prompt_response(self, model_out_text): clean_str = super().parse_prompt_response(model_out_text) print('clean prompt response:', clean_str) return clean_str def parse_view_response(self, ai_text, data) -> str: return ai_text def get_format_instructions(self) -> str: pass
def add_or_update_by_ts(session: sqlalchemy.orm.Session, record: Base) -> None: query = session.query(type(record)).filter_by(timestamp=record.timestamp) if (query.count() > 0): query.update({column: getattr(record, column) for column in record.__table__.columns.keys() if (column != 'id')}) else: session.add(record)
def read_bel_properties(properties_file, properties_map): def inner(): with open(properties_file, 'r') as f: for line in f: raw_props = line.split() tile = raw_props[0] sites_count = int(raw_props[1]) prop_loc = 2 if (sites_count == 0): (yield ((tile,), {})) for site in range(0, sites_count): site_name = raw_props[prop_loc] bels_count = int(raw_props[(prop_loc + 1)]) prop_loc += 2 for bel in range(0, bels_count): bel_name = raw_props[prop_loc] bel_name = clean_bname(bel_name) bel_name = bel_name.lower() bel_properties_count = int(raw_props[(prop_loc + 1)]) props = 0 prop_loc += 2 for prop in range(0, bel_properties_count): prop_name = raw_props[prop_loc] if (prop_name.startswith('CONFIG.') and prop_name.endswith('.VALUES')): prop_name = prop_name[7:(- 7)] prop_values_count = int(raw_props[(prop_loc + 1)]) if (prop_name not in ['RAM_MODE', 'WRITE_WIDTH_A', 'WRITE_WIDTH_B', 'READ_WIDTH_A', 'READ_WIDTH_B']): if (bel_name in properties_map): if (prop_name in properties_map[bel_name]): prop_name = properties_map[bel_name][prop_name] (yield ((tile, site_name, bel_name, prop_name), raw_props[(prop_loc + 2):((prop_loc + 2) + prop_values_count)])) props += 1 prop_loc += (2 + prop_values_count) if (props == 0): (yield ((tile, site_name, bel_name), {})) return merged_dict(inner())
class Announcement(base_tests.SimpleDataPlane): def runTest(self): logging.info('Running Announcement test') logging.info('Sending Features_Request') logging.info('Expecting Features Reply with supported actions') request = ofp.message.features_request() (reply, pkt) = self.controller.transact(request) self.assertTrue((reply is not None), 'Failed to get any reply') self.assertEqual(reply.type, ofp.OFPT_FEATURES_REPLY, 'Response is not Features_reply') supported_actions = [] if (reply.actions & (1 << ofp.OFPAT_OUTPUT)): supported_actions.append('OFPAT_OUTPUT') if (reply.actions & (1 << ofp.OFPAT_SET_VLAN_VID)): supported_actions.append('OFPAT_SET_VLAN_VID') if (reply.actions & (1 << ofp.OFPAT_SET_VLAN_PCP)): supported_actions.append('OFPAT_SET_VLAN_PCP') if (reply.actions & (1 << ofp.OFPAT_STRIP_VLAN)): supported_actions.append('OFPAT_STRIP_VLAN') if (reply.actions & (1 << ofp.OFPAT_SET_DL_SRC)): supported_actions.append('OFPAT_SET_DL_SRC') if (reply.actions & (1 << ofp.OFPAT_SET_DL_DST)): supported_actions.append('OFPAT_SET_NW_SRC') if (reply.actions & (1 << ofp.OFPAT_SET_NW_DST)): supported_actions.append('OFPAT_SET_NW_DST') if (reply.actions & (1 << ofp.OFPAT_SET_NW_TOS)): supported_actions.append('OFPAT_SET_NW_TOS') if (reply.actions & (1 << ofp.OFPAT_SET_TP_SRC)): supported_actions.append('OFPAT_SET_TP_SRC') if (reply.actions & (1 << ofp.OFPAT_SET_TP_DST)): supported_actions.append('OFPAT_SET_TP_DST') if (reply.actions & (1 << ofp.OFPAT_EXPERIMENTER)): supported_actions.append('OFPAT_EXPERIMENTER') if (reply.actions & (1 << ofp.OFPAT_ENQUEUE)): supported_actions.append('OFPAT_ENQUEUE') logging.info(supported_actions)
def _pmt_specification_type_to_python_type(specification_type: str) -> str: element_types = _get_sub_types_of_compositional_types(specification_type) element1_type_in_python = _specification_type_to_python_type(element_types[0]) element2_type_in_python = _specification_type_to_python_type(element_types[1]) python_type = 'Dict[{}, {}]'.format(element1_type_in_python, element2_type_in_python) return python_type
class BaseEmailConnector(Generic[DB_CONNECTOR_TYPE], ABC): def required_identities(self) -> List[str]: def identities_for_test_email(self) -> Dict[(str, Any)]: def __init__(self, configuration: ConnectionConfig): self.configuration = configuration self.hide_parameters = (not CONFIG.dev_mode) self.db_client: Optional[DB_CONNECTOR_TYPE] = None def test_connection(self) -> Optional[ConnectionTestStatus]: def batch_email_send(self, privacy_requests: Query) -> None: def add_skipped_log(self, db: Session, privacy_request: PrivacyRequest) -> None:
def register(): from bpy.utils import register_class for cls in classes: register_class(cls) bpy.types.Scene.shapekeytransfer_list_index = IntProperty() bpy.types.Scene.shapekeytransfer = PointerProperty(type=SKT_PG_settings) bpy.types.Scene.customshapekeylist = CollectionProperty(type=SKT_PG_shapeKeyListItem)
def extractYoloarchivesWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesAreaSonification(Options): def contextTracks(self) -> 'OptionSeriesAreaSonificationContexttracks': return self._config_sub_data('contextTracks', OptionSeriesAreaSonificationContexttracks) def defaultInstrumentOptions(self) -> 'OptionSeriesAreaSonificationDefaultinstrumentoptions': return self._config_sub_data('defaultInstrumentOptions', OptionSeriesAreaSonificationDefaultinstrumentoptions) def defaultSpeechOptions(self) -> 'OptionSeriesAreaSonificationDefaultspeechoptions': return self._config_sub_data('defaultSpeechOptions', OptionSeriesAreaSonificationDefaultspeechoptions) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def pointGrouping(self) -> 'OptionSeriesAreaSonificationPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesAreaSonificationPointgrouping) def tracks(self) -> 'OptionSeriesAreaSonificationTracks': return self._config_sub_data('tracks', OptionSeriesAreaSonificationTracks)
def xyz_d65_to_jzazbz(xyzd65: Vector) -> Vector: (xa, ya, za) = util.xyz_to_absxyz(xyzd65) xm = ((B * xa) - ((B - 1) * za)) ym = ((G * ya) - ((G - 1) * xa)) lms = alg.matmul(xyz_to_lms_m, [xm, ym, za], dims=alg.D2_D1) pqlms = util.pq_st2084_oetf(lms, m2=M2) (iz, az, bz) = alg.matmul(lms_p_to_izazbz_m, pqlms, dims=alg.D2_D1) jz = ((((1 + D) * iz) / (1 + (D * iz))) - D0) return [jz, az, bz]
class OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingLowpass(Options): def frequency(self) -> 'OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingLowpassFrequency': return self._config_sub_data('frequency', OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingLowpassFrequency) def resonance(self) -> 'OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingLowpassResonance': return self._config_sub_data('resonance', OptionSeriesWaterfallSonificationDefaultinstrumentoptionsMappingLowpassResonance)
def extractJustafrenchbreadWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def xterm_palette_to_rgb(color): color = int(color) if (color <= 7): return BASIC_RGB[color] if (8 <= color <= 15): return tuple(((255 if (c > 0) else 0) for c in BASIC_RGB[(color - 8)])) if (XTERM_GRAY_LEVELS_OFFSET <= color <= 255): return (((8 + (10 * (color - XTERM_GRAY_LEVELS_OFFSET))),) * 3) value = (color - XTERM_COLORS_OFFSET) mod = value rgb = [] for c in XTERM_COEFF: (val, mod) = divmod(mod, c) rgb.append(sum(XTERM_JUMPS[:val])) return tuple(rgb)
class HTMLHelpWindow(QtGui.QDialog): def __init__(self, parent, html_content, scale_dx, scale_dy): from pyface.qt import QtWebKit QtGui.QDialog.__init__(self, parent) layout = QtGui.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) html_control = QtWebKit.QWebView() html_control.setSizePolicy(QtGui.QSizePolicy.Policy.Expanding, QtGui.QSizePolicy.Policy.Expanding) html_control.setHtml(html_content) layout.addWidget(html_control) bbox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.StandardButton.Ok, QtCore.Qt.Orientation.Horizontal) bbox.accepted.connect(self.accept) layout.addWidget(bbox) position_window(self, parent=parent) self.show()
class OptionSeriesXrangeSonificationContexttracksMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('y') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('c6') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('c2') def min(self, text: str): self._config(text, js_type=False) def scale(self): return self._config_get(None) def scale(self, value: Any): self._config(value, js_type=False) def within(self): return self._config_get('yAxis') def within(self, text: str): self._config(text, js_type=False)
class GenericBranch(Instruction, ABC, Generic[E]): def __init__(self, condition: E, tags: Optional[Tuple[(Tag, ...)]]=None): super().__init__(tags) self._condition = condition def __repr__(self) -> str: def __iter__(self) -> Iterator[E]: (yield self.condition) def complexity(self) -> int: return self.condition.complexity def requirements_iter(self) -> Iterator[Variable]: return self.condition.requirements_iter def substitute(self, replacee: Expression, replacement: Expression) -> None: if (self._condition == replacee): self._condition = replacement else: self._condition.substitute(replacee, replacement) if (isinstance(self.condition, Condition) and (self.condition.operation in {OperationType.equal, OperationType.not_equal}) and any(((isinstance((new_cond := op), BinaryOperation) and (new_cond.operation in Condition.NEGATIONS)) for op in self.condition.operands)) and any(((isinstance(op, Constant) and (op.value == 0)) for op in self.condition.operands))): assert isinstance(new_cond, BinaryOperation) if (self.condition.operation == OperationType.not_equal): self._condition = Condition(new_cond.operation, new_cond.operands, new_cond.type) else: self._condition = Condition(new_cond.operation, new_cond.operands, new_cond.type).negate() def condition(self) -> E: return self._condition def copy(self) -> GenericBranch[E]: return self.__class__(self._condition.copy(), self.tags) def accept(self, visitor: DataflowObjectVisitorInterface[T]) -> T: return visitor.visit_generic_branch(self)
class OptionSeriesXrangeStatesHover(Options): def animation(self) -> 'OptionSeriesXrangeStatesHoverAnimation': return self._config_sub_data('animation', OptionSeriesXrangeStatesHoverAnimation) def borderColor(self): return self._config_get(None) def borderColor(self, text: str): self._config(text, js_type=False) def brightness(self): return self._config_get(0.1) def brightness(self, num: float): self._config(num, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def halo(self) -> 'OptionSeriesXrangeStatesHoverHalo': return self._config_sub_data('halo', OptionSeriesXrangeStatesHoverHalo) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesXrangeStatesHoverMarker': return self._config_sub_data('marker', OptionSeriesXrangeStatesHoverMarker)
def test_delete_cascade(admin_db, common_db, backend_db): (fo, fw) = create_fw_with_child_fo() assert (common_db.exists(fo.uid) is False) assert (common_db.exists(fw.uid) is False) backend_db.insert_multiple_objects(fw, fo) assert (common_db.exists(fo.uid) is True) assert (common_db.exists(fw.uid) is True) admin_db.delete_object(fw.uid) assert (common_db.exists(fw.uid) is False) assert (common_db.exists(fo.uid) is False), 'deletion should be cascaded to child objects'
class Hdf5Data(object): def __init__(self, values, keys, inputs, features): self.values = np.array(values, dtype=np.float32) self.keys = np.array(keys, dtype=h5py.string_dtype()) self.inputs = np.array(inputs, dtype=h5py.string_dtype()) self.features = np.array(features, dtype=h5py.string_dtype()) def save(self, filename): with h5py.File(filename, 'w') as f: f.create_dataset('Values', data=self.values) f.create_dataset('Keys', data=self.keys) f.create_dataset('Inputs', data=self.inputs) f.create_dataset('Features', data=self.features)
(frozen=True) class CueSpecs(): brand: str = field(default='Predator') M: float = field(default=0.567) length: float = field(default=1.4732) tip_radius: float = field(default=0.007) butt_radius: float = field(default=0.02) def default() -> CueSpecs: return CueSpecs() def snooker() -> CueSpecs: raise NotImplementedError()
class AgentDialogue(LedgerApiDialogue): def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: BaseDialogue.Role, message_class: Type[LedgerApiMessage]) -> None: LedgerApiDialogue.__init__(self, dialogue_label=dialogue_label, self_address=self_address, role=role, message_class=message_class)
def extractVintaegeWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('The Captivating Crown Prince', 'The Captivating Crown Prince', 'translated'), ('Meow Meow Meow', 'Meow Meow Meow', 'translated')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_employees_repos_commits_amount(df: DataFrame, repo_name_field: str, author_name_field: str, author_email_field: str, commits_id_field: str, result_field: str='Commits') -> DataFrame: return df.select(repo_name_field, author_name_field, author_email_field, commits_id_field).groupBy(repo_name_field, author_name_field, author_email_field).agg(f.count(f.col(commits_id_field)).alias(result_field)).sort(result_field, ascending=False)
def process_overlaps_all(hits): clean_doms = [] total_range = set() for (hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore) in hits: (hmmfrom, hmmto, sqfrom, sqto) = map(int, [hmmfrom, hmmto, sqfrom, sqto]) new_span = set(range(sqfrom, (sqto + 1))) total_overlap = (new_span & total_range) if (len(total_overlap) > 0): best = True tmp_clean_doms = [] tmp_overlapping = [] for (phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore) in clean_doms: prev_span = set(range(psqfrom, (psqto + 1))) overlap = (new_span & prev_span) if ((len(overlap) > 0) and (best == True)): if (heval > pheval): best = False tmp_overlapping.append([phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore]) else: tmp_clean_doms.append([phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore]) if (best == True): tmp_clean_doms.append([hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore]) else: tmp_clean_doms.extend(tmp_overlapping) clean_doms = tmp_clean_doms for (phid, pheval, phscore, phmmfrom, phmmto, psqfrom, psqto, pdomscore) in clean_doms: clean_span = set(range(psqfrom, (psqto + 1))) total_range.update(clean_span) else: clean_doms.append([hid, heval, hscore, hmmfrom, hmmto, sqfrom, sqto, domscore]) total_range.update(new_span) return clean_doms
def extractYyupdatesCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class AgencyClientDeclaration(AbstractObject): def __init__(self, api=None): super(AgencyClientDeclaration, self).__init__() self._isAgencyClientDeclaration = True self._api = api class Field(AbstractObject.Field): agency_representing_client = 'agency_representing_client' client_based_in_france = 'client_based_in_france' client_city = 'client_city' client_country_code = 'client_country_code' client_email_address = 'client_email_address' client_name = 'client_name' client_postal_code = 'client_postal_code' client_province = 'client_province' client_street = 'client_street' client_street2 = 'client_street2' has_written_mandate_from_advertiser = 'has_written_mandate_from_advertiser' is_client_paying_invoices = 'is_client_paying_invoices' _field_types = {'agency_representing_client': 'unsigned int', 'client_based_in_france': 'unsigned int', 'client_city': 'string', 'client_country_code': 'string', 'client_email_address': 'string', 'client_name': 'string', 'client_postal_code': 'string', 'client_province': 'string', 'client_street': 'string', 'client_street2': 'string', 'has_written_mandate_from_advertiser': 'unsigned int', 'is_client_paying_invoices': 'unsigned int'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def test_dict_to_snake_case(): assert (dict_to_snake_case(None) is None) assert (dict_to_snake_case({}) == {}) assert (dict_to_snake_case({'name': 'Areeb', 'age': 432}) == {'name': 'Areeb', 'age': 432}) assert (dict_to_snake_case({'is_admin': True, 'full_name': 'Areeb Jamal'}) == {'is_admin': True, 'full_name': 'Areeb Jamal'}) expected = {'is_admin': True, 'full_name': 'Areeb Jamal', 'age': 432} assert (dict_to_snake_case({'is-admin': True, 'full-name': 'Areeb Jamal', 'age': 432}) == expected) assert (dict_to_snake_case({'isAdmin': True, 'fullName': 'Areeb Jamal', 'age': 432}) == expected) assert (dict_to_snake_case({'name': 'Areeb', 'isSuperAdmin': True, 'job-title': 'Software Engineer', 'field_level': 'Super'}) == {'name': 'Areeb', 'is_super_admin': True, 'job_title': 'Software Engineer', 'field_level': 'Super'}) assert (dict_to_snake_case({'is-superAdmin': True, 'isMega_level-event': True, 'isSuper_admin': False}) == {'is_super_admin': False, 'is_mega_level_event': True})
class SettingsBinarySensor(EntityBase, BinarySensorEntity): def device_class(self) -> str: return f'{DOMAIN}__settings' def name_ext(self) -> str: if (self._key in self._appliance.settings): setting = self._appliance.settings[self._key] if setting: return setting.name return None def icon(self) -> str: return self.get_entity_setting('icon', 'mdi:tune') def is_on(self): if (self._key in self._appliance.settings): if self.has_entity_setting('on_state'): return (self._appliance.settings[self._key].value == self.get_entity_setting('on_state')) return self._appliance.settings[self._key].value return None async def async_on_update(self, appliance: Appliance, key: str, value) -> None: self.async_write_ha_state()
.asyncio .workspace_host class TestDeleteUserPermission(): async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData): user = test_data['users']['regular'] permission = test_data['permissions']['castles:delete'] response = (await test_client_api.delete(f'/users/{user.id}/permissions/{permission.id}')) unauthorized_api_assertions(response) .authenticated_admin async def test_unknown_user(self, test_client_api: not_existing_uuid: uuid.UUID, test_data: TestData): permission = test_data['permissions']['castles:delete'] response = (await test_client_api.delete(f'/users/{not_existing_uuid}/permissions/{permission.id}')) assert (response.status_code == status.HTTP_404_NOT_FOUND) .authenticated_admin async def test_not_added_permission(self, test_client_api: test_data: TestData): user = test_data['users']['regular'] permission = test_data['permissions']['castles:create'] response = (await test_client_api.delete(f'/users/{user.id}/permissions/{permission.id}')) assert (response.status_code == status.HTTP_404_NOT_FOUND) .authenticated_admin async def test_valid(self, test_client_api: test_data: TestData, workspace_session: AsyncSession): permission = test_data['permissions']['castles:delete'] user = test_data['users']['regular'] response = (await test_client_api.delete(f'/users/{user.id}/permissions/{permission.id}')) assert (response.status_code == status.HTTP_204_NO_CONTENT) user_permission_repository = UserPermissionRepository(workspace_session) user_permissions = (await user_permission_repository.list(user_permission_repository.get_by_user_statement(user.id, direct_only=True))) assert (len(user_permissions) == 0)
.skipif(any(((module in sys.modules) for module in ('brotli', 'brotlicffi'))), reason='urllib3 includes "br" to the "accept-encoding" headers.') def test_websocket_headers(test_client_factory): async def app(scope: Scope, receive: Receive, send: Send) -> None: websocket = WebSocket(scope, receive=receive, send=send) headers = dict(websocket.headers) (await websocket.accept()) (await websocket.send_json({'headers': headers})) (await websocket.close()) client = test_client_factory(app) with client.websocket_connect('/') as websocket: expected_headers = {'accept': '*/*', 'accept-encoding': 'gzip, deflate', 'connection': 'upgrade', 'host': 'testserver', 'user-agent': 'testclient', 'sec-websocket-key': 'testserver==', 'sec-websocket-version': '13'} data = websocket.receive_json() assert (data == {'headers': expected_headers})
def sync_isolate(isolate_version: str, cwd: Path) -> set[str]: target_repo_path = (cwd / '_isolate_git') src_dir = (target_repo_path / 'src') subprocess.check_call(['git', 'clone', '--depth', '1', '--branch', f'v{isolate_version}', ' target_repo_path], cwd=cwd) known_imports = {} for proto_file in target_repo_path.rglob('*.proto'): shutil.copy(proto_file, cwd) known_imports[proto_file.stem] = _to_qualified_name(proto_file.parent, src_dir) return known_imports
def test_commit_callbacks(db): row = db.CommitWatcher.insert(foo='test1') assert (not COMMIT_CALLBACKS['all']) db.commit() assert (len(COMMIT_CALLBACKS['all']) == 2) (before, after) = COMMIT_CALLBACKS['all'] (order, op_type, ctx) = before assert (order == 'before') assert (op_type == TransactionOps.insert) assert (ctx.values.foo == 'test1') assert (ctx.return_value == row.id) (order, op_type, ctx) = after assert (order == 'after') assert (op_type == TransactionOps.insert) assert (ctx.values.foo == 'test1') assert (ctx.return_value == row.id) COMMIT_CALLBACKS['all'].clear() row.update_record(foo='test1a') assert (not COMMIT_CALLBACKS['all']) db.commit() assert (len(COMMIT_CALLBACKS['all']) == 2) (before, after) = COMMIT_CALLBACKS['all'] (order, op_type, ctx) = before assert (order == 'before') assert (op_type == TransactionOps.update) assert ctx.dbset assert (ctx.values.foo == 'test1a') assert (ctx.return_value == 1) (order, op_type, ctx) = after assert (order == 'after') assert (op_type == TransactionOps.update) assert ctx.dbset assert (ctx.values.foo == 'test1a') assert (ctx.return_value == 1) COMMIT_CALLBACKS['all'].clear() row.delete_record() assert (not COMMIT_CALLBACKS['all']) db.commit() assert (len(COMMIT_CALLBACKS['all']) == 2) (before, after) = COMMIT_CALLBACKS['all'] (order, op_type, ctx) = before assert (order == 'before') assert (op_type == TransactionOps.delete) assert ctx.dbset assert (ctx.return_value == 1) (order, op_type, ctx) = after assert (order == 'after') assert (op_type == TransactionOps.delete) assert ctx.dbset assert (ctx.return_value == 1) COMMIT_CALLBACKS['all'].clear() row = CommitWatcher.new(foo='test2') row.save() assert (not COMMIT_CALLBACKS['all']) assert (not COMMIT_CALLBACKS['save']) db.commit() assert (len(COMMIT_CALLBACKS['all']) == 4) assert (len(COMMIT_CALLBACKS['save']) == 2) (before_ins, before_save, after_ins, after_save) = COMMIT_CALLBACKS['all'] (order, op_type, ctx) = before_ins assert (order == 'before') assert (op_type == TransactionOps.insert) assert (ctx.values.foo == 'test2') assert (ctx.return_value == row.id) (order, op_type, ctx) = after_ins assert (order == 'after') assert (op_type == TransactionOps.insert) assert (ctx.values.foo == 'test2') assert (ctx.return_value == row.id) (order, op_type, ctx) = before_save assert (order == 'before') assert (op_type == TransactionOps.save) assert (ctx.values.foo == 'test2') assert (ctx.return_value == row.id) assert (ctx.row.id == row.id) assert ('id' in ctx.changes) (order, op_type, ctx) = after_save assert (order == 'after') assert (op_type == TransactionOps.save) assert (ctx.values.foo == 'test2') assert (ctx.return_value == row.id) assert (ctx.row.id == row.id) assert ('id' in ctx.changes) (before_save, after_save) = COMMIT_CALLBACKS['save'] (order, ctx) = before_save assert (order == 'before') assert (ctx.values.foo == 'test2') assert (ctx.return_value == row.id) assert (ctx.row.id == row.id) assert ('id' in ctx.changes) (order, ctx) = after_save assert (order == 'after') assert (ctx.values.foo == 'test2') assert (ctx.return_value == row.id) assert (ctx.row.id == row.id) assert ('id' in ctx.changes) COMMIT_CALLBACKS['all'].clear() COMMIT_CALLBACKS['save'].clear() row.foo = 'test2a' row.save() assert (not COMMIT_CALLBACKS['all']) assert (not COMMIT_CALLBACKS['save']) db.commit() assert (len(COMMIT_CALLBACKS['all']) == 4) assert (len(COMMIT_CALLBACKS['save']) == 2) (before_upd, before_save, after_upd, after_save) = COMMIT_CALLBACKS['all'] (order, op_type, ctx) = before_upd assert (order == 'before') assert (op_type == TransactionOps.update) assert ctx.dbset assert (ctx.values.foo == 'test2a') assert (ctx.return_value == 1) (order, op_type, ctx) = after_upd assert (order == 'after') assert (op_type == TransactionOps.update) assert ctx.dbset assert (ctx.values.foo == 'test2a') assert (ctx.return_value == 1) (order, op_type, ctx) = before_save assert (order == 'before') assert (op_type == TransactionOps.save) assert ctx.dbset assert (ctx.values.foo == 'test2a') assert (ctx.return_value == 1) assert (ctx.row.id == row.id) assert set(ctx.changes.keys()).issubset({'foo', 'updated_at'}) (order, op_type, ctx) = after_save assert (order == 'after') assert (op_type == TransactionOps.save) assert ctx.dbset assert (ctx.values.foo == 'test2a') assert (ctx.return_value == 1) assert (ctx.row.id == row.id) assert set(ctx.changes.keys()).issubset({'foo', 'updated_at'}) (before_save, after_save) = COMMIT_CALLBACKS['save'] (order, ctx) = before_save assert (order == 'before') assert ctx.dbset assert (ctx.values.foo == 'test2a') assert (ctx.return_value == 1) assert (ctx.row.id == row.id) assert set(ctx.changes.keys()).issubset({'foo', 'updated_at'}) (order, ctx) = after_save assert (order == 'after') assert ctx.dbset assert (ctx.values.foo == 'test2a') assert (ctx.return_value == 1) assert (ctx.row.id == row.id) assert set(ctx.changes.keys()).issubset({'foo', 'updated_at'}) COMMIT_CALLBACKS['all'].clear() COMMIT_CALLBACKS['save'].clear() row.destroy() assert (not COMMIT_CALLBACKS['all']) assert (not COMMIT_CALLBACKS['destroy']) db.commit() assert (len(COMMIT_CALLBACKS['all']) == 4) assert (len(COMMIT_CALLBACKS['destroy']) == 2) (before_del, before_destroy, after_del, after_destroy) = COMMIT_CALLBACKS['all'] (order, op_type, ctx) = before_del assert (order == 'before') assert (op_type == TransactionOps.delete) assert ctx.dbset assert (ctx.return_value == 1) (order, op_type, ctx) = after_del assert (order == 'after') assert (op_type == TransactionOps.delete) assert ctx.dbset assert (ctx.return_value == 1) (order, op_type, ctx) = before_destroy assert (order == 'before') assert (op_type == TransactionOps.destroy) assert ctx.dbset assert (ctx.return_value == 1) assert (ctx.row.id == row.id) (order, op_type, ctx) = after_destroy assert (order == 'after') assert (op_type == TransactionOps.destroy) assert ctx.dbset assert (ctx.return_value == 1) assert (ctx.row.id == row.id) (before_destroy, after_destroy) = COMMIT_CALLBACKS['destroy'] (order, ctx) = before_destroy assert (order == 'before') assert ctx.dbset assert (ctx.return_value == 1) assert (ctx.row.id == row.id) (order, ctx) = after_destroy assert (order == 'after') assert ctx.dbset assert (ctx.return_value == 1) assert (ctx.row.id == row.id) COMMIT_CALLBACKS['all'].clear() COMMIT_CALLBACKS['destroy'].clear()
def calc_checksum_for_ip_change(old_ip_packet, new_ip_packet, old_checksum, is_ipv6=False): final_checksum = old_checksum a = 0 b = 1 if is_ipv6: n = 8 else: n = 2 i = 0 while (i < n): old_field = ((old_ip_packet[a] << 8) | old_ip_packet[b]) new_field = ((new_ip_packet[a] << 8) | new_ip_packet[b]) final_checksum = fn_utils.calc_incre_csum(final_checksum, old_field, new_field) a = (a + 2) b = (b + 2) i += 1 return final_checksum
def _gather_statistics(insert_time, cache_time, post_details): total = (insert_time + cache_time) file_time = '' if post_details.has_file: total += post_details.file_time file_time = 'file: {}ms, '.format(post_details.file_time) s = '{}db: {}ms, caches: {}ms, total: {}ms' return s.format(file_time, insert_time, cache_time, total)
class EnsureLeadingTrailingSlashTests(): def test_none(self): assert (ensure_leading_trailing_slash(None) == '/') def test_empty(self): assert (ensure_leading_trailing_slash('') == '/') def test_slash(self): assert (ensure_leading_trailing_slash('/') == '/') def test_contents(self): assert (ensure_leading_trailing_slash('/foo/') == '/foo/') def test_leading(self): assert (ensure_leading_trailing_slash('/foo') == '/foo') def test_trailing(self): assert (ensure_leading_trailing_slash('foo/') == '/foo')
def dock_window_theme(theme=None): global _dock_window_theme if (_dock_window_theme is None): from .default_dock_window_theme import default_dock_window_theme _dock_window_theme = default_dock_window_theme old_theme = _dock_window_theme if (theme is not None): _dock_window_theme = theme return old_theme
('/<string:uid>', doc={'description': '', 'params': {'uid': 'Firmware UID'}}) class RestFirmwareGetWithUid(RestResourceBase): URL = '/rest/firmware' _accepted(*PRIVILEGES['view_analysis']) (responses={200: 'Success', 400: 'Unknown UID'}, params={'summary': {'description': 'include summary in result', 'in': 'query', 'type': 'boolean', 'default': 'false'}}) def get(self, uid): summary = get_boolean_from_request(request.args, 'summary') if summary: firmware = self.db.frontend.get_complete_object_including_all_summaries(uid) else: firmware = self.db.frontend.get_object(uid) if ((not firmware) or (not isinstance(firmware, Firmware))): return error_message(f'No firmware with UID {uid} found', self.URL, {'uid': uid}) fitted_firmware = self._fit_firmware(firmware) return success_message({'firmware': fitted_firmware}, self.URL, request_data={'uid': uid}) def _fit_firmware(firmware): meta = create_meta_dict(firmware) analysis = firmware.processed_analysis return {'meta_data': meta, 'analysis': analysis} _accepted(*PRIVILEGES['submit_analysis']) (firmware_model) def put(self, uid): try: update = get_update(request.args) except ValueError as value_error: return error_message(str(value_error), self.URL, request_data={'uid': uid}) return self._update_analysis(uid, update) def _update_analysis(self, uid, update): firmware = self.db.frontend.get_object(uid) if (not firmware): return error_message(f'No firmware with UID {uid} found', self.URL, {'uid': uid}) unpack = ('unpacker' in update) while ('unpacker' in update): update.remove('unpacker') firmware.scheduled_analysis = update supported_plugins = self.intercom.get_available_analysis_plugins().keys() for item in update: if (item not in supported_plugins): return error_message(f"Unknown analysis system '{item}'", self.URL, {'uid': uid, 'update': update}) self.intercom.add_re_analyze_task(firmware, unpack) if unpack: update.append('unpacker') return success_message({}, self.URL, {'uid': uid, 'update': update})
.external .skipif((has_openai_key is False), reason='OpenAI API key not available') def test_labels_in_prompt(request: FixtureRequest): 'See config = Config().from_str(request.getfixturevalue('zeroshot_cfg_string')) config['components'].pop('ner') config.pop('initialize') config['nlp']['pipeline'] = ['llm'] config['components']['llm']['task']['labels'] = ['A', 'B', 'C'] nlp = assemble_from_config(config) doc = Doc(get_lang_class('en')().vocab, words=['Well', 'hello', 'there']) doc.ents = [Span(doc, 0, 1, 'A'), Span(doc, 1, 2, 'B'), Span(doc, 2, 3, 'C')] assert ('Well[ENT0:A] hello[ENT1:B] there[ENT2:C]' in list(nlp.get_pipe('llm')._task.generate_prompts([doc]))[0])
() ('--dry-run/--no-dry-run', default=False, help='Do not actually notify the people, but rather print information on stdout') ('--email', '-e', 'email_filter', help='Notify only ') ('--all/--not-all', default=False, help='Notify all (even the recently notified) relevant people') def notify_outdated_chroots(dry_run, email_filter, all): return notify_outdated_chroots_function(dry_run, email_filter, all)
class FaucetTaggedSwapVidMirrorTest(FaucetTaggedTest): CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "tagged"\n 101:\n description: "tagged"\nacls:\n 1:\n - rule:\n vlan_vid: 100\n actions:\n mirror: %(port_3)d\n force_port_vlan: 1\n output:\n swap_vid: 101\n allow: 1\n' CONFIG = '\n interfaces:\n %(port_1)d:\n tagged_vlans: [100]\n acl_in: 1\n %(port_2)d:\n tagged_vlans: [101]\n %(port_3)d:\n tagged_vlans: [100]\n %(port_4)d:\n tagged_vlans: [100]\n ' def test_tagged(self): (first_host, second_host, third_host) = self.hosts_name_ordered()[:3] def test_acl(tcpdump_host, tcpdump_filter): tcpdump_txt = self.tcpdump_helper(tcpdump_host, tcpdump_filter, [(lambda : first_host.cmd(('arp -s %s %s' % (second_host.IP(), '01:02:03:04:05:06')))), (lambda : first_host.cmd(' '.join((self.FPINGS_ARGS_ONE, second_host.IP()))))], root_intf=True) self.assertTrue(re.search(('%s: ICMP echo request' % second_host.IP()), tcpdump_txt)) self.assertTrue(re.search(tcpdump_filter, tcpdump_txt)) test_acl(second_host, 'vlan 101') test_acl(third_host, 'vlan 100')
def extractLazybirdtranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_manifests(): test_manifests = {'manifest_1': {'dataset': [{'name': 'Test Dataset 1', 'organization_fides_key': 1, 'datasetType': {}, 'datasetLocation': 'somedb:3306', 'description': 'Test Dataset 1', 'fides_key': 'some_dataset', 'datasetTables': []}], 'system': [{'name': 'Test System 1', 'organization_fides_key': 1, 'systemType': 'mysql', 'description': 'Test System 1', 'fides_key': 'some_system'}]}, 'manifest_2': {'dataset': [{'name': 'Test Dataset 2', 'description': 'Test Dataset 2', 'organization_fides_key': 1, 'datasetType': {}, 'datasetLocation': 'somedb:3306', 'fides_key': 'another_dataset', 'datasetTables': []}], 'system': [{'name': 'Test System 2', 'organization_fides_key': 1, 'systemType': 'mysql', 'description': 'Test System 2', 'fides_key': 'another_system'}]}} (yield test_manifests)
def get_uploaded_name(metadata, release=None, host=None, suite=None): implname = 'cpython' if (not release): release = 'main' commit = metadata.get('commit_id') if (not commit): raise NotImplementedError if (not host): host = metadata['hostname'] compat = get_compat_id(metadata) suite = (f'-{suite}' if (suite and (suite != 'pyperformance')) else '') return f'{implname}-{release}-{commit[:10]}-{host}-{compat}{suite}.json'
def test_timeout_is_properly_raised(connection, server): def slow_request(): (yield from asyncio.sleep(0.01)) return {} server.register_response('/_search', slow_request()) with raises(ConnectionTimeout): (yield from connection.perform_request('GET', '/_search', timeout=0.0001))
class TestServiceStub(object): def __init__(self, channel): self.GetServerResponse = channel.unary_unary('/test.TestService/GetServerResponse', request_serializer=test__pb2.Message.SerializeToString, response_deserializer=test__pb2.MessageResponse.FromString) self.GetServerResponseAbort = channel.unary_unary('/test.TestService/GetServerResponseAbort', request_serializer=test__pb2.Message.SerializeToString, response_deserializer=test__pb2.MessageResponse.FromString) self.GetServerResponseUnavailable = channel.unary_unary('/test.TestService/GetServerResponseUnavailable', request_serializer=test__pb2.Message.SerializeToString, response_deserializer=test__pb2.MessageResponse.FromString) self.GetServerResponseException = channel.unary_unary('/test.TestService/GetServerResponseException', request_serializer=test__pb2.Message.SerializeToString, response_deserializer=test__pb2.MessageResponse.FromString)
class Newton_controller(proteus.StepControl.Newton_controller): def __init__(self, model, nOptions): proteus.StepControl.Newton_controller.__init__(self, model, nOptions) def initializeTimeHistory(self): proteus.StepControl.Newton_controller.initializeTimeHistory(self) for (m, u, r) in zip(self.model.levelModelList, self.model.uList, self.model.rList): u.flat[:] = 0.0 m.getResidual(u, r) m.coefficients.postStep(self.t_model) m.coefficients.vofModel.updateTimeHistory(self.t_model, resetFromDOF=False) m.coefficients.vofModel.timeIntegration.updateTimeHistory(resetFromDOF=False)
class RepentanceStick(GenericAction): def apply_action(self) -> bool: (src, tgt) = (self.source, self.target) g = self.game catnames = ('cards', 'showncards', 'equips', 'fatetell') cats = [getattr(tgt, i) for i in catnames] l: List[PhysicalCard] = [] for i in range(2): if (not (tgt.cards or tgt.showncards or tgt.equips or tgt.fatetell)): break card = g.user_input([src], ChoosePeerCardInputlet(self, tgt, catnames)) if (not card): card = random_choose_card(g, cats) if card: l.append(card) g.players.exclude(tgt).player.reveal(card) g.process_action(DropCards(src, tgt, [card])) self.cards = l return True
def extractFeijinchuanWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Martial Inverse', 'Martial Inverse', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) titlemap = [('Martial Inverse Chapter', 'Martial Inverse', 'translated')] if (item['tags'] == ['Uncategorized']): for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class PDBDatabase(dict): def __init__(self): super().__init__(self, tracks=[], artists=[], albums=[], playlists=[], playlist_map=[], artwork=[], colors=[], genres=[], labels=[], key_names=[]) self.parsed = None def get_track(self, track_id): for track in self['tracks']: if (track.id == track_id): return track raise KeyError('PDBDatabase: track {} not found'.format(track_id)) def get_artist(self, artist_id): for artist in self['artists']: if (artist.id == artist_id): return artist raise KeyError('PDBDatabase: artist {} not found'.format(artist_id)) def get_album(self, album_id): for album in self['albums']: if (album.id == album_id): return album raise KeyError('PDBDatabase: album {} not found'.format(album_id)) def get_key(self, key_id): for key in self['key_names']: if (key.id == key_id): return key raise KeyError('PDBDatabase: key {} not found'.format(key_id)) def get_genre(self, genre_id): for genre in self['genres']: if (genre.id == genre_id): return genre raise KeyError('PDBDatabase: genre {} not found'.format(genre_id)) def get_label(self, label_id): for label in self['labels']: if (label.id == label_id): return label raise KeyError('PDBDatabase: label {} not found'.format(genre_id)) def get_color(self, color_id): for color in self['colors']: if (color.id == color_id): return color raise KeyError('PDBDatabase: color {} not found'.format(color_id)) def get_artwork(self, artwork_id): for artwork in self['artwork']: if (artwork.id == artwork_id): return artwork raise KeyError('PDBDatabase: artwork {} not found'.format(artwork_id)) def get_playlists(self, folder_id): ff = (lambda pl: (pl.folder_id == folder_id)) sf = (lambda pl: pl.sort_order) return sorted(filter(ff, self['playlists']), key=sf) def get_playlist(self, playlist_id): pms = filter((lambda pm: (pm.playlist_id == playlist_id)), self['playlist_map']) sorted_pms = sorted(pms, key=(lambda pm: pm.entry_index)) tracks = filter((lambda t: any(((t.id == pm.track_id) for pm in sorted_pms))), self['tracks']) return list(tracks) def collect_entries(self, page_type, target): for page in filter((lambda x: (x.page_type == page_type)), self.parsed.pages): for entry_block in page.entry_list: for (entry, enabled) in zip(reversed(entry_block.entries), reversed(entry_block.entry_enabled)): if (not enabled): continue self[target] += [entry] logging.debug('done collecting {}'.format(target)) def load_file(self, filename): logging.info('Loading database "%s"', filename) stat = os.stat(filename) fh = PDBFile with open(filename, 'rb') as f: self.parsed = fh.parse_stream(f) if (stat.st_size != self.parsed['file_size']): raise RuntimeError('failed to parse the complete file ({}/{} bytes parsed)'.format(self.parsed['file_size'], stat.st_size)) self.collect_entries('block_tracks', 'tracks') self.collect_entries('block_artists', 'artists') self.collect_entries('block_albums', 'albums') self.collect_entries('block_playlists', 'playlists') self.collect_entries('block_playlist_map', 'playlist_map') self.collect_entries('block_artwork', 'artwork') self.collect_entries('block_colors', 'colors') self.collect_entries('block_genres', 'genres') self.collect_entries('block_keys', 'key_names') self.collect_entries('block_labels', 'labels') logging.info('Loaded %d pages, %d tracks, %d playlists', len(self.parsed.pages), len(self['tracks']), len(self['playlists']))
def filter_hardware_npu_np6_session_stats_data(json): option_list = ['dev_id'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
def format_body(headers: Dict[(str, Any)], body: Optional[str]) -> Tuple[(Dict[(str, Any)], Optional[str])]: if (body is None): return (headers, None) content_type = next((value for (header, value) in headers.items() if (header.lower() == 'content-type')), None) if (content_type is None): content_type = 'application/json' headers['Content-Type'] = content_type if (content_type == 'application/json'): output = body elif (content_type == 'application/x-www-form-urlencoded'): output = multidimensional_urlencode(json.loads(body)) elif (content_type == 'text/plain'): output = body else: raise FidesopsException(f'Unsupported Content-Type: {content_type}') return (headers, output)
class Command(BaseCommand): def handle(self, *args, **kwargs): url = ' rsp = requests.get(url) doc = bs4.BeautifulSoup(rsp.content, 'html.parser') month_abbrs = [x.lower() for x in calendar.month_abbr] imported_months = [] for a in doc.findAll('a', href=re.compile('Part%20VIIIA.+\\.csv$')): base_filename = unquote(os.path.splitext(os.path.basename(a.attrs['href']))[0]) if (base_filename == 'Part VIIIA Nov 20 updated'): (year, month) = ('2020', 11) else: words = re.split('[ -]+', base_filename) (month_name, year) = words[(- 2):] year = re.match('\\d+', year).group() if (len(year) == 2): year = ('20' + year) month_abbr = month_name.lower()[:3] month = month_abbrs.index(month_abbr) date = datetime.date(int(year), month, 1) if ImportLog.objects.filter(category='tariff', current_at=date).exists(): continue csv_url = urljoin(url, a.attrs['href']) csv_data = requests.get(csv_url).text rows = csv.reader(StringIO(csv_data)) import_month(rows, date) imported_months.append((year, month)) if imported_months: client = Client('dmd') client.upload_model(TariffPrice) for (year, month) in imported_months: msg = ('Imported Drug Tariff for %s_%s' % (year, month)) notify_slack(msg) else: msg = 'Found no new tariff data to import' notify_slack(msg)
def test_counterexample_5(): (x0, x1, x2) = [Variable('x', Integer.int32_t(), i) for i in range(3)] (y0, y1, y2) = [Variable('y', Integer.int32_t(), i) for i in range(3)] (z0, z1, z2) = [Variable('z', Integer.int32_t(), i) for i in range(3)] cfg = ControlFlowGraph() cfg.add_nodes_from([(head := BasicBlock(0, instructions=[])), (start_1 := BasicBlock(1, instructions=[Assignment(x0.copy(), z0.copy()), Assignment(y0.copy(), z0.copy())])), (start_2 := BasicBlock(2, instructions=[Assignment(y1.copy(), Constant(10, Integer.int32_t()))])), (loop_body := BasicBlock(3, instructions=[Phi(x2.copy(), [x0.copy(), y1.copy(), y2.copy()]), Phi(y2.copy(), [y0.copy(), y1.copy(), y1.copy()]), Branch(Condition(OperationType.greater, [x2.copy(), y2.copy()]))])), (end := BasicBlock(4, instructions=[]))]) cfg.add_edges_from([TrueCase(head, start_1), FalseCase(head, start_2), UnconditionalEdge(start_1, loop_body), UnconditionalEdge(start_2, loop_body), FalseCase(loop_body, loop_body), TrueCase(loop_body, end)]) IdentityElimination().run(DecompilerTask('test', cfg, function_parameters=[z0.copy()])) assert (head.instructions == []) assert (start_1.instructions == []) assert (start_2.instructions == [Assignment(y1.copy(), Constant(10, Integer.int32_t()))]) assert (loop_body.instructions == [Phi(x2.copy(), [z0.copy(), y1.copy(), y2.copy()]), Phi(y2.copy(), [z0.copy(), y1.copy(), y1.copy()]), Branch(Condition(OperationType.greater, [x2.copy(), y2.copy()]))]) assert (end.instructions == [])
class TestErrors(unittest.TestCase): desc = upnp.errors.ERR_CODE_DESCRIPTIONS def test_existing_err(self): for (key, value) in self.desc._descriptions.items(): self.assertEqual(self.desc[key], value) def test_non_integer(self): try: self.desc['a string'] raise Exception('Should have raised KeyError.') except KeyError as exc: self.assertEqual(str(exc), '"\'key\' must be an integer"') def test_reserved(self): for i in range(606, (612 + 1)): self.assertEqual(self.desc[i], 'These ErrorCodes are reserved for UPnP DeviceSecurity.') def test_common_action(self): for i in range(613, (699 + 1)): self.assertEqual(self.desc[i], 'Common action errors. Defined by UPnP Forum Technical Committee.') def test_action_specific_committee(self): for i in range(700, (799 + 1)): self.assertEqual(self.desc[i], 'Action-specific errors defined by UPnP Forum working committee.') def test_action_specific_vendor(self): for i in range(800, (899 + 1)): self.assertEqual(self.desc[i], 'Action-specific errors for non-standard actions. Defined by UPnP vendor.')
class RFOptimizer(HessianOptimizer): def __init__(self, geometry: Geometry, line_search: bool=True, gediis: bool=False, gdiis: bool=True, gdiis_thresh: float=0.0025, gediis_thresh: float=0.01, gdiis_test_direction: bool=True, max_micro_cycles: int=25, adapt_step_func: bool=False, **kwargs) -> None: super().__init__(geometry, max_micro_cycles=max_micro_cycles, **kwargs) self.line_search = line_search self.gediis = gediis self.gdiis = gdiis self.gdiis_thresh = gdiis_thresh self.gediis_thresh = gediis_thresh self.gdiis_test_direction = gdiis_test_direction self.adapt_step_func = adapt_step_func self.successful_gediis = 0 self.successful_gdiis = 0 self.successful_line_search = 0 def optimize(self): (energy, gradient, H, big_eigvals, big_eigvecs, resetted) = self.housekeeping() (step_func, pred_func) = self.get_step_func(big_eigvals, gradient) ref_gradient = gradient.copy() ref_step = step_func(big_eigvals, big_eigvecs, gradient) if self.check_convergence(ref_step)[0]: self.log('Convergence achieved! Skipping inter/extrapolation.') return ref_step ip_gradient = None ip_step = None diis_result = None rms_forces = rms(gradient) rms_step = rms(ref_step) can_diis = ((rms_step <= self.gdiis_thresh) and (not resetted)) can_gediis = ((rms_forces <= self.gediis_thresh) and (not resetted)) if (self.gdiis and can_diis): err_vecs = (- np.array(self.forces)) diis_result = gdiis(err_vecs, self.coords, self.forces, ref_step, test_direction=self.gdiis_test_direction) self.successful_gdiis += (1 if diis_result else 0) elif (self.gediis and can_gediis): diis_result = gediis(self.coords, self.energies, self.forces, hessian=H) self.successful_gediis += (1 if diis_result else 0) try: ip_coords = diis_result.coords ip_step = (ip_coords - self.geometry.coords) ip_gradient = (- diis_result.forces) except AttributeError: self.log("GDIIS didn't succeed.") if (self.line_search and (diis_result is None) and (not resetted)): (ip_energy, ip_gradient, ip_step) = poly_line_search(energy, self.energies[(- 2)], gradient, (- self.forces[(- 2)]), self.steps[(- 1)], cubic_max_x=(- 1), quartic_max_x=2, logger=self.logger) self.successful_line_search += (1 if (ip_gradient is not None) else 0) if ((ip_gradient is not None) and (ip_step is not None)): gradient = ip_gradient else: ip_step = np.zeros_like(gradient) step = step_func(big_eigvals, big_eigvecs, gradient) step = (step + ip_step) prediction = pred_func(ref_gradient, H, step) self.predicted_energy_changes.append(prediction) return step def postprocess_opt(self): msg = f'''Successful invocations: GEDIIS: {self.successful_gediis} GDIIS: {self.successful_gdiis} Line Search: {self.successful_line_search} ''' self.log(msg)
def main(length: float=(2 * np.pi), thickness: float=0.5, rotation: float=90.0, increment: float=5.0, elemsize: float=0.5, poisson: float=0.4, restol: float=1e-08, trim: float=(np.pi / 2), stretch: float=1.0, degree: int=2): zgrid = (length * np.linspace((- 0.5), 0.5, (round((length / elemsize)) + 1))) grid = np.linspace((- np.pi), np.pi, (round(((2 * np.pi) / elemsize)) + 1)) (cylinder, (z, )) = mesh.rectilinear([zgrid, grid], periodic=(1,)) = ( - ((((z / length) * np.pi) / 180) * function.Argument('', shape=()))) if trim: cylinder = cylinder.trim(((( ** 2) + (z ** 2)) - (trim ** 2)), maxrefine=2) (extrusion, r) = mesh.line([(1 - (thickness / 2)), (1 + (thickness / 2))], space='T') topo = (cylinder * extrusion) bezier = topo.boundary.sample('bezier', 5) ns = Namespace() ns.X = np.stack([z, (r * np.sin()), (r * np.cos())]) ns.X = np.stack([(z * stretch), (r * np.sin()), (r * np.cos())]) ns.define_for('X', gradient='', jacobians=('dV',)) ns.add_field('u', topo.basis('spline', degree=degree, removedofs=((0, (- 1)), None, None)), shape=(3,)) ns.x_i = 'X_i + u_i' ns.F_ij = '_j(x_i)' ns.J = np.linalg.det(ns.F) ns.D = (poisson / (0.5 - poisson)) ns.W = 'F_ij F_ij - 3 - 2 log(J) + D (J - 1)^2' energy = topo.integral(('W dV' ns), degree=(degree * 2)) args = {} clim = ((0, 1) if (stretch == 1) else None) for args[''] in np.linspace(0, rotation, (round((rotation / increment)) + 1)): with log.context('{:.1f} deg', **args): args = solver.minimize('u,', energy, arguments=args).solve(restol) (x, W) = bezier.eval((['x_i', 'W'] ns), **args) export.triplot('energy.jpg', x, W, tri=bezier.tri, hull=bezier.hull, clim=clim, cmap='inferno_r', vlabel='strain energy density') clim = None return args
class JudogMonitor(Monitor): name = '' chat_name = 'Mulgoreemby' chat_keyword = ':\\d+' bot_username = 'mulgorebot' notify_create_name = True async def init(self): channel = (await self.client.get_chat('Mulgoreemby')) self.chat_name = channel.linked_chat.id self.log.info(f': {channel.linked_chat.title}') return True async def on_trigger(self, message: Message, key, reply): wr = async_partial(self.client.wait_reply, self.bot_username) msg: Message = (await wr('/start')) if ('' in (msg.caption or msg.text)): (await asyncio.sleep(random.uniform(2, 4))) msg = (await wr('')) if ('' in (msg.caption or msg.text)): (await asyncio.sleep(random.uniform(2, 4))) msg = (await wr('')) if ('' in (msg.caption or msg.text)): return else: self.log.bind(notify=True).info(f'{self.name}, .')
def downgrade(): op.drop_column('users', 'company') op.drop_column('users', 'billing_zip_code') op.drop_column('users', 'billing_tax_info') op.drop_column('users', 'billing_phone') op.drop_column('users', 'billing_country') op.drop_column('users', 'billing_contact_name') op.drop_column('users', 'billing_city') op.drop_column('users', 'billing_address') op.drop_column('users', 'billing_additional_info')
.host_test class EfuseTestCase(): def setup_method(self): if (reset_port is None): self.efuse_file = tempfile.NamedTemporaryFile(delete=False) self.base_cmd = f'{sys.executable} -m espefuse --chip {arg_chip} --virt --path-efuse-file {self.efuse_file.name} -d' else: self.base_cmd = f'{sys.executable} -m espefuse --chip {arg_chip} --port {arg_port} -d' self.reset_efuses() def teardown_method(self): if (reset_port is None): self.efuse_file.close() os.unlink(self.efuse_file.name) def reset_efuses(self): reset_port.dtr = False reset_port.rts = False time.sleep(0.05) reset_port.dtr = True reset_port.rts = True time.sleep(0.05) reset_port.dtr = False reset_port.rts = False def get_esptool(self): if (reset_port is not None): import esptool esp = esptool.cmds.detect_chip(port=arg_port) del esptool else: import espefuse efuse = espefuse.SUPPORTED_CHIPS[arg_chip].efuse_lib esp = efuse.EmulateEfuseController(self.efuse_file.name) del espefuse del efuse return esp def _set_34_coding_scheme(self): self.espefuse_py('burn_efuse CODING_SCHEME 1') def check_data_block_in_log(self, log, file_path, repeat=1, reverse_order=False, offset=0): with open(file_path, 'rb') as f: data = ((BitStream('0x00') * offset) + BitStream(f)) blk = data.readlist(f'{(data.len // 8)}*uint:8') blk = (blk[::(- 1)] if reverse_order else blk) hex_blk = ' '.join((f'{num:02x}' for num in blk)) assert (repeat == log.count(hex_blk)) def espefuse_not_virt_py(self, cmd, check_msg=None, ret_code=0): full_cmd = ' '.join((f'{sys.executable} -m espefuse', cmd)) return self._run_command(full_cmd, check_msg, ret_code) def espefuse_py(self, cmd, do_not_confirm=True, check_msg=None, ret_code=0): full_cmd = ' '.join([self.base_cmd, ('--do-not-confirm' if do_not_confirm else ''), cmd]) output = self._run_command(full_cmd, check_msg, ret_code) self._run_command(' '.join([self.base_cmd, 'check_error']), 'No errors detected', 0) print(output) return output def _run_command(self, cmd, check_msg, ret_code): try: p = subprocess.Popen(cmd.split(), shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) (output, _) = p.communicate() returncode = p.returncode if check_msg: assert (check_msg in output) if returncode: print(output) print(cmd) assert (ret_code == returncode) return output except subprocess.CalledProcessError as error: print(error) raise
class Cube(QWidget, Ui_ScrollCube): def __init__(self, wget, args, idx): super().__init__(wget) self.setupUi(self) self.setAttribute(Qt.WA_AcceptTouchEvents) self._args = args self._idx = idx cube_grid_layout = QGridLayout(self.cube_color) cube_grid_layout.setContentsMargins(0, 0, 0, 0) cube_grid_layout.setHorizontalSpacing(0) cube_grid_layout.setVerticalSpacing(0) self.square = Square(self.cube_color, self._args, self._idx) cube_grid_layout.addWidget(self.square) for tp in ('r', 'g', 'b'): getattr(self, 'hs_rgb_{}'.format(tp)).wheelEvent = (lambda event: event.ignore()) for tp in ('h', 's', 'v'): getattr(self, 'hs_hsv_{}'.format(tp)).wheelEvent = (lambda event: event.ignore()) def paintEvent(self, event): wid = (self.cube_color.width() * self._args.cubic_ratio) self.cube_color.setMinimumHeight((wid * 0.618)) self.cube_color.setMaximumHeight((wid * 0.618))
class TCPMappingTLSTerminationCrossNamespaceTest(TCPMappingTLSTerminationTest): extra_ports = [6789] target: ServiceType def init(self, tls_src: Literal[('tlscontext', 'host')]) -> None: super().init(tls_src) self.target = HTTP() def manifests(self) -> str: return ((((namespace_manifest('other-namespace') + f''' --- apiVersion: v1 kind: Secret metadata: name: {self.path.k8s}-servercert namespace: other-namespace type: kubernetes.io/tls data: tls.crt: {TLSCerts['tls-context-host-2'].k8s_crt} tls.key: {TLSCerts['tls-context-host-2'].k8s_key} ''') + (f''' --- apiVersion: getambassador.io/v2 kind: TLSContext metadata: name: {self.path.k8s}-tlsserver namespace: other-namespace spec: ambassador_id: [ {self.ambassador_id} ] secret: {self.path.k8s}-servercert hosts: [ "tls-context-host-2" ] ''' if (self.tls_src == 'tlscontext') else f''' --- apiVersion: getambassador.io/v2 kind: Host metadata: name: {self.path.k8s}-tlsserver namespace: other-namespace spec: ambassador_id: [ {self.ambassador_id} ] hostname: "tls-context-host-2" tlsSecret: name: {self.path.k8s}-servercert ''')) + f''' --- apiVersion: getambassador.io/v2 kind: TCPMapping metadata: name: {self.path.k8s} spec: ambassador_id: [ {self.ambassador_id} ] port: 6789 host: tls-context-host-2 service: {self.target.path.fqdn}:80 ''') + super().manifests()) def queries(self): (yield Query(self.url('', scheme=' port=6789), sni=True, headers={'Host': 'tls-context-host-2'}, ca_cert=TLSCerts['tls-context-host-2'].pubcert)) def check(self): assert (self.results[0].json['backend'] == self.target.path.k8s) assert (self.results[0].json['request']['tls']['enabled'] == False)
def get_envoy_config(yaml): aconf = Config() fetcher = ResourceFetcher(logger, aconf) fetcher.parse_yaml((default_listener_manifests() + yaml), k8s=True) aconf.load_all(fetcher.sorted()) secret_handler = NullSecretHandler(logger, None, None, '0') ir = IR(aconf, file_checker=(lambda path: True), secret_handler=secret_handler) assert ir return EnvoyConfig.generate(ir)
def scat_sdm_common_selection(): return create_sdm_item_selection(7, (sdm_common_data.COMMON_BASIC_INFO, True), (1, True), (sdm_common_data.COMMON_DATA_INFO, True), (sdm_common_data.COMMON_SIGNALING_INFO, True), (sdm_common_data.COMMON_SMS_INFO, True), (5, True), (sdm_common_data.COMMON_MULTI_SIGNALING_INFO, True))
def test_adding_multiple_persistence_annotations(): config = '\n persistence:\n enabled: true\n annotations:\n hello: world\n world: hello\n' r = helm_template(config) annotations = r['statefulset'][name]['spec']['volumeClaimTemplates'][0]['metadata']['annotations'] assert (annotations['hello'] == 'world') assert (annotations['world'] == 'hello')
('/timers') def handle_timers(self): global TXBuffer, navMenuIndex TXBuffer = '' navMenuIndex = 7 if rpieGlobals.wifiSetup: return self.redirect('/setup') if (not isLoggedIn(self.get, self.cookie)): return self.redirect('/login') sendHeadandTail('TmplStd', _HEAD) TXBuffer += "<table class='normal'><TR><TH align='left'>Timer #<TH align='left'>State<TH align='left'>Looping/Loopcount/Maxloops<TH align='left'>Timeout<TH align='left'>Last start<TH align='left'>Last error" try: for t in range(len(rpieTime.Timers)): TXBuffer += (('<TR><TD>' + str((t + 1))) + '</TD><TD>') if (rpieTime.Timers[t].state == 0): TXBuffer += 'off' elif (rpieTime.Timers[t].state == 1): TXBuffer += 'running' elif (rpieTime.Timers[t].state == 2): TXBuffer += 'paused' TXBuffer += '<TD>' if (rpieTime.Timers[t].looping == False): TXBuffer += 'no' else: TXBuffer += ((('yes/' + str(rpieTime.Timers[t].loopcount)) + '/') + str(rpieTime.Timers[t].maxloops)) TXBuffer += ('<TD>' + str(rpieTime.Timers[t].timeout)) if (rpieTime.Timers[t].laststart == 0): TXBuffer += '<TD>never' else: TXBuffer += (('<TD>' + misc.formatnum((time.time() - rpieTime.Timers[t].laststart), 2)) + 's ago') TXBuffer += ('<TD>' + str(rpieTime.Timers[t].lasterr)) except Exception as e: print(e) TXBuffer += '</table></form>' sendHeadandTail('TmplStd', _TAIL) return TXBuffer
class OptionPlotoptionsBellcurveSonificationDefaultspeechoptionsMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class ArgumentDefinition(): MISSING_ARGUMENT = 'Missing argument!' def __init__(self, optional: bool=False) -> None: super().__init__() self.__optional = optional def isOptional(self) -> bool: return self.__optional def validate(self, token: str) -> ValidationStatus: vs = ValidationStatus() if ((not self.isOptional()) and (token.strip() == '')): vs.setFailed() vs.addToMessage(ArgumentDefinition.MISSING_ARGUMENT) return vs
def test_resource_entry(): obj = task.Resources.ResourceEntry(task.Resources.ResourceName.CPU, 'blah') assert (task.Resources.ResourceEntry.from_flyte_idl(obj.to_flyte_idl()) == obj) assert (obj != task.Resources.ResourceEntry(task.Resources.ResourceName.GPU, 'blah')) assert (obj != task.Resources.ResourceEntry(task.Resources.ResourceName.CPU, 'bloop')) assert (obj.name == task.Resources.ResourceName.CPU) assert (obj.value == 'blah')
class ListValues(): def __init__(self, lst: List[str], enum_lst: List[Color], passthrough_list: List[LibraryClass], dataclass_val: List[User], def_value: List[str]=[]): self.lst = lst self.enum_lst = enum_lst self.passthrough_list = passthrough_list self.dataclass_val = dataclass_val self.def_value = def_value def __eq__(self, other): return (isinstance(other, type(self)) and (self.lst == other.lst) and (self.enum_lst == other.enum_lst) and (self.passthrough_list == other.passthrough_list) and (self.dataclass_val == other.dataclass_val) and (self.def_value == other.def_value))
def get_git_commit_hash() -> str: try: result = subprocess.run(['git', 'rev-parse', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True) commit_hash = result.stdout.strip() return commit_hash except FileNotFoundError as e: return str(e) except subprocess.CalledProcessError as e: return ('Error: ' + str(e))
def prepare_box_data(curr: pd.DataFrame, ref: Optional[pd.DataFrame], cat_feature_name: str, num_feature_name: str) -> Dict[(str, Dict[(str, list)])]: dfs = [curr] names = ['current'] if (ref is not None): dfs.append(ref) names.append('reference') res = {} for (df, name) in zip(dfs, names): data = df.groupby(cat_feature_name, observed=False)[num_feature_name] df_for_plot = data.quantile([0, 0.25, 0.5, 0.75, 1]).reset_index() df_for_plot.columns = [cat_feature_name, 'q', num_feature_name] res_df = {} values = df_for_plot[cat_feature_name].unique() def _quantiles(qdf, value): return qdf[(df_for_plot.q == value)].set_index(cat_feature_name).loc[(values, num_feature_name)].tolist() res_df['mins'] = _quantiles(df_for_plot, 0) res_df['lowers'] = _quantiles(df_for_plot, 0.25) res_df['means'] = _quantiles(df_for_plot, 0.5) res_df['uppers'] = _quantiles(df_for_plot, 0.75) res_df['maxs'] = _quantiles(df_for_plot, 1) res_df['values'] = values res[name] = res_df return res