code
stringlengths
281
23.7M
(cis_audit, 'open', mock_open()) (os.path, 'exists', return_value=True) (cis_audit.CISAudit, 'audit_package_is_installed', mock_audit_package_is_installed_true) def test_audit_gdm_last_user_logged_in_disabled_fail(MagickMock): state = test.audit_gdm_last_user_logged_in_disabled() assert (state == 46)
class HealthcarePractitioner(Document): def onload(self): load_address_and_contact(self) def autoname(self): self.name = self.practitioner_name if frappe.db.exists('Healthcare Practitioner', self.name): self.name = append_number_if_name_exists('Contact', self.name) def validate(self): self.set_full_name() validate_party_accounts(self) if self.inpatient_visit_charge_item: validate_service_item(self.inpatient_visit_charge_item, 'Configure a service Item for Inpatient Consulting Charge Item') if (not self.inpatient_visit_charge): frappe.throw(_('Inpatient Consulting Charge is mandatory if you are setting Inpatient Consulting Charge Item'), frappe.MandatoryError) if self.op_consulting_charge_item: validate_service_item(self.op_consulting_charge_item, 'Configure a service Item for Outpatient Consulting Charge Item') if (not self.op_consulting_charge): frappe.throw(_('Outpatient Consulting Charge is mandatory if you are setting Outpatient Consulting Charge Item'), frappe.MandatoryError) if self.user_id: self.validate_user_id() else: existing_user_id = frappe.db.get_value('Healthcare Practitioner', self.name, 'user_id') if existing_user_id: frappe.permissions.remove_user_permission('Healthcare Practitioner', self.name, existing_user_id) self.validate_practitioner_schedules() def on_update(self): if self.user_id: frappe.permissions.add_user_permission('Healthcare Practitioner', self.name, self.user_id) def set_full_name(self): if self.last_name: self.practitioner_name = ' '.join(filter(None, [self.first_name, self.last_name])) else: self.practitioner_name = self.first_name def validate_practitioner_schedules(self): for practitioner_schedule in self.practitioner_schedules: if frappe.db.get_value('Practitioner Schedule', practitioner_schedule.schedule, 'allow_video_conferencing'): if ((not self.google_calendar) and (not frappe.db.get_single_value('Healthcare Settings', 'default_google_calendar'))): frappe.throw(_('Video conferencing enabled for {}, \t\t\t\t\t\t\t\t\t\t\tplease link {} or configure Default Google Calendar in {}').format(get_link_to_form('Practitioner Schedule', practitioner_schedule.schedule), frappe.bold('Google Calendar'), get_link_to_form('Healthcare Settings', 'Healthcare Settings', 'Healthcare Settings')), title=_('Google Calendar Required')) break def validate_user_id(self): if (not frappe.db.exists('User', self.user_id)): frappe.throw(_('User {0} does not exist').format(self.user_id)) elif (not frappe.db.exists('User', self.user_id, 'enabled')): frappe.throw(_('User {0} is disabled').format(self.user_id)) practitioner = frappe.db.exists('Healthcare Practitioner', {'user_id': self.user_id, 'name': ('!=', self.name)}) if practitioner: frappe.throw(_('User {0} is already assigned to Healthcare Practitioner {1}').format(self.user_id, practitioner)) def on_trash(self): delete_contact_and_address('Healthcare Practitioner', self.name)
class EosHomePathNotFoundError(ErsiliaError): def __init__(self): self.message = self._get_message() self.hints = self._get_hints() ErsiliaError.__init__(self, self.message, self.hints) def _get_message(self): text = 'EOS Home path not found. Looks like Ersilia is not installed correctly.' return text def _get_hints(self): text = 'Re-install Ersilia and try again. \n' return text
class BaseConcreteCommittee(BaseCommittee): __tablename__ = 'ofec_committee_detail_mv' committee_id = db.Column(db.String, primary_key=True, unique=True, index=True, doc=docs.COMMITTEE_ID) candidate_ids = db.Column(ARRAY(db.Text), doc=docs.CANDIDATE_ID) sponsor_candidate_ids = db.Column(ARRAY(db.Text), doc=docs.SPONSOR_CANDIDATE_ID)
class CmdDisasm(Cmd): keywords = ['disasm', 'disas', 'disassemble', 'd'] description = 'Display a disassembly of a specified region in the memory.' parser = argparse.ArgumentParser(prog=keywords[0], description=description, epilog=('Aliases: ' + ', '.join(keywords))) parser.add_argument('--length', '-l', type=auto_int, default=128, help='Length of the disassembly (default: %(default)s).') parser.add_argument('address', type=auto_int, help='Start address of the disassembly.') def work(self): args = self.getArgs() if (args == None): return True if (not self.isAddressInSections(args.address, args.length)): answer = yesno(('Warning: Address 0x%08x (len=0x%x) is not inside a valid section. Continue?' % (args.address, args.length))) if (not answer): return False dump = self.readMem(args.address, args.length) if (dump == None): return False print(disasm(dump, vma=args.address)) return True
def _calculate_iteration_count(exponent_length: int, first_32_exponent_bytes: bytes) -> int: first_32_exponent = big_endian_to_int(first_32_exponent_bytes) highest_bit_index = get_highest_bit_index(first_32_exponent) if (exponent_length <= 32): iteration_count = highest_bit_index else: iteration_count = (highest_bit_index + (8 * (exponent_length - 32))) return max(iteration_count, 1)
def add_task(args, daccess): context = args.get('context') options = get_options(args, TASK_MUTATORS, {'deadline': {'None': None}}) if (context is None): context = '' if args['edit']: (title, content) = core.editor_edit_task(args['title'], None, EDITOR) else: (title, content) = (args['title'], None) task_id = daccess.add_task(title, content, context, options) if args['depends_on']: unexisting_deps = daccess.set_task_dependencies(task_id, args['depends_on']) if unexisting_deps: return ('dependencies_not_found', unexisting_deps) return ('add_task', task_id)
def extractYiumreBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def power_profiles(interval, activity_type='ride', power_unit='mmp', group='M'): activity_type = (('%' + activity_type) + '%') df_best_samples = pd.read_sql(sql=app.session.query(stravaBestSamples).filter(stravaBestSamples.type.ilike(activity_type), (stravaBestSamples.interval == interval)).statement, con=engine, index_col=['timestamp_local']) app.session.remove() if (len(df_best_samples) < 1): return {} df_best_samples['power_profile_dategroup'] = df_best_samples.index.to_period(group).to_timestamp() df = df_best_samples[['activity_id', power_unit, 'power_profile_dategroup', 'interval']] df = df.loc[df.groupby('power_profile_dategroup')[power_unit].idxmax()] figure = {'data': [go.Bar(x=df['power_profile_dategroup'], y=df[power_unit], customdata=['{}_{}_{}'.format(df.loc[x]['activity_id'], df.loc[x]['interval'].astype('int'), interval) for x in df.index], text=[('{:.2f} W/kg'.format(x) if (power_unit == 'watts_per_kg') else '{:.0f} W'.format(x)) for x in df[power_unit]], hoverinfo='x+text', marker=dict(color=[(orange if (x == df[power_unit].max()) else light_blue) for x in df[power_unit]]))], 'layout': go.Layout(font=dict(size=10, color=white), height=400, xaxis=dict(showticklabels=True, tickformat="%b '%y"), yaxis=dict(showgrid=True, gridcolor='rgb(73, 73, 73)'), margin={'l': 25, 'b': 25, 't': 5, 'r': 20})} return figure
def upgrade(): op.add_column('settings', sa.Column('logo_size', sa.Integer(), server_default='1000', nullable=False)) op.add_column('settings', sa.Column('image_size', sa.Integer(), server_default='10000', nullable=False)) op.add_column('settings', sa.Column('slide_size', sa.Integer(), server_default='20000', nullable=False))
def string_to_bool(v: str): if isinstance(v, bool): return v if (v.lower() in ('yes', 'true', 't', 'y', '1')): os.environ['data_selection'] = 'True' return True elif (v.lower() in ('no', 'false', 'f', 'n', '0')): os.environ['data_selection'] = 'False' return False else: raise argparse.ArgumentTypeError('Boolean value expected.')
class RegisterForm(forms.Form): template_name = 'auth/forms/register_form.html' name = forms.CharField(max_length=settings.AUTH_USER_NAME_MAX_LENGTH, label='Full name', error_messages={'required': 'You need to enter your name.'}, widget=forms.TextInput(attrs={'placeholder': 'Enter your name.'})) email = forms.EmailField(label='Email address', error_messages={'required': 'You need to enter your email address.'}, widget=forms.EmailInput(attrs={'placeholder': 'Enter your email address.'})) password = forms.CharField(min_length=8, label='Password', error_messages={'required': 'You need to enter a password.', 'min_length': 'Your password must have at least 8 characters.'}, widget=forms.TextInput(attrs={'placeholder': 'Create a password.'})) terms = forms.BooleanField(widget=forms.CheckboxInput(), error_messages={'required': 'You need to accept the Terms and Conditions.'})
def upgrade(): op.create_table('cookies', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('name', sa.String(), nullable=False), sa.Column('domain', sa.String(), nullable=True), sa.Column('path', sa.String(), nullable=True), sa.Column('system_id', sa.String(), nullable=True), sa.Column('privacy_declaration_id', sa.String(), nullable=True), sa.ForeignKeyConstraint(['privacy_declaration_id'], ['privacydeclaration.id'], ondelete='SET NULL'), sa.ForeignKeyConstraint(['system_id'], ['ctl_systems.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name', 'privacy_declaration_id', name='_cookie_name_privacy_declaration_uc')) op.create_index(op.f('ix_cookies_id'), 'cookies', ['id'], unique=False) op.create_index(op.f('ix_cookies_name'), 'cookies', ['name'], unique=False) op.create_index(op.f('ix_cookies_privacy_declaration_id'), 'cookies', ['privacy_declaration_id'], unique=False) op.create_index(op.f('ix_cookies_system_id'), 'cookies', ['system_id'], unique=False)
class OptionSeriesPyramid3dLabel(Options): def boxesToAvoid(self): return self._config_get(None) def boxesToAvoid(self, value: Any): self._config(value, js_type=False) def connectorAllowed(self): return self._config_get(False) def connectorAllowed(self, flag: bool): self._config(flag, js_type=False) def connectorNeighbourDistance(self): return self._config_get(24) def connectorNeighbourDistance(self, num: float): self._config(num, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def format(self): return self._config_get('undefined') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get('undefined') def formatter(self, value: Any): self._config(value, js_type=False) def maxFontSize(self): return self._config_get(None) def maxFontSize(self, num: float): self._config(num, js_type=False) def minFontSize(self): return self._config_get(None) def minFontSize(self, num: float): self._config(num, js_type=False) def onArea(self): return self._config_get(None) def onArea(self, flag: bool): self._config(flag, js_type=False) def style(self) -> 'OptionSeriesPyramid3dLabelStyle': return self._config_sub_data('style', OptionSeriesPyramid3dLabelStyle) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False)
def make_string_array(offset, blk, pc, access=None): function = blk.function binary = function.binary if (offset in binary.direct_offsets): string_array = binary.direct_offsets[offset] else: addrs = binary.sections.get_rodata_addrs(offset) strings = [] for addr in addrs: if (addr in binary.string_consts): strings.append(binary.string_consts[addr]) else: value = binary.sections.get_rodata_string(addr) if (value is not None): string_const = StringConst(binary=binary, value=value, access=None, offset=addr) binary.string_consts[addr] = string_const strings.append(string_const) string_array = StringArrayOffset(binary=binary, offset=offset, access=access, strings=strings) binary.direct_offsets[offset] = string_array function.direct_offsets.add(offset) return string_array
class OptionSeriesPyramidSonificationContexttracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesVariablepieSonificationPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
(params=[('cg', False), ('cg', True), ('dg', False), ('dg', True), ('file', 't11_tria.msh'), ('file', 't11_quad.msh')]) def mesh2d(request): if (request.param[0] == 'cg'): return UnitSquareMesh(12, 12, quadrilateral=request.param[1]) elif (request.param[0] == 'dg'): return PeriodicUnitSquareMesh(12, 12, quadrilateral=request.param[1]) elif (request.param[0] == 'file'): meshfile = join(cwd, '..', 'meshes', request.param[1]) return Mesh(meshfile)
def go_to_goal(goal): global robot_rotation, robot_location d = Distance_compute(robot_location, goal) theta = robot_rotation[2] kl = 1 ka = 4 vx = 0 va = 0 heading = math.atan2((goal[1] - robot_location[1]), (goal[0] - robot_location[0])) err_theta = (heading - theta) if (d > 0.01): vx = (kl * abs(d)) vx = 1 if (abs(err_theta) > 0.01): va = (ka * err_theta) vel_1 = rospy.Publisher('/cmd_vel', geometry_msgs.msg.Twist, queue_size=10) cmd = geometry_msgs.msg.Twist() cmd.linear.x = vx cmd.angular.z = va vel_1.publish(cmd)
class PurgeKeys(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'surrogate_keys': ([str],)} _property def discriminator(): return None attribute_map = {'surrogate_keys': 'surrogate_keys'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
('value,expected', [param('range(10,11)', RangeSweep(start=10, stop=11, step=1), id='ints'), param('range (10,11)', RangeSweep(start=10, stop=11, step=1), id='ints'), param('range(1,10,2)', RangeSweep(start=1, stop=10, step=2), id='ints_with_step'), param('range(start=1,stop=10,step=2)', RangeSweep(start=1, stop=10, step=2), id='ints_with_step'), param('range(1.0, 3.14)', RangeSweep(start=1.0, stop=3.14, step=1), id='floats'), param('range(1.0, 3.14, 0.1)', RangeSweep(start=1.0, stop=3.14, step=0.1), id='floats_with_step'), param('range(10)', RangeSweep(start=0, stop=10, step=1), id='no_start'), param('range(-10)', RangeSweep(start=0, stop=(- 10), step=1), id='no_start_empty'), param('range(-10, step=-1)', RangeSweep(start=0, stop=(- 10), step=(- 1)), id='no_start_negative'), param('range(5.5)', RangeSweep(start=0, stop=5.5, step=1), id='no_start_float'), param('range(5.5, step=0.5)', RangeSweep(start=0, stop=5.5, step=0.5), id='no_start_step_float')]) def test_range_sweep(value: str, expected: Any) -> None: ret = parse_rule(value, 'function') assert (ret == expected)
class MycobotTest(object): def __init__(self): self.mycobot = None self.win = tkinter.Tk() self.win.title(' Mycobot ') self.win.geometry('918x600+10+10') self.port_label = tkinter.Label(self.win, text=':') self.port_label.grid(row=0) self.port_list = ttk.Combobox(self.win, width=15, postcommand=self.get_serial_port_list) self.get_serial_port_list() self.port_list.current(0) self.port_list.grid(row=0, column=1) self.baud_label = tkinter.Label(self.win, text=':') self.baud_label.grid(row=1) self.baud_list = ttk.Combobox(self.win, width=15) self.baud_list['value'] = ('1000000', '115200') self.baud_list.current(1) self.baud_list.grid(row=1, column=1) self.connect_label = tkinter.Label(self.win, text='mycobot:') self.connect_label.grid(row=2) self.connect = tkinter.Button(self.win, text='', command=self.connect_mycobot) self.disconnect = tkinter.Button(self.win, text='', command=self.disconnect_mycobot) self.connect.grid(row=3) self.disconnect.grid(row=3, column=1) self.check_label = tkinter.Label(self.win, text=':') self.check_label.grid(row=4) self.check_btn = tkinter.Button(self.win, text='', command=self.check_mycobot_servos) self.check_btn.grid(row=4, column=1) self.calibration_num = None self.calibration_label = tkinter.Label(self.win, text=':') self.calibration_label.grid(row=5) self.calibration_btn = tkinter.Button(self.win, text='', command=self.calibration_mycobot) self.calibration_btn.grid(row=5, column=1) self.set_color_label = tkinter.Label(self.win, text='Atom:') self.set_color_label.grid(row=6, columnspan=2) self.color_red = tkinter.Button(self.win, text='', command=(lambda : self.send_color('red'))) self.color_green = tkinter.Button(self.win, text='', command=(lambda : self.send_color('green'))) self.color_red.grid(row=7) self.color_green.grid(row=7, column=1) self.aging_stop = False self.movement_label = tkinter.Label(self.win, text=':') self.movement_label.grid(row=8) self.start_btn = tkinter.Button(self.win, text='', command=self.start_aging_test) self.start_btn.grid(row=9) self.stop_btn = tkinter.Button(self.win, text='', command=self.stop_aging_test) self.stop_btn.grid(row=9, column=1) self.release_btn = tkinter.Button(self.win, text='', command=self.release_mycobot) self.release_btn.grid(row=10) self.focus_btn = tkinter.Button(self.win, text='', command=self.focus_mycobot) self.focus_btn.grid(row=10, column=1) self.test_IO_label = tkinter.Label(self.win, text='I/O:') self.test_IO_label.grid(row=11) self.test_basic_btn = tkinter.Button(self.win, text='I/O', command=self.test_basic) self.test_atom_btn = tkinter.Button(self.win, text='I/O', command=self.test_atom) self.test_basic_btn.grid(row=12) self.test_atom_btn.grid(row=12, column=1) self.log_label = tkinter.Label(self.win, text=':') self.log_label.grid(row=0, column=12) _f = tkinter.Frame(self.win) _bar = tkinter.Scrollbar(_f, orient=tkinter.VERTICAL) self.log_data_Text = tkinter.Text(_f, width=100, height=35, yscrollcommand=_bar.set) _bar.pack(side=tkinter.RIGHT, fill=tkinter.Y) _bar.config(command=self.log_data_Text.yview) self.log_data_Text.pack() _f.grid(row=1, column=12, rowspan=15, columnspan=10) def run(self): self.win.mainloop() def connect_mycobot(self): self.prot = port = self.port_list.get() if (not port): self.write_log_to_Text('') return self.baud = baud = self.baud_list.get() if (not baud): self.write_log_to_Text('') return baud = int(baud) try: self.mycobot = MyCobot(port, baud) time.sleep(0.5) self.mycobot._write([255, 255, 3, 22, 1, 250]) time.sleep(0.5) self.write_log_to_Text(' !') except Exception as e: err_log = ' \r !!!\n \r\n {}\n \r\n '.format(e) self.write_log_to_Text(err_log) def disconnect_mycobot(self): if (not self.has_mycobot()): return try: del self.mycobot self.mycobot = None self.write_log_to_Text(' !') except AttributeError: self.write_log_to_Text('mycobot!!!') def release_mycobot(self): if (not self.has_mycobot()): return self.mycobot.release_all_servos() self.write_log_to_Text('Release over.') def focus_mycobot(self): if (not self.has_mycobot()): return self.mycobot.power_on() self.write_log_to_Text('Power on over.') def check_mycobot_servos(self): self.connect_mycobot() if (not self.has_mycobot()): return res = [] for i in range(1, 8): _data = self.mycobot.get_servo_data(i, 5) time.sleep(0.02) if (_data != i): res.append(i) if res: self.write_log_to_Text(' {} !!!'.format(res)) else: self.write_log_to_Text('') def calibration_mycobot(self): if (not self.has_mycobot()): return if (not self.calibration_num): self.calibration_num = 0 self.calibration_num += 1 self.mycobot.set_servo_calibration(self.calibration_num) time.sleep(0.1) self.mycobot.focus_servo(self.calibration_num) time.sleep(0.5) pos = self.mycobot.get_angles() self.write_log_to_Text((('' + str(self.calibration_num)) + '.')) if (self.calibration_num == 6): self.write_log_to_Text('.') self.calibration_num = None self._calibration_test() def send_color(self, color: str): if (not self.has_mycobot()): return color_dict = {'red': [255, 0, 0], 'green': [0, 255, 0], 'blue': [0, 0, 255]} self.mycobot.set_color(*color_dict[color]) self.write_log_to_Text(': {}.'.format(color)) def start_aging_test(self): if (not self.has_mycobot()): return self.aging_stop = False self.aging = threading.Thread(target=self._aging_test, daemon=True) self.aging.start() self.write_log_to_Text(' ...') def stop_aging_test(self): try: os.system('sudo systemctl stop aging_test.service') os.system('sudo rm /home/ubuntu/Desktop/aging_test.sh') os.system('sudo rm /home/ubuntu/Desktop/aging_test.py') os.system('sudo rm /etc/systemd/system/aging_test.service') os.system('sudo systemctl daemon-reload') self.write_log_to_Text('.') except: self.write_log_to_Text(' !!!') def rectify_mycobot(self): if (not self.has_mycobot()): return data_id = [21, 22, 23, 24, 26, 27] data = [10, 0, 1, 0, 3, 3] for i in range(1, 7): for j in range(len(data_id)): self.mycobot.set_servo_data(i, data_id[j], data[j]) time.sleep(0.2) _data = self.mycobot.get_servo_data(i, data_id[j]) time.sleep(0.2) if (_data == data[j]): self.write_log_to_Text((((((('Servo motor :' + str(i)) + ' data_id : ') + str(data_id[j])) + ' data: ') + str(_data)) + ' modify successfully ')) else: self.write_log_to_Text((((((('Servo motor :' + str(i)) + ' data_id : ') + str(data_id[j])) + ' data: ') + str(_data)) + ' modify error ')) def test_basic(self): pin_no = [1, 2, 3, 4, 5, 6] for p in pin_no: self.write_log_to_Text((' %s 0 ' % p)) self.mycobot.set_basic_output(p, 0) time.sleep(0.5) time.sleep(1) for p in pin_no: self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_basic_input(p)))) time.sleep(0.5) time.sleep(1) for p in pin_no: self.write_log_to_Text((' %s 1 ' % p)) self.mycobot.set_basic_output(p, 1) time.sleep(0.5) time.sleep(1) for p in pin_no: self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_basic_input(p)))) time.sleep(0.5) def test_atom(self): pin_in = [19, 22] pin_out = [23, 33] for p in pin_out: self.write_log_to_Text((' %s 0 ' % p)) self.mycobot.set_digital_output(p, 0) time.sleep(0.5) time.sleep(1) for p in pin_in: self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_digital_input(p)))) time.sleep(0.5) time.sleep(1) for p in pin_out: self.write_log_to_Text((' %s 1 ' % p)) self.mycobot.set_digital_output(p, 1) time.sleep(0.5) time.sleep(1) for p in pin_in: self.write_log_to_Text((' %s : %s' % (p, self.mycobot.get_digital_input(p)))) time.sleep(0.5) time.sleep(1) def has_mycobot(self): if (not self.mycobot): self.write_log_to_Text('mycobot!!!') return False return True def _aging_test(self): aging_test_content_py = textwrap.dedent((" #!/usr/bin/python3\n\n from pymycobot.mycobot import MyCobot\n from pymycobot import PI_PORT, PI_BAUD\n import time\n \n speed = [50, 100]\n joint = [1, 2, 3, 4, 5, 6]\n angle = [0, 168, 90, 130, 145, 165, 180]\n coord = ['y', 'z', 'x']\n\n mycobot = MyCobot('%s', %s)\n\n def aging_test():\n\n mycobot.set_color(0,0,255)\n \n mycobot.wait(1).send_angles([0, 0, 0, 0, 0, 0], speed[1])\n\n # \n for a in range(1):\n for j in joint:\n for sp in speed:\n if sp == 10:\n t = 10\n elif sp == 50:\n t = 5\n elif sp == 100:\n t = 3\n mycobot.wait(t).send_angle(j, angle[j], sp)\n mycobot.wait(t).send_angle(j, angle[j]*(-1), sp)\n mycobot.wait(t).send_angle(j, angle[0], sp)\n\n \n # \n for b in range(2):\n for sp in speed:\n if sp == 10:\n t = 10\n elif sp == 50:\n t = 5\n elif sp == 100:\n t = 3\n mycobot.wait(t).send_angles([90, -90, 90, 90, 90, 90], sp)\n mycobot.wait(t).send_angles([-90, 90, -90, -90, -90, -90], sp)\n mycobot.wait(t).send_angles([-171.38, 70.57, 41.66, -24.87, -82.88, 6.76], sp)\n mycobot.wait(t).send_angles([0.43, -92.72, 92.9, 87.71, 89.56, -0.17], sp)\n \n mycobot.wait(5).send_angles([0, 0, 0, 0, 0, 0], speed[1])\n\n # \n mycobot.wait(5).send_angles([0, -25, -115, 45, -80, 0], speed[1])\n time.sleep(2)\n \n for c in range(2):\n for sp in speed:\n data_list = [235.4, -117.3, 244.5, 9.14, -25.44, 85.62]\n mycobot.wait(5).send_coords(data_list, speed[1], 1)\n if sp == 10:\n t = 10\n elif sp == 50:\n t = 3\n elif sp == 100:\n t = 1\n\n for cd in coord:\n if cd == 'x':\n i = 0\n elif cd == 'y':\n i = 1\n elif cd == 'z':\n i = 2\n print(cd)\n \n data_list[i] = data_list[i] + 90\n mycobot.wait(t).send_coords(data_list, sp, 1)\n print(t,data_list,sp)\n \n data_list[i] = data_list[i] - 140\n mycobot.wait(t).send_coords(data_list, sp, 1)\n print(t,data_list,sp)\n \n mycobot.wait(5).send_angles([0, 0, 0, 0, 0, 0], speed[1])\n \n # ()\n for d in range(2):\n for sp in speed:\n if sp == 10:\n t = 10\n elif sp == 50:\n t = 3\n elif sp == 100:\n t = 2\n mycobot.wait(t).send_angles([79.1, -30.41, -96.85, 40, 88.85, 0], sp)\n mycobot.wait(t).send_angles([79.1, -60.41, -96.85, 80, 88.85, 0], sp)\n mycobot.wait(t).send_angles([79.1, -30.41, -96.85, 40, 88.85, 0], sp)\n mycobot.wait(t).send_angles([-27.59, -8.78, -127.26, 45.35, 88.85, 0], sp)\n mycobot.wait(t).send_angles([-27.59, -60, -90, 60.35, 88.85, 0],sp)\n mycobot.wait(t).send_angles([-27.59, -8.78, -127.26, 45.35, 88.85, 0], sp)\n \n self.mycobot.wait(5).send_angles([0, 0, 0, 0, 0, 0], speed[1])\n\n\n if __name__ == '__main__':\n while True:\n aging_test()\n " % (self.prot, self.baud))) aging_test_content_sh = textwrap.dedent(' #!/bin/bash\n python3 /home/ubuntu/Desktop/aging_test.py\n ') aging_test_content_service = textwrap.dedent(' [Unit]\n Description=aging-test\n\n [Service]\n Type=forking\n User=ubuntu\n Restart=on-failure\n RestartSec=6\n ExecStart=/home/ubuntu/Desktop/aging_test.sh\n\n [Install]\n WantedBy=multi-user.target\n ') os.system((('echo "' + aging_test_content_py) + '" >> /home/ubuntu/Desktop/aging_test.py')) os.system((('echo "' + aging_test_content_sh) + '" >> /home/ubuntu/Desktop/aging_test.sh')) os.system('sudo chmod +x /home/ubuntu/Desktop/aging_test.sh') os.system((('echo "' + aging_test_content_service) + '" >> /home/ubuntu/Desktop/aging_test.service')) os.system('sudo mv /home/ubuntu/Desktop/aging_test.service /etc/systemd/system/aging_test.service') os.system('sudo systemctl enable aging_test.service') os.system('sudo systemctl start aging_test.service') def _calibration_test(self): self.write_log_to_Text('.') self.mycobot.set_fresh_mode(1) time.sleep(0.5) angles = [0, 0, 0, 0, 0, 0] test_angle = [(- 20), 20, 0] for i in range(6): for j in range(3): angles[i] = test_angle[j] self.mycobot.send_angles(angles, 0) time.sleep(2) self.write_log_to_Text('.') def get_serial_port_list(self): plist = [str(x).split(' - ')[0].strip() for x in serial.tools.list_ports.comports()] print(plist) self.port_list['value'] = plist return plist def get_current_time(self): current_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) return current_time def write_log_to_Text(self, logmsg: str): global LOG_NUM current_time = self.get_current_time() logmsg_in = (((str(current_time) + ' ') + str(logmsg)) + '\n') if (LOG_NUM <= 18): self.log_data_Text.insert(tkinter.END, logmsg_in) LOG_NUM += len(logmsg_in.split('\n')) else: self.log_data_Text.insert(tkinter.END, logmsg_in) self.log_data_Text.yview('end')
.parametrize('enabled,recording,is_recording', [(True, True, True), (True, False, False), (False, True, False), (False, False, False)]) def test_is_recording(enabled, recording, is_recording): c = Config(inline_dict={'enabled': enabled, 'recording': recording, 'service_name': 'foo'}) assert (c.is_recording is is_recording)
def test_visitor_class_nested() -> None: src = '\n class Foo:\n class Bar:\n some_field = 4\n ' tree = ast.parse(dedent(src)) visitor = PatchHygieneVisitor() visitor.visit(tree) items = visitor.items assert (items == ['Foo', 'Foo.Bar', 'Foo.Bar.some_field'])
def _extract_pair(object_file: str, resource_type: int, project: str, domain: str, version: str, patches: Dict[(int, Callable[([_GeneratedProtocolMessageType], _GeneratedProtocolMessageType)])]) -> Tuple[(_identifier_pb2.Identifier, Union[(_core_tasks_pb2.TaskTemplate, _core_workflow_pb2.WorkflowTemplate, _launch_plan_pb2.LaunchPlanSpec)])]: if (resource_type not in _resource_map): raise _user_exceptions.FlyteAssertion(f'Resource type found in proto file name [{resource_type}] invalid, must be 1 (task), 2 (workflow) or 3 (launch plan)') entity = utils.load_proto_from_file(_resource_map[resource_type], object_file) (registerable_identifier, registerable_entity) = hydrate_registration_parameters(resource_type, project, domain, version, entity) patch_fn = patches.get(resource_type) if patch_fn: registerable_entity = patch_fn(registerable_entity) return (registerable_identifier, registerable_entity)
def edit_start_up_settings(_: Any) -> None: locale_manager = locale_handler.LocalManager.from_config() options = {'CHECK_FOR_UPDATES': bool, 'UPDATE_TO_BETAS': bool, 'HIDE_START_TEXT': bool, 'DEFAULT_START_OPTION': int, 'CREATE_BACKUP': bool} option_values = [get_config_value_category('START_UP', option) for option in options] ids = user_input_handler.select_not_inc(list(options.keys()), locale_manager.search_key('select_l'), option_values) for option_id in ids: option_name = list(options.keys())[option_id] option_type = options[option_name] current_value = option_values[option_id] if (option_type == bool): if current_value: current_value = locale_manager.search_key('enabled') else: current_value = locale_manager.search_key('disabled') enable = (user_input_handler.colored_input((locale_manager.search_key('flag_set_config') % (option_name, current_value))) == '1') set_config_setting_category('START_UP', option_name, enable) elif (option_type == int): option_value = user_input_handler.get_int((locale_manager.search_key('enter_new_val_config') % (option_name, current_value))) set_config_setting_category('START_UP', option_name, option_value) else: raise Exception(f'Unsupported option type {option_type}')
def fortios_log_syslogd2(data, fos): fos.do_member_operation('log.syslogd2', 'filter') if data['log_syslogd2_filter']: resp = log_syslogd2_filter(data, fos) else: fos._module.fail_json(msg=('missing task body: %s' % 'log_syslogd2_filter')) return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
def extractChinesenoveltranslationsSimplegamesdevCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('WATTT' in item['tags']): return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix) return False
.parametrize('params', [(False, 1), (True, 10)]) def test_correct_param_assignment_at_init(params): (param1, param2) = params t = GeometricWidthDiscretiser(return_object=param1, return_boundaries=param1, precision=param2, bins=param2) assert (t.return_object is param1) assert (t.return_boundaries is param1) assert (t.precision == param2) assert (t.bins == param2)
def find_parents_and_order(glyphsets, locations): parents = ([None] + list(range((len(glyphsets) - 1)))) order = list(range(len(glyphsets))) if locations: bases = (i for (i, l) in enumerate(locations) if all(((v == 0) for v in l.values()))) if bases: base = next(bases) logging.info('Base master index %s, location %s', base, locations[base]) else: base = 0 logging.warning('No base master location found') try: from scipy.sparse.csgraph import minimum_spanning_tree graph = [([0] * len(locations)) for _ in range(len(locations))] axes = set() for l in locations: axes.update(l.keys()) axes = sorted(axes) vectors = [tuple((l.get(k, 0) for k in axes)) for l in locations] for (i, j) in itertools.combinations(range(len(locations)), 2): graph[i][j] = vdiff_hypot2(vectors[i], vectors[j]) tree = minimum_spanning_tree(graph) (rows, cols) = tree.nonzero() graph = defaultdict(set) for (row, col) in zip(rows, cols): graph[row].add(col) graph[col].add(row) parents = ([None] * len(locations)) order = [] visited = set() queue = deque([base]) while queue: i = queue.popleft() visited.add(i) order.append(i) for j in sorted(graph[i]): if (j not in visited): parents[j] = i queue.append(j) except ImportError: pass log.info('Parents: %s', parents) log.info('Order: %s', order) return (parents, order)
def set_managed_items(save_stats: dict[(str, Any)]) -> dict[(str, Any)]: data = server_handler.check_gen_token(save_stats) token = data['token'] save_stats = data['save_stats'] if (token is None): helper.colored_text('Error generating token') return save_stats server_handler.update_managed_items(save_stats['inquiry_code'], token, save_stats) return save_stats
def compile_to_strings(lib_name, proc_list): orig_procs = [id(p) for p in proc_list] def from_lines(x): return '\n'.join(x) proc_list = list(sorted(find_all_subprocs(proc_list), key=(lambda x: x.name))) (ctxt_name, ctxt_def) = _compile_context_struct(find_all_configs(proc_list), lib_name) struct_defns = set() public_fwd_decls = [] memory_code = _compile_memories(find_all_mems(proc_list)) builtin_code = _compile_builtins(find_all_builtins(proc_list)) private_fwd_decls = [] proc_bodies = [] needed_helpers = set() seen_procs = set() for p in proc_list: if (p.name in seen_procs): raise TypeError(f'multiple procs named {p.name}') seen_procs.add(p.name) if (p.instr is not None): argstr = ','.join([str(a.name) for a in p.args]) proc_bodies.extend(['', '/* relying on the following instruction..."', f'{p.name}({argstr})', p.instr, '*/']) else: is_public_decl = (id(p) in orig_procs) p = ParallelAnalysis().run(p) p = PrecisionAnalysis().run(p) p = WindowAnalysis().apply_proc(p) p = MemoryAnalysis().run(p) comp = Compiler(p, ctxt_name, is_public_decl=is_public_decl) (d, b) = comp.comp_top() struct_defns |= comp.struct_defns() needed_helpers |= comp.needed_helpers() if is_public_decl: public_fwd_decls.append(d) else: private_fwd_decls.append(d) proc_bodies.append(b) struct_defns = [x.definition for x in sorted(struct_defns, key=(lambda x: x.name))] header_contents = f''' #include <stdint.h> #include <stdbool.h> // Compiler feature macros adapted from Hedley (public domain) // #if defined(__has_builtin) # define EXO_HAS_BUILTIN(builtin) __has_builtin(builtin) #else # define EXO_HAS_BUILTIN(builtin) (0) #endif #if EXO_HAS_BUILTIN(__builtin_assume) # define EXO_ASSUME(expr) __builtin_assume(expr) #elif EXO_HAS_BUILTIN(__builtin_unreachable) # define EXO_ASSUME(expr) ((void)((expr) ? 1 : (__builtin_unreachable(), 1))) #else # define EXO_ASSUME(expr) ((void)(expr)) #endif {from_lines(ctxt_def)} {from_lines(struct_defns)} {from_lines(public_fwd_decls)} ''' helper_code = [_static_helpers[v] for v in needed_helpers] body_contents = f''' {from_lines(helper_code)} {from_lines(memory_code)} {from_lines(builtin_code)} {from_lines(private_fwd_decls)} {from_lines(proc_bodies)} ''' return (header_contents, body_contents)
class ObjectClassCountViewSet(DisasterBase): endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/disaster/object_class/count.md' _response() def post(self, request: Request) -> Response: filters = [Q(object_class_id=OuterRef('pk')), self.all_closed_defc_submissions, self.is_in_provided_def_codes, self.is_non_zero_total_spending] count = ObjectClass.objects.filter(Exists(FinancialAccountsByProgramActivityObjectClass.objects.filter(*filters))).values('object_class').distinct().count() return Response({'count': count})
class Ganache7MiddleWare(BrownieMiddlewareABC): def get_layer(cls, w3: Web3, network_type: str) -> Optional[int]: if w3.clientVersion.lower().startswith('ganache/v7'): return (- 100) else: return None def process_request(self, make_request: Callable, method: str, params: List) -> Dict: result = make_request(method, params) if ((method in ('eth_sendTransaction', 'eth_sendRawTransaction')) and ('error' in result) and ('data' in result['error'])): data = result['error']['data'] data['error'] = data.pop('message') data['program_counter'] = data.pop('programCounter') result['error']['data'] = {data.pop('hash'): data} if ((method == 'eth_call') and ('error' in result) and result['error'].get('message', '').startswith('VM Exception')): msg = result['error']['message'].split(': ', maxsplit=1)[(- 1)] if msg.startswith('revert'): data = {'error': 'revert', 'reason': msg[7:]} else: data = {'error': msg, 'reason': None} result['error']['data'] = {'0x': data} return result
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.') .skipif((LOW_MEMORY > memory), reason='Travis has too less memory to run it.') def test_hicPlotMatrix_perChr_pca1_bigwig_vertical(): outfile = NamedTemporaryFile(suffix='.png', prefix='hicexplorer_test', delete=False) args = '--matrix {0}/hicTransform/pearson_perChromosome.h5 --perChr --disable_tight_layout --bigwigAdditionalVerticalAxis --outFileName {1} --bigwig {2} {2}'.format(ROOT, outfile.name, (ROOT + 'hicPCA/pca1.bw')).split() compute(hicexplorer.hicPlotMatrix.main, args, 5) res = compare_images(((ROOT + 'hicPlotMatrix') + '/small_matrix_50kb_pearson_pca1_plot_two_bigwig_vertical.png'), outfile.name, tol=tolerance) assert (res is None), res if REMOVE_OUTPUT: os.remove(outfile.name)
def ExRem(array1, array2): assert array1.index.equals(array2.index), 'Indices do not match' array = pandas.Series(False, dtype=bool, index=array1.index) i = 0 while (i < len(array1)): if array1[i]: array[i] = True for j in range(i, len(array2)): if array2[j]: break i = j i += 1 return array.fillna(value=False)
class BaseType(): __slots__ = ['pkg_name', 'type', 'string_upper_bound'] pkg_name: Optional[str] type: str string_upper_bound: Optional[int] def __init__(self, type_string: str, context_package_name: Optional[str]=None): if (type_string in PRIMITIVE_TYPES): self.pkg_name = None self.type = type_string self.string_upper_bound = None elif (type_string.startswith(('string%s' % STRING_UPPER_BOUND_TOKEN)) or type_string.startswith(('wstring%s' % STRING_UPPER_BOUND_TOKEN))): self.pkg_name = None self.type = type_string.split(STRING_UPPER_BOUND_TOKEN, 1)[0] upper_bound_string = type_string[(len(self.type) + len(STRING_UPPER_BOUND_TOKEN)):] ex = TypeError((("the upper bound of the string type '%s' must " + 'be a valid integer value > 0') % type_string)) try: self.string_upper_bound = int(upper_bound_string) except ValueError: raise ex if (self.string_upper_bound <= 0): raise ex else: parts = type_string.split(PACKAGE_NAME_MESSAGE_TYPE_SEPARATOR) if (not ((len(parts) == 2) or ((len(parts) == 1) and (context_package_name is not None)))): raise InvalidResourceName(type_string) if (len(parts) == 2): self.pkg_name = parts[0] self.type = parts[1] else: self.pkg_name = context_package_name self.type = type_string if (not is_valid_package_name(self.pkg_name)): raise InvalidResourceName("'{}' is an invalid package name. It should have the pattern '{}'".format(self.pkg_name, VALID_PACKAGE_NAME_PATTERN.pattern)) if (not is_valid_message_name(self.type)): raise InvalidResourceName("'{}' is an invalid message name. It should have the pattern '{}'".format(self.type, VALID_MESSAGE_NAME_PATTERN.pattern)) self.string_upper_bound = None def is_primitive_type(self): return (self.pkg_name is None) def __eq__(self, other): if ((other is None) or (not isinstance(other, BaseType))): return False return ((self.pkg_name == other.pkg_name) and (self.type == other.type) and (self.string_upper_bound == other.string_upper_bound)) def __hash__(self): return hash(str(self)) def __str__(self): if (self.pkg_name is not None): return ('%s/%s' % (self.pkg_name, self.type)) s = self.type if self.string_upper_bound: s += ('%s%u' % (STRING_UPPER_BOUND_TOKEN, self.string_upper_bound)) return s
class TestStrategy(bt.Strategy): params = [('exitbars', 5), ('printlog', True)] def __init__(self): super().__init__() self.dataclose = self.datas[0].close self.order = None self.order_cancel = None self.buyprice = None self.buycomm = None self.bar_executed = 0 def log(self, fmt, *args, doprint=False, **kwargs): if (self.params.printlog or doprint): try: dt = self.datas[0].datetime.datetime(0, naive=False) except IndexError: dt = datetime.datetime.now() fmt = '{}, {}'.format(dt, fmt) logging.info(fmt, *args, **kwargs) def notify_order(self, order): if (order.status in [order.Submitted, order.Accepted]): return if (order.status in [order.Completed]): if order.isbuy(): self.log('Buy Exuecuted, Size: %d, Price: %.2f, Cost: %.2f, Comm: %.2f', order.executed.size, order.executed.price, order.executed.value, order.executed.comm) self.buyprice = order.executed.price self.buycomm = order.executed.comm elif order.issell(): self.log('Sell Executed, Size: %d, Price: %.2f, Cost: %.2f, Comm: %.2f', order.executed.size, order.executed.price, order.executed.value, order.executed.comm) self.order = None self.bar_executed = len(self) elif (order.status in [order.Canceled]): self.log('Order Canceled') if (self.order == self.order_cancel): self.order = None self.order_cancel = None elif (order.status in [order.Margin]): self.log('Order Margin') if (self.order == self.order_cancel): self.order = None self.order_cancel = None elif (order.status in [order.Rejected]): self.log('Order Rejected') if (self.order == self.order_cancel): self.order = None self.order_cancel = None def notify_trade(self, trade): if (not trade.isclosed): return self.log('Operation Profit, Gross: %.2f, Net: %.2f', trade.pnl, trade.pnlcomm) def next_market(self): self.log('Close, %.2f', self.dataclose[0]) if self.order: return if (not self.position): self.log('Buy Create, %.2f', self.dataclose[0]) self.order = self.buy() elif (len(self) >= (self.bar_executed + self.params.exitbars)): self.log('Sell Create, %.2f', self.dataclose[0]) self.order = self.sell() def next_limit(self): self.log('Close, %.2f', self.dataclose[0]) if self.order: if (len(self) >= (self.bar_executed + self.params.exitbars)): if (not self.order_cancel): self.log('Cancel Create') self.cancel(self.order) self.order_cancel = self.order elif (not self.position): self.log('Buy Create, %.2f', self.dataclose[0]) self.order = self.buy(exectype=bt.Order.Limit, price=(self.dataclose[0] - 1000.0)) self.bar_executed = len(self) elif (len(self) >= (self.bar_executed + self.params.exitbars)): self.log('No Sell Create, %.2f', self.dataclose[0]) def start(self): self.log('Starting Portfolio Value: %.2f', self.broker.getvalue(), doprint=True) def next(self): return self.next_limit() def stop(self): self.log('Final Portfolio Value: %.2f', self.broker.getvalue(), doprint=True)
def main(args, fn_data): PEHE_train_ = [] PEHE_test_ = [] results_d = {} time_start = time.time() for _ in range(args.num_exp): (pehe_train_curr, pehe_test_curr) = run_experiment(fn_data, mode=args.mode, test_frac=args.test_frac) PEHE_train_.append(pehe_train_curr) PEHE_test_.append(pehe_test_curr) print(('Experiment: %d (train) \tPEHE: %.3f \t--- (test) \tPEHE: %.3f \t---' % (_, pehe_train_curr, pehe_test_curr))) results_d['train'] = PEHE_train_ results_d['test'] = PEHE_test_ PEHE_train_np = np.array(PEHE_train_)[(~ np.isnan(np.array(PEHE_train_)))] PEHE_test_np = np.array(PEHE_test_)[(~ np.isnan(np.array(PEHE_test_)))] time_exe = (time.time() - time_start) results_d['PEHE_train'] = mean_confidence_interval(PEHE_train_np) results_d['PEHE_test'] = mean_confidence_interval(PEHE_test_np) results_d['time_exe'] = time_exe if (args.o is not None): with open(args.o, 'w') as fp: json.dump(results_d, fp) print('exe time {:0.0f}s'.format(time_exe)) print(('Final results|| Train PEHE = %.3f +/- %.3f --- Test PEHE = %.3f +/- %.3f' % (mean_confidence_interval(PEHE_train_np)[0], mean_confidence_interval(PEHE_train_np)[1], mean_confidence_interval(PEHE_test_np)[0], mean_confidence_interval(PEHE_test_np)[1]))) return results_d
.usefixtures('scan_teardown') def test_scroll_error(sync_client): bulk = [] for x in range(4): bulk.append({'index': {'_index': 'test_index'}}) bulk.append({'value': x}) sync_client.bulk(operations=bulk, refresh=True) with patch.object(sync_client, 'options', return_value=sync_client), patch.object(sync_client, 'scroll') as scroll_mock: scroll_mock.side_effect = mock_scroll_responses data = list(helpers.scan(sync_client, index='test_index', size=2, raise_on_error=False, clear_scroll=False)) assert (len(data) == 3) assert (data[(- 1)] == {'scroll_data': 42}) scroll_mock.side_effect = mock_scroll_responses with pytest.raises(ScanError): data = list(helpers.scan(sync_client, index='test_index', size=2, raise_on_error=True, clear_scroll=False)) assert (len(data) == 3) assert (data[(- 1)] == {'scroll_data': 42})
class OptionSeriesAreasplineDragdropGuideboxDefault(Options): def className(self): return self._config_get('highcharts-drag-box-default') def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('rgba(0, 0, 0, 0.1)') def color(self, text: str): self._config(text, js_type=False) def cursor(self): return self._config_get('move') def cursor(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('#888') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(1) def lineWidth(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(900) def zIndex(self, num: float): self._config(num, js_type=False)
class DefaultCreateModelMixin(CreateModelMixin): def create(self: BaseGenericViewSet, request: Request, *args: Any, **kwargs: Any) -> Response: serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) instance = self.perform_create(serializer) headers = self.get_success_headers(serializer.data) return self.get_response(instance, status.HTTP_201_CREATED, headers) def perform_create(self: BaseGenericViewSet, serializer: Any) -> Any: return serializer.save()
def mock_requeue(called_with: tp.Optional[int]=None, not_called: bool=False): assert (not_called or (called_with is not None)) requeue = patch('submitit.slurm.slurm.SlurmJobEnvironment._requeue', return_value=None) with requeue as _patch: try: (yield) finally: if not_called: _patch.assert_not_called() else: _patch.assert_called_with(called_with)
class MsgStub(object): def __init__(self, channel): self.Transfer = channel.unary_unary('/ibc.applications.transfer.v1.Msg/Transfer', request_serializer=ibc_dot_applications_dot_transfer_dot_v1_dot_tx__pb2.MsgTransfer.SerializeToString, response_deserializer=ibc_dot_applications_dot_transfer_dot_v1_dot_tx__pb2.MsgTransferResponse.FromString)
('pyscf') .parametrize('embedding, ref_energy', [('', (- 151.)), ('electronic', (- 151.)), ('electronic_rc', (- 151.)), ('electronic_rcd', (- 151.))]) def test_oniom_ee_charge_distribution(embedding, ref_energy, pyscf_acetaldehyd_getter): geom = pyscf_acetaldehyd_getter(embedding=embedding) en = geom.energy assert (en == pytest.approx(ref_energy))
def create_unary_op(op_type: FuncEnum, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> AITTensor: input = (kwargs['input'] if ('input' in kwargs) else args[0]) if (not isinstance(input, (AITTensor, float, int))): raise RuntimeError(f'Unexpected left operand {type(input)} on {name}: {input}') (input_is_constant, input_constant) = try_get_constant_num(input) if input_is_constant: res = get_python_op_from_ait_constant_elementwise_op(op_type)(input_constant) return res return elementwise(op_type)(input)
class OptionSeriesCylinderSonificationContexttracksMappingTremolo(Options): def depth(self) -> 'OptionSeriesCylinderSonificationContexttracksMappingTremoloDepth': return self._config_sub_data('depth', OptionSeriesCylinderSonificationContexttracksMappingTremoloDepth) def speed(self) -> 'OptionSeriesCylinderSonificationContexttracksMappingTremoloSpeed': return self._config_sub_data('speed', OptionSeriesCylinderSonificationContexttracksMappingTremoloSpeed)
class OptionSeriesHistogramSonificationPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def publish_event(channel, event_type, data, pub_id, pub_prev_id, skip_user_ids=None, **publish_kwargs): from django_grip import publish if (skip_user_ids is None): skip_user_ids = [] content_filters = [] if pub_id: event_id = '%I' content_filters.append('build-id') else: event_id = None content = sse_encode_event(event_type, data, event_id=event_id, escape=bool(pub_id)) meta = {} if skip_user_ids: meta['skip_users'] = ','.join(skip_user_ids) publish(('events-%s' % quote(channel)), HttpStreamFormat(content, content_filters=content_filters), id=pub_id, prev_id=pub_prev_id, meta=meta, **publish_kwargs)
def _validate_cai_enabled(cai_configs): if (not cai_configs.get('enabled')): LOGGER.debug('CloudAsset Inventory disabled by configuration.') return False if (not cai_configs.get('gcs_path', '').startswith('gs://')): LOGGER.debug('CloudAsset Inventory not configured with a valid GCS bucket.') return False return True
_function def ip_route_add(session, destination, device=None, gateway='', source='', ifindex=0, route_type=zebra.ZEBRA_ROUTE_KERNEL, is_selected=True): if device: intf = interface.ip_link_show(session, ifname=device) if (not intf): LOG.debug('Interface "%s" does not exist', device) return None ifindex = (ifindex or intf.ifindex) route = ip_route_show(session, destination=destination, device=device) if route: LOG.debug('Route to "%s" already exists on "%s" device', destination, device) return route (dest_addr, dest_prefix_num) = destination.split('/') dest_prefix_num = int(dest_prefix_num) if (ip.valid_ipv4(dest_addr) and (0 <= dest_prefix_num <= 32)): family = socket.AF_INET elif (ip.valid_ipv6(dest_addr) and (0 <= dest_prefix_num <= 128)): family = socket.AF_INET6 else: LOG.debug('Invalid IP address for "prefix": %s', destination) return None safi = packet_safi.UNICAST if is_selected: old_routes = ip_route_show_all(session, destination=destination, is_selected=True) for old_route in old_routes: if old_route: LOG.debug('Set existing route to unselected: %s', old_route) old_route.is_selected = False new_route = Route(family=family, safi=safi, destination=destination, gateway=gateway, ifindex=ifindex, source=source, route_type=route_type, is_selected=is_selected) session.add(new_route) return new_route
def factorization_test(setup): print('Beginning test: prove you know small integers that multiply to 91') eqs = '\n n public\n pb0 === pb0 * pb0\n pb1 === pb1 * pb1\n pb2 === pb2 * pb2\n pb3 === pb3 * pb3\n qb0 === qb0 * qb0\n qb1 === qb1 * qb1\n qb2 === qb2 * qb2\n qb3 === qb3 * qb3\n pb01 <== pb0 + 2 * pb1\n pb012 <== pb01 + 4 * pb2\n p <== pb012 + 8 * pb3\n qb01 <== qb0 + 2 * qb1\n qb012 <== qb01 + 4 * qb2\n q <== qb012 + 8 * qb3\n n <== p * q\n ' public = [91] vk = c.make_verification_key(setup, 16, eqs) print('Generated verification key') assignments = c.fill_variable_assignments(eqs, {'pb3': 1, 'pb2': 1, 'pb1': 0, 'pb0': 1, 'qb3': 0, 'qb2': 1, 'qb1': 1, 'qb0': 1}) proof = p.prove_from_witness(setup, 16, eqs, assignments) print('Generated proof') assert v.verify_proof(setup, 16, vk, proof, public, optimized=True) print('Factorization test success!')
class ModelParametersGetter(BaseAction): def __init__(self, model_id, config_json): BaseAction.__init__(self, model_id=model_id, config_json=config_json, credentials_json=None) def _requires_parameters(model_path): pf = PackFile(model_path) return pf.needs_model() def _get_destination(self): model_path = self._model_path(self.model_id) return os.path.join(model_path, MODEL_DIR) _ersilia_exception def get(self): model_path = self._model_path(self.model_id) folder = self._get_destination() if (not os.path.exists(folder)): os.mkdir(folder) if (not self._requires_parameters(model_path)): return None if (not os.path.exists(folder)): raise FolderNotFoundError(folder)
class OptionPlotoptionsVariwideSonificationContexttracksMappingHighpass(Options): def frequency(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingHighpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsVariwideSonificationContexttracksMappingHighpassFrequency) def resonance(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingHighpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsVariwideSonificationContexttracksMappingHighpassResonance)
class FaucetUntaggedMultiVlansOutputTest(FaucetUntaggedTest): CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n unicast_flood: False\nacls:\n 1:\n - rule:\n dl_dst: "01:02:03:04:05:06"\n actions:\n output:\n set_fields:\n - eth_dst: "06:06:06:06:06:06"\n vlan_vids: [123, 456]\n port: %(port_2)d\n' CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: 1\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n' def test_untagged(self): (first_host, second_host) = self.hosts_name_ordered()[0:2] tcpdump_filter = 'vlan' tcpdump_txt = self.tcpdump_helper(second_host, tcpdump_filter, [(lambda : first_host.cmd(('arp -s %s %s' % (second_host.IP(), '01:02:03:04:05:06')))), (lambda : first_host.cmd(' '.join((self.FPINGS_ARGS_ONE, second_host.IP()))))]) self.assertTrue(re.search(('%s: ICMP echo request' % second_host.IP()), tcpdump_txt)) self.assertTrue(re.search('vlan 456.+vlan 123', tcpdump_txt))
class BarcodePrimerTrieTestCase(unittest.TestCase): def setUp(self): self.barcode_str = 'p1d1bc205,TACTAGCG,CATTGCCTATG\np1d1bc206,TACTCGTC,CATTGCCTATG\np1d1bc207,TACTGTGC,CATTGCCTATG\np1d1bc208,TACTGCAG,CATTGCCTATG\np1d1bc209,TACACAGC,CATTGCCTATG\np1d1bc210,TACAGTCG,CAYGGCTA\np1d1bc211,TACGTACG,CAYGGCTA\np1d1bc212,TACGTCTC,CAYGGCTA\np1d1bc213,TACGAGAC,CAYGGCTA' self.fp = StringIO(self.barcode_str) def test_primer_provided(self): res = quality_filter.parse_barcode_file(self.fp, primer='CATTGCCTATG') self.assertEqual(9, len(list(res.keys()))) self.assertEqual('p1d1bc210', res['TACAGTCGCATTGCCTATG']) self.assertEqual(None, quality_filter.trie_match('TACAGTCGCATTGCCTAT', res)) self.assertEqual('TACAGTCGCATTGCCTATG', quality_filter.trie_match('TACAGTCGCATTGCCTATGCTACCTA', res)) def test_primer_in_file(self): res = quality_filter.parse_barcode_file(self.fp, primer=None) self.assertEqual(13, len(list(res.keys()))) self.assertEqual('p1d1bc212', res['TACGTCTCCATGGCTA']) self.assertEqual('p1d1bc212', res['TACGTCTCCACGGCTA']) self.assertIsNone(res.get('TACGTCTCCAAGGCTA')) self.assertIsNone(res.get('TACGTCTCCAGGGCTA'))
class DraggableTabBar(QtGui.QTabBar): def __init__(self, editor_area, parent): super().__init__(parent) self.editor_area = editor_area self.setContextMenuPolicy(QtCore.Qt.ContextMenuPolicy.DefaultContextMenu) self.drag_obj = None def mousePressEvent(self, event): if (event.button() == QtCore.Qt.MouseButton.LeftButton): index = self.tabAt(event.pos()) tabwidget = self.parent() if (tabwidget.widget(index) and (not (tabwidget.widget(index) == tabwidget.empty_widget))): self.drag_obj = TabDragObject(start_pos=event.pos(), tabBar=self) return super().mousePressEvent(event) def mouseMoveEvent(self, event): if self.drag_obj: if (not (event.buttons() == QtCore.Qt.MouseButton.LeftButton)): pass elif ((event.pos() - self.drag_obj.start_pos).manhattanLength() < QtGui.QApplication.startDragDistance()): pass else: drag = QtGui.QDrag(self.drag_obj.widget) mimedata = PyMimeData(data=self.drag_obj, pickle=False) drag.setPixmap(self.drag_obj.get_pixmap()) drag.setHotSpot(self.drag_obj.get_hotspot()) drag.setMimeData(mimedata) if hasattr(drag, 'exec'): drag.exec() else: drag.exec_() self.drag_obj = None return return super().mouseMoveEvent(event) def mouseReleaseEvent(self, event): self.drag_obj = None return super().mouseReleaseEvent(event)
def _Popen(*args, **kwargs): customArgs = {} allowlist = ['env', 'shell'] for arg in allowlist: if (arg in kwargs): customArgs[arg] = kwargs[arg] ps = subprocess.Popen(*args, bufsize=(- 1), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, preexec_fn=os.setsid, errors='replace', **customArgs) return ps
class Sx3(): def __init__(self, channel=False): self.channel = channel self.channel_folder = self.channel.split('/')[(- 2)] self.id = self.get_id() self.api_data = self.get_api_response_data() self.name = self.api_data['resposta']['items']['item'][0]['programes_tv'][0]['titol'] self.description = self.api_data['resposta']['items']['item'][0]['programes_tv'][0]['desc'] self.poster = self.get_images()['poster'] self.landscape = self.get_images()['landscape'] def get_api_response_data(self): headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'} api_serie = ' api_serie_response = requests.get(api_serie, headers=headers) api_serie_response_data = json.loads(api_serie_response.text) return api_serie_response_data def get_id(self): headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'} page = requests.get(self.channel, headers=headers) soup = BeautifulSoup(page.content, 'html.parser') element = soup.find('div', {'class': 'titolMedia'}).find('a') first_episode_id = element['href'] first_episode_id = first_episode_id.strip('/').split('/')[(- 1)] api_video = ' api_video_response = requests.get(api_video, headers=headers) api_video_response_data = json.loads(api_video_response.text) serie_id = api_video_response_data['informacio']['programa_id'] return serie_id def get_images(self): for image in self.api_data['resposta']['items']['item'][0]['programes_tv'][0]['imatges']: if (image['mida'] == '320x466'): poster = image['text'] if (image['mida'] == '1600x284'): landscape = image['text'] return {'poster': poster, 'landscape': landscape}
_ns.route('/package/reset', methods=PUT) _login_required def package_reset(): copr = get_copr() form = forms.BasePackageForm() try: package = PackagesLogic.get(copr.id, form.package_name.data)[0] except IndexError: raise ObjectNotFound('No package with name {name} in copr {copr}'.format(name=form.package_name.data, copr=copr.name)) PackagesLogic.reset_package(flask.g.user, package) db.session.commit() return flask.jsonify(to_dict(package))
class NumpyArrayWrapper(Wrapper): def __init__(self, data, *args, **kwargs): self.data = data def plot_map(self, backend): wrapper = get_wrapper(backend.option('metadata')) if hasattr(wrapper, 'plot_numpy'): return wrapper.plot_numpy(backend, self.data) metadata = wrapper.field_metadata() backend.bounding_box(north=metadata['north'], south=metadata['south'], west=metadata['west'], east=metadata['east']) backend.plot_numpy(self.data.reshape(metadata.get('shape', self.data.shape)), metadata=metadata) def to_datetime_list(self): return [datetime.datetime.fromtimestamp((x * 1e-09), tz=datetime.timezone.utc) for x in self.data.tolist()]
def test_attribute(): params = dict(name='test', type_=str, is_required=True) attribute = Attribute(**params) assert (attribute is not None) assert (attribute == Attribute(**params)) assert (attribute != Attribute(name='another', type_=int, is_required=True)) assert (str(attribute) == "Attribute(name=test,type=<class 'str'>,is_required=True)") attribute_pb = attribute.encode() actual_attribute = Attribute.decode(attribute_pb) assert (actual_attribute == attribute)
class ContractDownloadValidator(DownloadValidatorBase): name = 'contract' def __init__(self, request_data: dict): super().__init__(request_data) self.tinyshield_models.extend([{'key': 'award_id', 'name': 'award_id', 'type': 'any', 'models': [{'type': 'integer'}, {'type': 'text', 'text_type': 'raw'}], 'optional': False, 'allow_nulls': False}, {'name': 'limit', 'key': 'limit', 'type': 'integer', 'min': 0, 'max': settings.MAX_DOWNLOAD_LIMIT, 'default': settings.MAX_DOWNLOAD_LIMIT}]) self._json_request = request_data self._json_request = self.get_validated_request() (award_id, piid, _, _, _) = _validate_award_id(self._json_request.pop('award_id')) filters = {'award_id': award_id, 'award_type_codes': tuple(set(contract_type_mapping))} self._json_request.update({'account_level': 'treasury_account', 'download_types': ['sub_contracts', 'contract_transactions', 'contract_federal_account_funding'], 'include_file_description': {'source': settings.CONTRACT_DOWNLOAD_README_FILE_PATH, 'destination': 'ContractAwardSummary_download_readme.txt'}, 'award_id': award_id, 'piid': piid, 'is_for_idv': False, 'is_for_contract': True, 'is_for_assistance': False, 'filters': filters, 'include_data_dictionary': True})
def check_no_lambdas(funcs: Sequence[Callable], entrypoint: str) -> None: if all(((not is_lambda(func)) for func in funcs)): return message = dedent(f''' lambda expression detected in {_truncate_seq_str(funcs, max_size=3)} HINT: Are you trying to do something like this...? arg = "example static arg" kwarg = "example static kwarg" await {entrypoint}( [lambda: async_fn(value, arg, kwarg=kwarg) for value in values] ) This is almost always not what you want, as all lambdas would bind to the same `value` (the last one in the loop). Instead, use a `partial`: import functools await {entrypoint}( [ functools.partial(async_fn, value, arg, kwarg=kwarg) for value in values ] ) ''') raise ValueError(message)
def get_type_converter(items): items = iter(items) def get_empty(v): return (None if (v is None) else type(v)()) def default_converter(_): return None try: first = next(items) except StopIteration: return default_converter if (not isinstance(first, (tuple, list))): empty = get_empty(first) if (empty is None): for item in items: if (item is not None): empty = get_empty(item) break return (lambda x: (x or empty)) else: empty_values = [get_empty(f) for f in first] for item_tuple in items: for (i, item) in enumerate(item_tuple): if ((item is not None) and (empty_values[i] is None)): empty_values[i] = type(item)() if all(((v is not None) for v in empty_values)): break def convert_types(tup): return tuple(((tup[i] or empty_i) for (i, empty_i) in enumerate(empty_values))) return convert_types
def _need_maintenance_ignore_users(request): if (not hasattr(request, 'user')): return user = request.user if (settings.MAINTENANCE_MODE_LOGOUT_AUTHENTICATED_USER and user.is_authenticated): logout(request) user = request.user if (settings.MAINTENANCE_MODE_IGNORE_ANONYMOUS_USER and user.is_anonymous): return False if (settings.MAINTENANCE_MODE_IGNORE_AUTHENTICATED_USER and user.is_authenticated): return False if (settings.MAINTENANCE_MODE_IGNORE_STAFF and user.is_staff): return False if (settings.MAINTENANCE_MODE_IGNORE_SUPERUSER and user.is_superuser): return False
.parametrize('keep_last, ref_cycle', [(0, 9), (2, 11), (4, 13), (6, 15), (8, 17)]) def test_anapot_growing_string(keep_last, ref_cycle): initial = AnaPot.get_geom(((- 1.05274), 1.02776, 0)) final = AnaPot.get_geom((1.94101, 3.85427, 0)) geoms = (initial, final) gs_kwargs = {'perp_thresh': 0.5, 'reparam_check': 'rms'} gs = GrowingString(geoms, (lambda : AnaPot()), **gs_kwargs) opt_kwargs = {'stop_in_when_full': keep_last, 'keep_last': keep_last} opt = StringOptimizer(gs, **opt_kwargs) opt.run() assert opt.is_converged assert (opt.cur_cycle == ref_cycle)
class OptionSeriesSunburstTooltipDatetimelabelformats(Options): def day(self): return self._config_get('%A, %e %b %Y') def day(self, text: str): self._config(text, js_type=False) def hour(self): return self._config_get('%A, %e %b, %H:%M') def hour(self, text: str): self._config(text, js_type=False) def millisecond(self): return self._config_get('%A, %e %b, %H:%M:%S.%L') def millisecond(self, text: str): self._config(text, js_type=False) def minute(self): return self._config_get('%A, %e %b, %H:%M') def minute(self, text: str): self._config(text, js_type=False) def month(self): return self._config_get('%B %Y') def month(self, text: str): self._config(text, js_type=False) def second(self): return self._config_get('%A, %e %b, %H:%M:%S') def second(self, text: str): self._config(text, js_type=False) def week(self): return self._config_get('Week from %A, %e %b %Y') def week(self, text: str): self._config(text, js_type=False) def year(self): return self._config_get('%Y') def year(self, text: str): self._config(text, js_type=False)
.parametrize('interfrag_hbonds', [True]) def test_interfragment_hydrogen_bonds(interfrag_hbonds): geom = geom_loader('lib:interfrag_hydrogen_bond.xyz') coord_info = setup_redundant(geom.atoms, geom.coords3d, interfrag_hbonds=interfrag_hbonds) assert (bool(len(coord_info.hydrogen_bonds)) == interfrag_hbonds)
def verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries): get_url = '/fledge/ping' ping_result = utils.get_request(fledge_url, get_url) assert ('dataRead' in ping_result) assert ('dataSent' in ping_result) assert (0 < ping_result['dataRead']), 'South data NOT seen in ping header' retry_count = 1 sent = 0 if (not skip_verify_north_interface): while (retries > retry_count): sent = ping_result['dataSent'] if (sent >= 1): break else: time.sleep(wait_time) retry_count += 1 ping_result = utils.get_request(fledge_url, get_url) assert (1 <= sent), 'Failed to send data via PI Web API using Basic auth' return ping_result
def about(request): article_query = Articles.objects.filter(nid=37) article_query.update(look_count=(F('look_count') + 1)) if (not article_query): return redirect('/') article_obj: Articles = article_query.first() comment_list = sub_comment_list(37) return render(request, 'about.html', locals())
def webapi_run_async_adjoint_bwd(simulations: Tuple[(Simulation, ...)], jax_infos: Tuple[(JaxInfo, ...)], folder_name: str, path_dir: str, callback_url: str, verbose: bool, parent_tasks: List[List[str]]) -> List[JaxSimulation]: task_names = [str(i) for i in range(len(simulations))] simulations = dict(zip(task_names, simulations)) jax_infos = dict(zip(task_names, jax_infos)) parent_tasks = [tuple(task_ids) for task_ids in parent_tasks] parent_tasks_dict = dict(zip(task_names, parent_tasks)) batch = AdjointBatch(simulations=simulations, jax_infos=jax_infos, folder_name=folder_name, callback_url=callback_url, verbose=verbose, simulation_type='adjoint_bwd', parent_tasks=parent_tasks_dict) batch.start() batch.monitor() sims_vjp = [] for (_, job) in batch.jobs.items(): task_id = job.task_id sim_vjp = download_sim_vjp(task_id=task_id, verbose=verbose) sims_vjp.append(sim_vjp) return sims_vjp
class BoundFunctionBase(ir.MemberLoad, CallableImpl, ABC): def dont_copy(self): return (super().dont_copy + ['member_access', 'bound_expression', 'bound_cfg', 'member_load_args']) def __init__(self, member_access, bound_expression, bound_cfg, member_load_args): ir.MemberLoad.__init__(self, **member_load_args) CallableImpl.__init__(self) self.member_access = member_access self.bound_expression = bound_expression self.bound_cfg = bound_cfg self.value = None self.gas = None def get_gas(self, call_info): if self.gas: return (self.gas, CfgSimple.empty()) gas = ir.GasLeft(call_info.ast_node) return (gas, CfgSimple.statement(gas)) def get_value(self, call_info): if self.value: return (self.value, CfgSimple.empty()) value = ir.Const(call_info.ast_node, 0, 'uint256') return (value, CfgSimple.statement(value))
def test_slack_loader_load_data(slack_loader, mocker): valid_json_query = 'in:random' mocker.patch.object(slack_loader.client, 'search_messages', return_value={'messages': {}}) result = slack_loader.load_data(valid_json_query) assert ('doc_id' in result) assert ('data' in result)
def test_plotting_quadratic(): mesh = UnitSquareMesh(10, 10) V = FunctionSpace(mesh, 'CG', 2) f = Function(V) x = SpatialCoordinate(mesh) f.interpolate(((x[0] ** 2) + (x[1] ** 2))) (fig, axes) = plt.subplots() contours = tricontour(f, axes=axes) assert (contours is not None)
class Application(Gtk.Application): def __init__(self, options, userConfig): super().__init__(application_id='io.github.fract4d') self.mainWindow = None self.options = options self.userConfig = userConfig resource = Gio.resource_load(fractconfig.T.find_resource('gnofract4d.gresource', '')) Gio.resources_register(resource) def do_startup(self): Gtk.Application.do_startup(self) self.userPrefs = preferences.Preferences(self.userConfig) self.compiler = fc.Compiler(self.userConfig) for path in self.options.extra_paths: self.compiler.add_func_path(path) def do_activate(self): if (not self.mainWindow): self.mainWindow = MainWindow(self) self.mainWindow.apply_options(self.options) GLib.idle_add(self.mainWindow.first_draw) self.mainWindow.present()
def test_weird_structure(): class ConvBlock(): n_layers: int = 4 n_filters: List[int] = field(default_factory=[16, 32, 64, 64].copy) from enum import Enum class Optimizers(Enum): ADAM = 'ADAM' RMSPROP = 'RMSPROP' SGD = 'SGD' class GeneratorHParams(ConvBlock): conv: ConvBlock = field(default_factory=ConvBlock) optimizer: Optimizers = field(default=Optimizers.ADAM) class DiscriminatorHParams(ConvBlock): conv: ConvBlock = field(default_factory=ConvBlock) optimizer: Optimizers = field(default=Optimizers.ADAM) class SomeWeirdClass(TestSetup): gen: GeneratorHParams = field(default_factory=GeneratorHParams) disc: DiscriminatorHParams = field(default_factory=DiscriminatorHParams) s = SomeWeirdClass.setup() assert (s.gen.conv.n_layers == 4) assert (s.gen.n_layers == 4) assert (s.disc.conv.n_layers == 4) assert (s.disc.n_layers == 4)
class BlockHeader(Serializable): fields = [('prevhash', hash32), ('uncles_hash', hash32), ('coinbase', address), ('state_root', trie_root), ('tx_list_root', trie_root), ('receipts_root', trie_root), ('bloom', int256), ('difficulty', big_endian_int), ('number', big_endian_int), ('gas_limit', big_endian_int), ('gas_used', big_endian_int), ('timestamp', big_endian_int), ('extra_data', binary), ('mixhash', binary), ('nonce', binary)]
class PerceiverResampler(fl.Chain): def __init__(self, latents_dim: int=1024, num_attention_layers: int=8, num_attention_heads: int=16, head_dim: int=64, num_tokens: int=8, input_dim: int=768, output_dim: int=1024, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None: self.latents_dim = latents_dim self.num_attention_layers = num_attention_layers self.head_dim = head_dim self.num_attention_heads = num_attention_heads self.num_tokens = num_tokens self.input_dim = input_dim self.output_dim = output_dim self.feedforward_dim = (4 * self.latents_dim) super().__init__(fl.Linear(in_features=input_dim, out_features=latents_dim, device=device, dtype=dtype), fl.SetContext(context='perceiver_resampler', key='x'), LatentsToken(num_tokens, latents_dim, device=device, dtype=dtype), Transformer((TransformerLayer(fl.Residual(fl.Parallel(fl.UseContext(context='perceiver_resampler', key='x'), fl.Identity()), PerceiverAttention(embedding_dim=latents_dim, head_dim=head_dim, num_heads=num_attention_heads, device=device, dtype=dtype)), fl.Residual(fl.LayerNorm(normalized_shape=latents_dim, device=device, dtype=dtype), FeedForward(embedding_dim=latents_dim, feedforward_dim=self.feedforward_dim, device=device, dtype=dtype))) for _ in range(num_attention_layers))), fl.Linear(in_features=latents_dim, out_features=output_dim, device=device, dtype=dtype), fl.LayerNorm(normalized_shape=output_dim, device=device, dtype=dtype)) def init_context(self) -> Contexts: return {'perceiver_resampler': {'x': None}}
class OptionSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def value(self): return self._config_get(None) def value(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
_in_both(MyObject) def test_property_tristate(): m = MyObject() print(m.tristateprop) m.set_tristateprop(42) loop.iter() print(m.tristateprop) m.set_tristateprop('') loop.iter() print(m.tristateprop) m.set_tristateprop(None) loop.iter() print(m.tristateprop)
def _patch_info_sequential(detailed, fsize, patch_size, compression, compression_info, dfpatch_size, data_format, dfpatch_info, to_size, diff_sizes, extra_sizes, adjustment_sizes, number_of_size_bytes): del adjustment_sizes number_of_diff_bytes = sum(diff_sizes) number_of_extra_bytes = sum(extra_sizes) number_of_data_bytes = (number_of_diff_bytes + number_of_extra_bytes) size_data_ratio = _format_ratio(number_of_size_bytes, number_of_data_bytes) patch_to_ratio = _format_ratio(patch_size, to_size) diff_extra_ratio = _format_ratio(number_of_diff_bytes, number_of_extra_bytes) compression = _format_compression(compression, compression_info) if diff_sizes: mean_diff_size = fsize(int(mean(diff_sizes))) median_diff_size = fsize(int(median(diff_sizes))) else: mean_diff_size = '-' median_diff_size = '-' if extra_sizes: mean_extra_size = fsize(int(mean(extra_sizes))) median_extra_size = fsize(int(median(extra_sizes))) else: mean_extra_size = '-' median_extra_size = '-' print('Type: sequential') print('Patch size: {}'.format(fsize(patch_size))) print('To size: {}'.format(fsize(to_size))) print('Patch/to ratio: {} % (lower is better)'.format(patch_to_ratio)) print('Diff/extra ratio: {} % (higher is better)'.format(diff_extra_ratio)) print('Size/data ratio: {} % (lower is better)'.format(size_data_ratio)) print('Compression: {}'.format(compression)) print('Data format size: {}'.format(fsize(dfpatch_size))) if (dfpatch_size > 0): print('Data format: {}'.format(data_format)) print() print('Number of diffs: {}'.format(len(diff_sizes))) print('Total diff size: {}'.format(fsize(sum(diff_sizes)))) print('Average diff size: {}'.format(mean_diff_size)) print('Median diff size: {}'.format(median_diff_size)) print() print('Number of extras: {}'.format(len(extra_sizes))) print('Total extra size: {}'.format(fsize(sum(extra_sizes)))) print('Average extra size: {}'.format(mean_extra_size)) print('Median extra size: {}'.format(median_extra_size)) if (detailed and (dfpatch_size > 0)): print() print('Data format details:') print() print(dfpatch_info)
def main(): parser = optparse.OptionParser() parser.add_option('--classes-dir', help='Directory containing .class files.') parser.add_option('--jar-path', help='Jar output path.') parser.add_option('--excluded-classes', help='List of .class file patterns to exclude from the jar.') parser.add_option('--stamp', help='Path to touch on success.') parser.add_option('--jar-bin', default='jar', help='The jar binary. If empty, the jar binary is resolved from PATH.') (options, _) = parser.parse_args() if options.excluded_classes: excluded_classes = build_utils.ParseGypList(options.excluded_classes) else: excluded_classes = [] JarDirectory(options.classes_dir, excluded_classes, options.jar_path, options.jar_bin) if options.stamp: build_utils.Touch(options.stamp)
def lazy_import(): from fastly.model.timestamps import Timestamps from fastly.model.tls_configuration_response_attributes_all_of import TlsConfigurationResponseAttributesAllOf globals()['Timestamps'] = Timestamps globals()['TlsConfigurationResponseAttributesAllOf'] = TlsConfigurationResponseAttributesAllOf
def test_no_record_stack_via_config(logger): logger.client.config.auto_log_stacks = False logger.info('This is a test of no stacks') assert (len(logger.client.events) == 1) event = logger.client.events[ERROR][0] assert (event.get('culprit') == None) assert (event['log']['message'] == 'This is a test of no stacks') assert ('stacktrace' not in event['log']) assert ('exception' not in event) assert ('param_message' in event['log']) assert (event['log']['param_message'] == 'This is a test of no stacks')
def test_cors_disallowed_preflight(test_client_factory): def homepage(request): pass app = Starlette(routes=[Route('/', endpoint=homepage)], middleware=[Middleware(CORSMiddleware, allow_origins=[' allow_headers=['X-Example'])]) client = test_client_factory(app) headers = {'Origin': ' 'Access-Control-Request-Method': 'POST', 'Access-Control-Request-Headers': 'X-Nope'} response = client.options('/', headers=headers) assert (response.status_code == 400) assert (response.text == 'Disallowed CORS origin, method, headers') assert ('access-control-allow-origin' not in response.headers) headers = {'Origin': ' 'Access-Control-Request-Method': 'GET', 'Access-Control-Request-Headers': 'X-Nope-1, X-Nope-2'} response = client.options('/', headers=headers) assert (response.text == 'Disallowed CORS headers')
class SearchsploitResult(Base): __tablename__ = 'searchsploit_result' def __str__(self): return self.pretty() def pretty(self, fullpath=False): pad = ' ' type_padlen = 8 filename_padlen = 9 if (not fullpath): filename = Path(self.path).name msg = f'{pad}{self.type:<{type_padlen}} | {filename:<{filename_padlen}}' for (i, line) in enumerate(textwrap.wrap(self.title)): if (i > 0): msg += f'''{(' ' * ((type_padlen + filename_padlen) + 5))}|{(pad * 2)}{line} ''' else: msg += f'''|{pad}{line} ''' msg = msg[:(- 1)] else: msg = f'{pad}{self.type:<{type_padlen}}' for (i, line) in enumerate(textwrap.wrap(self.title)): if (i > 0): msg += f'''{(' ' * (type_padlen + 2))}|{(pad * 2)}{line} ''' else: msg += f'''|{pad}{line} ''' msg += f"{(' ' * (type_padlen + 2))}|{pad}{self.path}" return msg id = Column(Integer, primary_key=True) title = Column(String, unique=True) path = Column(String) type = Column(String) target_id = Column(Integer, ForeignKey('target.id')) target = relationship('Target', back_populates='searchsploit_results')
def rerun_game(version: str) -> None: if (not is_ran_as_root()): return package_name = ('jp.co.ponos.battlecats' + version.replace('jp', '')) subprocess.run(f'sudo pkill -f {package_name}', capture_output=True, check=False, shell=True) subprocess.run(f'sudo monkey -p {package_name} -c android.intent.category.LAUNCHER 1', capture_output=True, check=False, shell=True)
def main(): cmt_clock_sources = ClockSources() site_to_cmt = dict(read_site_to_cmt()) clock_region_limit = dict() clock_region_serdes_location = dict() db = Database(util.get_db_root(), util.get_part()) grid = db.grid() def gen_sites(desired_site_type): for tile_name in sorted(grid.tiles()): loc = grid.loc_of_tilename(tile_name) gridinfo = grid.gridinfo_at_loc(loc) for (site, site_type) in gridinfo.sites.items(): if (site_type == desired_site_type): (yield (tile_name, site)) clock_region_sites = set() def get_clock_region_site(site_type, clk_reg): for (site_name, reg) in site_to_cmt.items(): if (site_name.startswith(site_type) and (reg in clk_reg)): if (site_name not in clock_region_sites): clock_region_sites.add(site_name) return site_name cmt_with_gtp = set() for (tile_name, site) in gen_sites('GTPE2_COMMON'): cmt_with_gtp.add(site_to_cmt[site]) print('module top();\n\n(* KEEP, DONT_TOUCH *)\nLUT6 dummy();\n') for (_, site) in gen_sites('MMCME2_ADV'): if (site_to_cmt[site] not in cmt_with_gtp): continue mmcm_clocks = ['mmcm_clock_{site}_{idx}'.format(site=site, idx=idx) for idx in range(7)] for clk in mmcm_clocks: cmt_clock_sources.add_clock_source(clk, site_to_cmt[site]) print('\n wire cin1_{site}, cin2_{site}, {c0}, {c1}, {c2}, {c3}, {c4}, {c5};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n MMCME2_ADV pll_{site} (\n .CLKIN1(cin1_{site}),\n .CLKIN2(cin2_{site}),\n .CLKOUT0({c0}),\n .CLKOUT1({c1}),\n .CLKOUT2({c2}),\n .CLKOUT3({c3}),\n .CLKOUT4({c4}),\n .CLKOUT5({c5}),\n .CLKOUT6({c6})\n );'.format(site=site, c0=mmcm_clocks[0], c1=mmcm_clocks[1], c2=mmcm_clocks[2], c3=mmcm_clocks[3], c4=mmcm_clocks[4], c5=mmcm_clocks[5], c6=mmcm_clocks[6])) for (_, site) in gen_sites('PLLE2_ADV'): if (site_to_cmt[site] not in cmt_with_gtp): continue pll_clocks = ['pll_clock_{site}_{idx}'.format(site=site, idx=idx) for idx in range(7)] for clk in pll_clocks: cmt_clock_sources.add_clock_source(clk, site_to_cmt[site]) print('\n wire cin1_{site}, cin2_{site}, clkfbin_{site}, {c0}, {c1}, {c2}, {c3}, {c4}, {c5}, {c6};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n PLLE2_ADV pll_{site} (\n .CLKIN1(cin1_{site}),\n .CLKIN2(cin2_{site}),\n .CLKFBIN(clkfbin_{site}),\n .CLKOUT0({c0}),\n .CLKOUT1({c1}),\n .CLKOUT2({c2}),\n .CLKOUT3({c3}),\n .CLKOUT4({c4}),\n .CLKOUT5({c5}),\n .CLKFBOUT({c6})\n );'.format(site=site, c0=pll_clocks[0], c1=pll_clocks[1], c2=pll_clocks[2], c3=pll_clocks[3], c4=pll_clocks[4], c5=pll_clocks[5], c6=pll_clocks[6])) for cmt in cmt_with_gtp: for (_, bufhce) in gen_sites('BUFHCE'): if (site_to_cmt[bufhce] != cmt): continue if (random.random() < 0.7): clock_name = cmt_clock_sources.get_random_source(cmt) else: continue if (clock_name is None): continue print_bufhce('{}'.format(bufhce), clock_name) print('endmodule')
class ConnectionManager(): def __init__(self): self.connections = [] async def connect(self, websocket): (await websocket.accept()) self.connections.append(websocket) async def broadcast(self, message): for conn in self.connections: (await conn.send_text(message)) def disconnect(self, websocket): self.connections.remove(websocket)
def test_ne_conversions(accounts, return_value): data = [88, [False, False, False], accounts[2], [('0x1234', '0x6666')], string_fixture] assert (not (return_value != data)) assert (not (return_value != tuple(data))) data[1] = tuple(data[1]) data[3] = tuple(data[3]) assert (not (return_value != tuple(data)))
class ExecuteGitCommandThread(threading.Thread): def __init__(self, repo, cmd, output_queue): threading.Thread.__init__(self) self.repo = repo self.cmd = cmd self.output_queue = output_queue class ReaderThread(threading.Thread): def __init__(self, stream): threading.Thread.__init__(self) self._output = [] self._full_output = [] self._stream = stream def GetPartialOutput(self): output = self._output self._output = [] return ''.join(output) def GetFullOutput(self): return ''.join(self._full_output) def run(self): try: for line in self._stream.readlines(): line = AsStr(line) self._output.append(line) self._full_output.append(line) except: import traceback traceback.print_exc() def _CreateReaderThread(self, p, stream_name): stream = getattr(p, stream_name) thread = self.ReaderThread(stream) thread.setDaemon(True) thread.start() return thread def run(self, serial=False): repo = self.repo cmd = self.cmd msg = ' '.join((([START_COLOR, '\n', repo, ':'] + cmd) + [RESET_COLOR])) shell = UseShellOnSubprocess() if serial: Print(msg) p = None try: p = subprocess.Popen(cmd, cwd=repo, shell=shell) except: PrintError(((('Error executing: ' + ' '.join(cmd)) + ' on: ') + repo)) if (p is not None): p.wait() else: try: p = subprocess.Popen(cmd, cwd=repo, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=shell) except: import os PrintError(((((('Error executing: ' + ' '.join(cmd)) + ' on: ') + repo) + ' cwd: ') + os.path.abspath('.'))) self.output_queue.put(Output(repo, ('Error executing: %s on repo: %s' % (cmd, repo)), '', '')) return self.stdout_thread = self._CreateReaderThread(p, 'stdout') self.stderr_thread = self._CreateReaderThread(p, 'stderr') p.wait() self.stdout_thread.join(2) self.stderr_thread.join(2) stdout = AsStr(self.stdout_thread.GetFullOutput()) stderr = AsStr(self.stderr_thread.GetFullOutput()) self._HandleOutput(msg, stdout, stderr) def GetPartialStderrOutput(self): stderr_thread = getattr(self, 'stderr_thread', None) if (stderr_thread is not None): return stderr_thread.GetPartialOutput() def GetFullStderrOutput(self): stderr_thread = getattr(self, 'stderr_thread', None) if (stderr_thread is not None): return stderr_thread.GetFullOutput() def GetFullStdoutOutput(self): stdout_thread = getattr(self, 'stdout_thread', None) if (stdout_thread is not None): return stdout_thread.GetFullOutput() def __str__(self): return ('${START_COLOR}%s : git %s${RESET_COLOR}' % (self.repo, ' '.join(self.cmd[1:]))) def _HandleOutput(self, msg, stdout, stderr): stdout = stdout.strip() if (not stdout): if stderr: self.output_queue.put(Output(self.repo, ((msg + '\n') + Indent(stderr)), stdout, stderr)) else: self.output_queue.put(Output(self.repo, ((msg + ': ') + 'empty'), stdout, stderr)) else: self.output_queue.put(Output(self.repo, ((msg + '\n') + Indent(stdout)), stdout, stderr))
class Iface(fb303_asyncio.fb303.FacebookService.Iface): def run(self, command=None, device=None, timeout=300, open_timeout=30, client_ip='', client_port='', uuid=''): pass def bulk_run(self, device_to_commands=None, timeout=300, open_timeout=30, client_ip='', client_port='', uuid=''): pass def bulk_run_local(self, device_to_commands=None, timeout=300, open_timeout=30, client_ip='', client_port='', uuid=''): pass def open_session(self, device=None, open_timeout=60, idle_timeout=300, client_ip='', client_port='', uuid=''): pass def run_session(self, session=None, command=None, timeout=300, client_ip='', client_port='', uuid=''): pass def close_session(self, session=None, client_ip='', client_port='', uuid=''): pass def open_raw_session(self, device=None, open_timeout=60, idle_timeout=300, client_ip='', client_port='', uuid=''): pass def run_raw_session(self, session=None, command=None, timeout=300, prompt_regex=None, client_ip='', client_port='', uuid=''): pass def close_raw_session(self, session=None, client_ip='', client_port='', uuid=''): pass
def _concat_kernel_single_input_output_param_size(op: Operator): inputs = op._attrs['inputs'] rank = inputs[0]._rank() size_of_one_output_meta = (CONCAT_OUTPUT_META_SIZE * rank) total_params_size = ((CONCAT_INPUT_META_SIZE + size_of_one_output_meta) + 24) _LOGGER.debug(f"concat op {op._attrs['name']}: total_params_size={total_params_size!r}") return total_params_size
.parametrize('base, unrestricted', (('00_pyrrole_rhf', False), ('01_pyrrole_uhf', True))) def test_transition_multipole_moments(base, unrestricted, this_dir): base_fn = ((this_dir / 'pyrrole') / base) json_fn = base_fn.with_suffix('.json') cis_fn = base_fn.with_suffix('.cis') wf = Wavefunction.from_orca_json(json_fn) (Xa, Ya, Xb, Yb) = parse_orca_cis(cis_fn) (Xa, Ya) = norm_ci_coeffs(Xa, Ya) gs_es_args = [Xa] if unrestricted: (Xa, Ya, Xb, Yb) = norm_ci_coeffs(Xa, Yb, Xb, Yb) gs_es_args = [Xa, Xb] tdms_ref = np.array(((0.0, (- 0.0), 0.0), (0.0, (- 0.0), 0.0), (0.0, (- 0.0), 1e-05))) es2es_tdms_ref = np.array((((- 0.0), 0.0, (- 0.09534)), (0.0, 0.0, 0.0), ((- 0.0), 0.0, 0.0))) else: tdms_ref = np.array(((0.0, (- 0.0), 1e-05), (0.0, (- 0.0), (- 0.05814)), ((- 0.00079), 0.0129, (- 0.0)))) es2es_tdms_ref = np.array(((0.5405, 0.03649, (- 0.0)), (0.0, (- 0.0), 0.0), ((- 0.0), 0.0, 0.07424))) tdms = wf.get_transition_dipole_moment(*gs_es_args) np.testing.assert_allclose(tdms, tdms_ref, atol=1e-05) nstates = Xa.shape[0] estdens_a = list() estdens_b = list() for i in range(nstates): for j in range((i + 1), nstates): Xai = Xa[i] Xaj = Xa[j] estdena = get_state_to_state_transition_density(Xai, Xaj) estdens_a.append(estdena) if unrestricted: Xbi = Xb[i] Xbj = Xb[j] estdenb = get_state_to_state_transition_density(Xbi, Xbj) estdens_b.append(estdenb) estdens_a = np.array(estdens_a) estdens_b = np.array(estdens_b) args = [estdens_a] if unrestricted: args.append(estdens_b) es_tdms = wf.get_transition_dipole_moment(*args, full=True) np.testing.assert_allclose(es_tdms, es2es_tdms_ref, atol=1e-05)
_defaults() class TicketSchemaPublic(SoftDeletionSchema): class Meta(): type_ = 'ticket' self_view = 'v1.ticket_detail' self_view_kwargs = {'id': '<id>'} inflect = dasherize _schema(pass_original=True) def validate_date(self, data, original_data): if ('id' in original_data['data']): ticket = Ticket.query.filter_by(id=original_data['data']['id']).one() if ('sales_starts_at' not in data): data['sales_starts_at'] = ticket.sales_starts_at if ('sales_ends_at' not in data): data['sales_ends_at'] = ticket.sales_ends_at if (data['sales_starts_at'] >= data['sales_ends_at']): raise UnprocessableEntityError({'pointer': '/data/attributes/sales-ends-at'}, 'sales-ends-at should be after sales-starts-at') _schema def validate_quantity(self, data): if (('max_order' in data) and ('min_order' in data)): if (data['max_order'] < data['min_order']): raise UnprocessableEntityError({'pointer': '/data/attributes/max-order'}, 'max-order should be greater than or equal to min-order') if (('quantity' in data) and ('min_order' in data)): if (data['quantity'] < data['min_order']): raise UnprocessableEntityError({'pointer': '/data/attributes/quantity'}, 'quantity should be greater than or equal to min-order') if (('min_price' in data) and ('max_price' in data) and (data['type'] == 'donation')): if (data['min_price'] > data['max_price']): raise UnprocessableEntityError({'pointer': '/data/attributes/min-price'}, 'minimum price should be lesser than or equal to maximum price') if (('quantity' in data) and ('max_order' in data)): if (data['quantity'] < data['max_order']): raise UnprocessableEntityError({'pointer': '/data/attributes/quantity'}, 'quantity should be greater than or equal to max-order') if (('quantity' in data) and (data['quantity'] <= 0)): raise UnprocessableEntityError({'pointer': '/data/attributes/quantity'}, 'quantity should be greater than 0') _schema def validate_price(self, data): if ('type' not in data): return if ((data['type'] == 'paid') and (('price' not in data) or (data['price'] <= 0))): raise UnprocessableEntityError({'pointer': 'data/attributes/price'}, 'paid ticket price should be greater than 0') _schema(pass_original=True) def validate_discount_code(self, data, original_data): if (('relationships' in original_data) and ('discount-codes' in original_data['data']['relationships'])): discount_codes = original_data['data']['relationships']['discount-codes'] for code in discount_codes['data']: try: DiscountCode.query.filter_by(id=code['id']).one() except NoResultFound: raise UnprocessableEntityError({'pointer': '/data/relationships/discount-codes'}, 'Discount code does not exist') id = fields.Str(dump_only=True) name = fields.Str(required=True) description = fields.Str(allow_none=True) type = fields.Str(required=True) price = fields.Float(validate=(lambda n: (n >= 0)), allow_none=True) min_price = fields.Float(validate=(lambda n: (n >= 0))) max_price = fields.Float(validate=(lambda n: (n >= 0)), allow_none=True) quantity = fields.Integer(validate=(lambda n: (n >= 0)), allow_none=True) is_description_visible = fields.Boolean(default=False) position = fields.Integer(allow_none=True) is_fee_absorbed = fields.Boolean() sales_starts_at = fields.DateTime(required=True) sales_ends_at = fields.DateTime(required=True) is_hidden = fields.Boolean(default=False) min_order = fields.Integer(validate=(lambda n: (n >= 0)), allow_none=True) max_order = fields.Integer(validate=(lambda n: (n >= 0)), allow_none=True) is_checkin_restricted = fields.Boolean(default=True) auto_checkin_enabled = fields.Boolean(default=False) form_id = fields.Str(allow_none=True) badge_id = fields.Str(allow_none=True) event = Relationship(self_view='v1.ticket_event', self_view_kwargs={'id': '<id>'}, related_view='v1.event_detail', related_view_kwargs={'ticket_id': '<id>'}, schema='EventSchemaPublic', type_='event') ticket_tags = Relationship(attribute='tags', self_view='v1.ticket_ticket_tag', self_view_kwargs={'id': '<id>'}, related_view='v1.ticket_tag_list', related_view_kwargs={'ticket_id': '<id>'}, schema='TicketTagSchema', many=True, type_='ticket-tag') discount_codes = Relationship(self_view='v1.ticket_discount_codes', self_view_kwargs={'id': '<id>'}, related_view='v1.discount_code_list', related_view_kwargs={'ticket_id': '<id>'}, schema='DiscountCodeSchemaTicket', many=True, type_='discount-code')
(tryfirst=True) def flaskbb_load_blueprints(app): forum = Blueprint('forum', __name__) register_view(forum, routes=['/category/<int:category_id>', '/category/<int:category_id>-<slug>'], view_func=ViewCategory.as_view('view_category')) register_view(forum, routes=['/forum/<int:forum_id>/edit', '/forum/<int:forum_id>-<slug>/edit'], view_func=ManageForum.as_view('manage_forum')) register_view(forum, routes=['/forum/<int:forum_id>', '/forum/<int:forum_id>-<slug>'], view_func=ViewForum.as_view('view_forum')) register_view(forum, routes=['/<int:forum_id>/markread', '/<int:forum_id>-<slug>/markread'], view_func=MarkRead.as_view('markread')) register_view(forum, routes=['/<int:forum_id>/topic/new', '/<int:forum_id>-<slug>/topic/new'], view_func=NewTopic.as_view('new_topic')) register_view(forum, routes=['/topic/<int:topic_id>/edit', '/topic/<int:topic_id>-<slug>/edit'], view_func=EditTopic.as_view('edit_topic')) register_view(forum, routes=['/memberlist'], view_func=MemberList.as_view('memberlist')) register_view(forum, routes=['/post/<int:post_id>/delete'], view_func=DeletePost.as_view('delete_post')) register_view(forum, routes=['/post/<int:post_id>/edit'], view_func=EditPost.as_view('edit_post')) register_view(forum, routes=['/post/<int:post_id>/raw'], view_func=RawPost.as_view('raw_post')) register_view(forum, routes=['/post/<int:post_id>/report'], view_func=ReportView.as_view('report_post')) register_view(forum, routes=['/post/<int:post_id>'], view_func=ViewPost.as_view('view_post')) register_view(forum, routes=['/search'], view_func=Search.as_view('search')) register_view(forum, routes=['/topic/<int:topic_id>/delete', '/topic/<int:topic_id>-<slug>/delete'], view_func=DeleteTopic.as_view('delete_topic')) register_view(forum, routes=['/topic/<int:topic_id>/highlight', '/topic/<int:topic_id>-<slug>/highlight'], view_func=HighlightTopic.as_view('highlight_topic')) register_view(forum, routes=['/topic/<int:topic_id>/lock', '/topic/<int:topic_id>-<slug>/lock'], view_func=LockTopic.as_view('lock_topic')) register_view(forum, routes=['/topic/<int:topic_id>/post/<int:post_id>/reply', '/topic/<int:topic_id>-<slug>/post/<int:post_id>/reply'], view_func=NewPost.as_view('reply_post')) register_view(forum, routes=['/topic/<int:topic_id>/post/new', '/topic/<int:topic_id>-<slug>/post/new'], view_func=NewPost.as_view('new_post')) register_view(forum, routes=['/topic/<int:topic_id>', '/topic/<int:topic_id>-<slug>'], view_func=ViewTopic.as_view('view_topic')) register_view(forum, routes=['/topic/<int:topic_id>/trivialize', '/topic/<int:topic_id>-<slug>/trivialize'], view_func=TrivializeTopic.as_view('trivialize_topic')) register_view(forum, routes=['/topic/<int:topic_id>/unlock', '/topic/<int:topic_id>-<slug>/unlock'], view_func=UnlockTopic.as_view('unlock_topic')) register_view(forum, routes=['/topictracker/<int:topic_id>/add', '/topictracker/<int:topic_id>-<slug>/add'], view_func=TrackTopic.as_view('track_topic')) register_view(forum, routes=['/topictracker/<int:topic_id>/delete', '/topictracker/<int:topic_id>-<slug>/delete'], view_func=UntrackTopic.as_view('untrack_topic')) register_view(forum, routes=['/topictracker'], view_func=TopicTracker.as_view('topictracker')) register_view(forum, routes=['/'], view_func=ForumIndex.as_view('index')) register_view(forum, routes=['/who-is-online'], view_func=WhoIsOnline.as_view('who_is_online')) register_view(forum, routes=['/topic/<int:topic_id>/hide', '/topic/<int:topic_id>-<slug>/hide'], view_func=HideTopic.as_view('hide_topic')) register_view(forum, routes=['/topic/<int:topic_id>/unhide', '/topic/<int:topic_id>-<slug>/unhide'], view_func=UnhideTopic.as_view('unhide_topic')) register_view(forum, routes=['/post/<int:post_id>/hide'], view_func=HidePost.as_view('hide_post')) register_view(forum, routes=['/post/<int:post_id>/unhide'], view_func=UnhidePost.as_view('unhide_post')) register_view(forum, routes=['/markdown', '/markdown/<path:mode>'], view_func=MarkdownPreview.as_view('markdown_preview')) forum.before_request(force_login_if_needed) app.register_blueprint(forum, url_prefix=app.config['FORUM_URL_PREFIX'])
def _clone_size_policy(size_policy): new_size_policy = QtGui.QSizePolicy() new_size_policy.setHorizontalPolicy(size_policy.horizontalPolicy()) new_size_policy.setVerticalPolicy(size_policy.verticalPolicy()) new_size_policy.setHorizontalStretch(size_policy.horizontalStretch()) new_size_policy.setVerticalStretch(size_policy.verticalStretch()) new_size_policy.setHeightForWidth(size_policy.hasHeightForWidth()) new_size_policy.setWidthForHeight(size_policy.hasWidthForHeight()) return new_size_policy
class OptionXaxisCrosshair(Options): def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('#cccccc') def color(self, text: str): self._config(text, js_type=False) def dashStyle(self): return self._config_get('Solid') def dashStyle(self, text: str): self._config(text, js_type=False) def snap(self): return self._config_get(True) def snap(self, flag: bool): self._config(flag, js_type=False) def width(self): return self._config_get(1) def width(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(2) def zIndex(self, num: float): self._config(num, js_type=False)
def update_messaging_config(db: Session, key: FidesKey, config: MessagingConfigRequest) -> MessagingConfigResponse: existing_config_with_key: Optional[MessagingConfig] = MessagingConfig.get_by(db=db, field='key', value=key) if (not existing_config_with_key): raise MessagingConfigNotFoundException(f'No messaging config found with key {key}') return create_or_update_messaging_config(db=db, config=config)