code
stringlengths
281
23.7M
class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument('end_date', help='YYYY-MM format') parser.add_argument('--months', help='Number of months of data to include (default: {})'.format(DEFAULT_NUM_MONTHS), default=DEFAULT_NUM_MONTHS) def handle(self, end_date, months=None, **kwargs): return build(end_date, months)
def get_smoothed_dtseries_file(user_settings, temp_dir): pre_dtseries_sm = os.path.join(user_settings.work_dir, user_settings.subject, 'MNINonLinear', 'Results', user_settings.fmri_name, '{}_Atlas_s{}.dtseries.nii'.format(user_settings.fmri_name, user_settings.fwhm)) if os.path.exists(pre_dtseries_sm): return pre_dtseries_sm else: dtseries_sm = os.path.join(temp_dir, '{}_Atlas_s{}.dtseries.nii'.format(user_settings.fmri_name, user_settings.fwhm)) Sigma = ciftify.utils.FWHM2Sigma(user_settings.fwhm) surfs_dir = os.path.join(user_settings.work_dir, user_settings.subject, 'MNINonLinear', 'fsaverage_LR32k') run(['wb_command', '-cifti-smoothing', user_settings.dtseries_s0, str(Sigma), str(Sigma), 'COLUMN', dtseries_sm, '-left-surface', os.path.join(surfs_dir, '{}.L.midthickness{}.surf.gii'.format(user_settings.subject, user_settings.surf_mesh)), '-right-surface', os.path.join(surfs_dir, '{}.R.midthickness{}.surf.gii'.format(user_settings.subject, user_settings.surf_mesh))]) return dtseries_sm
class SelectItemListener(sublime_plugin.EventListener): def on_modified_async(self, view: sublime.View): select_item.reset_model(view) def on_post_text_command(self, view, command_name, args): if (command_name != 'emmet_select_item'): select_item.reset_model(view)
def erase_device(conn, serialnumber, clear_attendance=False): print('WARNING! the next step will erase the current device content.') print('Please input the serialnumber of this device [{}] to acknowledge the ERASING!'.format(serialnumber)) new_serial = input('Serial Number : ') if (new_serial != serialnumber): raise BasicException('Serial number mismatch') conn.disable_device() print('Erasing device...') conn.clear_data() if clear_attendance: print('Clearing attendance too!') conn.clear_attendance() conn.read_sizes() print(conn)
def klaviyo_dataset_config(db: Session, klaviyo_connection_config: ConnectionConfig, klaviyo_dataset: Dict[(str, Any)]) -> Generator: fides_key = klaviyo_dataset['fides_key'] klaviyo_connection_config.name = fides_key klaviyo_connection_config.key = fides_key klaviyo_connection_config.save(db=db) ctl_dataset = CtlDataset.create_from_dataset_dict(db, klaviyo_dataset) dataset = DatasetConfig.create(db=db, data={'connection_config_id': klaviyo_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id}) (yield dataset) dataset.delete(db=db) ctl_dataset.delete(db=db)
def get_data_model(atoms, dump_steps): coord_size = (3 * len(atoms)) _1d = (dump_steps,) _2d = (dump_steps, coord_size) data_model = {'cart_coords': _2d, 'step': _1d, 'energy_tot': _1d, 'energy_pot': _1d, 'energy_kin': _1d, 'energy_conserved': _1d, 'T': _1d, 'T_avg': _1d, 'velocity': _2d} return data_model
def _test_hcm(yaml, expectations={}): econf = econf_compile(yaml) def check(typed_config): for (key, expected) in expectations.items(): if (expected is None): assert (key not in typed_config) else: assert (key in typed_config) assert (typed_config[key] == expected) return True econf_foreach_hcm(econf, check)
def _constant_factories(bmg: BMGraphBuilder) -> Dict[(Type, Callable)]: return {bn.NegativeRealNode: bmg.add_neg_real, bn.NaturalNode: bmg.add_natural, bn.ConstantNode: bmg.add_constant, bn.RealNode: bmg.add_real, bn.PositiveRealNode: bmg.add_pos_real, bn.ProbabilityNode: bmg.add_probability, bn.ConstantBooleanMatrixNode: bmg.add_boolean_matrix, bn.ConstantNaturalMatrixNode: bmg.add_natural_matrix, bn.ConstantNegativeRealMatrixNode: bmg.add_neg_real_matrix, bn.ConstantProbabilityMatrixNode: bmg.add_probability_matrix, bn.ConstantSimplexMatrixNode: bmg.add_simplex, bn.ConstantPositiveRealMatrixNode: bmg.add_pos_real_matrix, bn.ConstantRealMatrixNode: bmg.add_real_matrix, bn.ConstantTensorNode: bmg.add_constant_tensor, bn.UntypedConstantNode: bmg.add_constant}
class TraitWXFont(TraitHandler): def validate(self, object, name, value): if (value is None): return None try: return create_traitsfont(value) except: pass raise TraitError(object, name, 'a font descriptor string', repr(value)) def info(self): return "a string describing a font (e.g. '12 pt bold italic swiss family Arial' or 'default 12')"
def test_wf1_run(): def t1(a: int) -> typing.NamedTuple('OutputsBC', t1_int_output=int, c=str): return ((a + 2), 'world') def t2(a: str, b: str) -> str: return (b + a) def my_wf(a: int, b: str) -> (int, str): (x, y) = t1(a=a) d = t2(a=y, b=b) return (x, d) x = my_wf(a=5, b='hello ') assert (x == (7, 'hello world')) def my_wf2(a: int, b: str) -> (int, str): tup = t1(a=a) d = t2(a=tup.c, b=b) return (tup.t1_int_output, d) x = my_wf2(a=5, b='hello ') assert (x == (7, 'hello world')) assert (context_manager.FlyteContextManager.size() == 1)
_toolkit([ToolkitName.qt]) class TestCustomFileEditor(BaseTestMixin, unittest.TestCase): def setUp(self): BaseTestMixin.setUp(self) def tearDown(self): BaseTestMixin.tearDown(self) def test_custom_editor_init_and_dispose(self): view = View(Item('filepath', editor=FileEditor(), style='custom')) obj = FileModel() with UITester().create_ui(obj, dict(view=view)): pass def test_custom_editor_reload_changed_after_dispose(self): view = View(Item('filepath', editor=FileEditor(reload_name='reload_event'), style='custom')) obj = FileModel() with UITester().create_ui(obj, dict(view=view)): pass obj.reload_event = True
class CommonSegBin(CommonSegment): def is_data() -> bool: return True def out_path(self) -> Optional[Path]: return ((options.opts.asset_path / self.dir) / f'{self.name}.bin') def split(self, rom_bytes): path = self.out_path() assert (path is not None) path.parent.mkdir(parents=True, exist_ok=True) if (self.rom_end is None): log.error(f'segment {self.name} needs to know where it ends; add a position marker [0xDEADBEEF] after it') with open(path, 'wb') as f: assert isinstance(self.rom_start, int) assert isinstance(self.rom_end, int) f.write(rom_bytes[self.rom_start:self.rom_end]) self.log(f'Wrote {self.name} to {path}')
class OptionPlotoptionsColumnStates(Options): def hover(self) -> 'OptionPlotoptionsColumnStatesHover': return self._config_sub_data('hover', OptionPlotoptionsColumnStatesHover) def inactive(self) -> 'OptionPlotoptionsColumnStatesInactive': return self._config_sub_data('inactive', OptionPlotoptionsColumnStatesInactive) def normal(self) -> 'OptionPlotoptionsColumnStatesNormal': return self._config_sub_data('normal', OptionPlotoptionsColumnStatesNormal) def select(self) -> 'OptionPlotoptionsColumnStatesSelect': return self._config_sub_data('select', OptionPlotoptionsColumnStatesSelect)
def _test_correct_response_for_recipient_location_district_without_geo_filters(client): resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'district', 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}})) expected_response = {'scope': 'recipient_location', 'geo_layer': 'district', 'results': [{'aggregated_amount': 50.0, 'display_name': None, 'per_capita': None, 'population': None, 'shape_code': ''}, {'aggregated_amount': 5000000.0, 'display_name': 'SC-11', 'per_capita': 500000.0, 'population': 10, 'shape_code': '4511'}, {'aggregated_amount': 500500.0, 'display_name': 'SC-51', 'per_capita': 5005.0, 'population': 100, 'shape_code': '4551'}, {'aggregated_amount': 55000.0, 'display_name': 'WA-51', 'per_capita': 27.5, 'population': 2000, 'shape_code': '5351'}], 'messages': [get_time_period_message()]} assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response' resp_json = resp.json() resp_json['results'].sort(key=_get_shape_code_for_sort) assert (resp_json == expected_response)
class NoDictSpacesWrapper(Wrapper[Union[(EnvType, StructuredEnvSpacesMixin)]]): def __init__(self, env): super().__init__(env) assert isinstance(env.observation_space, gym.spaces.Dict) assert (len(env.observation_space.spaces) == 1) self.observation_key = list(env.observation_space.spaces.keys())[0] assert isinstance(env.action_space, gym.spaces.Dict) assert (len(env.action_space.spaces) == 1) self.action_key = list(env.action_space.spaces.keys())[0] (StructuredEnvSpacesMixin) def observation_space(self) -> gym.spaces.Space: return self.env.observation_space.spaces[self.observation_key] (StructuredEnvSpacesMixin) def action_space(self) -> gym.spaces.Space: return self.env.action_space.spaces[self.action_key] (StructuredEnvSpacesMixin) def observation_spaces_dict(self) -> Dict[(Union[(int, str)], gym.spaces.Dict)]: return {k: v.spaces[self.observation_key] for (k, v) in self.env.observation_spaces_dict.items()} (StructuredEnvSpacesMixin) def action_spaces_dict(self) -> Dict[(Union[(int, str)], gym.spaces.Dict)]: return {k: v.spaces[self.action_key] for (k, v) in self.env.action_spaces_dict.items()} def observation(self, observation: Any) -> Any: return observation[self.observation_key] def action(self, action: np.ndarray) -> Dict[(str, np.ndarray)]: return {self.action_key: action} def reverse_action(self, action: Dict[(str, np.ndarray)]) -> np.ndarray: return action[self.action_key] (BaseEnv) def reset(self) -> Any: observation = self.env.reset() return self.observation(observation) def step(self, action) -> Tuple[(Any, Any, bool, Dict[(Any, Any)])]: (observation, reward, done, info) = self.env.step(self.action(action)) return (self.observation(observation), reward, done, info) (Wrapper) def get_observation_and_action_dicts(self, maze_state: Optional[MazeStateType], maze_action: Optional[MazeActionType], first_step_in_episode: bool) -> Tuple[(Optional[Dict[(Union[(int, str)], Any)]], Optional[Dict[(Union[(int, str)], Any)]])]: (obs_dict, act_dict) = self.env.get_observation_and_action_dicts(maze_state, maze_action, first_step_in_episode) if (act_dict is not None): act_dict = {policy_id: self.reverse_action(action) for (policy_id, action) in act_dict.items()} if (obs_dict is not None): obs_dict = {policy_id: self.observation(obs) for (policy_id, obs) in obs_dict.items()} return (obs_dict, act_dict) (SimulatedEnvMixin) def clone_from(self, env: 'NoDictSpacesWrapper') -> None: self.env.clone_from(env)
class Switch(JsPackage): def __init__(self, component: primitives.HtmlModel, js_code: str=None, set_var: bool=True, is_py_data: bool=True, page: primitives.PageModel=None): self.htmlCode = (js_code if (js_code is not None) else component.htmlCode) (self.varName, self.varData, self.__var_def) = (("document.getElementById('%s')" % self.htmlCode), '', None) (self.component, self.page) = (component, page) (self._js, self._jquery) = ([], None) def toggle(self): return JsObjects.JsObjects.get(self.component.switch.click()) def val(self, data: Union[(bool, primitives.JsDataModel)]): data = JsUtils.jsConvertData(data, None) return JsObjects.JsObjects.get(("%(varName)s.querySelector('input').checked = %(flag)s; \n if(%(flag)s) {%(varName)s.querySelector('p').innerHTML = %(htmlCode)s_data.on}\n else {%(varName)s.querySelector('p').innerHTML = %(htmlCode)s_data.off}" % {'varName': self.varName, 'flag': data, 'htmlCode': self.htmlCode})) def false(self): return JsObjects.JsObjects.get(("%s.querySelector('input').checked = false; %s.querySelector('p').innerHTML = %s_data.off" % (self.varName, self.varName, self.htmlCode))) def true(self): return JsObjects.JsObjects.get(("%s.querySelector('input').checked = true; %s.querySelector('p').innerHTML = %s_data.on" % (self.varName, self.varName, self.htmlCode)))
def enable_reprlib_cstruct(): from dissect.cstruct.types.instance import Instance def reprlib_repr(self) -> str: values = ', '.join((f'{k}={(hex(v) if isinstance(v, int) else reprlib.repr(v))}' for (k, v) in self._values.items())) return f'<{self._type.name} {values}>' Instance.__repr__ = reprlib_repr
def todo_pips(): is_gtp_channel_left = False is_ibufds_left = False is_cmt_left = False with open('../../todo_all.txt', 'r') as todo_file: for line in todo_file: fields = line.split('.') if ('HCLK_GTP_CK_IN' not in fields[1]): continue is_gtp_channel_left |= fields[2].startswith('GTPE2_COMMON') is_ibufds_left |= fields[2].startswith('IBUFDS') is_cmt_left |= fields[2].startswith('HCLK') return (is_gtp_channel_left, is_ibufds_left, is_cmt_left)
def check_uid(): uid_path = Path(check_dir_exist(CONF_DIR), 'uid.yaml') conf = read_conf_file(uid_path) if ('uid' not in conf.keys()): uid = str(uuid.uuid4()) err = write_conf_file(uid_path, {'uid': uid}, error=True) if err: return ('NO_UID', err, True) else: return (uid, None, True) return ((conf.get('uid') or 'NO_UID'), None, False)
def upgrade(): op.create_table('emails', sa.Column('address', sa.Text(), nullable=False), sa.Column('owner_id', sa.Integer(), nullable=False), sa.Column('registered_on', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['owner_id'], ['users.id']), sa.PrimaryKeyConstraint('address', 'owner_id'))
def kv_to_map(kvs): def convert(v): if v.startswith("'"): return v[1:(- 1)] try: return int(v) except ValueError: pass try: return float(v) except ValueError: pass try: return to_bool(v) except ValueError: pass try: return to_none(v) except ValueError: pass return v result = {} for kv in kvs: (k, v) = kv.split(':') result[k.strip()] = convert(v.strip()) return result
class RayFunctionTask(PythonFunctionTask): _RAY_TASK_TYPE = 'ray' def __init__(self, task_config: RayJobConfig, task_function: Callable, **kwargs): super().__init__(task_config=task_config, task_type=self._RAY_TASK_TYPE, task_function=task_function, **kwargs) self._task_config = task_config def pre_execute(self, user_params: ExecutionParameters) -> ExecutionParameters: ray.init(address=self._task_config.address) return user_params def post_execute(self, user_params: ExecutionParameters, rval: Any) -> Any: ray.shutdown() return rval def get_custom(self, settings: SerializationSettings) -> Optional[Dict[(str, Any)]]: cfg = self._task_config ray_job = RayJob(ray_cluster=RayCluster(head_group_spec=(HeadGroupSpec(cfg.head_node_config.ray_start_params) if cfg.head_node_config else None), worker_group_spec=[WorkerGroupSpec(c.group_name, c.replicas, c.min_replicas, c.max_replicas, c.ray_start_params) for c in cfg.worker_node_config]), runtime_env=base64.b64encode(json.dumps(cfg.runtime_env).encode()).decode()) return MessageToDict(ray_job.to_flyte_idl())
class TestCurrentPrivacyPreferences(): (scope='function') def url(self) -> str: return (V1_URL_PREFIX + CURRENT_PRIVACY_PREFERENCES_REPORT) def test_get_current_preferences_not_authenticated(self, api_client: TestClient, url) -> None: response = api_client.get(url, headers={}) assert (401 == response.status_code) def test_get_current_preferences_incorrect_scope(self, api_client: TestClient, url, generate_auth_header) -> None: auth_header = generate_auth_header(scopes=[CONSENT_READ]) response = api_client.get(url, headers=auth_header) assert (403 == response.status_code) def test_get_current_preferences_report_with_tcf(self, generate_auth_header, privacy_preference_history_for_tcf_purpose_consent, api_client, url): auth_header = generate_auth_header(scopes=[CURRENT_PRIVACY_PREFERENCE_READ]) response = api_client.get(url, headers=auth_header) assert (response.status_code == 200) assert (len(response.json()['items']) == 1) data = response.json()['items'][0] assert (data['preference'] == privacy_preference_history_for_tcf_purpose_consent.preference.value) assert (data['purpose_consent'] == 8) assert (data['id'] == privacy_preference_history_for_tcf_purpose_consent.current_privacy_preference.id) .parametrize('role,expected_status', [('owner', HTTP_200_OK), ('contributor', HTTP_200_OK), ('viewer_and_approver', HTTP_403_FORBIDDEN), ('viewer', HTTP_403_FORBIDDEN), ('approver', HTTP_403_FORBIDDEN)]) def test_get_current_preferences_roles(self, role, expected_status, api_client: TestClient, url, generate_role_header) -> None: auth_header = generate_role_header(roles=[role]) response = api_client.get(url, headers=auth_header) assert (response.status_code == expected_status) def test_get_current_preferences(self, api_client: TestClient, url, generate_auth_header, privacy_preference_history) -> None: current_preference = privacy_preference_history.current_privacy_preference auth_header = generate_auth_header(scopes=[CURRENT_PRIVACY_PREFERENCE_READ]) response = api_client.get(url, headers=auth_header) assert (response.status_code == 200) assert (len(response.json()['items']) == 1) assert (response.json()['total'] == 1) assert (response.json()['page'] == 1) assert (response.json()['pages'] == 1) assert (response.json()['size'] == 50) response_body = response.json()['items'][0] assert (response_body['id'] == current_preference.id) assert (response_body['preference'] == current_preference.preference.value) assert (response_body['privacy_notice_history_id'] == current_preference.privacy_notice_history.id) assert (response_body['provided_identity_id'] == privacy_preference_history.provided_identity.id) assert (response_body['created_at'] is not None) def test_get_current_preference_ordering(self, api_client: TestClient, url, generate_auth_header, privacy_preference_history, privacy_preference_history_us_ca_provide, privacy_preference_history_fr_provide_service_frontend_only) -> None: auth_header = generate_auth_header(scopes=[CURRENT_PRIVACY_PREFERENCE_READ]) response = api_client.get(url, headers=auth_header) assert (response.status_code == 200) assert (len(response.json()['items']) == 3) assert (response.json()['total'] == 3) assert (response.json()['page'] == 1) assert (response.json()['pages'] == 1) assert (response.json()['size'] == 50) response_body = response.json()['items'] assert (response_body[0]['id'] == privacy_preference_history_fr_provide_service_frontend_only.current_privacy_preference.id) assert (response_body[1]['id'] == privacy_preference_history_us_ca_provide.current_privacy_preference.id) assert (response_body[2]['id'] == privacy_preference_history.current_privacy_preference.id) def test_get_current_preferences_date_filtering(self, api_client: TestClient, url, generate_auth_header, privacy_preference_history, privacy_preference_history_us_ca_provide, privacy_preference_history_fr_provide_service_frontend_only) -> None: auth_header = generate_auth_header(scopes=[CURRENT_PRIVACY_PREFERENCE_READ]) response = api_client.get((url + f"?updated_lt={privacy_preference_history.current_privacy_preference.updated_at.strftime('%Y-%m-%dT%H:%M:%S.%f')}"), headers=auth_header) assert (response.status_code == 200) assert (response.json()['items'] == []) response = api_client.get((url + f"?updated_lt={(privacy_preference_history_fr_provide_service_frontend_only.current_privacy_preference.updated_at + timedelta(hours=1)).strftime('%Y-%m-%dT%H:%M:%S.%f')}"), headers=auth_header) assert (response.status_code == 200) assert (response.json()['total'] == 3) assert (response.json()['items'][0]['id'] == privacy_preference_history_fr_provide_service_frontend_only.current_privacy_preference.id) assert (response.json()['items'][1]['id'] == privacy_preference_history_us_ca_provide.current_privacy_preference.id) assert (response.json()['items'][2]['id'] == privacy_preference_history.current_privacy_preference.id) response = api_client.get((url + f"?updated_gt={privacy_preference_history.current_privacy_preference.updated_at.strftime('%Y-%m-%dT%H:%M:%S.%f')}"), headers=auth_header) assert (response.status_code == 200) assert (response.json()['total'] == 2) assert (response.json()['items'][0]['id'] == privacy_preference_history_fr_provide_service_frontend_only.current_privacy_preference.id) assert (response.json()['items'][1]['id'] == privacy_preference_history_us_ca_provide.current_privacy_preference.id) response = api_client.get((url + f"?updated_lt={privacy_preference_history.current_privacy_preference.updated_at.strftime('%Y-%m-%dT%H:%M:%S.%f')}&updated_gt={privacy_preference_history_fr_provide_service_frontend_only.current_privacy_preference.created_at.strftime('%Y-%m-%dT%H:%M:%S.%f')}"), headers=auth_header) assert (response.status_code == 400) assert ('Value specified for updated_lt' in response.json()['detail']) assert ('must be after updated_gt' in response.json()['detail'])
def get_weather_data(path, dates, highs, lows, date_index, high_index, low_index): lines = path.read_text().splitlines() reader = csv.reader(lines) header_row = next(reader) for row in reader: current_date = datetime.strptime(row[date_index], '%Y-%m-%d') try: high = int(row[high_index]) low = int(row[low_index]) except ValueError: print(f'Missing data for {current_date}') else: dates.append(current_date) highs.append(high) lows.append(low)
class PhoneLogin(models.Model): class Meta(): verbose_name = '' verbose_name_plural = '' id = models.AutoField(**_('ID'), primary_key=True) user = models.OneToOneField(User, models.CASCADE, **_(''), unique=True) phone = models.CharField(**_(''), unique=True, max_length=15, validators=[is_phone_number])
def test_rename_member_type_ptr(): string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)}) file_path = (test_dir / 'test_prog.f08') string += rename_request('bp_rename', file_path, 18, 25) (errcode, results) = run_request(string) assert (errcode == 0) ref = {} ref[path_to_uri(str(file_path))] = [create('bp_rename', 18, 16, 18, 26)] ref[path_to_uri(str(((test_dir / 'subdir') / 'test_free.f90')))] = [create('bp_rename', 15, 27, 15, 37)] check_rename_response(results[1]['changes'], ref)
class CreateIndex(Runner): async def __call__(self, es, params): indices = mandatory(params, 'indices', self) api_kwargs = self._default_kw_params(params) for term in ['index', 'body']: api_kwargs.pop(term, None) for (index, body) in indices: (await es.indices.create(index=index, body=body, **api_kwargs)) return {'weight': len(indices), 'unit': 'ops', 'success': True} def __repr__(self, *args, **kwargs): return 'create-index'
def extractReinepearlWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('The Haunted', 'The Haunted', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_equip_slots() -> list[list[int]]: length = next_int(1) data = get_length_data(1, length=(length * 10)) slots: list[list[int]] = [] for i in range(length): start_pos = (10 * i) end_pos = (10 * (i + 1)) slots.append(data[start_pos:end_pos]) data = slots return data
def get_ec2_reserved_instances_prices(filter_region=None, filter_instance_type=None, filter_instance_type_pattern=None, filter_os_type=None, use_cache=False, cache_class=SimpleResultsCache): urls = [INSTANCES_RESERVED_LIGHT_UTILIZATION_LINUX_URL, INSTANCES_RESERVED_LIGHT_UTILIZATION_RHEL_URL, INSTANCES_RESERVED_LIGHT_UTILIZATION_SLES_URL, INSTANCES_RESERVED_LIGHT_UTILIZATION_WINDOWS_URL, INSTANCES_RESERVED_LIGHT_UTILIZATION_WINSQL_URL, INSTANCES_RESERVED_LIGHT_UTILIZATION_WINSQLWEB_URL, INSTANCES_RESERVED_MEDIUM_UTILIZATION_LINUX_URL, INSTANCES_RESERVED_MEDIUM_UTILIZATION_RHEL_URL, INSTANCES_RESERVED_MEDIUM_UTILIZATION_SLES_URL, INSTANCES_RESERVED_MEDIUM_UTILIZATION_WINDOWS_URL, INSTANCES_RESERVED_MEDIUM_UTILIZATION_WINSQL_URL, INSTANCES_RESERVED_MEDIUM_UTILIZATION_WINSQLWEB_URL, INSTANCES_RESERVED_HEAVY_UTILIZATION_LINUX_URL, INSTANCES_RESERVED_HEAVY_UTILIZATION_RHEL_URL, INSTANCES_RESERVED_HEAVY_UTILIZATION_SLES_URL, INSTANCES_RESERVED_HEAVY_UTILIZATION_WINDOWS_URL, INSTANCES_RESERVED_HEAVY_UTILIZATION_WINSQL_URL, INSTANCES_RESERVED_HEAVY_UTILIZATION_WINSQLWEB_URL] result = get_ec2_instances_prices(urls, 'reserved', filter_region, filter_instance_type, filter_instance_type_pattern, filter_os_type, use_cache, cache_class) return result
class OptionPlotoptionsNetworkgraphStatesInactive(Options): def animation(self) -> 'OptionPlotoptionsNetworkgraphStatesInactiveAnimation': return self._config_sub_data('animation', OptionPlotoptionsNetworkgraphStatesInactiveAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def linkOpacity(self): return self._config_get(0.3) def linkOpacity(self, num: float): self._config(num, js_type=False) def opacity(self): return self._config_get(0.2) def opacity(self, num: float): self._config(num, js_type=False)
def proj(fov_y, aspect, near, far): height = (1.0 / math.tan((math.radians(fov_y) * 0.5))) width = ((height * 1.0) / aspect) diff = (far - near) aa = (far / diff) bb = (near * aa) return (ctypes.c_float * 16)(*[width, 0.0, 0.0, 0.0, 0.0, height, 0.0, 0.0, 0.0, 0.0, aa, 1.0, 0.0, 0.0, (- bb), 0.0])
def build_pass_fail_matrix(): matrix = [] pure_prefix = 'pure_' impure_prefix = 'impure_' for valcode_type in all_known_valcode_types(): if valcode_type.startswith(pure_prefix): matrix.append((valcode_type, True)) elif valcode_type.startswith(impure_prefix): matrix.append((valcode_type, False)) else: raise ValueError('Valcode keys should be prefixed with {} (pass) or {} (fail) to indicate as to if the test should pass or fail. Given: {}.'.format(pure_prefix, impure_prefix, valcode_type)) return matrix
class SignedMessage(): __slots__ = ('_ledger_id', '_body', '_is_deprecated_mode') def __init__(self, ledger_id: str, body: str, is_deprecated_mode: bool=False) -> None: self._ledger_id = ledger_id self._body = body self._is_deprecated_mode = is_deprecated_mode self._check_consistency() def _check_consistency(self) -> None: enforce(isinstance(self._ledger_id, str), 'ledger_id must be str') enforce(isinstance(self._body, str), 'body must be string') enforce(isinstance(self._is_deprecated_mode, bool), 'is_deprecated_mode must be bool') def ledger_id(self) -> str: return self._ledger_id def body(self) -> str: return self._body def is_deprecated_mode(self) -> bool: return self._is_deprecated_mode def encode(signed_message_protobuf_object: Any, signed_message_object: 'SignedMessage') -> None: signed_message_dict = {'ledger_id': signed_message_object.ledger_id, 'body': signed_message_object.body, 'is_deprecated_mode': signed_message_object.is_deprecated_mode} signed_message_protobuf_object.signed_message = DictProtobufStructSerializer.encode(signed_message_dict) def decode(cls, signed_message_protobuf_object: Any) -> 'SignedMessage': signed_message_dict = DictProtobufStructSerializer.decode(signed_message_protobuf_object.signed_message) return cls(signed_message_dict['ledger_id'], signed_message_dict['body'], signed_message_dict['is_deprecated_mode']) def __eq__(self, other: Any) -> bool: return (isinstance(other, SignedMessage) and (self.ledger_id == other.ledger_id) and (self.body == other.body) and (self.is_deprecated_mode == other.is_deprecated_mode)) def __str__(self) -> str: return 'SignedMessage: ledger_id={}, body={}, is_deprecated_mode={}'.format(self.ledger_id, self.body, self.is_deprecated_mode)
class WorkflowOptions(): workflow_id: str = None workflow_id_reuse_policy: WorkflowIdReusePolicy = None workflow_run_timeout: timedelta = None workflow_execution_timeout: timedelta = None workflow_task_timeout: timedelta = None task_queue: str = None retry_options: RetryOptions = None cron_schedule: str = None memo: Dict[(str, object)] = None search_attributes: Dict[(str, object)] = None
class ClientUpdate(BaseModel): name: (str | None) = None first_party: (bool | None) = None client_type: (ClientType | None) = None redirect_uris: (list[RedirectURI] | None) = Field(None, min_length=1) authorization_code_lifetime_seconds: (int | None) = Field(None, ge=0) access_id_token_lifetime_seconds: (int | None) = Field(None, ge=0) refresh_token_lifetime_seconds: (int | None) = Field(None, ge=0)
def destroy_all(): herder = VpsHerder() while [node for (host, node) in herder.list_nodes() if ('scrape-worker' in node)]: for node in [node for (host, node) in herder.list_nodes() if ('scrape-worker' in node)]: print(("Destroy call for node: '%s'" % node)) herder.destroy_client(node)
(factory) _all(__all__) class AtmosOpacity(Score): def __init__(self, range_in=(100, 300), formula=Expression.Exponential, name='score-atm-op', **kwargs): super(AtmosOpacity, self).__init__(**kwargs) self.range_in = range_in self.formula = formula self.name = name def expr(self): expresion = self.formula(rango=self.range_in) return expresion def map(self, collection, **kwargs): col = kwargs.get('col') band = col.getBand('atmos_opacity', 'name') if band: f = self.expr.map(name=self.name, band=band.name, map=self.adjust(), **kwargs) else: f = self.empty return collection.map(f)
def do_compile(flist, output, c_flags, debug=False, is_shared=False): cmd = 'cc' if is_shared: cmd += ' -fPIC -shared' cmd += (' %s -o %s' % (' '.join(flist), output)) if debug: cmd += ' -g -Wall -D DEBUG' if c_flags: cmd += (' %s' % c_flags) print(cmd) os.system(cmd)
class ThemeVisualDensityDeprecated(EnumMeta): def __getattribute__(self, item): if (item == 'ADAPTIVEPLATFORMDENSITY'): warn('ADAPTIVEPLATFORMDENSITY is deprecated, use ADAPTIVE_PLATFORM_DENSITY instead.', DeprecationWarning, stacklevel=2) return EnumMeta.__getattribute__(self, item)
class FilterConverter(filters.BaseFilterConverter): strings = (FilterLike, FilterNotLike, FilterEqual, FilterNotEqual, FilterEmpty, FilterInList, FilterNotInList) int_filters = (IntEqualFilter, IntNotEqualFilter, IntGreaterFilter, IntSmallerFilter, FilterEmpty, IntInListFilter, IntNotInListFilter) float_filters = (FloatEqualFilter, FloatNotEqualFilter, FloatGreaterFilter, FloatSmallerFilter, FilterEmpty, FloatInListFilter, FloatNotInListFilter) bool_filters = (BooleanEqualFilter, BooleanNotEqualFilter) date_filters = (DateEqualFilter, DateNotEqualFilter, DateGreaterFilter, DateSmallerFilter, DateBetweenFilter, DateNotBetweenFilter, FilterEmpty) datetime_filters = (DateTimeEqualFilter, DateTimeNotEqualFilter, DateTimeGreaterFilter, DateTimeSmallerFilter, DateTimeBetweenFilter, DateTimeNotBetweenFilter, FilterEmpty) time_filters = (TimeEqualFilter, TimeNotEqualFilter, TimeGreaterFilter, TimeSmallerFilter, TimeBetweenFilter, TimeNotBetweenFilter, FilterEmpty) def convert(self, type_name, column, name): filter_name = type_name.lower() if (filter_name in self.converters): return self.converters[filter_name](column, name) return None ('CharField', 'TextField') def conv_string(self, column, name): return [f(column, name) for f in self.strings] ('BooleanField') def conv_bool(self, column, name): return [f(column, name) for f in self.bool_filters] ('IntegerField', 'BigIntegerField', 'PrimaryKeyField') def conv_int(self, column, name): return [f(column, name) for f in self.int_filters] ('DecimalField', 'FloatField', 'DoubleField') def conv_float(self, column, name): return [f(column, name) for f in self.float_filters] ('DateField') def conv_date(self, column, name): return [f(column, name) for f in self.date_filters] ('DateTimeField') def conv_datetime(self, column, name): return [f(column, name) for f in self.datetime_filters] ('TimeField') def conv_time(self, column, name): return [f(column, name) for f in self.time_filters]
class NCBITaxa(): def __init__(self, dbfile=None, taxdump_file=None, memory=False, update=True): self.dbfile = (dbfile or DEFAULT_TAXADB) if taxdump_file: self.update_taxonomy_database(taxdump_file) if ((dbfile != DEFAULT_TAXADB) and (not os.path.exists(self.dbfile))): print('NCBI database not present yet (first time used?)', file=sys.stderr) self.update_taxonomy_database(taxdump_file) if (not os.path.exists(self.dbfile)): raise ValueError(('Cannot open taxonomy database: %s' % self.dbfile)) self.db = None self._connect() if ((not is_taxadb_up_to_date(self.dbfile)) and update): print('NCBI database format is outdated. Upgrading', file=sys.stderr) self.update_taxonomy_database(taxdump_file) if memory: filedb = self.db self.db = sqlite3.connect(':memory:') filedb.backup(self.db) def update_taxonomy_database(self, taxdump_file=None): update_db(self.dbfile, taxdump_file) def _connect(self): self.db = sqlite3.connect(self.dbfile) def _translate_merged(self, all_taxids): conv_all_taxids = set(list(map(int, all_taxids))) cmd = ('SELECT taxid_old, taxid_new FROM merged WHERE taxid_old IN (%s)' % ','.join(map(str, all_taxids))) result = self.db.execute(cmd) conversion = {} for (old, new) in result.fetchall(): conv_all_taxids.discard(int(old)) conv_all_taxids.add(int(new)) conversion[int(old)] = int(new) return (conv_all_taxids, conversion) def get_fuzzy_name_translation(self, name, sim=0.9): import sqlite3.dbapi2 as dbapi2 _db = dbapi2.connect(self.dbfile) _db.enable_load_extension(True) module_path = os.path.split(os.path.realpath(__file__))[0] _db.execute(("SELECT load_extension('%s/%s')" % (module_path, 'SQLite-Levenshtein/levenshtein.sqlext'))) print(('Trying fuzzy search for %s' % name)) maxdiffs = math.ceil((len(name) * (1 - sim))) cmd = f'SELECT taxid, spname, LEVENSHTEIN(spname, "{name}") AS sim FROM species WHERE sim <= {maxdiffs} ORDER BY sim LIMIT 1;' (taxid, spname, score) = (None, None, len(name)) result = _db.execute(cmd) try: (taxid, spname, score) = result.fetchone() except TypeError: cmd = f'SELECT taxid, spname, LEVENSHTEIN(spname, "{name}") AS sim FROM synonym WHERE sim <= {maxdiffs} ORDER BY sim LIMIT 1;' result = _db.execute(cmd) try: (taxid, spname, score) = result.fetchone() except: pass else: taxid = int(taxid) else: taxid = int(taxid) norm_score = (1 - (float(score) / len(name))) if taxid: print(f'FOUND! {spname} taxid:{taxid} score:{score} ({norm_score})') return (taxid, spname, norm_score) def get_rank(self, taxids): all_ids = set(taxids) all_ids.discard(None) all_ids.discard('') query = ','.join((('"%s"' % v) for v in all_ids)) cmd = ('SELECT taxid, rank FROM species WHERE taxid IN (%s);' % query) result = self.db.execute(cmd) id2rank = {} for (tax, spname) in result.fetchall(): id2rank[tax] = spname return id2rank def get_lineage_translator(self, taxids): all_ids = set(taxids) all_ids.discard(None) all_ids.discard('') query = ','.join((('"%s"' % v) for v in all_ids)) cmd = ('SELECT taxid, track FROM species WHERE taxid IN (%s);' % query) result = self.db.execute(cmd) id2lineages = {} for (tax, track) in result.fetchall(): id2lineages[tax] = list(map(int, reversed(track.split(',')))) return id2lineages def get_lineage(self, taxid): if (not taxid): return None taxid = int(taxid) result = self.db.execute(f'SELECT track FROM species WHERE taxid={taxid}') raw_track = result.fetchone() if (not raw_track): (_, merged_conversion) = self._translate_merged([taxid]) if (taxid in merged_conversion): result = self.db.execute(('SELECT track FROM species WHERE taxid=%s' % merged_conversion[taxid])) raw_track = result.fetchone() if (not raw_track): raise ValueError(f'Could not find taxid: {taxid}') else: warnings.warn(('taxid %s was translated into %s' % (taxid, merged_conversion[taxid]))) track = list(map(int, raw_track[0].split(','))) return list(reversed(track)) def get_common_names(self, taxids): query = ','.join((('"%s"' % v) for v in taxids)) cmd = ('SELECT taxid, common FROM species WHERE taxid IN (%s);' % query) result = self.db.execute(cmd) id2name = {} for (tax, common_name) in result.fetchall(): if common_name: id2name[tax] = common_name return id2name def get_taxid_translator(self, taxids, try_synonyms=True): all_ids = set(map(int, taxids)) all_ids.discard(None) all_ids.discard('') query = ','.join((('"%s"' % v) for v in all_ids)) cmd = ('SELECT taxid, spname FROM species WHERE taxid IN (%s);' % query) result = self.db.execute(cmd) id2name = {} for (tax, spname) in result.fetchall(): id2name[tax] = spname if ((len(all_ids) != len(id2name)) and try_synonyms): not_found_taxids = (all_ids - set(id2name.keys())) (taxids, old2new) = self._translate_merged(not_found_taxids) new2old = {v: k for (k, v) in old2new.items()} if old2new: query = ','.join((('"%s"' % v) for v in new2old)) cmd = ('SELECT taxid, spname FROM species WHERE taxid IN (%s);' % query) result = self.db.execute(cmd) for (tax, spname) in result.fetchall(): id2name[new2old[tax]] = spname return id2name def get_name_translator(self, names): name2id = {} name2origname = {} for n in names: name2origname[n.lower()] = n names = set(name2origname.keys()) query = ','.join((('"%s"' % n) for n in name2origname.keys())) cmd = ('SELECT spname, taxid FROM species WHERE spname IN (%s)' % query) result = self.db.execute(('SELECT spname, taxid FROM species WHERE spname IN (%s)' % query)) for (sp, taxid) in result.fetchall(): oname = name2origname[sp.lower()] name2id.setdefault(oname, []).append(taxid) missing = (names - set([n.lower() for n in name2id.keys()])) if missing: query = ','.join((('"%s"' % n) for n in missing)) result = self.db.execute(('SELECT spname, taxid FROM synonym WHERE spname IN (%s)' % query)) for (sp, taxid) in result.fetchall(): oname = name2origname[sp.lower()] name2id.setdefault(oname, []).append(taxid) return name2id def translate_to_names(self, taxids): id2name = self.get_taxid_translator(taxids) names = [] for sp in taxids: names.append(id2name.get(sp, sp)) return names def get_descendant_taxa(self, parent, intermediate_nodes=False, rank_limit=None, collapse_subspecies=False, return_tree=False): try: taxid = int(parent) except ValueError: try: taxid = self.get_name_translator([parent])[parent][0] except KeyError: raise ValueError(('%s not found!' % parent)) (_, conversion) = self._translate_merged([taxid]) if conversion: taxid = conversion[taxid] with open((self.dbfile + '.traverse.pkl'), 'rb') as CACHED_TRAVERSE: prepostorder = pickle.load(CACHED_TRAVERSE) descendants = {} found = 0 for tid in prepostorder: if (tid == taxid): found += 1 elif (found == 1): descendants[tid] = (descendants.get(tid, 0) + 1) elif (found == 2): break if (not found): raise ValueError(('taxid not found:%s' % taxid)) elif (found == 1): return [taxid] if (rank_limit or collapse_subspecies or return_tree): tree = self.get_topology(list(descendants.keys()), intermediate_nodes=intermediate_nodes, collapse_subspecies=collapse_subspecies, rank_limit=rank_limit) if return_tree: return tree elif intermediate_nodes: return list(map(int, [n.name for n in tree.get_descendants()])) else: return list(map(int, [n.name for n in tree])) elif intermediate_nodes: return [tid for (tid, count) in descendants.items()] else: return [tid for (tid, count) in descendants.items() if (count == 1)] def get_topology(self, taxids, intermediate_nodes=False, rank_limit=None, collapse_subspecies=False, annotate=True): from .. import PhyloTree (taxids, merged_conversion) = self._translate_merged(taxids) if (len(taxids) == 1): root_taxid = int(list(taxids)[0]) with open((self.dbfile + '.traverse.pkl'), 'rb') as CACHED_TRAVERSE: prepostorder = pickle.load(CACHED_TRAVERSE) descendants = {} found = 0 nodes = {} hit = 0 visited = set() start = prepostorder.index(root_taxid) try: end = prepostorder.index(root_taxid, (start + 1)) subtree = prepostorder[start:(end + 1)] except ValueError: subtree = [root_taxid] leaves = set((v for (v, count) in Counter(subtree).items() if (count == 1))) nodes[root_taxid] = PhyloTree({'name': str(root_taxid)}) current_parent = nodes[root_taxid] for tid in subtree: if (tid in visited): current_parent = nodes[tid].up else: visited.add(tid) nodes[tid] = PhyloTree({'name': str(tid)}) current_parent.add_child(nodes[tid]) if (tid not in leaves): current_parent = nodes[tid] root = nodes[root_taxid] else: taxids = set(map(int, taxids)) sp2track = {} elem2node = {} id2lineage = self.get_lineage_translator(taxids) all_taxids = set() for lineage in id2lineage.values(): all_taxids.update(lineage) id2rank = self.get_rank(all_taxids) for sp in taxids: track = [] lineage = id2lineage[sp] for elem in lineage: if (elem not in elem2node): node = elem2node.setdefault(elem, PhyloTree()) node.name = str(elem) node.taxid = elem node.add_prop('rank', str(id2rank.get(int(elem), 'no rank'))) else: node = elem2node[elem] track.append(node) sp2track[sp] = track for (sp, track) in sp2track.items(): parent = None for elem in track: if (parent and (elem not in parent.children)): parent.add_child(elem) if (rank_limit and (elem.props.get('rank') == rank_limit)): break parent = elem root = elem2node[1] if (not intermediate_nodes): for n in root.descendants(): if ((len(n.children) == 1) and (int(n.name) not in taxids)): n.delete(prevent_nondicotomic=False) if (len(root.children) == 1): tree = root.children[0].detach() else: tree = root if collapse_subspecies: to_detach = [] for node in tree.traverse(): if (node.props.get('rank') == 'species'): to_detach.extend(node.children) for n in to_detach: n.detach() if annotate: self.annotate_tree(tree) return tree def annotate_tree(self, t, taxid_attr='name', tax2name=None, tax2track=None, tax2rank=None): taxids = set() for n in t.traverse(): try: tid = int(getattr(n, taxid_attr, n.props.get(taxid_attr))) except (ValueError, AttributeError, TypeError): pass else: taxids.add(tid) merged_conversion = {} (taxids, merged_conversion) = self._translate_merged(taxids) if ((not tax2name) or (taxids - set(map(int, list(tax2name.keys()))))): tax2name = self.get_taxid_translator(taxids) if ((not tax2track) or (taxids - set(map(int, list(tax2track.keys()))))): tax2track = self.get_lineage_translator(taxids) all_taxid_codes = set([_tax for _lin in list(tax2track.values()) for _tax in _lin]) extra_tax2name = self.get_taxid_translator(list((all_taxid_codes - set(tax2name.keys())))) tax2name.update(extra_tax2name) tax2common_name = self.get_common_names(tax2name.keys()) if (not tax2rank): tax2rank = self.get_rank(list(tax2name.keys())) n2leaves = t.get_cached_content() for n in t.traverse('postorder'): try: node_taxid = int(getattr(n, taxid_attr, n.props.get(taxid_attr))) except (ValueError, AttributeError, TypeError): node_taxid = None n.add_prop('taxid', node_taxid) if node_taxid: if (node_taxid in merged_conversion): node_taxid = merged_conversion[node_taxid] n.add_props(sci_name=tax2name.get(node_taxid, getattr(n, taxid_attr, n.props.get(taxid_attr, ''))), common_name=tax2common_name.get(node_taxid, ''), lineage=tax2track.get(node_taxid, []), rank=tax2rank.get(node_taxid, 'Unknown'), named_lineage=[tax2name.get(tax, str(tax)) for tax in tax2track.get(node_taxid, [])]) elif n.is_leaf: n.add_props(sci_name=getattr(n, taxid_attr, n.props.get(taxid_attr, 'NA')), common_name='', lineage=[], rank='Unknown', named_lineage=[]) else: lineage = self._common_lineage([lf.props.get('lineage') for lf in n2leaves[n]]) ancestor = lineage[(- 1)] n.add_props(sci_name=tax2name.get(ancestor, str(ancestor)), common_name=tax2common_name.get(ancestor, ''), taxid=ancestor, lineage=lineage, rank=tax2rank.get(ancestor, 'Unknown'), named_lineage=[tax2name.get(tax, str(tax)) for tax in lineage]) return (tax2name, tax2track, tax2rank) def _common_lineage(self, vectors): occurrence = defaultdict(int) pos = defaultdict(set) for v in vectors: for (i, taxid) in enumerate(v): occurrence[taxid] += 1 pos[taxid].add(i) common = [taxid for (taxid, ocu) in occurrence.items() if (ocu == len(vectors))] if (not common): return [''] else: sorted_lineage = sorted(common, key=(lambda x: min(pos[x]))) return sorted_lineage def get_broken_branches(self, t, taxa_lineages, n2content=None): if (not n2content): n2content = t.get_cached_content() tax2node = defaultdict(set) unknown = set() for leaf in t.iter_leaves(): if (leaf.sci_name.lower() != 'unknown'): lineage = taxa_lineages[leaf.taxid] for (index, tax) in enumerate(lineage): tax2node[tax].add(leaf) else: unknown.add(leaf) broken_branches = defaultdict(set) broken_clades = set() for (tax, leaves) in tax2node.items(): if (len(leaves) > 1): common = t.get_common_ancestor(leaves) else: common = list(leaves)[0] if ((leaves ^ set(n2content[common])) - unknown): broken_branches[common].add(tax) broken_clades.add(tax) broken_clade_sizes = [len(tax2node[tax]) for tax in broken_clades] return (broken_branches, broken_clades, broken_clade_sizes)
class Import_Statement(Simple_Statement): def __init__(self, t_kw): super().__init__() assert isinstance(t_kw, MATLAB_Token) assert ((t_kw.kind == 'KEYWORD') and (t_kw.value == 'import')) self.t_kw = t_kw self.t_kw.set_ast(self) self.l_chain = None def loc(self): return self.t_kw.location def set_chain(self, l_chain): assert isinstance(l_chain, list) for t_item in l_chain: assert isinstance(t_item, MATLAB_Token) assert ((t_item.kind == 'IDENTIFIER') or ((t_item.kind == 'OPERATOR') and (t_item.value == '.*'))) self.l_chain = l_chain for t_item in self.l_chain: t_item.set_ast(self) def get_chain_strings(self): return [(t.value if (t.kind == 'IDENTIFIER') else '*') for t in self.l_chain]
class handler(object): __fileno = (- 1) def set_fileno(self, fd): self.__fileno = fd def fileno(self): return self.__fileno def init_func(self, creator_fd, *args, **kwargs): pass def evt_read(self): pass def evt_write(self): pass def timeout(self): pass def error(self): pass def delete(self): pass def set_timeout(self, fd, seconds): self.dispatcher.set_timeout(fd, seconds) def create_handler(self, creator_fd, h, *args, **kwargs): return self.dispatcher.create_handler(creator_fd, h, *args, **kwargs) def replace_handler(self, creator_fd, fileno, h, *args, **kwargs): return self.dispatcher.repleace_handler(creator_fd, fileno, h, *args, **kwargs) def delete_handler(self, fd): self.dispatcher.delete_handler(fd) def send_message_to_handler(self, src_fd, dst_fd, data): return self.dispatcher.send_message_to_handler(src_fd, dst_fd, data) def message_from_handler(self, from_fd, data): pass def handler_exists(self, fd): return self.dispatcher.handler_exists(fd) def register(self, fd): self.dispatcher.register(fd) def add_evt_read(self, fd): self.dispatcher.add_evt_read(fd) def remove_evt_read(self, fd): self.dispatcher.remove_evt_read(fd) def add_evt_write(self, fd): self.dispatcher.add_evt_write(fd) def remove_evt_write(self, fd): self.dispatcher.remove_evt_write(fd) def unregister(self, fd): self.dispatcher.unregister(fd) def dispatcher(self): return global_vars[consts.SERVER_INSTANCE_NAME] def reset(self): pass def ctl_handler(self, src_fd, dst_fd, cmd, *args, **kwargs): return self.dispatcher.ctl_handler(src_fd, dst_fd, cmd, *args, **kwargs) def handler_ctl(self, from_fd, cmd, *args, **kwargs): pass def add_to_loop_task(self, fileno): self.dispatcher.add_to_loop_task(fileno) def del_loop_task(self, fileno): self.dispatcher.del_loop_task(fileno) def task_loop(self): pass def release_when_replace(self): pass def get_handler(self, fd): return self.dispatcher.get_handler(fd)
def test_plot_style_test_defaults(): style = PlotStyle('Test') assert (style.name == 'Test') assert (style.color == '#000000') assert (style.line_style == '-') assert (style.alpha == 1.0) assert (style.marker == '') assert (style.width == 1.0) assert (style.size == 7.5) assert style.isEnabled() style.line_style = None style.marker = None assert (style.line_style == '') assert (style.marker == '')
def _get_migration_config(alembic_ini_path: Optional[str]=None, script_location: Optional[str]=None): from dbgpt.storage.metadata.db_manager import db as db_manager from dbgpt.util._db_migration_utils import create_alembic_config from dbgpt.app.initialization.db_model_initialization import _MODELS from dbgpt.app.base import _initialize_db default_meta_data_path = _initialize_db() alembic_cfg = create_alembic_config(default_meta_data_path, db_manager.engine, db_manager.Model, db_manager.session(), alembic_ini_path, script_location) return (alembic_cfg, db_manager)
def test_check_versions_negative_cannot_parse_version(): with mock.patch('sys.stdout', new_callable=StringIO) as mock_stdout: with mock.patch('subprocess.check_output', return_value=b''): check_versions() stdout = mock_stdout.getvalue() assert ("Warning: cannot parse 'go' version from command: ['go', 'version']." in stdout) assert ("Warning: cannot parse 'gcc' version from command: ['gcc', '--version']." in stdout)
class AcmePreferencesPage(PreferencesPage): category = 'General' help_id = '' name = 'Acme' preferences_path = 'acme.workbench' width = Int(100) height = Int(200) ratio = Float(0.1) bgcolor = Color('red') font = Font('helvetica') trait_view = View('width', 'height', 'ratio', 'font', 'bgcolor')
class VolumeButtonControl(Gtk.VolumeButton, BaseControl): name = 'volume' title = _('Volume') description = _('Change the volume') __gsignals__ = {'value-changed': 'override'} def __init__(self): Gtk.VolumeButton.__init__(self) BaseControl.__init__(self) self.updating = False adjustment = Gtk.Adjustment(upper=1, step_incr=0.1, page_incr=0.2) self.set_adjustment(adjustment) plus_button = self.get_plus_button() plus_button.set_image(Gtk.Image.new_from_icon_name('list-add', Gtk.IconSize.BUTTON)) plus_button.set_label('') minus_button = self.get_minus_button() minus_button.set_image(Gtk.Image.new_from_icon_name('list-remove', Gtk.IconSize.BUTTON)) minus_button.set_label('') event.add_ui_callback(self.on_option_set, 'player_option_set') self.on_option_set('player_option_set', settings, 'player/volume') def destroy(self): event.remove_callback(self.on_option_set, 'player_option_set') ButtonControl.destroy(self) Gtk.VolumeButton.destroy(self) def set_value(self, value): self.updating = True Gtk.VolumeButton.set_value(self, value) self.updating = False def do_value_changed(self, value): if (not self.updating): settings.set_option('player/volume', value) def on_option_set(self, event, settings, option): if (option == 'player/volume'): self.set_value(float(settings.get_option(option)))
def shift_memory(fmem, memory_size, shift_size, from_size): size = file_size(fmem) if (size < memory_size): raise Error('Expected memory size of at least {} bytes, but got {}.'.format(memory_size, size)) fmem.seek(0, os.SEEK_SET) from_data = fmem.read(from_size) fmem.seek(shift_size, os.SEEK_SET) fmem.write(from_data[:(memory_size - shift_size)])
class NotifyOfActionWhenAway(hass.Hass): def initialize(self): self.listen_state_handle_list = [] self.timer_handle_list = [] self.app_switch = self.args['app_switch'] self.notify_name = self.args['notify_name'] self.isHome_delay = self.args['isHome_delay'] self.isHome = self.args['isHome'] self.message = self.args['message'] self.notifier = self.get_app('Notifier') for sensor in self.args['sensor'].split(','): self.listen_state_handle_list.append(self.listen_state(self.state_change, sensor)) def state_change(self, entity, attribute, old, new, kwargs): if (self.get_state(self.app_switch) == 'on'): if ((new != '') and (new != old)): if (self.get_state(self.isHome) == 'off'): if (entity.startswith('binary_sensor.motion_sensor') and (new == 'off')): pass else: self.log('Waiting {} seconds for someone to come home'.format(self.isHome_delay)) self.timer_handle_list.append(self.run_in(self.notify_if_no_one_home, self.isHome_delay, sensor=entity, new=new)) def notify_if_no_one_home(self, kwargs): if (self.get_state(self.isHome) == 'off'): self.log('{} changed to {}'.format(self.friendly_name(kwargs['sensor']), kwargs['new'])) self.notifier.notify(self.notify_name, self.message.format(self.friendly_name(kwargs['sensor']), kwargs['new']), useAlexa=False) def terminate(self): for listen_state_handle in self.listen_state_handle_list: self.cancel_listen_state(listen_state_handle) for timer_handle in self.timer_handle_list: self.cancel_timer(timer_handle)
.skip(reason='result inconsistent') def test_cut_middle_template_attribute(tmpdir, merge_files_oneLR): path = os.path.join(str(tmpdir), 'cut-middle-template-attribut.dlis') content = ['data/chap3/start.dlis.part', 'data/chap3/template/cut_after_attribute.dlis.part'] merge_files_oneLR(path, content) with pytest.raises(IndexError) as excinfo: dlis.load(path) assert ('unexpected end-of-record' in str(excinfo.value))
def place_hook_in_registry(path_to_hook, path_to_registry): registry_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, path_to_registry, 0, winreg.KEY_WRITE) winreg.SetValueEx(registry_key, 'AppInit_DLLs', 0, winreg.REG_SZ, path_to_hook) winreg.SetValueEx(registry_key, 'LoadAppInit_DLLs', 0, winreg.REG_DWORD, 1) winreg.SetValueEx(registry_key, 'RequireSignedAppInit_DLLs', 0, winreg.REG_DWORD, 0) winreg.CloseKey(registry_key) return
def get_first_requests(session, endpoint_id, limit=None): query = session.query(Request.version_requested, func.min(Request.time_requested).label('first_used')).filter((Request.endpoint_id == endpoint_id)).group_by(Request.version_requested).order_by(desc('first_used')) if limit: query = query.limit(limit) return query.all()
class AttributeVisitor(): def __init__(self, attribute_grammar): self.attribute_grammar: AttributeGrammar = attribute_grammar def visit(self, root, function): def traverse_attributes(node, _, __): symbol = self.attribute_grammar.production_of(node) attributes = self.attribute_grammar.attributes[symbol] for attribute in attributes: function(node, attribute) self.attribute_grammar.traverse(root, traverse_attributes)
def instanciate_values(o, kwargs): if isinstance(o, dict): return {k: instanciate_values(v, kwargs) for (k, v) in o.items()} if isinstance(o, list): return [instanciate_values(v, kwargs) for v in o] if isinstance(o, tuple): return tuple([instanciate_values(v, kwargs) for v in o]) if (isinstance(o, str) and o.startswith('$')): return kwargs[o[1:]] return o
class InstallationViewSet(EventUserModelViewSet): queryset = Installation.objects.all() serializer_class = InstallationSerializer search_fields = 'notes' filter_fields = ('attendee__event__event_slug', 'attendee__event_user__event__event_slug', 'software', 'hardware', 'attendee') ordering_fields = ('created_at', 'updated_at') def get_counts(self): queryset = self.filter_queryset(self.get_queryset()) return Installation.objects.get_counts(queryset)
class TransparencyPreference(widgets.ScalePreference, widgets.CheckConditional): default = 0.3 name = 'plugin/minimode/transparency' condition_preference_name = 'plugin/minimode/use_alpha' def __init__(self, preferences, widget): widgets.ScalePreference.__init__(self, preferences, widget) widgets.CheckConditional.__init__(self)
('factory_type', ['empty']) def test_init_optional_factories(factory_aggregate, factory_a, factory_b): factory_aggregate.set_factories(example_a=factory_a, example_b=factory_b) assert (factory_aggregate.factories == {'example_a': factory_a, 'example_b': factory_b}) assert isinstance(factory_aggregate('example_a'), ExampleA) assert isinstance(factory_aggregate('example_b'), ExampleB)
class InteractiveEntityBase(EntityBase): async def async_added_to_hass(self): (await super().async_added_to_hass()) if (self._key != 'BSH.Common.Status.RemoteControlActive'): self._appliance.register_callback(self.async_on_update, 'BSH.Common.Status.RemoteControlActive') async def async_will_remove_from_hass(self): (await super().async_will_remove_from_hass()) if (self._key != 'BSH.Common.Status.RemoteControlActive'): self._appliance.deregister_callback(self.async_on_update, 'BSH.Common.Status.RemoteControlActive')
class PredictionColumns(): predicted_values: Optional[ColumnDefinition] = None prediction_probas: Optional[List[ColumnDefinition]] = None def get_columns_list(self) -> List[ColumnDefinition]: result = [self.predicted_values] if (self.prediction_probas is not None): result.extend(self.prediction_probas) return [col for col in result if (col is not None)]
def test_get_id_range_for_partition_with_one_over(): min_id = 1 max_id = 101 partition_size = 20 id_range_item_count = ((max_id - min_id) + 1) assert ((id_range_item_count % partition_size) == 1) etl_config = {'partition_size': partition_size} ctrl = PostgresElasticsearchIndexerController(etl_config) ctrl.min_id = min_id ctrl.max_id = max_id ctrl.record_count = id_range_item_count ctrl.config['partitions'] = ctrl.determine_partitions() assert (ctrl.config['partitions'] == ceil((id_range_item_count / partition_size))) partition_range = range(0, ctrl.config['partitions']) (lower_bound, upper_bound) = ctrl.get_id_range_for_partition(partition_range[0]) assert (lower_bound == min_id) assert (upper_bound == (lower_bound + (partition_size - 1))) (lower_bound, upper_bound) = ctrl.get_id_range_for_partition(partition_range[1]) assert (lower_bound == (min_id + partition_size)) assert (upper_bound == (lower_bound + (partition_size - 1))) (lower_bound, upper_bound) = ctrl.get_id_range_for_partition(partition_range[(- 1)]) assert (lower_bound == (min_id + (partition_size * partition_range[(- 1)])) == 101) assert (upper_bound == max_id == 101) id_set = set(range(min_id, (max_id + 1))) assert (_remove_seen_ids(ctrl, id_set) == set({}))
_view(['GET', 'POST', 'PUT', 'PATCH']) def basic_view(request): if (request.method == 'GET'): return Response({'method': 'GET'}) elif (request.method == 'POST'): return Response({'method': 'POST', 'data': request.data}) elif (request.method == 'PUT'): return Response({'method': 'PUT', 'data': request.data}) elif (request.method == 'PATCH'): return Response({'method': 'PATCH', 'data': request.data})
.django_db def test_two_same_special_case_recipients(client, monkeypatch, double_fpds_awards_with_same_special_case_recipients, helpers, elasticsearch_award_index): setup_elasticsearch_test(monkeypatch, elasticsearch_award_index) helpers.patch_datetime_now(monkeypatch, 2022, 12, 31) resp = _default_post(client, helpers) assert (resp.data['count'] == 1)
def test_color_and_bc_color(): p = Printer(no_print=True) text = 'This is a text.' result = p.text(text, color='green', bg_color='yellow') print(result) if SUPPORTS_ANSI: assert (result == '\x1b[38;5;2;48;5;3mThis is a text.\x1b[0m') else: assert (result == 'This is a text.')
(CLIENT_SCOPE, dependencies=[Security(verify_oauth_client, scopes=[CLIENT_READ])], response_model=List[str]) def get_client_scopes(client_id: str, db: Session=Depends(get_db)) -> List[str]: client = ClientDetail.get(db, object_id=client_id, config=CONFIG) if (not client): return [] logger.info('Getting client scopes') return (client.scopes or [])
class MiddlewareInfo(_Traversable): __visit_name__ = 'middleware' def __init__(self, middleware_tree: MiddlewareTreeInfo, middleware_classes: List[MiddlewareClassInfo], independent: bool): self.middleware_tree = middleware_tree self.middleware_classes = middleware_classes self.independent = independent if independent: self.independent_text = 'Middleware are independent' else: self.independent_text = 'Middleware are dependent'
class EnvoyLogJSONTest(AmbassadorTest): target: ServiceType log_path: str def init(self): self.target = HTTP() self.log_path = '/tmp/ambassador/ambassador.log' def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]: (yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Module\nname: ambassador\nambassador_id: [{self.ambassador_id}]\nconfig:\n envoy_log_path: {self.log_path}\n envoy_log_format:\n protocol: "%PROTOCOL%"\n duration: "%DURATION%"\n envoy_log_type: json\n'))) def check(self): access_log_entry_regex = re.compile('^({"duration":|{"protocol":)') cmd = ShellCommand('tools/bin/kubectl', 'exec', self.path.k8s, 'cat', self.log_path) if (not cmd.check('check envoy access log')): pytest.exit('envoy access log does not exist') for line in cmd.stdout.splitlines(): assert access_log_entry_regex.match(line), f'{line} does not match {access_log_entry_regex}'
def _fill_message_args(msg, msg_args, keys, prefix=''): if (not isinstance(msg, (Message, TVal))): raise ValueError(('msg must be a Message instance: %s' % msg)) if (type(msg_args) == dict): for (f, v) in msg_args.items(): if (v is None): v = '' _fill_val(msg, f, v, keys, prefix) elif (type(msg_args) == list): if (len(msg_args) > len(msg.__slots__)): raise MessageException(('Too many arguments:\n * Given: %s\n * Expected: %s' % (msg_args, msg.__slots__))) elif (len(msg_args) < len(msg.__slots__)): raise MessageException(('Not enough arguments:\n * Given: %s\n * Expected: %s' % (msg_args, msg.__slots__))) for (f, v) in zip(msg.__slots__, msg_args): _fill_val(msg, f, v, keys, prefix) else: raise ValueError(('invalid msg_args type: %s' % str(msg_args)))
class OptionPlotoptionsPyramid3dSonificationContexttracks(Options): def activeWhen(self) -> 'OptionPlotoptionsPyramid3dSonificationContexttracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsPyramid3dSonificationContexttracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsPyramid3dSonificationContexttracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsPyramid3dSonificationContexttracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsPyramid3dSonificationContexttracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsPyramid3dSonificationContexttracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def timeInterval(self): return self._config_get(None) def timeInterval(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False) def valueInterval(self): return self._config_get(None) def valueInterval(self, num: float): self._config(num, js_type=False) def valueMapFunction(self): return self._config_get('linear') def valueMapFunction(self, value: Any): self._config(value, js_type=False) def valueProp(self): return self._config_get('"x"') def valueProp(self, text: str): self._config(text, js_type=False)
def text(state: StateInline, silent: bool) -> bool: pos = state.pos posMax = state.posMax while ((pos < posMax) and (state.src[pos] not in _TerminatorChars)): pos += 1 if (pos == state.pos): return False if (not silent): state.pending += state.src[state.pos:pos] state.pos = pos return True
class TelegramPerson(TelegramIdentifier, Person): def __init__(self, id, first_name=None, last_name=None, username=None): super().__init__(id) self._first_name = first_name self._last_name = last_name self._username = username def id(self) -> str: return self._id def first_name(self) -> str: return self._first_name def last_name(self) -> str: return self._last_name def fullname(self) -> str: fullname = self.first_name if (self.last_name is not None): fullname += (' ' + self.last_name) return fullname def username(self) -> str: return self._username def client(self) -> None: return None person = id nick = username
def test_trie_secure_hex() -> None: tests = load_tests('hex_encoded_securetrie_test.json') for (name, test) in tests.items(): st: Trie[(Bytes, Bytes)] = Trie(secured=True, default=b'') for (k, v) in test.get('in').items(): trie_set(st, to_bytes(k), to_bytes(v)) result = root(st) expected = remove_hex_prefix(test.get('root')) assert (result.hex() == expected), f'test {name} failed'
def to_tokens(text: str, line_offset: int=0, column_offset: int=0) -> Iterable[tuple[(KeyToken, (ValueToken | None))]]: key_token: (KeyToken | None) = None try: for token in tokenize(text): if isinstance(token, KeyToken): if (key_token is not None): (yield (key_token, None)) key_token = token elif isinstance(token, ValueToken): if (key_token is None): raise TokenizeError('expected key before value', token.start) (yield (key_token, token)) key_token = None except TokenizeError as exc: if (line_offset or column_offset): raise exc.clone(line_offset, column_offset) from exc raise
.ledger ('aea_ledger_fetchai._cosmos.requests.get') ('aea_ledger_fetchai._cosmos.requests.post') def test_successful_realistic_faucet_operation(mock_post, mock_get): address = 'a normal cosmos address would be here' mock_post.return_value = MockRequestsResponse({'uuid': 'a-uuid-v4-would-be-here'}) mock_get.side_effect = [MockRequestsResponse({'status': 'ok', 'claim': {'createdAt': '2021-08-13T15:18:50.420Z', 'updatedAt': '2021-08-13T15:18:58.249Z', 'status': FetchAIFaucetApi.FAUCET_STATUS_PENDING}}), MockRequestsResponse({'status': 'ok', 'claim': {'createdAt': '2021-08-13T15:18:50.420Z', 'updatedAt': '2021-08-13T15:18:58.249Z', 'status': FetchAIFaucetApi.FAUCET_STATUS_PENDING}}), MockRequestsResponse({'status': 'ok', 'claim': {'createdAt': '2021-08-13T15:18:50.420Z', 'updatedAt': '2021-08-13T15:18:58.249Z', 'status': FetchAIFaucetApi.FAUCET_STATUS_COMPLETED, 'txStatus': {'hash': '0x transaction hash would be here', 'height': 123456}}})] faucet = FetchAIFaucetApi(poll_interval=0) faucet.get_wealth(address) mock_post.assert_has_calls([call(url=f'{FetchAIFaucetApi.testnet_faucet_url}/api/v3/claims', json={'address': address})]) mock_get.assert_has_calls([call(f'{FetchAIFaucetApi.testnet_faucet_url}/api/v3/claims/a-uuid-v4-would-be-here'), call(f'{FetchAIFaucetApi.testnet_faucet_url}/api/v3/claims/a-uuid-v4-would-be-here'), call(f'{FetchAIFaucetApi.testnet_faucet_url}/api/v3/claims/a-uuid-v4-would-be-here')])
def setup(app: Sphinx): app.add_directive('div', Div) app.add_config_value('panels_add_bootstrap_css', None, 'env') app.add_config_value('panels_add_boostrap_css', None, 'env') app.add_config_value('panels_css_variables', {}, 'env') app.add_config_value('panels_dev_mode', False, 'env') app.connect('builder-inited', update_css) app.connect('env-updated', update_css_links) app.add_node(nodes.container, override=True, html=(visit_container, depart_container)) setup_panels(app) setup_link_button(app) setup_dropdown(app) setup_tabs(app) setup_icons(app) return {'version': __version__, 'parallel_read_safe': True, 'parallel_write_safe': True}
def inference(sess, graph, builder, annotator, text, enable_tracing=False): tokens = [sentence_pb2.Token(word=word, start=(- 1), end=(- 1)) for word in text.split()] sentence = sentence_pb2.Sentence() sentence.token.extend(tokens) if enable_tracing: (annotations, traces) = sess.run([annotator['annotations'], annotator['traces']], feed_dict={annotator['input_batch']: [sentence.SerializeToString()]}) else: annotations = sess.run(annotator['annotations'], feed_dict={annotator['input_batch']: [sentence.SerializeToString()]}) parsed_sentence = sentence_pb2.Sentence.FromString(annotations[0]) return parsed_sentence
('snakes.nets') def extend(module): class Transition(module.Transition): def label(self, *get, **set): if (not hasattr(self, '_labels')): self._labels = {} result = tuple((self._labels[g] for g in get)) self._labels.update(set) if (len(get) == 1): return result[0] elif (len(get) > 1): return result elif (len(set) == 0): return self._labels.copy() def has_label(self, name, *names): if (len(names) == 0): return (name in self._labels) else: return tuple(((n in self._labels) for n in ((name,) + names))) def copy(self, name=None, **options): if (not hasattr(self, '_labels')): self._labels = {} result = module.Transition.copy(self, name, **options) result._labels = self._labels.copy() return result def __pnmldump__(self): t = module.Transition.__pnmldump__(self) if hasattr(self, '_labels'): for (key, val) in self._labels.items(): t.add_child(Tree('label', None, Tree.from_obj(val), name=key)) return t def __pnmlload__(cls, tree): t = new_instance(cls, module.Transition.__pnmlload__(tree)) t._labels = dict(((lbl['name'], lbl.child().to_obj()) for lbl in tree.get_children('label'))) return t class Place(module.Place): def label(self, *get, **set): if (not hasattr(self, '_labels')): self._labels = {} result = tuple((self._labels[g] for g in get)) self._labels.update(set) if (len(get) == 1): return result[0] elif (len(get) > 1): return result elif (len(set) == 0): return self._labels.copy() def has_label(self, name, *names): if (len(names) == 0): return (name in self._labels) else: return tuple(((n in self._labels) for n in ((name,) + names))) def copy(self, name=None, **options): if (not hasattr(self, '_labels')): self._labels = {} result = module.Place.copy(self, name, **options) result._labels = self._labels.copy() return result def __pnmldump__(self): t = module.Place.__pnmldump__(self) if hasattr(self, '_labels'): for (key, val) in self._labels.items(): t.add_child(Tree('label', None, Tree.from_obj(val), name=key)) return t def __pnmlload__(cls, tree): p = new_instance(cls, module.Place.__pnmlload__(tree)) p._labels = dict(((lbl['name'], lbl.child().to_obj()) for lbl in tree.get_children('label'))) return p class PetriNet(module.PetriNet): def label(self, *get, **set): if (not hasattr(self, '_labels')): self._labels = {} result = tuple((self._labels[g] for g in get)) self._labels.update(set) if (len(get) == 1): return result[0] elif (len(get) > 1): return result elif (len(set) == 0): return self._labels.copy() def has_label(self, name, *names): if (len(names) == 0): return (name in self._labels) else: return tuple(((n in self._labels) for n in ((name,) + names))) def copy(self, name=None, **options): if (not hasattr(self, '_labels')): self._labels = {} result = module.PetriNet.copy(self, name, **options) result._labels = self._labels.copy() return result def __pnmldump__(self): t = module.PetriNet.__pnmldump__(self) if hasattr(self, '_labels'): for (key, val) in self._labels.items(): t.add_child(Tree('label', None, Tree.from_obj(val), name=key)) return t def __pnmlload__(cls, tree): n = new_instance(cls, module.PetriNet.__pnmlload__(tree)) n._labels = dict(((lbl['name'], lbl.child().to_obj()) for lbl in tree.get_children('label'))) return n def merge_places(self, target, sources, **options): module.PetriNet.merge_places(self, target, sources, **options) new = self.place(target) for place in sources: new.label(**dict(self.place(place).label())) def merge_transitions(self, target, sources, **options): module.PetriNet.merge_transitions(self, target, sources, **options) new = self.transition(target) for trans in sources: new.label(**dict(self.transition(trans).label())) return (Transition, Place, PetriNet)
def test_async_mode_disabling_on_overriding(): dependency = object() def _get_dependency(): return dependency provider = providers.Provider() provider.override(providers.Callable(_get_dependency)) assert (provider.is_async_mode_undefined() is True) provider() assert (provider.is_async_mode_disabled() is True)
def parse_binutils_size_c_output(stdout: str) -> Dict[(str, int)]: number_field_regex = '(.*):\\s+(\\d+)\\sbytes' matches = re.finditer(number_field_regex, stdout, re.MULTILINE) out = {} for m in matches: (field, value) = m.groups() out[field.lower()] = int(value) assert ('program' in out.keys()), out.keys() assert ('data' in out.keys()), out.keys() return out
class OptionSeriesBulletDataDatalabelsTextpath(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False)
class OptionSeriesPolygonSonificationTracksMappingFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
('/messages/delete/<int:msg_no>', methods=['GET']) def messages_delete(msg_no): print('Removing message') message = Message(get_db(), current_app.config.get('MASTER_CONFIG')) success = message.delete(msg_no) if success: return redirect(request.referrer, code=301) else: flash('Delete message failed. Check the log.') return redirect(request.referrer, code=303)
def separable_dwt(size, J, no_grad=False, dev='cuda'): x = torch.randn(*size, requires_grad=(not no_grad)).to(dev) xfm = dwt.DWTForward(J, wave='db5', mode='zero').to(dev) for _ in range(5): (yl, yh) = xfm(x) if (not no_grad): yh[0].backward(torch.ones_like(yh[0])) return (yl.mean(), [y.mean() for y in yh])
.asyncio .workspace_host class TestDeleteUser(): async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData): user = test_data['users']['regular'] response = (await test_client_api.delete(f'/users/{user.id}')) unauthorized_api_assertions(response) .authenticated_admin async def test_not_existing(self, test_client_api: not_existing_uuid: uuid.UUID): response = (await test_client_api.delete(f'/users/{not_existing_uuid}')) assert (response.status_code == status.HTTP_404_NOT_FOUND) .authenticated_admin async def test_valid(self, test_client_api: test_data: TestData): user = test_data['users']['regular'] response = (await test_client_api.delete(f'/users/{user.id}')) assert (response.status_code == status.HTTP_204_NO_CONTENT)
class _port_desc_prop_recirculate(loxi.OFObject): type = 4 def __init__(self, port_nos=None): if (port_nos != None): self.port_nos = port_nos else: self.port_nos = '' return def pack(self): packed = [] packed.append(struct.pack('!H', self.type)) packed.append(struct.pack('!H', 0)) packed.append(self.port_nos) length = sum([len(x) for x in packed]) packed[1] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = _port_desc_prop_recirculate() _type = reader.read('!H')[0] assert (_type == 4) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.port_nos = str(reader.read_all()) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.port_nos != other.port_nos): return False return True def pretty_print(self, q): q.text('_port_desc_prop_recirculate {') with q.group(): with q.indent(2): q.breakable() q.text('port_nos = ') q.pp(self.port_nos) q.breakable() q.text('}')
def filter_firewall_shaping_policy_data(json): option_list = ['app_category', 'app_group', 'application', 'class_id', 'comment', 'cos', 'cos_mask', 'diffserv_forward', 'diffserv_reverse', 'diffservcode_forward', 'diffservcode_rev', 'dstaddr', 'dstaddr6', 'dstintf', 'groups', 'id', 'internet_service', 'internet_service_custom', 'internet_service_custom_group', 'internet_service_group', 'internet_service_id', 'internet_service_name', 'internet_service_src', 'internet_service_src_custom', 'internet_service_src_custom_group', 'internet_service_src_group', 'internet_service_src_id', 'internet_service_src_name', 'ip_version', 'name', 'per_ip_shaper', 'schedule', 'service', 'srcaddr', 'srcaddr6', 'srcintf', 'status', 'tos', 'tos_mask', 'tos_negate', 'traffic_shaper', 'traffic_shaper_reverse', 'traffic_type', 'url_category', 'users', 'uuid'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
class Player(DjangoObjectType): class Meta(): model = models.Player exclude = ['friended_by', 'blocked_by', 'reported_by'] friend_requests = gh.List(gh.NonNull('player.schema.Player'), description='') def resolve_friends(root, info): ctx = info.context require_login(ctx) p = ctx.user.player if (p.id != root.id): require_perm(ctx, 'player.view_player') return root.friends.filter(friends__id=root.id) def resolve_friend_requests(root, info): ctx = info.context require_login(ctx) p = ctx.user.player if (p.id != root.id): require_perm(ctx, 'player.view_player') return root.friended_by.exclude(id__in=root.friends.only('id')) user = gh.Field(User, description='') def resolve_user(root, info): ctx = info.context require_login(ctx) u = ctx.user if ((u.id == root.user.id) or require_perm(ctx, 'player.view_user')): return root.user badges = gh.List(gh.NonNull('badge.schema.PlayerBadge'), description='') def resolve_badges(root, info): from badge.models import PlayerBadge return PlayerBadge.objects.filter(player__id=root.id) power = gh.Int(description='P') def resolve_power(root, info): return 233
class ImageVmView(APIView): dc_bound = False order_by_default = order_by_fields = ('hostname',) def __init__(self, request, name, data): super(ImageVmView, self).__init__(request) self.name = name self.data = data self.img = get_virt_object(request, Image, data=data, name=name) def get(self, many=True): assert many (request, img) = (self.request, self.img) if (self.full or self.extended): sr = ('owner', 'node', 'dc') else: sr = () vms = [vm for vm in get_vms(request, sr=sr, order_by=self.order_by, dc__in=img.dc.all(), slavevm__isnull=True) if (img.uuid in vm.get_image_uuids())] if (self.full or self.extended): res = VmSerializer(request, vms, many=True).data else: res = [vm.hostname for vm in vms] return SuccessTaskResponse(request, res, dc_bound=self.dc_bound)
class CRCEngine(Module): def __init__(self, width, polynom): self.data = Signal(width) self.last = Signal(width) self.next = Signal(width) def _optimize_xors(l): d = OrderedDict() for e in l: try: d[e] += 1 except: d[e] = 1 r = [] for (k, v) in d.items(): if (v % 2): r.append(k) return r new = Signal(32) self.comb += new.eq((self.last ^ self.data)) taps = [x for x in range(width) if ((1 << x) & polynom)] curval = [[('new', i)] for i in range(width)] for i in range(width): feedback = curval.pop() for j in range((width - 1)): if ((j + 1) in taps): curval[j] += feedback curval[j] = _optimize_xors(curval[j]) curval.insert(0, feedback) for i in range(width): xors = [] for (t, n) in curval[i]: if (t == 'new'): xors += [new[n]] self.comb += self.next[i].eq(reduce(xor, xors))
def automl_classification_train_async_arguments(provider_name: str) -> Dict: if (provider_name == 'nyckel'): project_id = 'function_yfisrgk70k1iuroq' else: raise NotImplementedError(f'Please add a project id for test arguments of provider: {provider_name}') return {'project_id': project_id}
def assert_new_alerts(new_alerts): alerts_to_send = new_alerts.send alerts_to_skip = new_alerts.skip assert (json.dumps([alert.id for alert in alerts_to_send], sort_keys=True) == json.dumps(['alert_id_2', 'alert_id_3', 'alert_id_4'], sort_keys=True)) assert (json.dumps([alert.id for alert in alerts_to_skip], sort_keys=True) == json.dumps(['alert_id_1', 'alert_id_5'], sort_keys=True))
def extractLittlelightbeaconWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class JsScreen(): def availHeight(self) -> JsNumber.JsNumber: return JsNumber.JsNumber('screen.availHeight') def availWidth(self) -> JsNumber.JsNumber: return JsNumber.JsNumber('screen.availWidth') def colorDepth(self) -> JsNumber.JsNumber: return JsNumber.JsNumber('screen.colorDepth') def height(self) -> JsNumber.JsNumber: return JsNumber.JsNumber('screen.height') def pixelDepth(self) -> JsNumber.JsNumber: return JsNumber.JsNumber('screen.pixelDepth') def width(self) -> JsNumber.JsNumber: return JsNumber.JsNumber('screen.width')
class TestDummyValidator(Validator): def __init__(self, dummy_report: ValidationReport) -> None: self.dummy_report: ValidationReport = dummy_report def name(self) -> str: return self.dummy_report.validator_name def __validate__(self) -> ValidationReport: return self.dummy_report
() ('action', [nox.param('dry', id='dry'), nox.param('push', id='push')]) def tag(session: nox.Session, action: str) -> None: from git.repo import Repo repo = Repo() all_tags = get_all_tags(repo) generated_tag = generate_tag(session, repo.active_branch.name, all_tags) if (action == 'dry'): session.log(f'Dry-run -- would generate tag: {generated_tag}') elif (action == 'push'): repo.create_tag(generated_tag) session.log(f'Pushing tag {generated_tag} to remote (origin)') repo.remotes.origin.push(generated_tag) else: session.error(f'Invalid action: {action}')
def get_local_user(): import getpass username = None try: username = getpass.getuser() except KeyError: pass except ImportError: if win32: import win32api import win32security import win32profile username = win32api.GetUserName() return username
class TestGetRankDetails(unittest.TestCase): def test_mock_backend(self): mockBackend = MockBackendFunction() mockTuple = (mockBackend.local_rank, mockBackend.global_rank, mockBackend.world_size, mockBackend.group, mockBackend.device, mockBackend.device) self.assertEqual(comms_utils.get_rank_details(mockBackend), mockTuple)
class Customer(ModelNormal): allowed_values = {('billing_network_type',): {'PUBLIC': 'public', 'PRIVATE': 'private'}} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'billing_contact_id': (str, none_type), 'billing_network_type': (str,), 'billing_ref': (str, none_type), 'can_configure_wordpress': (bool, none_type), 'can_reset_passwords': (bool,), 'can_upload_vcl': (bool,), 'force_2fa': (bool,), 'force_sso': (bool,), 'has_account_panel': (bool,), 'has_improved_events': (bool,), 'has_improved_ssl_config': (bool,), 'has_openstack_logging': (bool,), 'has_pci': (bool,), 'has_pci_passwords': (bool,), 'ip_whitelist': (str,), 'legal_contact_id': (str, none_type), 'name': (str,), 'owner_id': (str,), 'phone_number': (str,), 'postal_address': (str, none_type), 'pricing_plan': (str,), 'pricing_plan_id': (str,), 'security_contact_id': (str, none_type), 'technical_contact_id': (str, none_type)} _property def discriminator(): return None attribute_map = {'billing_contact_id': 'billing_contact_id', 'billing_network_type': 'billing_network_type', 'billing_ref': 'billing_ref', 'can_configure_wordpress': 'can_configure_wordpress', 'can_reset_passwords': 'can_reset_passwords', 'can_upload_vcl': 'can_upload_vcl', 'force_2fa': 'force_2fa', 'force_sso': 'force_sso', 'has_account_panel': 'has_account_panel', 'has_improved_events': 'has_improved_events', 'has_improved_ssl_config': 'has_improved_ssl_config', 'has_openstack_logging': 'has_openstack_logging', 'has_pci': 'has_pci', 'has_pci_passwords': 'has_pci_passwords', 'ip_whitelist': 'ip_whitelist', 'legal_contact_id': 'legal_contact_id', 'name': 'name', 'owner_id': 'owner_id', 'phone_number': 'phone_number', 'postal_address': 'postal_address', 'pricing_plan': 'pricing_plan', 'pricing_plan_id': 'pricing_plan_id', 'security_contact_id': 'security_contact_id', 'technical_contact_id': 'technical_contact_id'} read_only_vars = {'can_configure_wordpress', 'can_reset_passwords', 'can_upload_vcl', 'has_improved_ssl_config', 'has_pci_passwords'} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class SpeakersCallDetail(ResourceDetail): def before_patch(self, args, kwargs, data): if kwargs.get('event_id'): try: speakers_call = SpeakersCall.query.filter_by(event_id=kwargs['event_id']).one() event = speakers_call.event if ((speakers_call.starts_at > event.starts_at) or (speakers_call.ends_at > event.starts_at)): raise ForbiddenError({'source': ''}, "Speakers call date can't be after the event start date") except NoResultFound: raise ObjectNotFound({'source': ''}, 'Object: not found') kwargs['id'] = speakers_call.id def before_get_object(self, view_kwargs): if view_kwargs.get('event_identifier'): try: event = self.session.query(Event).filter_by(identifier=view_kwargs['event_identifier']).one() except NoResultFound: raise ObjectNotFound({'parameter': 'event_identifier'}, 'Event: {} not found'.format(view_kwargs['event_identifier'])) else: view_kwargs['event_id'] = event.id if view_kwargs.get('event_id'): try: speakers_call = self.session.query(SpeakersCall).filter_by(event_id=view_kwargs['event_id']).one() except NoResultFound: raise ObjectNotFound({'parameter': 'event_identifier'}, 'Object: not found') view_kwargs['id'] = speakers_call.id decorators = (api.has_permission('is_coorganizer', fetch='event_id', model=SpeakersCall, methods='PATCH,DELETE'),) schema = SpeakersCallSchema data_layer = {'session': db.session, 'model': SpeakersCall, 'methods': {'before_get_object': before_get_object}}