code
stringlengths
281
23.7M
def update(moi_object_data, data, operator): global modification_list global moi_object_list if (operator != 'SET_TO_DEFAULT'): modification_list = dict() for i in range(data['modificationList'][0].__len__()): modification_list[data['modificationList'][0][i]] = data['modificationList'][1][i] if (operator == 'REPLACE'): moi_object_data.update(**modification_list) elif (operator == 'ADD_VALUES'): moi_object_list = [nssi for nssi in moi_object_data] for i in range(moi_object_list.__len__()): for j in range(data['modificationList'][0].__len__()): str_list = list(data['modificationList'][0][j]) str_list[0] = str_list[0].upper() model_class_name = ''.join(str_list) moi_object_list[i].__getattribute__(data['modificationList'][0][j]).add(globals()[model_class_name].objects.get(pk=data['modificationList'][1][j])) elif (operator == 'REMOVE_VALUES'): moi_object_list = [moi for moi in moi_object_data] for i in range(moi_object_list.__len__()): for j in range(data['modificationList'][0].__len__()): str_list = list(data['modificationList'][0][j]) str_list[0] = str_list[0].upper() model_class_name = ''.join(str_list) moi_object_list[i].__getattribute__(data['modificationList'][0][j]).remove(globals()[model_class_name].objects.get(pk=data['modificationList'][1][j])) elif (operator == 'SET_TO_DEFAULT'): moi_object_list = [nssi for nssi in moi_object_data] for i in range(moi_object_list.__len__()): default_value = moi_object_data.model._meta.get_field(data['modificationList'][0][i]).default modification_list = {data['modificationList'][0][i]: default_value} moi_object_data.update(**modification_list)
def extractTlbasilCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
.feature('unit') .story('common', 'fledge-microservice') class TestFledgeMicroservice(): def test_constructor_abstract_method_missing(self): with pytest.raises(TypeError): fm = FledgeMicroservice() with pytest.raises(TypeError): class FledgeMicroserviceImp(FledgeMicroservice): pass fm = FledgeMicroserviceImp() with pytest.raises(TypeError): class FledgeMicroserviceImp(FledgeMicroservice): async def change(self): pass async def shutdown(self): pass fm = FledgeMicroserviceImp() with pytest.raises(TypeError): class FledgeMicroserviceImp(FledgeMicroservice): def run(self): pass async def shutdown(self): pass fm = FledgeMicroserviceImp() with pytest.raises(TypeError): class FledgeMicroserviceImp(FledgeMicroservice): def run(self): pass async def change(self): pass fm = FledgeMicroserviceImp() def test_constructor_good(self, loop): class FledgeMicroserviceImp(FledgeMicroservice): def __init__(self): super().__init__() def run(self): pass async def change(self): pass async def shutdown(self): pass async def get_track(self): pass async def add_track(self): pass with patch.object(asyncio, 'get_event_loop', return_value=loop): with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None): with patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None): with patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=_DEFAULT_CONFIG): with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: with patch.object(FledgeMicroservice, '_make_microservice_management_app', return_value=None) as make_patch: with patch.object(FledgeMicroservice, '_run_microservice_management_app', side_effect=None) as run_patch: with patch.object(FledgeProcess, 'register_service_with_core', return_value={'id': 'bla'}) as reg_patch: with patch.object(FledgeMicroservice, '_get_service_registration_payload', return_value=None) as payload_patch: fm = FledgeMicroserviceImp() assert (fm._core_management_host is 'corehost') assert (fm._core_management_port == 32333) assert (fm._name is 'sname') assert hasattr(fm, '_core_microservice_management_client') assert hasattr(fm, '_readings_storage_async') assert hasattr(fm, '_storage_async') assert hasattr(fm, '_start_time') assert hasattr(fm, '_microservice_management_app') assert hasattr(fm, '_microservice_management_handler') assert hasattr(fm, '_microservice_management_server') assert hasattr(fm, '_microservice_management_host') assert hasattr(fm, '_microservice_management_port') assert hasattr(fm, '_microservice_id') assert hasattr(fm, '_type') assert hasattr(fm, '_protocol') def test_constructor_exception(self, loop): class FledgeMicroserviceImp(FledgeMicroservice): def __init__(self): super().__init__() def run(self): pass async def change(self): pass async def shutdown(self): pass async def get_track(self): pass async def add_track(self): pass with patch.object(asyncio, 'get_event_loop', return_value=loop): with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None): with patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None): with patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=_DEFAULT_CONFIG): with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: with patch.object(FledgeMicroservice, '_make_microservice_management_app', side_effect=Exception()) as make_patch: with patch.object(_logger, 'exception') as logger_patch: with pytest.raises(Exception) as excinfo: fm = FledgeMicroserviceImp() args = logger_patch.call_args assert ('Unable to initialize FledgeMicroservice' == args[0][1]) .asyncio async def test_ping(self, loop): class FledgeMicroserviceImp(FledgeMicroservice): def __init__(self): super().__init__() def run(self): pass async def change(self): pass async def shutdown(self): pass async def get_track(self): pass async def add_track(self): pass with patch.object(asyncio, 'get_event_loop', return_value=loop): with patch.object(sys, 'argv', ['pytest', '--address', 'corehost', '--port', '32333', '--name', 'sname']): with patch.object(MicroserviceManagementClient, '__init__', return_value=None) as mmc_patch: with patch.object(MicroserviceManagementClient, 'create_configuration_category', return_value=None): with patch.object(MicroserviceManagementClient, 'create_child_category', return_value=None): with patch.object(MicroserviceManagementClient, 'get_configuration_category', return_value=_DEFAULT_CONFIG): with patch.object(ReadingsStorageClientAsync, '__init__', return_value=None) as rsc_async_patch: with patch.object(StorageClientAsync, '__init__', return_value=None) as sc_async_patch: with patch.object(FledgeMicroservice, '_make_microservice_management_app', return_value=None) as make_patch: with patch.object(FledgeMicroservice, '_run_microservice_management_app', side_effect=None) as run_patch: with patch.object(FledgeProcess, 'register_service_with_core', return_value={'id': 'bla'}) as reg_patch: with patch.object(FledgeMicroservice, '_get_service_registration_payload', return_value=None) as payload_patch: with patch.object(web, 'json_response', return_value=None) as response_patch: with patch.object(time, 'time', return_value=1) as time_patch: fm = FledgeMicroserviceImp() (await fm.ping(None)) response_patch.assert_called_once_with({'uptime': 0})
class DatePicker(UserControl): def selected_data(self): return self.selected PREV_MONTH = 'PM' NEXT_MONTH = 'NM' PREV_YEAR = 'PY' NEXT_YEAR = 'NY' PREV_HOUR = 'PH' NEXT_HOUR = 'NH' PREV_MINUTE = 'PMIN' NEXT_MINUTE = 'NMIN' EMPTY = '' WHITE_SPACE = ' ' DELTA_MONTH_WEEK = 5 DELTA_YEAR_WEEK = 52 DELTA_HOUR = 1 DELTA_MINUTE = 1 WEEKEND_DAYS = [5, 6] CELL_SIZE = 32 LAYOUT_WIDTH = 340 LAYOUT_MIN_HEIGHT = 280 LAYOUT_MAX_HEIGHT = 320 LAYOUT_DT_MIN_HEIGHT = 320 LAYOUT_DT_MAX_HEIGHT = 360 def __init__(self, hour_minute: bool=False, selected_date: (list[datetime] | None)=None, selection_type: (SelectionType | int)=SelectionType.SINGLE, disable_to: datetime=None, disable_from: datetime=None, holidays: list[datetime]=None, hide_prev_next_month_days: bool=False, first_weekday: int=0, show_three_months: bool=False, locale: str=None, on_change: Callable=None): super().__init__() self.selected = (selected_date if selected_date else []) self.selection_type = (selection_type if (not type(int)) else SelectionType.from_value(selection_type)) self.hour_minute = hour_minute self.disable_to = disable_to self.disable_from = disable_from self.holidays = holidays self.hide_prev_next_month_days = hide_prev_next_month_days self.first_weekday = first_weekday self.show_three_months = show_three_months if locale: loc.setlocale(loc.LC_ALL, locale) self.on_change = (on_change or (lambda x: None)) self.now = datetime.now() self.yy = self.now.year self.mm = self.now.month self.dd = self.now.day self.hour = self.now.hour self.minute = self.now.minute self.cal = calendar.Calendar(first_weekday) def _on_change(self, e) -> None: self.on_change(e) def _get_current_month(self, year, month): return self.cal.monthdatescalendar(year, month) def _create_calendar(self, year, month, hour, minute, hide_ymhm=False): week_rows_controls = [] week_rows_days_controls = [] today = datetime.now() days = self._get_current_month(year, month) ym = self._year_month_selectors(year, month, hide_ymhm) week_rows_controls.append(Column([ym], alignment=MainAxisAlignment.START)) labels = Row(self._row_labels(), spacing=18) week_rows_controls.append(Column([labels], alignment=MainAxisAlignment.START)) weeks_rows_num = len(self._get_current_month(year, month)) for w in range(0, weeks_rows_num): row = [] for d in days[w]: d = (datetime(d.year, d.month, d.day, self.hour, self.minute) if self.hour_minute else datetime(d.year, d.month, d.day)) month = d.month is_main_month = (True if (month == self.mm) else False) if (self.hide_prev_next_month_days and (not is_main_month)): row.append(Text('', width=self.CELL_SIZE, height=self.CELL_SIZE)) continue dt_weekday = d.weekday() day = d.day is_weekend = False is_holiday = False is_day_disabled = False if (self.disable_from and (self._reset_time(d) > self._reset_time(self.disable_from))): is_day_disabled = True if (self.disable_to and (self._reset_time(d) < self._reset_time(self.disable_to))): is_day_disabled = True text_color = None border_side = None bg = None if (dt_weekday in self.WEEKEND_DAYS): text_color = colors.RED_500 is_weekend = True if (self.holidays and (d in self.holidays)): text_color = colors.RED_500 is_holiday = True if (is_main_month and (day == self.dd) and (self.dd == today.day) and (self.mm == today.month) and (self.yy == today.year)): border_side = BorderSide(2, colors.BLUE) elif ((is_weekend or is_holiday) and ((not is_main_month) or is_day_disabled)): text_color = colors.RED_200 bg = None elif ((not is_main_month) and is_day_disabled): text_color = colors.BLACK38 bg = None elif (not is_main_month): text_color = colors.BLUE_200 bg = None else: bg = None selected_numbers = len(self.selected) if (self.selection_type != SelectionType.RANGE): if ((selected_numbers > 0) and (d in self.selected)): bg = colors.BLUE_400 text_color = colors.WHITE elif ((selected_numbers > 0) and (selected_numbers < 3) and (d in self.selected)): bg = colors.BLUE_400 text_color = colors.WHITE if ((self.selection_type == SelectionType.RANGE) and (selected_numbers > 1)): if ((d > self.selected[0]) and (d < self.selected[(- 1)])): bg = colors.BLUE_300 text_color = colors.WHITE row.append(TextButton(text=str(day), data=d, width=self.CELL_SIZE, height=self.CELL_SIZE, disabled=is_day_disabled, style=ButtonStyle(color=text_color, bgcolor=bg, padding=0, shape={MaterialState.DEFAULT: RoundedRectangleBorder(radius=20)}, side=border_side), on_click=self._select_date)) week_rows_days_controls.append(Row(row, spacing=18)) week_rows_controls.append(Column(week_rows_days_controls, alignment=MainAxisAlignment.START, spacing=0)) if (self.hour_minute and (not hide_ymhm)): hm = self._hour_minute_selector(hour, minute) week_rows_controls.append(Row([hm], alignment=MainAxisAlignment.CENTER)) return week_rows_controls def _year_month_selectors(self, year, month, hide_ymhm=False): prev_year = (IconButton(icon=icons.ARROW_BACK, data=self.PREV_YEAR, on_click=self._adjust_calendar) if (not hide_ymhm) else Text(self.EMPTY, height=self.CELL_SIZE)) next_year = (IconButton(icon=icons.ARROW_FORWARD, data=self.NEXT_YEAR, on_click=self._adjust_calendar) if (not hide_ymhm) else Text(self.EMPTY)) prev_month = (IconButton(icon=icons.ARROW_BACK, data=self.PREV_MONTH, on_click=self._adjust_calendar) if (not hide_ymhm) else Text(self.EMPTY)) next_month = (IconButton(icon=icons.ARROW_FORWARD, data=self.NEXT_MONTH, on_click=self._adjust_calendar) if (not hide_ymhm) else Text(self.EMPTY)) ym = Row([Row([prev_year, Text(year), next_year], spacing=0), Row([prev_month, Text(calendar.month_name[month], text_align=alignment.center), next_month], spacing=0)], spacing=0, alignment=MainAxisAlignment.SPACE_BETWEEN) return ym def _row_labels(self): label_row = [] days_label = calendar.weekheader(2).split(self.WHITE_SPACE) for i in range(0, self.first_weekday): days_label.append(days_label.pop(0)) for l in days_label: label_row.append(TextButton(text=l, width=self.CELL_SIZE, height=self.CELL_SIZE, disabled=True, style=ButtonStyle(padding=0, color=colors.BLACK, bgcolor=colors.GREY_300, shape={MaterialState.DEFAULT: RoundedRectangleBorder(radius=20)}))) return label_row def _hour_minute_selector(self, hour, minute): hm = Row([Row([IconButton(icon=icons.ARROW_BACK, data=self.PREV_HOUR, on_click=self._adjust_hh_min), Text(hour), IconButton(icon=icons.ARROW_FORWARD, data=self.NEXT_HOUR, on_click=self._adjust_hh_min)]), Text(':'), Row([IconButton(icon=icons.ARROW_BACK, data=self.PREV_MINUTE, on_click=self._adjust_hh_min), Text(minute), IconButton(icon=icons.ARROW_FORWARD, data=self.NEXT_MINUTE, on_click=self._adjust_hh_min)])], spacing=48, alignment=MainAxisAlignment.SPACE_EVENLY) return hm def build(self): rows = self._create_layout(self.yy, self.mm, self.hour, self.minute) cal_height = self._calculate_heigth(self.yy, self.mm) self.cal_container = Container(content=Row(rows), bgcolor=colors.WHITE, padding=12, height=self._cal_height(cal_height)) return self.cal_container def _calculate_heigth(self, year, month): if self.show_three_months: (prev, next) = self._prev_next_month(year, month) cal_height = max(len(self._get_current_month(year, month)), len(self._get_current_month(prev.year, prev.month)), len(self._get_current_month(next.year, next.month))) else: cal_height = len(self._get_current_month(year, month)) return cal_height def _create_layout(self, year, month, hour, minute): rows = [] (prev, next) = self._prev_next_month(year, month) if self.show_three_months: week_rows_controls_prev = self._create_calendar(prev.year, prev.month, hour, minute, True) rows.append(Column(week_rows_controls_prev, width=self.LAYOUT_WIDTH, spacing=10)) rows.append(VerticalDivider()) week_rows_controls = self._create_calendar(year, month, hour, minute) rows.append(Column(week_rows_controls, width=self.LAYOUT_WIDTH, spacing=10)) if self.show_three_months: rows.append(VerticalDivider()) week_rows_controls_next = self._create_calendar(next.year, next.month, hour, minute, True) rows.append(Column(week_rows_controls_next, width=self.LAYOUT_WIDTH, spacing=10)) return rows def _prev_next_month(self, year, month): delta = timedelta(weeks=self.DELTA_MONTH_WEEK) current = datetime(year, month, 15) prev = (current - delta) next = (current + delta) return (prev, next) def _select_date(self, e: ControlEvent): result: datetime = e.control.data if (self.selection_type == SelectionType.RANGE): if (len(self.selected) == 2): self.selected = [] if (len(self.selected) > 0): if (self.selected[0] == result): self.selected = [] elif (result > self.selected[0]): if (len(self.selected) == 1): self.selected.append(result) else: return else: return else: self.selected.append(result) elif (self.selection_type == SelectionType.MULTIPLE): if ((len(self.selected) > 0) and (result in self.selected)): self.selected.remove(result) else: if self.hour_minute: result = datetime(result.year, result.month, result.day, self.hour, self.minute) self.selected.append(result) elif ((len(self.selected) == 1) and (result in self.selected)): self.selected.remove(result) else: self.selected = [] if self.hour_minute: result = datetime(result.year, result.month, result.day, self.hour, self.minute) self.selected.append(result) self._on_change(self.selected) self._update_calendar() def _adjust_calendar(self, e: ControlEvent): if ((e.control.data == self.PREV_MONTH) or (e.control.data == self.NEXT_MONTH)): delta = timedelta(weeks=self.DELTA_MONTH_WEEK) if ((e.control.data == self.PREV_YEAR) or (e.control.data == self.NEXT_YEAR)): delta = timedelta(weeks=self.DELTA_YEAR_WEEK) if ((e.control.data == self.PREV_MONTH) or (e.control.data == self.PREV_YEAR)): self.now = (self.now - delta) if ((e.control.data == self.NEXT_MONTH) or (e.control.data == self.NEXT_YEAR)): self.now = (self.now + delta) self.mm = self.now.month self.yy = self.now.year self._update_calendar() def _adjust_hh_min(self, e: ControlEvent): if ((e.control.data == self.PREV_HOUR) or (e.control.data == self.NEXT_HOUR)): delta = timedelta(hours=self.DELTA_HOUR) if ((e.control.data == self.PREV_MINUTE) or (e.control.data == self.NEXT_MINUTE)): delta = timedelta(minutes=self.DELTA_MINUTE) if ((e.control.data == self.PREV_HOUR) or (e.control.data == self.PREV_MINUTE)): self.now = (self.now - delta) if ((e.control.data == self.NEXT_HOUR) or (e.control.data == self.NEXT_MINUTE)): self.now = (self.now + delta) self.hour = self.now.hour self.minute = self.now.minute self._update_calendar() def _update_calendar(self): self.cal_container.content = Row(self._create_layout(self.yy, self.mm, self.hour, self.minute)) cal_height = self._calculate_heigth(self.yy, self.mm) self.cal_container.height = self._cal_height(cal_height) self.update() def _cal_height(self, weeks_number): if self.hour_minute: return (self.LAYOUT_DT_MIN_HEIGHT if (weeks_number == 5) else self.LAYOUT_DT_MAX_HEIGHT) else: return (self.LAYOUT_MIN_HEIGHT if (weeks_number == 5) else self.LAYOUT_MAX_HEIGHT) def _reset_time(self, date): return date.replace(hour=0, minute=0, second=0, microsecond=0)
class Calibration(): def __init__(self, handle: object, depth_mode: DepthMode, color_resolution: ColorResolution, thread_safe: bool=True): self._calibration_handle = handle self._transformation_handle: Optional[object] = None self.thread_safe = thread_safe self._depth_mode = depth_mode self._color_resolution = color_resolution self._raw: Optional[str] = None def from_raw(cls, value: str, depth_mode: DepthMode, color_resolution: ColorResolution, thread_safe: bool=True) -> 'Calibration': (res, handle) = k4a_module.calibration_get_from_raw(thread_safe, value, depth_mode, color_resolution) _verify_error(res) return Calibration(handle=handle, depth_mode=depth_mode, color_resolution=color_resolution, thread_safe=thread_safe) def depth_mode(self) -> DepthMode: return self._depth_mode def color_resolution(self) -> ColorResolution: return self._color_resolution def _convert_3d_to_3d(self, source_point_3d: Tuple[(float, float, float)], source_camera: CalibrationType, target_camera: CalibrationType) -> Tuple[(float, float, float)]: (res, target_point_3d) = k4a_module.calibration_3d_to_3d(self._calibration_handle, self.thread_safe, source_point_3d, source_camera, target_camera) _verify_error(res) return target_point_3d def depth_to_color_3d(self, point_3d: Tuple[(float, float, float)]) -> Tuple[(float, float, float)]: return self._convert_3d_to_3d(point_3d, CalibrationType.DEPTH, CalibrationType.COLOR) def color_to_depth_3d(self, point_3d: Tuple[(float, float, float)]) -> Tuple[(float, float, float)]: return self._convert_3d_to_3d(point_3d, CalibrationType.COLOR, CalibrationType.DEPTH) def _convert_2d_to_3d(self, source_pixel_2d: Tuple[(float, float)], source_depth: float, source_camera: CalibrationType, target_camera: CalibrationType) -> Tuple[(float, float, float)]: (res, valid, target_point_3d) = k4a_module.calibration_2d_to_3d(self._calibration_handle, self.thread_safe, source_pixel_2d, source_depth, source_camera, target_camera) _verify_error(res) if (valid == 0): raise ValueError(f'Coordinates {source_pixel_2d} are not valid in the calibration model') return target_point_3d def convert_2d_to_3d(self, coordinates: Tuple[(float, float)], depth: float, source_camera: CalibrationType, target_camera: Optional[CalibrationType]=None): if (target_camera is None): target_camera = source_camera return self._convert_2d_to_3d(coordinates, depth, source_camera, target_camera) def _convert_3d_to_2d(self, source_point_3d: Tuple[(float, float, float)], source_camera: CalibrationType, target_camera: CalibrationType) -> Tuple[(float, float)]: (res, valid, target_px_2d) = k4a_module.calibration_3d_to_2d(self._calibration_handle, self.thread_safe, source_point_3d, source_camera, target_camera) _verify_error(res) if (valid == 0): raise ValueError(f'Coordinates {source_point_3d} are not valid in the calibration model') return target_px_2d def convert_3d_to_2d(self, coordinates: Tuple[(float, float, float)], source_camera: CalibrationType, target_camera: Optional[CalibrationType]=None): if (target_camera is None): target_camera = source_camera return self._convert_3d_to_2d(coordinates, source_camera, target_camera) def transformation_handle(self) -> object: if (not self._transformation_handle): handle = k4a_module.transformation_create(self._calibration_handle, self.thread_safe) if (not handle): raise K4AException('Cannot create transformation handle') self._transformation_handle = handle return self._transformation_handle def get_camera_matrix(self, camera: CalibrationType) -> np.ndarray: if (camera not in [CalibrationType.COLOR, CalibrationType.DEPTH]): raise ValueError('Camera matrix only available for color and depth cameras.') params = k4a_module.calibration_get_intrinsics(self._calibration_handle, self.thread_safe, camera) if (len(params) != 14): raise ValueError('Unknown camera calibration type') (cx, cy, fx, fy) = params[:4] return np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1]]) def get_distortion_coefficients(self, camera: CalibrationType) -> np.ndarray: if (camera not in [CalibrationType.COLOR, CalibrationType.DEPTH]): raise ValueError('Distortion coefficients only available for color and depth cameras.') params = k4a_module.calibration_get_intrinsics(self._calibration_handle, self.thread_safe, camera) if (len(params) != 14): raise ValueError('Unknown camera calibration type') return np.array([params[4], params[5], params[13], params[12], *params[6:10]]) def get_extrinsic_parameters(self, source_camera: CalibrationType, target_camera: CalibrationType) -> Tuple[(np.ndarray, np.ndarray)]: params = k4a_module.calibration_get_extrinsics(self._calibration_handle, self.thread_safe, source_camera, target_camera) rotation = np.reshape(np.array(params[0]), [3, 3]) translation = (np.reshape(np.array(params[1]), [1, 3]) / 1000) return (rotation, translation)
class OptionPlotoptionsPolygonSonificationTracks(Options): def activeWhen(self) -> 'OptionPlotoptionsPolygonSonificationTracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsPolygonSonificationTracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsPolygonSonificationTracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsPolygonSonificationTracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsPolygonSonificationTracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsPolygonSonificationTracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
def export_playlist_files(playlist, parent=None): if (playlist is None): return def _on_uri(uri): if hasattr(playlist, 'get_playlist'): pl = playlist.get_playlist() else: pl = playlist pl_files = [track.get_loc_for_io() for track in pl] dialog = FileCopyDialog(pl_files, uri, (_('Exporting %s') % playlist.name), parent=parent) dialog.do_copy() dialog = DirectoryOpenDialog(title=_('Choose directory to export files to'), parent=parent) dialog.set_select_multiple(False) dialog.connect('uris-selected', (lambda widget, uris: _on_uri(uris[0]))) dialog.run() dialog.destroy()
def extractMaynoveltranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Rebirth of Chen An', 'Rebirth of Chen An', 'translated'), ('Rebirth of Brotherly Love', 'Rebirth of Brotherly Love', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionPlotoptionsSplineSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
class AtariDQNModel(DQNModel): def __init__(self, num_actions: int, network: str='nature', dueling_dqn: bool=True, spectral_norm: bool=True, double_dqn: bool=False) -> None: super().__init__() self._num_actions = num_actions self._network = network.lower() self._dueling_dqn = dueling_dqn self._spectral_norm = spectral_norm self._double_dqn = double_dqn self._online_net = AtariDQNNet(num_actions, network=network, dueling_dqn=dueling_dqn, spectral_norm=spectral_norm) self._target_net = (copy.deepcopy(self._online_net) if double_dqn else None) def init_model(self) -> None: self._online_net.init_model() if (self._target_net is not None): self._target_net.init_model() def forward(self, observation: torch.Tensor) -> torch.Tensor: return self._online_net(observation) def q(self, s: torch.Tensor, a: torch.Tensor) -> torch.Tensor: q = self._online_net(s) q = q.gather(dim=(- 1), index=a) return q _method(batch_size=256) def act(self, observation: torch.Tensor, eps: torch.Tensor) -> Tuple[(torch.Tensor, torch.Tensor)]: with torch.no_grad(): q = self._online_net(observation) (_, action_dim) = q.size() greedy_action = q.argmax((- 1), keepdim=True) pi = (torch.ones_like(q) * (eps / action_dim)) pi.scatter_(dim=(- 1), index=greedy_action, src=(1.0 - ((eps * (action_dim - 1)) / action_dim))) action = pi.multinomial(1) v = self._value(observation, q) q = q.gather(dim=(- 1), index=action) return (action, q, v) def sync_target_net(self) -> None: if (self._target_net is not None): self._target_net.load_state_dict(self._online_net.state_dict()) def _value(self, observation: torch.Tensor, q: Optional[torch.Tensor]=None) -> torch.Tensor: if (q is None): q = self._online_net(observation) if (not self._double_dqn): v = q.max((- 1), keepdim=True)[0] else: a = q.argmax((- 1), keepdim=True) q = self._target_net(observation) v = q.gather(dim=(- 1), index=a) return v
class UdpTransport(): _keep_connection = False def __init__(self, host, port, timeout=TimeoutNotSet, **kwargs): self._host = host self._port = port self._timeout = timeout self._sock = None def send(self, events, use_logging=False): self._create_socket() try: self._send(events) finally: self._close() def _create_socket(self): if (self._sock is not None): return self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if (self._timeout is not TimeoutNotSet): self._sock.settimeout(self._timeout) def _send(self, events): for event in events: self._send_via_socket(event) def _send_via_socket(self, data): data_to_send = self._convert_data_to_send(data) self._sock.sendto(data_to_send, (self._host, self._port)) def _convert_data_to_send(self, data): if (not isinstance(data, bytes)): return bytes(data, 'utf-8') return data def _close(self, force=False): if ((not self._keep_connection) or force): if self._sock: self._wait_for_socket_buffer_empty() self._sock.shutdown(socket.SHUT_WR) self._sock.close() self._sock = None def _wait_for_socket_buffer_empty(self): wait_timeout = constants.SOCKET_CLOSE_WAIT_TIMEOUT interval = 0.05 time_waited = 0 while ((time_waited < wait_timeout) and (not self._is_sock_write_buff_empty())): time_waited += interval time.sleep(interval) def _is_sock_write_buff_empty(self): if (fcntl is None): return True socket_fd = self._sock.fileno() buffer_size = struct.pack('I', 0) ioctl_result = fcntl.ioctl(socket_fd, termios.TIOCOUTQ, buffer_size) buffer_size = struct.unpack('I', ioctl_result)[0] return (not buffer_size) def close(self): self._close(force=True)
class OptionSeriesColumnrangeSonificationTracksMappingLowpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesFunnel3dSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
.signal_handling def test_dispatching_by_message_attribute_succeeds_with_job_args_and_job_kwargs(fake_sqs_queue): queue = fake_sqs_queue message_attr = {'work_type': {'DataType': 'String', 'StringValue': 'a'}} queue.send_message(MessageBody=1234, MessageAttributes=message_attr) dispatcher = SQSWorkDispatcher(queue, worker_process_name='Test Worker Process', long_poll_seconds=1, monitor_sleep_time=1) def one_work(task_id, category): assert (task_id == 1234) assert (category == 'one work') queue_in_use = fake_sqs_queue queue_in_use.send_message(MessageBody=1) def two_work(task_id, category): assert (task_id == 1234) assert (category == 'two work') queue_in_use = fake_sqs_queue queue_in_use.send_message(MessageBody=2) def work_one_or_two(message): msg_attr = message.message_attributes if (msg_attr and (msg_attr.get('work_type', {}).get('StringValue') == 'a')): return {'_job': one_work, '_job_args': (message.body,), 'category': 'one work'} else: return {'_job': one_work, '_job_args': (message.body,), 'category': 'two work'} dispatcher.dispatch_by_message_attribute(work_one_or_two) dispatcher._worker_process.join(5) messages = queue.receive_messages(WaitTimeSeconds=1, MaxNumberOfMessages=10) assert (len(messages) == 1) assert (messages[0].body == 1) assert (dispatcher._worker_process.exitcode == 0)
class TlsConfigurationsResponse(ModelComposed): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'links': (PaginationLinks,), 'meta': (PaginationMeta,), 'data': ([TlsConfigurationResponseData],)} _property def discriminator(): return None attribute_map = {'links': 'links', 'meta': 'meta', 'data': 'data'} read_only_vars = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes} composed_info = validate_get_composed_info(constant_args, kwargs, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] discarded_args = composed_info[3] for (var_name, var_value) in kwargs.items(): if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes} composed_info = validate_get_composed_info(constant_args, kwargs, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] discarded_args = composed_info[3] for (var_name, var_value) in kwargs.items(): if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') _property def _composed_schemas(): lazy_import() return {'anyOf': [], 'allOf': [Pagination, TlsConfigurationsResponseAllOf], 'oneOf': []}
def extractSimplypinkDreamwidthOrg(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('how to divorce the male lead', 'how to divorce the male lead', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
.django_db def test_tas_unparsable_no_sub(client, monkeypatch, elasticsearch_award_index, award_with_tas): _setup_es(client, monkeypatch, elasticsearch_award_index) resp = query_by_tas(client, {'require': [['011', '011-0990', '011-2000/2000-0990']]}) assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY), 'Failed to return 422 Response'
class SimpleVizGroup(lg.Group): INPUT = lg.Topic(RandomMessage) PLOT: BarPlot WINDOW: Window def setup(self) -> None: self.PLOT.configure(BarPlotConfig(x_field='domain', y_field='range', style={'setLabels': {'title': 'A really cool title', 'left': "I'm a label on the left!", 'bottom': "I'm a label on the bottom!"}})) self.WINDOW.PLOT = self.PLOT def connections(self) -> lg.Connections: return ((self.INPUT, self.PLOT.INPUT),)
def _create_asset(name, asset_type, parent_name, data_dict, iam_policy_dict): resource = {'name': name, 'asset_type': asset_type, 'resource': {'data': data_dict}} if parent_name: resource['resource']['parent'] = parent_name resource_data = json.dumps(resource, separators=(',', ':'), sort_keys=True) if iam_policy_dict: iam_policy = {'name': name, 'asset_type': asset_type, 'iam_policy': iam_policy_dict} iam_policy_data = json.dumps(iam_policy, separators=(',', ':'), sort_keys=True) else: iam_policy_data = None return (resource_data, iam_policy_data)
def _get_service(masteruri, name, wait=True, check_host=True): result = [] while ((not result) and (not rospy.is_shutdown())): master = xmlrpcclient.ServerProxy(masteruri) (code, msg, val) = master.getSystemState(rospy.get_name()) if (code == 1): (pubs, subs, srvs) = val own_host = get_hostname(masteruri) nodes_host = [] for (srv, providers) in srvs: if srv.endswith(name): if check_host: (code, msg, val) = master.lookupService(rospy.get_name(), srv) if (code == 1): hode_host = get_hostname(val) if (hode_host == own_host): result.append(srv) else: nodes_host.append(hode_host) else: result.append(srv) if ((not result) and wait): rospy.logwarn(("master_discovery node appear not to running %s, only found on %s. Wait for service '%s' %s." % (own_host, nodes_host, name, own_host))) time.sleep(1) elif ((not result) and wait): rospy.logwarn(("can't get state from ROS master: %s, %s" % (code, msg))) time.sleep(1) if (not wait): return result return result
class EnvisageEngine(Engine): __version__ = 0 window = Instance(WorkbenchWindow) name = Str('Mayavi Envisage Engine') def __get_pure_state__(self): d = super(EnvisageEngine, self).__get_pure_state__() for x in ['window']: d.pop(x, None) return d def start(self): if self.running: return scene_manager = self.window.get_service(ISceneManager) for scene in scene_manager.scenes: self.add_scene(scene) scene_manager.on_trait_change(self._scene_editors_changed, 'scenes_items') super(EnvisageEngine, self).start() logger.debug(' EnvisageEngine started ') def stop(self): super(EnvisageEngine, self).stop() def new_scene(self, viewer=None, name=None, **kwargs): action = NewScene(window=self.window) editor = action.perform(None) if (name is not None): editor.name = name GUI.process_events() return self.scenes[(- 1)] def close_scene(self, scene): active_window = self.window s = scene.scene for editor in active_window.editors[:]: if isinstance(editor, scene_editor.SceneEditor): if (id(editor.scene) == id(s)): editor.close() break GUI.process_events() def _scene_editors_changed(self, list_event): for scene in list_event.removed: self.remove_scene(scene) for scene in list_event.added: self.add_scene(scene) _trait_change('window:opened') def _on_window_opened(self, obj, trait_name, old, new): if (trait_name == 'opened'): self.start() _trait_change('window:closed') def _on_window_closed(self, obj, trait_name, old, new): if (trait_name == 'closed'): self.stop() def _window_changed(self, old, new): sm = new.get_service(ISceneManager) if (sm is not None): self.start() _trait_change('window:editors[]') def _sync_scene_editor_name(self, obj, trait_name, old, new): if trait_name.startswith('editors'): scenes = list(self.scenes) scenes.reverse() for editor in new: if (not hasattr(editor, 'scene')): continue for scene in scenes: if (id(editor.scene) == id(scene.scene)): editor.name = scene.name scene.sync_trait('name', editor, 'name') break
def test_internal_segy_import_full_vs_partial(): segyfile = xtgeo._XTGeoFile(SFILE5) with segyio.open(SFILE5, 'r') as segyfile: attrs1 = _import_segy_all_traces(segyfile) with segyio.open(SFILE5, 'r') as segyfile: attrs2 = _import_segy_incomplete_traces(segyfile) for (key, val) in attrs1.items(): if isinstance(val, np.ndarray): np.testing.assert_array_equal(val, attrs2[key]) else: assert (val == attrs2[key])
def match_ip(address, pattern) -> bool: try: ip_obj = ipaddress.ip_address(address) except ValueError: return False try: if ('/' in pattern): network = ipaddress.ip_network(pattern, strict=False) if (ip_obj in network): return True elif (ip_obj == ipaddress.ip_address(pattern)): return True except ValueError: return False return False
class AsyncCollection(): def __init__(self, storage_backend: AbstractStorageBackend, collection_name: str) -> None: self._storage_backend = storage_backend self._collection_name = collection_name async def put(self, object_id: str, object_body: JSON_TYPES) -> None: return (await self._storage_backend.put(self._collection_name, object_id, object_body)) async def get(self, object_id: str) -> Optional[JSON_TYPES]: return (await self._storage_backend.get(self._collection_name, object_id)) async def remove(self, object_id: str) -> None: return (await self._storage_backend.remove(self._collection_name, object_id)) async def find(self, field: str, equals: EQUALS_TYPE) -> List[OBJECT_ID_AND_BODY]: return (await self._storage_backend.find(self._collection_name, field, equals)) async def list(self) -> List[OBJECT_ID_AND_BODY]: return (await self._storage_backend.list(self._collection_name))
def _clean_code_of_thread_stuff(code): parts = [] i = iend = 0 while True: i0 = code.find('this._lock.__enter__', i) if (i0 < 0): break i = code.find('try {', i0) (i, sub) = _exit_scope(code, i) i = code.find('finally {', i) (i, _) = _exit_scope(code, i) parts.append(code[iend:i0]) parts.append('/* with lock */\n') parts.append(sub) iend = i parts.append(code[iend:]) jscode = ''.join(parts) jscode = jscode.replace('this._ensure_thread_', '//this._ensure_thread_') jscode = jscode.replace('threading.get_ident()', '0') jscode = jscode.replace('._local.', '.') jscode = jscode.replace('this._thread_match(true);\n', '') jscode = jscode.replace('if (_pyfunc_truthy(this._thread_match(false)))', '') jscode = jscode.replace('this._thread_match', '(function() { return true})') return jscode
class OptionSeriesFunnel3dDataDragdropDraghandle(Options): def className(self): return self._config_get('highcharts-drag-handle') def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('#fff') def color(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('rgba(0, 0, 0, 0.6)') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(1) def lineWidth(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(901) def zIndex(self, num: float): self._config(num, js_type=False)
def test_main_one_file_protein(): outfile_domains = NamedTemporaryFile(suffix='.txt', delete=True) args = '-d {} -om {} -p {} '.format((ROOT + 'hicMergeDomains/10kbtad_domains.bed'), outfile_domains.name, (ROOT + 'hicMergeDomains/ctcf_sorted.bed')).split() compute(hicMergeDomains.main, args, 5) are_files_equal(outfile_domains.name, (ROOT + 'hicMergeDomains/one_file'), delta=2)
class NaiveAlgorithm(BaseAlgorithm): similarity = sim_distance def top_matches(self, prefs, p1): return [(p2, self.similarity(prefs[p1], prefs[p2])) for p2 in prefs if (p2 != p1)] def calculate_similarities(self, vote_list, verbose=0): itemPrefs = convert_vote_list_to_itemprefs(vote_list) itemMatch = {} for item in itemPrefs: itemMatch[item] = self.top_matches(itemPrefs, item) iteritems = itemMatch.items() return iteritems def get_recommended_items(self, vote_list, itemMatch, itemIgnored, user): prefs = convert_vote_list_to_userprefs(vote_list) itemMatch = dict(itemMatch) if (user in prefs): userRatings = prefs[user] scores = defaultdict(int) totalSim = defaultdict(int) for (item, rating) in userRatings.items(): for (item2, similarity) in itemMatch[item]: if ((user.pk in itemIgnored) and (item2 in itemIgnored[user.pk])): continue if ((not math.isnan(similarity)) and (item2 not in userRatings)): scores[item2] += (similarity * rating) totalSim[item2] += similarity rankings = ((item, (score / totalSim[item])) for (item, score) in scores.items() if (totalSim[item] != 0)) return rankings return [] def calculate_recommendations(self, vote_list, itemMatch, itemIgnored): recommendations = [] users = set(map((lambda x: x[0]), vote_list)) for user in users: rankings = self.get_recommended_items(vote_list, itemMatch, itemIgnored, user) recommendations.append((user, rankings)) return recommendations
class EventCounter(models.Model): id = models.AutoField(primary_key=True, serialize=False, verbose_name='ID') name = models.CharField(max_length=255, unique=True) value = models.BigIntegerField(default=0) updated = models.DateTimeField(db_index=True, auto_now=True) def get_or_create(cls, name): try: en = cls.objects.get(name=name) except cls.DoesNotExist: try: en = cls(name=name) en.save() except IntegrityError: en = cls.objects.get(name=name) return en
def create_command_line(options: Options, tmpdir: str, tmp2dir: typing.Optional[str], dstdir: str, farmer_public_key: typing.Optional[str], pool_public_key: typing.Optional[str], pool_contract_address: typing.Optional[str]) -> typing.List[str]: args = [options.executable, 'plots', 'create', '-k', str(options.k), '-r', str(options.n_threads), '-u', str(options.n_buckets), '-b', str(options.job_buffer), '-t', tmpdir, '-d', dstdir] if options.e: args.append('-e') if options.x: args.append('-x') if (tmp2dir is not None): args.append('-2') args.append(tmp2dir) if (farmer_public_key is not None): args.append('-f') args.append(farmer_public_key) if (pool_public_key is not None): args.append('-p') args.append(pool_public_key) if (pool_contract_address is not None): args.append('-c') args.append(pool_contract_address) return args
class TestGMSH(unittest.TestCase): def test_gmsh_generation_2D(self): domain = Domain.PlanarStraightLineGraphDomain() domain.vertices = [[0.0, 0.0, 0.0], [5.0, 0.0, 0.0], [5.0, 5.0, 0.0], [0.0, 5.0, 0.0]] domain.segments = [[0, 1], [1, 2], [2, 3], [3, 0]] domain.facets = [[[0, 1, 2, 3]]] domain.writeGeo('gmsh_mesh_test_2D', he_max=0.1) gmsh_cmd = 'gmsh {0:s} -v 10 -2 -o {1:s} -format msh2'.format((domain.geofile + '.geo'), (domain.geofile + '.msh')) check_call(gmsh_cmd, shell=True) MeshTools.msh2simplex(domain.geofile, nd=2) with open('gmsh_mesh_test_2D.node', 'r') as nodefile: numNodes = int(nodefile.readline().split(' ', 1)[0]) assert (abs((1.0 - old_div(numNodes, numNodes_reference_2D))) < THRESHOLD) with open('gmsh_mesh_test_2D.edge', 'r') as edgefile: numEdges = int(edgefile.readline().split(' ', 1)[0]) assert (abs((1.0 - old_div(numEdges, numEdges_reference_2D))) < THRESHOLD) with open('gmsh_mesh_test_2D.ele', 'r') as elefile: numElements = int(elefile.readline().split(' ', 1)[0]) assert (abs((1.0 - old_div(numElements, numElements_reference_2D))) < THRESHOLD) def test_gmsh_generation_3D(self): domain = Domain.PiecewiseLinearComplexDomain() cube = st.Cuboid(domain, dim=[2.0, 2.0, 2.0]) st.assembleDomain(domain) domain.writeGeo('gmsh_mesh_test_3D', he_max=0.1) gmsh_cmd = 'gmsh {0:s} -v 10 -3 -o {1:s} -format msh2'.format((domain.geofile + '.geo'), (domain.geofile + '.msh')) check_call(gmsh_cmd, shell=True) MeshTools.msh2simplex(domain.geofile, nd=3) with open('gmsh_mesh_test_3D.node', 'r') as nodefile: assert (old_div(abs((int(nodefile.readline().split()[0]) - numNodes_reference_3D)), numNodes_reference_3D) < THRESHOLD) with open('gmsh_mesh_test_3D.edge', 'r') as edgefile: assert (old_div(abs((int(edgefile.readline().split()[0]) - numEdges_reference_3D)), numEdges_reference_3D) < THRESHOLD) with open('gmsh_mesh_test_3D.face', 'r') as facefile: assert (old_div(abs((int(facefile.readline().split()[0]) - numFaces_reference_3D)), numFaces_reference_3D) < THRESHOLD) with open('gmsh_mesh_test_3D.ele', 'r') as elefile: assert (old_div(abs((int(elefile.readline().split()[0]) - numElements_reference_3D)), numElements_reference_3D) < THRESHOLD)
class ResidualGroup(nn.Module): def __init__(self, bam, conv, n_feat, kernel_size, reduction, act, res_scale, n_resblocks): super(ResidualGroup, self).__init__() modules_body = [RCAB(bam, conv, n_feat, kernel_size, reduction, bias=True, bn=False, act=nn.ReLU(True), res_scale=1) for _ in range(n_resblocks)] modules_body.append(conv(n_feat, n_feat, kernel_size)) self.body = nn.Sequential(*modules_body) def forward(self, x): res = self.body(x) res += x return res
def test_time_modulation(): assert isclose(np.real(CW.amp_time((1 / FREQ_MODULATE))), AMP_TIME) assert isclose(CW.max_modulation, AMP_TIME) cw = CW.updated_copy(phase=(np.pi / 4), amplitude=10) assert isclose(np.real(cw.amp_time((1 / FREQ_MODULATE))), ((np.sqrt(2) / 2) * 10)) assert isclose(cw.max_modulation, 10)
class SalesReceipt(DeleteMixin, QuickbooksPdfDownloadable, QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin, VoidMixin): class_dict = {'DepartmentRef': Ref, 'CurrencyRef': Ref, 'TxnTaxDetail': TxnTaxDetail, 'DepositToAccountRef': Ref, 'CustomerRef': Ref, 'BillAddr': Address, 'ShipAddr': Address, 'ClassRef': Ref, 'BillEmail': EmailAddress, 'PaymentMethodRef': Ref, 'ShipMethodRef': Ref} list_dict = {'CustomField': CustomField, 'Line': DetailLine, 'LinkedTxn': LinkedTxn} detail_dict = {} qbo_object_name = 'SalesReceipt' def __init__(self): super(SalesReceipt, self).__init__() self.DocNumber = '' self.TxnDate = '' self.PrivateNote = '' self.ShipDate = '' self.TrackingNum = '' self.TotalAmt = 0 self.PrintStatus = 'NotSet' self.EmailStatus = 'NotSet' self.Balance = 0 self.PaymentRefNum = '' self.ApplyTaxAfterDiscount = False self.ExchangeRate = 1 self.GlobalTaxCalculation = 'TaxExcluded' self.CustomerMemo = None self.DeliveryInfo = None self.CreditCardPayment = None self.TxnSource = None self.DepartmentRef = None self.CurrencyRef = None self.TxnTaxDetail = None self.DepositToAccountRef = None self.BillAddr = None self.ShipAddr = None self.ShipMethodRef = None self.BillEmail = None self.CustomerRef = None self.ClassRef = None self.PaymentMethodRef = None self.CustomField = [] self.Line = [] self.LinkedTxn = [] def __str__(self): return str(self.TotalAmt)
class OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingLowpass(Options): def frequency(self) -> 'OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingLowpassFrequency': return self._config_sub_data('frequency', OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingLowpassFrequency) def resonance(self) -> 'OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingLowpassResonance': return self._config_sub_data('resonance', OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingLowpassResonance)
class DefinedStyleItems(): def __init__(self, page: primitives.HtmlModel, component: primitives.HtmlModel=None): (self.page, self.component) = (page, component) def selected_text_background_color(self, background=None, color=None): return {'background': ('%s !IMPORTANT' % (background or self.page.theme.colors[0])), 'color': ('%s !IMPORTANT' % (color or self.page.theme.greys[(- 1)]))}
def create_initial_df(input_filename, groups, cols, aggregations, fit_distribution=False): df_raw = pd.read_csv(input_filename) all_cols = (groups.copy() + cols) csv_headers = [] for c in cols: for a in aggregations: csv_headers.append(f'{c}_{a}') dis_df = df_raw[all_cols].copy() df = df_raw[all_cols].groupby(groups).agg(aggregations) s_buf = io.StringIO() df.to_csv(s_buf, header=csv_headers) s_buf.seek(0) df = pd.read_csv(s_buf) df = df.replace({pd.np.nan: None}) if (fit_distribution is True): dis_df = dis_df.groupby(groups) for key in dis_df.groups.keys(): for c in cols: data = dis_df.get_group(key)[c] (best_fit_name, best_fit_params) = best_fit_distribution(data) index = (df[groups] == pd.DataFrame([key], columns=groups).loc[0]).apply((lambda x: np.all(x)), axis=1) print(((('Column: ' + c) + ' is best fit for ') + best_fit_name)) df.loc[(index, (c + '_best_fit_distribution'))] = best_fit_name df.loc[(index, (c + '_fit_parameter'))] = ','.join(map(str, best_fit_params)) return df
def veryfi_invoice_parser(original_response: dict) -> InvoiceParserDataClass: ship_name = original_response['ship_to']['name'] ship_address = original_response['ship_to']['address'] if ((ship_name is not None) and (ship_address is not None)): ship_address = (ship_name + ship_address) customer_information = CustomerInformationInvoice(customer_name=original_response['bill_to']['name'], customer_address=original_response['bill_to']['address'], customer_email=None, customer_id=original_response['account_number'], customer_tax_id=original_response['bill_to']['vat_number'], customer_mailing_address=None, customer_billing_address=original_response['bill_to']['address'], customer_shipping_address=ship_address, customer_service_address=None, customer_remittance_address=None, abn_number=None, gst_number=None, pan_number=None, vat_number=None) merchant_information = MerchantInformationInvoice(merchant_name=original_response['vendor']['name'], merchant_address=original_response['vendor']['address'], merchant_phone=original_response['vendor']['phone_number'], merchant_email=original_response['vendor']['email'], merchant_tax_id=original_response['vendor']['vat_number'], merchant_website=original_response['vendor']['web'], merchant_fax=original_response['vendor']['fax_number'], merchant_siren=None, merchant_siret=None, abn_number=None, gst_number=None, pan_number=None, vat_number=None) bank_informations = BankInvoice(account_number=original_response['vendor']['account_number'], iban=original_response['vendor']['iban'], swift=original_response['vendor']['bank_swift'], vat_number=original_response['vendor']['vat_number'], bsb=None, sort_code=None, rooting_number=None) item_lines = [] for item in original_response['line_items']: item_lines.append(ItemLinesInvoice(description=item['description'], quantity=item['quantity'], discount=item['discount'], unit_price=item['price'], tax_item=item['tax'], tax_rate=item['tax_rate'], amount=item['total'], date_item=item['date'], product_code=item['sku'])) info_invoice = [InfosInvoiceParserDataClass(customer_information=customer_information, merchant_information=merchant_information, taxes=[TaxesInvoice(value=original_response['tax'], rate=None)], invoice_total=original_response['total'], invoice_subtotal=original_response['subtotal'], invoice_number=original_response['invoice_number'], date=original_response['date'], purchase_order=original_response['purchase_order_number'], item_lines=item_lines, locale=LocaleInvoice(currency=original_response['currency_code'], language=None), bank_informations=bank_informations)] return InvoiceParserDataClass(extracted_data=info_invoice)
class MessageRemoval(Status): def __init__(self, source_channel: 'Channel', destination_channel: 'Channel', message: 'Message'): self.source_channel: 'Channel' = source_channel self.destination_channel: 'Channel' = destination_channel self.message: 'Message' = message self.verify() def __str__(self): return '<MessageRemoval: {s.message}; {s.source_channel.channel_name} -> {s.destination_channel.channel_name}>'.format(s=self) def verify(self): assert isinstance(self.source_channel, Channel), f'Source channel {self.source_channel!r} is not valid.' assert isinstance(self.destination_channel, Channel), f'Destination channel {self.destination_channel!r} is not valid.' assert isinstance(self.message, Message), f'Message {self.message!r} is not valid.' assert (self.message.chat.module_id and self.message.chat.uid and self.message.uid), f'Message does not contain the minimum information required: {self.message!r}' def __getstate__(self): state = super(MessageRemoval, self).__getstate__() if (state['source_channel'] is not None): state['source_channel'] = state['source_channel'].channel_id return state def __setstate__(self, state: Dict[(str, Any)]): super(MessageRemoval, self).__setstate__(state) with suppress(NameError): sc = coordinator.get_module_by_id(state['source_channel']) if isinstance(sc, Channel): self.source_channel = sc
def range_zero_one_observation_value_range_test(wrapper_factory: Callable[([], ObservationNormalizationWrapper)]) -> None: env: ObservationNormalizationWrapper = wrapper_factory() conduct_observation_normalization_test(env, (lambda obs: (np.all((0.0 <= obs)) and np.all((obs <= 1.0)))), n_steps=10) env.close()
.parametrize('params', (['t', 'u'], ['u', 't'])) .parametrize('levels', ([500, 850], [850, 500])) .parametrize('source_name', ['indexed-directory']) def test_indexing_order_by_with_keyword(params, levels, source_name): request = dict(variable=params, level=levels, date=, time='1200') request['order_by'] = dict(level=levels, variable=params) (ds, _, total, n) = get_fixtures(source_name, request) assert (len(ds) == n), len(ds) check_sel_and_order(ds, params, levels)
_log_on_failure_all class TestLibp2pConnectionRelayNodeRestartIncomingEnvelopes(BaseTestLibp2pRelay): _log_on_failure def setup(self): super().setup() temp_dir_gen = os.path.join(self.t, 'temp_dir_gen') os.mkdir(temp_dir_gen) self.genesis = _make_libp2p_connection(data_dir=temp_dir_gen, port=(DEFAULT_PORT + 1), build_directory=self.t) self.multiplexer_genesis = Multiplexer([self.genesis], protocols=[DefaultMessage]) self.multiplexer_genesis.connect() self.log_files.append(self.genesis.node.log_file) self.multiplexers.append(self.multiplexer_genesis) genesis_peer = self.genesis.node.multiaddrs[0] file = 'node_key' make_crypto(DEFAULT_LEDGER).dump(file) self.relay_key_path = file temp_dir_rel = os.path.join(self.t, 'temp_dir_rel') os.mkdir(temp_dir_rel) self.relay = _make_libp2p_connection(data_dir=temp_dir_rel, port=(DEFAULT_PORT + 2), entry_peers=[genesis_peer], node_key_file=self.relay_key_path, build_directory=self.t) self.multiplexer_relay = Multiplexer([self.relay], protocols=[DefaultMessage]) self.multiplexer_relay.connect() self.log_files.append(self.relay.node.log_file) self.multiplexers.append(self.multiplexer_relay) relay_peer = self.relay.node.multiaddrs[0] temp_dir_1 = os.path.join(self.t, 'temp_dir_1') os.mkdir(temp_dir_1) self.connection = _make_libp2p_connection(data_dir=temp_dir_1, port=(DEFAULT_PORT + 3), relay=False, entry_peers=[relay_peer], build_directory=self.t) self.multiplexer = Multiplexer([self.connection], protocols=[DefaultMessage]) self.multiplexer.connect() self.log_files.append(self.connection.node.log_file) self.multiplexers.append(self.multiplexer) temp_dir_2 = os.path.join(self.t, 'temp_dir_2') os.mkdir(temp_dir_2) self.connection2 = _make_libp2p_connection(data_dir=temp_dir_2, port=(DEFAULT_PORT + 4), relay=False, entry_peers=[relay_peer], build_directory=self.t) self.multiplexer2 = Multiplexer([self.connection2], protocols=[DefaultMessage]) self.multiplexer2.connect() self.log_files.append(self.connection2.node.log_file) self.multiplexers.append(self.multiplexer2) def test_connection_is_established(self): assert (self.relay.is_connected is True) assert (self.connection.is_connected is True) assert (self.connection2.is_connected is True) def test_envelope_routed_from_peer_after_relay_restart(self): addr_1 = self.genesis.address addr_2 = self.connection.address msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello') envelope = Envelope(to=addr_2, sender=addr_1, protocol_specification_id=DefaultMessage.protocol_specification_id, message=DefaultSerializer().encode(msg)) self.multiplexer_genesis.put(envelope) delivered_envelope = self.multiplexer.get(block=True, timeout=20) assert (delivered_envelope is not None) assert (delivered_envelope.to == envelope.to) assert (delivered_envelope.sender == envelope.sender) assert (delivered_envelope.protocol_specification_id == envelope.protocol_specification_id) assert (delivered_envelope.message_bytes == envelope.message_bytes) self.multiplexer_relay.disconnect() self.change_state_and_wait(self.multiplexer_relay, expected_is_connected=False) self.multiplexer_relay = Multiplexer([self.relay], protocols=[DefaultMessage]) self.multiplexer_relay.connect() self.change_state_and_wait(self.multiplexer_relay, expected_is_connected=True) self.multiplexers.append(self.multiplexer_relay) msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'helloAfterRestart') envelope = Envelope(to=addr_2, sender=addr_1, protocol_specification_id=DefaultMessage.protocol_specification_id, message=DefaultSerializer().encode(msg)) time.sleep(10) self.multiplexer_genesis.put(envelope) delivered_envelope = self.multiplexer.get(block=True, timeout=20) assert (delivered_envelope is not None) assert (delivered_envelope.to == envelope.to) assert (delivered_envelope.sender == envelope.sender) assert (delivered_envelope.protocol_specification_id == envelope.protocol_specification_id) assert (delivered_envelope.message_bytes == envelope.message_bytes) def test_envelope_routed_from_client_after_relay_restart(self): addr_1 = self.connection.address addr_2 = self.connection2.address msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello') envelope = Envelope(to=addr_1, sender=addr_2, protocol_specification_id=DefaultMessage.protocol_specification_id, message=DefaultSerializer().encode(msg)) self.multiplexer2.put(envelope) delivered_envelope = self.multiplexer.get(block=True, timeout=20) assert (delivered_envelope is not None) assert (delivered_envelope.to == envelope.to) assert (delivered_envelope.sender == envelope.sender) assert (delivered_envelope.protocol_specification_id == envelope.protocol_specification_id) assert (delivered_envelope.message_bytes == envelope.message_bytes) self.multiplexer_relay.disconnect() self.change_state_and_wait(self.multiplexer_relay, expected_is_connected=False) self.multiplexer_relay = Multiplexer([self.relay], protocols=[DefaultMessage]) self.multiplexer_relay.connect() self.change_state_and_wait(self.multiplexer_relay, expected_is_connected=True) self.multiplexers.append(self.multiplexer_relay) msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'helloAfterRestart') envelope = Envelope(to=addr_1, sender=addr_2, protocol_specification_id=DefaultMessage.protocol_specification_id, message=DefaultSerializer().encode(msg)) time.sleep(10) self.multiplexer2.put(envelope) delivered_envelope = self.multiplexer.get(block=True, timeout=20) assert (delivered_envelope is not None) assert (delivered_envelope.to == envelope.to) assert (delivered_envelope.sender == envelope.sender) assert (delivered_envelope.protocol_specification_id == envelope.protocol_specification_id) assert (delivered_envelope.message_bytes == envelope.message_bytes)
def test_condition_else_int(): def multiplier_3(my_input: int) -> float: return conditional('fractions').if_(((my_input >= 0) & (my_input < 1.0))).then(double(n=my_input)).elif_(((my_input > 1.0) & (my_input < 10.0))).then(square(n=my_input)).else_().fail('The input must be between 0 and 10') assert (multiplier_3(my_input=0) == 0)
('/') def index(search_service: SearchService=Provide[Container.search_service], default_query: str=Provide[Container.config.default.query], default_limit: int=Provide[Container.config.default.limit.as_int()]): query = request.args.get('query', default_query) limit = request.args.get('limit', default_limit, int) repositories = search_service.search_repositories(query, limit) return render_template('index.html', query=query, limit=limit, repositories=repositories)
class OptionSeriesFunnel3dSonificationContexttracksMappingLowpass(Options): def frequency(self) -> 'OptionSeriesFunnel3dSonificationContexttracksMappingLowpassFrequency': return self._config_sub_data('frequency', OptionSeriesFunnel3dSonificationContexttracksMappingLowpassFrequency) def resonance(self) -> 'OptionSeriesFunnel3dSonificationContexttracksMappingLowpassResonance': return self._config_sub_data('resonance', OptionSeriesFunnel3dSonificationContexttracksMappingLowpassResonance)
class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): multiprocess = True def __init__(self, host, port, app, processes=40, handler=None, passthrough_errors=False, ssl_context=None, fd=None): BaseWSGIServer.__init__(self, host, port, app, handler, passthrough_errors, ssl_context, fd) self.max_children = processes
class PathConverterExtension(Extension): def __init__(self, *args, **kwargs): self.config = {'base_path': ['', 'Base path used to find files - Default: ""'], 'relative_path': ['', 'Path that files will be relative to (not needed if using absolute) - Default: ""'], 'absolute': [False, 'Paths are absolute by default; disable for relative - Default: False'], 'tags': ['img script a link', "tags to convert src and/or href in - Default: 'img scripts a link'"], 'file_scheme': [False, 'Use file:// scheme for absolute paths - Default: False']} super().__init__(*args, **kwargs) def extendMarkdown(self, md): rel_path = PathConverterPostprocessor(md) rel_path.config = self.getConfigs() md.postprocessors.register(rel_path, 'path-converter', 2) md.registerExtension(self)
def test_empty_graph_with_conditions_2(): graph = ControlFlowGraph() graph.add_nodes_from((vertices := [BasicBlock(0, instructions=[Branch(Condition(OperationType.less, [variable('a'), Constant(2)]))]), BasicBlock(1, instructions=[Branch(Condition(OperationType.less, [variable('b'), Constant(2)]))]), BasicBlock(2, instructions=[]), BasicBlock(3, instructions=[]), BasicBlock(4, instructions=[]), BasicBlock(5, instructions=[Branch(Condition(OperationType.less, [variable('b'), Constant(10)]))]), BasicBlock(6, instructions=[]), BasicBlock(7, instructions=[]), BasicBlock(8, instructions=[])])) graph.add_edges_from([TrueCase(vertices[0], vertices[1]), FalseCase(vertices[0], vertices[5]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[4]), UnconditionalEdge(vertices[3], vertices[4]), TrueCase(vertices[5], vertices[6]), FalseCase(vertices[5], vertices[7]), UnconditionalEdge(vertices[6], vertices[8]), UnconditionalEdge(vertices[7], vertices[8])]) EmptyBasicBlockRemover(graph).remove() assert (graph.nodes == tuple())
def tokenize_regex(text): prev_idx = 0 tokens = [] for m in RE_TOK.finditer(text): t = text[prev_idx:m.start()].strip() if t: tokens.append(t) t = m.group().strip() if t: if (tokens and (tokens[(- 1)] in {'Mr', 'Ms'}) and (t == '.')): tokens[(- 1)] = (tokens[(- 1)] + t) else: tokens.append(t) prev_idx = m.end() t = text[prev_idx:] if t: tokens.append(t) return tokens
class LiteDRAMUSDDRPHYCRG(Module): def __init__(self, platform, core_config): assert (core_config['memtype'] in ['DDR4']) self.rst = Signal() self.clock_domains.cd_por = ClockDomain(reset_less=True) self.clock_domains.cd_sys = ClockDomain() self.clock_domains.cd_sys4x = ClockDomain() self.clock_domains.cd_sys4x_pll = ClockDomain() self.clock_domains.cd_iodelay = ClockDomain() clk = platform.request('clk') rst = platform.request('rst') por_count = Signal(32, reset=int(((core_config['input_clk_freq'] * 100) / 1000.0))) por_done = Signal() self.comb += self.cd_por.clk.eq(clk) self.comb += por_done.eq((por_count == 0)) self.sync.por += If((~ por_done), por_count.eq((por_count - 1))) self.submodules.pll = pll = USMMCM(speedgrade=core_config['speedgrade']) self.comb += pll.reset.eq((rst | self.rst)) pll.register_clkin(clk, core_config['input_clk_freq']) pll.create_clkout(self.cd_iodelay, core_config['iodelay_clk_freq']) pll.create_clkout(self.cd_sys4x_pll, (4 * core_config['sys_clk_freq']), buf=None) self.comb += platform.request('pll_locked').eq(pll.locked) self.specials += [Instance('BUFGCE_DIV', name='main_bufgce_div', p_BUFGCE_DIVIDE=4, i_CE=por_done, i_I=self.cd_sys4x_pll.clk, o_O=self.cd_sys.clk), Instance('BUFGCE', name='main_bufgce', i_CE=por_done, i_I=self.cd_sys4x_pll.clk, o_O=self.cd_sys4x.clk)] self.submodules.idelayctrl = USIDELAYCTRL(self.cd_iodelay, cd_sys=self.cd_sys)
def encode(obj): if isinstance(obj, bytes): return add_length_prefix(obj) elif isinstance(obj, str): return add_length_prefix(obj.encode('utf-8')) elif isinstance(obj, int): assert (obj >= 0) return add_length_prefix(int_to_big_endian(obj)) elif isinstance(obj, list): res = b'' for o in obj: res += encode(o) return add_length_prefix(res, True) elif isinstance(obj, Serializable): return encode(obj._s(obj))
class BigtableClusterIterator(ResourceIterator): def iter(self): gcp = self.client if (not getattr(self.resource, 'instance_id', '')): return try: for (data, metadata) in gcp.iter_bigtable_clusters(project_id=self.resource.parent()['projectId'], instance_id=self.resource.instance_id): (yield FACTORIES['bigtable_cluster'].create_new(data, metadata=metadata)) except ResourceNotSupported as e: LOGGER.debug(e)
def fortios_dlp(data, fos, check_mode): fos.do_member_operation('dlp', 'sensor') if data['dlp_sensor']: resp = dlp_sensor(data, fos, check_mode) else: fos._module.fail_json(msg=('missing task body: %s' % 'dlp_sensor')) if check_mode: return resp return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
((MAGICK_VERSION_NUMBER < 1672), reason='Not supported until after ImageMagick-6.8.8') def test_rotational_blur(): with Image(filename='rose:') as img: was = img.signature img.rotational_blur(45.0) now = img.signature assert (was != now) was = now img.rotational_blur(180, 'blue') assert (was != img.signature)
class BooleanFilter(): def __init__(self) -> None: self._filter: Dict[(str, Any)] = {} def __and__(self, x: 'BooleanFilter') -> 'BooleanFilter': if (tuple(self.subtree.keys()) == ('must',)): if ('bool' in self._filter): self.subtree['must'].append(x.build()) else: self.subtree['must'].append(x.subtree) return self elif (tuple(x.subtree.keys()) == ('must',)): if ('bool' in x._filter): x.subtree['must'].append(self.build()) else: x.subtree['must'].append(self.subtree) return x return AndFilter(self, x) def __or__(self, x: 'BooleanFilter') -> 'BooleanFilter': if (tuple(self.subtree.keys()) == ('should',)): if ('bool' in x._filter): self.subtree['should'].append(x.build()) else: self.subtree['should'].append(x.subtree) return self elif (tuple(x.subtree.keys()) == ('should',)): if ('bool' in self._filter): x.subtree['should'].append(self.build()) else: x.subtree['should'].append(self.subtree) return x return OrFilter(self, x) def __invert__(self) -> 'BooleanFilter': return NotFilter(self) def empty(self) -> bool: return (not bool(self._filter)) def __repr__(self) -> str: return str(self.build()) def subtree(self) -> Dict[(str, Any)]: if ('bool' in self._filter): return cast(Dict[(str, Any)], self._filter['bool']) else: return self._filter def build(self) -> Dict[(str, Any)]: return self._filter
('HFWrapper.v1') def HFWrapper(hf_model: HFObjects, convert_inputs: Optional[Callable]=None, convert_outputs: Optional[Callable]=None, mixed_precision: bool=False, grad_scaler_config: dict={}, config_cls=AutoConfig, model_cls=AutoModel, tokenizer_cls=AutoTokenizer) -> Model[(Any, Any)]: if (convert_inputs is None): convert_inputs = convert_pytorch_default_inputs if (convert_outputs is None): convert_outputs = convert_pytorch_default_outputs return Model('hf-pytorch', pt_forward, attrs={'convert_inputs': convert_inputs, 'convert_outputs': convert_outputs}, shims=[HFShim(hf_model, mixed_precision=mixed_precision, grad_scaler_config=grad_scaler_config, config_cls=config_cls, model_cls=model_cls, tokenizer_cls=tokenizer_cls)], dims={'nI': None, 'nO': None})
def parse(s): if (not isinstance(s, str)): raise ValueError(f'expected a str, received {type(s).__name__}') tail = s.lstrip('+-.') q = float((s[:(len(s) - len(tail))] or 1)) for (expr, power, isnumer) in _split_factors(tail): u = expr.lstrip('+-.') try: v = (float((expr[:(len(expr) - len(u))] or 1)) * (getattr(units, u) ** power)) except (ValueError, AttributeError): raise ValueError(f'invalid (sub)expression {expr!r}') from None q = ((q * v) if isnumer else (q / v)) if isinstance(q, Quantity): q._parsed_from = s return q
class LocalExperimentAccessor(LocalExperimentReader): def __init__(self, storage: LocalStorageAccessor, uuid: UUID, path: Path, parameters: Optional[List[ParameterConfig]]=None, responses: Optional[List[ResponseConfig]]=None, observations: Optional[Dict[(str, xr.Dataset)]]=None, name: Optional[str]=None) -> None: self._storage: LocalStorageAccessor = storage self._id = uuid self._path = path self._name = (name if (name is not None) else datetime.today().strftime('%Y-%m-%d')) parameters = ([] if (parameters is None) else parameters) parameter_file = (self.mount_point / self._parameter_file) parameter_data = (json.loads(parameter_file.read_text(encoding='utf-8')) if parameter_file.exists() else {}) for parameter in parameters: parameter.save_experiment_data(self._path) parameter_data.update({parameter.name: parameter.to_dict()}) with open(parameter_file, 'w', encoding='utf-8') as f: json.dump(parameter_data, f) responses = ([] if (responses is None) else responses) response_file = (self.mount_point / self._responses_file) response_data = (json.loads(response_file.read_text(encoding='utf-8')) if response_file.exists() else {}) for response in responses: response_data.update({response.name: response.to_dict()}) with open(response_file, 'w', encoding='utf-8') as f: json.dump(response_data, f, default=str) if observations: output_path = (self.mount_point / 'observations') Path.mkdir(output_path, parents=True, exist_ok=True) for (name, dataset) in observations.items(): dataset.to_netcdf((output_path / f'{name}'), engine='scipy') with open((path / 'index.json'), 'w', encoding='utf-8') as f: print(_Index(id=uuid, name=self._name).model_dump_json(), file=f) def name(self) -> str: return self._name def create_ensemble(self, *, ensemble_size: int, name: str, iteration: int=0, prior_ensemble: Optional[LocalEnsembleReader]=None) -> LocalEnsembleAccessor: return self._storage.create_ensemble(self, ensemble_size=ensemble_size, iteration=iteration, name=name, prior_ensemble=prior_ensemble) def write_simulation_arguments(self, info: Union[(SingleTestRunArguments, EnsembleExperimentRunArguments, ESRunArguments, ESMDARunArguments, SIESRunArguments)]) -> None: with open((self.mount_point / self._simulation_arguments_file), 'w', encoding='utf-8') as f: json.dump(dataclasses.asdict(info), f)
def test_dates_are_fiscal_year_bookends(): date_1 = date(2000, 9, 30) date_2 = date(2001, 10, 1) date_3 = date(2020, 3, 2) date_4 = date(2017, 5, 30) date_5 = date(2019, 10, 30) date_6 = date(1998, 10, 1) assert (fyh.dates_are_fiscal_year_bookends(date_1, date_2) is False) assert (fyh.dates_are_fiscal_year_bookends(date_1, date_3) is False) assert (fyh.dates_are_fiscal_year_bookends(date_2, date_4) is False) assert (fyh.dates_are_fiscal_year_bookends(date_1, date_5) is False) assert (fyh.dates_are_fiscal_year_bookends(date_6, date_1) is True)
class Kinetic3d(Function): def eval(cls, La, Lb, a, b, A, B): (Tx, Ty, Tz) = [Kinetic1d(a, A[i], b, B[i]).eval(La[i], Lb[i]) for i in range(3)] (Sx, Sy, Sz) = [Overlap1d(a, A[i], b, B[i]).eval(La[i], Lb[i]) for i in range(3)] return ((((Tx * Sy) * Sz) + ((Sx * Ty) * Sz)) + ((Sx * Sy) * Tz))
def test_agent_info(): agents_info = OefSearchMessage.AgentsInfo({'agent_address_1': {'key_1': b'value_1', 'key_2': b'value_2'}, 'agent_address_2': {'key_3': b'value_3', 'key_4': b'value_4'}}) assert (agents_info.get_info_for_agent('agent_address_1') == {'key_1': b'value_1', 'key_2': b'value_2'}) with pytest.raises(ValueError, match='body must not be None'): OefSearchMessage.AgentsInfo(None)
def get_fyp_notation(relation_name=None): prefix = (f'{relation_name}__' if relation_name else '') return Concat(Value('FY'), Cast(f'{prefix}reporting_fiscal_year', output_field=CharField()), Value('P'), LPad(Cast(f'{prefix}reporting_fiscal_period', output_field=CharField()), 2, Value('0')))
class TestProxyResolver(tests.LimitedTestCase): def test_clear(self): rp = greendns.ResolverProxy() assert (rp._cached_resolver is None) resolver = rp._resolver assert (resolver is not None) rp.clear() assert (rp._resolver is not None) assert (rp._resolver != resolver) def _make_mock_hostsresolver(self): base_resolver = _make_mock_base_resolver() base_resolver.rr.address = '1.2.3.4' return base_resolver() def _make_mock_resolver(self): base_resolver = _make_mock_base_resolver() base_resolver.rr.address = '5.6.7.8' return base_resolver() def test_hosts(self): hostsres = self._make_mock_hostsresolver() rp = greendns.ResolverProxy(hostsres) ans = rp.query('host.example.com') assert (ans[0].address == '1.2.3.4') ans = rp.query('host.example.com') assert (ans[0].address == '1.2.3.4') ans = rp.query(b'host.example.com') assert (ans[0].address == '1.2.3.4') def test_hosts_noanswer(self): hostsres = self._make_mock_hostsresolver() res = self._make_mock_resolver() rp = greendns.ResolverProxy(hostsres) rp._resolver = res hostsres.raises = greendns.dns.resolver.NoAnswer ans = rp.query('host.example.com') assert (ans[0].address == '5.6.7.8') def test_resolver(self): res = self._make_mock_resolver() rp = greendns.ResolverProxy() rp._resolver = res ans = rp.query('host.example.com') assert (ans[0].address == '5.6.7.8') assert isinstance(res.args[0], dns.name.Name) ans = rp.query('host.example.com') assert (ans[0].address == '5.6.7.8') assert isinstance(res.args[0], dns.name.Name) ans = rp.query(b'host.example.com') assert (ans[0].address == '5.6.7.8') assert isinstance(res.args[0], dns.name.Name) def test_noanswer(self): res = self._make_mock_resolver() rp = greendns.ResolverProxy() rp._resolver = res res.raises = greendns.dns.resolver.NoAnswer with tests.assert_raises(greendns.dns.resolver.NoAnswer): rp.query('host.example.com') def test_nxdomain(self): res = self._make_mock_resolver() rp = greendns.ResolverProxy() rp._resolver = res res.raises = greendns.dns.resolver.NXDOMAIN with tests.assert_raises(greendns.dns.resolver.NXDOMAIN): rp.query('host.example.com') def test_noanswer_hosts(self): hostsres = self._make_mock_hostsresolver() res = self._make_mock_resolver() rp = greendns.ResolverProxy(hostsres) rp._resolver = res hostsres.raises = greendns.dns.resolver.NoAnswer res.raises = greendns.dns.resolver.NoAnswer with tests.assert_raises(greendns.dns.resolver.NoAnswer): rp.query('host.example.com') def _make_mock_resolver_aliases(self): class RR(): target = 'host.example.com' class Resolver(): call_count = 0 exc_type = greendns.dns.resolver.NoAnswer def query(self, *args, **kwargs): self.args = args self.kwargs = kwargs self.call_count += 1 if (self.call_count < 2): return greendns.HostsAnswer(args[0], 1, 5, [RR()], False) else: raise self.exc_type() return Resolver() def test_getaliases(self): aliases_res = self._make_mock_resolver_aliases() rp = greendns.ResolverProxy() rp._resolver = aliases_res aliases = set(rp.getaliases('alias.example.com')) assert (aliases == {'host.example.com'}) def test_getaliases_fqdn(self): aliases_res = self._make_mock_resolver_aliases() rp = greendns.ResolverProxy() rp._resolver = aliases_res rp._resolver.call_count = 1 assert (rp.getaliases('host.example.com') == []) def test_getaliases_nxdomain(self): aliases_res = self._make_mock_resolver_aliases() rp = greendns.ResolverProxy() rp._resolver = aliases_res rp._resolver.call_count = 1 rp._resolver.exc_type = greendns.dns.resolver.NXDOMAIN assert (rp.getaliases('host.example.com') == [])
class TemporaryCharacterSheet(): def _random_ability(self): return min(dice.roll('1d6'), dice.roll('1d6'), dice.roll('1d6')) def __init__(self): self.name = dice.roll_random_table('1d282', chargen_tables['name']) self.strength = self._random_ability() self.dexterity = self._random_ability() self.constitution = self._random_ability() self.intelligence = self._random_ability() self.wisdom = self._random_ability() self.charisma = self._random_ability() physique = dice.roll_random_table('1d20', chargen_tables['physique']) face = dice.roll_random_table('1d20', chargen_tables['face']) skin = dice.roll_random_table('1d20', chargen_tables['skin']) hair = dice.roll_random_table('1d20', chargen_tables['hair']) clothing = dice.roll_random_table('1d20', chargen_tables['clothing']) speech = dice.roll_random_table('1d20', chargen_tables['speech']) virtue = dice.roll_random_table('1d20', chargen_tables['virtue']) vice = dice.roll_random_table('1d20', chargen_tables['vice']) background = dice.roll_random_table('1d20', chargen_tables['background']) misfortune = dice.roll_random_table('1d20', chargen_tables['misfortune']) alignment = dice.roll_random_table('1d20', chargen_tables['alignment']) self.ability_changes = 0 self.desc = f'You are {physique} with a {face} face, {skin} skin, {hair} hair, {speech} speech, and {clothing} clothing. You were a {background.title()}, but you were {misfortune} and ended up a knave. You are {virtue} but also {vice}. You tend towards {alignment}.' self.hp_max = max(5, dice.roll('1d8')) self.hp = self.hp_max self.armor = dice.roll_random_table('1d20', chargen_tables['armor']) _helmet_and_shield = dice.roll_random_table('1d20', chargen_tables['helmets and shields']) self.helmet = ('helmet' if ('helmet' in _helmet_and_shield) else 'none') self.shield = ('shield' if ('shield' in _helmet_and_shield) else 'none') self.weapon = dice.roll_random_table('1d20', chargen_tables['starting weapon']) self.backpack = ['ration', 'ration', dice.roll_random_table('1d20', chargen_tables['dungeoning gear']), dice.roll_random_table('1d20', chargen_tables['dungeoning gear']), dice.roll_random_table('1d20', chargen_tables['general gear 1']), dice.roll_random_table('1d20', chargen_tables['general gear 2'])] def show_sheet(self): equipment = (str(item) for item in ([self.armor, self.helmet, self.shield, self.weapon] + self.backpack) if item) return _TEMP_SHEET.format(name=self.name, strength=self.strength, dexterity=self.dexterity, constitution=self.constitution, intelligence=self.intelligence, wisdom=self.wisdom, charisma=self.charisma, description=self.desc, equipment=', '.join(equipment)) def apply(self, account): start_location = ObjectDB.objects.get_id(settings.START_LOCATION) default_home = ObjectDB.objects.get_id(settings.DEFAULT_HOME) permissions = settings.PERMISSION_ACCOUNT_DEFAULT new_character = create_object(EvAdventureCharacter, key=self.name, location=start_location, home=default_home, permissions=permissions, attributes=(('strength', self.strength), ('dexterity', self.dexterity), ('constitution', self.constitution), ('intelligence', self.intelligence), ('wisdom', self.wisdom), ('charisma', self.wisdom), ('hp', self.hp), ('hp_max', self.hp_max), ('desc', self.desc))) new_character.locks.add(('puppet:id(%i) or pid(%i) or perm(Developer) or pperm(Developer);delete:id(%i) or perm(Admin)' % (new_character.id, account.id, account.id))) if self.weapon: weapon = spawn(self.weapon) new_character.equipment.move(weapon[0]) if self.armor: armor = spawn(self.armor) new_character.equipment.move(armor[0]) if self.shield: shield = spawn(self.shield) new_character.equipment.move(shield[0]) if self.helmet: helmet = spawn(self.helmet) new_character.equipment.move(helmet[0]) for item in self.backpack: item = spawn(item) new_character.equipment.move(item[0]) return new_character
class TestBugTesting(BasePyTestCase): ('bodhi.server.models.bugs.bugtracker.on_qa') ('bodhi.server.models.log.debug') def test_parent_security_bug(self, debug, on_qa): update = model.Update.query.first() update.type = model.UpdateType.security bug = model.Bug.query.first() bug.parent = True bug.testing(update) debug.assert_called_once_with('Not modifying parent security bug %s', bug.bug_id) assert (on_qa.call_count == 0)
def test_calcCubicBounds(): assert (calcCubicBounds((0, 0), (25, 100), (75, 100), (100, 0)) == (0, 0, 100, 75.0)) assert (calcCubicBounds((0, 0), (50, 0), (100, 50), (100, 100)) == (0.0, 0.0, 100, 100)) assert (calcCubicBounds((50, 0), (0, 100), (100, 100), (50, 0)) == pytest.approx((35.566243, 0.0, 64.433757, 75.0)))
def V3HTTPFilter(irfilter: IRFilter, v3config: 'V3Config'): fn = {'ir.grpc_ V3HTTPFilter_grpc_ 'ir.grpc_web': V3HTTPFilter_grpc_web, 'ir.grpc_stats': V3HTTPFilter_grpc_stats, 'ir.cors': V3HTTPFilter_cors, 'ir.router': V3HTTPFilter_router, 'ir.lua_scripts': V3HTTPFilter_lua}[irfilter.kind] return fn(irfilter, v3config)
class OptionsWithTemplates(Options): def template(self): return self._config_get(None) def template(self, value: str): self._config(('function(data){return %s}' % value), js_type=True) def templateLoading(self): return self._config_get(None) def templateLoading(self, value: str): self._config(('function(data){return %s}' % value), js_type=True) def templateError(self): return self._config_get(None) def templateError(self, value: str): self._config(('function(data){return %s}' % value), js_type=True)
def createArnoldTextureSettings(): import pymel.core as pm import maya.cmds as cmds import pymel.versions as versions from mtoa.ui.globals import settings pm.setUITemplate('attributeEditorTemplate', pushTemplate=True) pm.columnLayout(adjustableColumn=True) pm.attrControlGrp('autotx', cc=settings.updateAutoTxSettings, label='Auto-convert Textures to TX (Disabled in Anima)', attribute='defaultArnoldRenderOptions.autotx', enable=False) pm.attrControlGrp('use_existing_tiled_textures', label='Use Existing TX Textures', attribute='defaultArnoldRenderOptions.use_existing_tiled_textures') pm.setAttr('defaultArnoldRenderOptions.autotx', 0) settings.updateAutoTxSettings() cmds.separator() maya_version = versions.shortName() if (int(float(maya_version)) < 2017): pm.attrControlGrp('texture_automip', label='Auto-mipmap', attribute='defaultArnoldRenderOptions.textureAutomip') pm.attrControlGrp('texture_accept_unmipped', label='Accept Unmipped', attribute='defaultArnoldRenderOptions.textureAcceptUnmipped') cmds.separator() pm.checkBoxGrp('ts_autotile', cc=settings.updateAutotileSettings, label='', label1='Auto-tile') pm.connectControl('ts_autotile', 'defaultArnoldRenderOptions.autotile', index=2) pm.intSliderGrp('ts_texture_autotile', label='Tile Size', minValue=16, maxValue=64, fieldMinValue=16, fieldMaxValue=1024) pm.connectControl('ts_texture_autotile', 'defaultArnoldRenderOptions.textureAutotile', index=1) pm.connectControl('ts_texture_autotile', 'defaultArnoldRenderOptions.textureAutotile', index=2) pm.connectControl('ts_texture_autotile', 'defaultArnoldRenderOptions.textureAutotile', index=3) pm.attrControlGrp('texture_accept_untiled', label='Accept Untiled', attribute='defaultArnoldRenderOptions.textureAcceptUntiled') pm.attrControlGrp('texture_max_memory_MB', label='Max Cache Size (MB)', attribute='defaultArnoldRenderOptions.textureMaxMemoryMB') pm.attrControlGrp('texture_max_open_files', label='Max Open Files', attribute='defaultArnoldRenderOptions.textureMaxOpenFiles') cmds.separator() cmds.attrControlGrp('texture_diffuse_blur', label='Diffuse Blur', attribute='defaultArnoldRenderOptions.textureDiffuseBlur') pm.setParent('..') pm.setUITemplate(popTemplate=True)
class Sequence(Sequence): def __init__(self, length): self.length = length def __len__(self): return self.length def __iter__(self): return self[:] def wrapindex(self, i): if (i < 0): i += len(self) if (not (0 <= i < len(self))): _ = (i + 0) raise IndexError('trace index out of range') return i
('gaussian16') def test_yaml_oniom(): run_dict = {'geom': {'type': 'redund', 'fn': 'lib:acetaldehyd_oniom.xyz'}, 'calc': {'type': 'oniom', 'calcs': {'real': {'type': 'g16', 'route': 'hf sto-3g', 'pal': 2}, 'high': {'type': 'g16', 'route': 'b3lyp d95v', 'pal': 2}}, 'models': {'high': {'inds': [4, 5, 6], 'calc': 'high'}}}, 'opt': {'thresh': 'gau_tight'}} res = run_from_dict(run_dict) opt = res.opt assert opt.is_converged assert (opt.cur_cycle == 7) assert (res.opt_geom.energy == pytest.approx((- 153.)))
def test_federal_account_c_defaults_success(client, download_test_data): download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string()) resp = client.post('/api/v2/download/accounts/', content_type='application/json', data=json.dumps({'account_level': 'federal_account', 'filters': {'submission_types': ['award_financial'], 'fy': '2016', 'quarter': '4'}, 'file_format': 'csv'})) assert (resp.status_code == status.HTTP_200_OK) assert ('.zip' in resp.json()['file_url'])
class flow_monitor_entry(loxi.OFObject): def __init__(self, monitor_id=None, out_port=None, out_group=None, flags=None, table_id=None, command=None, match=None): if (monitor_id != None): self.monitor_id = monitor_id else: self.monitor_id = 0 if (out_port != None): self.out_port = out_port else: self.out_port = 0 if (out_group != None): self.out_group = out_group else: self.out_group = 0 if (flags != None): self.flags = flags else: self.flags = 0 if (table_id != None): self.table_id = table_id else: self.table_id = 0 if (command != None): self.command = command else: self.command = 0 if (match != None): self.match = match else: self.match = ofp.match() return def pack(self): packed = [] packed.append(struct.pack('!L', self.monitor_id)) packed.append(struct.pack('!L', self.out_port)) packed.append(struct.pack('!L', self.out_group)) packed.append(struct.pack('!H', self.flags)) packed.append(struct.pack('!B', self.table_id)) packed.append(struct.pack('!B', self.command)) packed.append(self.match.pack()) return ''.join(packed) def unpack(reader): obj = flow_monitor_entry() obj.monitor_id = reader.read('!L')[0] obj.out_port = reader.read('!L')[0] obj.out_group = reader.read('!L')[0] obj.flags = reader.read('!H')[0] obj.table_id = reader.read('!B')[0] obj.command = reader.read('!B')[0] obj.match = ofp.match.unpack(reader) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.monitor_id != other.monitor_id): return False if (self.out_port != other.out_port): return False if (self.out_group != other.out_group): return False if (self.flags != other.flags): return False if (self.table_id != other.table_id): return False if (self.command != other.command): return False if (self.match != other.match): return False return True def pretty_print(self, q): q.text('flow_monitor_entry {') with q.group(): with q.indent(2): q.breakable() q.text('monitor_id = ') q.text(('%#x' % self.monitor_id)) q.text(',') q.breakable() q.text('out_port = ') q.text(('%#x' % self.out_port)) q.text(',') q.breakable() q.text('out_group = ') q.text(('%#x' % self.out_group)) q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPFMF_INITIAL', 2: 'OFPFMF_ADD', 4: 'OFPFMF_REMOVED', 8: 'OFPFMF_MODIFY', 16: 'OFPFMF_INSTRUCTIONS', 32: 'OFPFMF_NO_ABBREV', 64: 'OFPFMF_ONLY_OWN'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('table_id = ') q.text(('%#x' % self.table_id)) q.text(',') q.breakable() q.text('command = ') value_name_map = {0: 'OFPFMC_ADD', 1: 'OFPFMC_MODIFY', 2: 'OFPFMC_DELETE'} if (self.command in value_name_map): q.text(('%s(%d)' % (value_name_map[self.command], self.command))) else: q.text(('%#x' % self.command)) q.text(',') q.breakable() q.text('match = ') q.pp(self.match) q.breakable() q.text('}')
class OptionPlotoptionsParetoSonificationDefaultspeechoptionsMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_dependency_but_no_circle_some_same_values_simple(graph_dependency_but_not_circular, aliased_variable_y, variable_u): (nodes, instructions, cfg) = graph_dependency_but_not_circular new_phi = Phi(variable_u[3], [aliased_variable_y[1], aliased_variable_y[4], aliased_variable_y[4]]) new_phi._origin_block = {nodes[0]: aliased_variable_y[1], nodes[3]: aliased_variable_y[4], nodes[4]: aliased_variable_y[4]} nodes[1].instructions[0] = new_phi run_out_of_ssa(cfg, SSAOptions.simple) new_node = [node for node in cfg.nodes if (node not in set(nodes))][0] assert ((nodes[0].instructions[0:3] == instructions[0:3]) and (nodes[1].instructions == [instructions[5]]) and (nodes[2].instructions == instructions[6:8]) and (nodes[3].instructions[0:5] == instructions[8:12]) and (nodes[4].instructions[0] == instructions[12])) assert ((nodes[0].instructions[3:] == [Assignment(phi.definitions[0], phi.origin_block[nodes[0]]) for phi in instructions[3:5]]) and (new_node.instructions == [Assignment(phi.definitions[0], phi.origin_block[nodes[3]]) for phi in instructions[3:5]]) and (nodes[4].instructions[1:] == [Assignment(phi.definitions[0], phi.origin_block[nodes[4]]) for phi in instructions[3:5]])) assert ((len(cfg.edges) == 7) and isinstance(cfg.get_edge(nodes[0], nodes[1]), UnconditionalEdge) and isinstance(cfg.get_edge(nodes[1], nodes[2]), FalseCase) and isinstance(cfg.get_edge(nodes[1], nodes[3]), TrueCase) and isinstance(cfg.get_edge(nodes[3], new_node), TrueCase) and isinstance(cfg.get_edge(new_node, nodes[1]), UnconditionalEdge) and isinstance(cfg.get_edge(nodes[3], nodes[4]), FalseCase) and isinstance(cfg.get_edge(nodes[4], nodes[1]), UnconditionalEdge))
class DecompilerPipeline(): def __init__(self, stages: List[PipelineStage]): self._stages = stages def from_strings(cls, cfg_stage_names: List[str], ast_stage_names: List[str]) -> DecompilerPipeline: name_to_stage = {stage.name: stage for stage in (CFG_STAGES + AST_STAGES)} stages = PREPROCESSING_STAGES.copy() for stage_name in cfg_stage_names: if (stage := name_to_stage.get(stage_name)): stages.append(stage) else: warning(f'Could not find a CFG PipelineStage named "{stage_name}"') stages.extend(POSTPROCESSING_STAGES.copy()) for stage_name in ast_stage_names: if (stage := name_to_stage.get(stage_name)): stages.append(stage) else: warning(f'Could not find a AST PipelineStage named "{stage_name}"') return cls(stages) def stages(self) -> List[PipelineStage]: return self._stages def validate(self): stages_run = [] for stage in self.stages: for dependency in stage.dependencies: if (dependency not in stages_run): raise ValueError(f'Invalid pipeline: {stage.name} requires {dependency}!') stages_run.append(stage.name) def run(self, task: DecompilerTask): output_format = task.options.getstring('logging.stage_output') show_all = task.options.getboolean('logging.show_all_stages', fallback=False) show_starting_point = task.options.getboolean('logging.show_starting_point', fallback=False) showed_stages = task.options.getlist('logging.show_selected', fallback=[]) print_ascii = ((output_format == 'ascii') or (output_format == 'ascii_and_tabs')) show_in_tabs = ((output_format == 'tabs') or (output_format == 'ascii_and_tabs')) debug_mode = task.options.getboolean('pipeline.debug', fallback=False) validate_no_dataflow_dup = task.options.getboolean('pipeline.validate_no_dataflow_dup', fallback=False) self.validate() if show_starting_point: self._show_stage(task, 'Starting point', print_ascii, show_in_tabs) if task.failed: return for stage in self.stages: debug(f'stage {stage.name}') instance = stage() try: instance.run(task) if (show_all or (stage.name in showed_stages)): self._show_stage(task, f'After {stage.name}', print_ascii, show_in_tabs) except Exception as e: task.fail(origin=stage.name) error(f'Failed to decompile {task.name}, error during stage {stage.name}: {e}') if debug_mode: raise e break if validate_no_dataflow_dup: if (task.graph is not None): self._assert_no_dataflow_duplicates(list(task.graph.instructions)) if (task.syntax_tree is not None): self._assert_no_ast_duplicates(task.syntax_tree) def _show_stage(task: DecompilerTask, stage_name: str, print_ascii: bool, show_in_tabs: bool): if (task.syntax_tree is not None): if print_ascii: DecoratedAST.print_ascii(task.syntax_tree, f'(AST) {stage_name}') if show_in_tabs: DecoratedAST.show_flowgraph(task.syntax_tree, f'(AST) {stage_name}') elif (task.graph is not None): if print_ascii: DecoratedCFG.print_ascii(task.graph, stage_name) if show_in_tabs: DecoratedCFG.show_flowgraph(task.graph, stage_name) def _assert_no_ast_duplicates(ast: AbstractSyntaxTree): instructions = [] for node in ast.topological_order(): if isinstance(node, CodeNode): instructions.extend(node.instructions) DecompilerPipeline._assert_no_dataflow_duplicates(instructions) def _assert_no_dataflow_duplicates(instructions: list[Instruction]): encountered_ids: set[int] = set() for instruction in instructions: for obj in instruction.subexpressions(): if (id(obj) in encountered_ids): raise AssertionError(f'Found duplicated DataflowObject in cfg: {obj}') encountered_ids.add(id(obj))
class OptionDateRangePicker(Options): def input(self): return self._config_get() def input(self, val): self._config(val) def container(self): return self._config_get() def container(self, val): self._config(val) def date(self): return self._config_get() def date(self, val): self._config(val) def dates(self): return EnumDates(self, 'date') def weekStartDay(self): return self._config_get('Sun') def weekStartDay(self, val): self._config(val)
class RedisBackend(BroadcastBackend): def __init__(self, url: str): parsed_url = urlparse(url) self._host = (parsed_url.hostname or 'localhost') self._port = (parsed_url.port or 6379) self._password = (parsed_url.password or None) async def connect(self) -> None: kwargs = {'host': self._host, 'port': self._port, 'password': self._password} self._pub_conn = (await asyncio_redis.Connection.create(**kwargs)) self._sub_conn = (await asyncio_redis.Connection.create(**kwargs)) self._subscriber = (await self._sub_conn.start_subscribe()) async def disconnect(self) -> None: self._pub_conn.close() self._sub_conn.close() async def subscribe(self, channel: str) -> None: (await self._subscriber.subscribe([channel])) async def unsubscribe(self, channel: str) -> None: (await self._subscriber.unsubscribe([channel])) async def publish(self, channel: str, message: typing.Any) -> None: (await self._pub_conn.publish(channel, message)) async def next_published(self) -> Event: message = (await self._subscriber.next_published()) return Event(channel=message.channel, message=message.value)
class OptionPlotoptionsArcdiagramSonificationContexttracksMappingLowpass(Options): def frequency(self) -> 'OptionPlotoptionsArcdiagramSonificationContexttracksMappingLowpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsArcdiagramSonificationContexttracksMappingLowpassFrequency) def resonance(self) -> 'OptionPlotoptionsArcdiagramSonificationContexttracksMappingLowpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsArcdiagramSonificationContexttracksMappingLowpassResonance)
def extractAveragecatWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('mitsuha', 'Rougo ni Sonaete Isekai de 8-manmai no Kinka wo Tamemasu', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) if (item['tags'] == ['Uncategorized']): titlemap = [('Mitsuha ', 'Rougo ni Sonaete Isekai de 8-manmai no Kinka wo Tamemasu', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')] for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesTilemapSonificationDefaultspeechoptionsMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def xn_mod_poly(p): if (len(p) == 1): return [galois_div(1, p[0])] halflen = (len(p) // 2) lowinv = xn_mod_poly(p[:halflen]) submod_high = karatsuba_mul(lowinv, p[:halflen])[halflen:] med = karatsuba_mul(p[halflen:], lowinv)[:halflen] med_plus_high = [(x ^ y) for (x, y) in zip(med, submod_high)] highinv = karatsuba_mul(med_plus_high, lowinv) o = (lowinv + highinv)[:len(p)] print(halflen, lowinv, submod_high, med, highinv) return o
class ABIFunction(TypedDict, total=False): constant: bool inputs: Sequence['ABIFunctionParams'] name: str outputs: Sequence['ABIFunctionParams'] payable: bool stateMutability: Literal[('pure', 'view', 'nonpayable', 'payable')] type: Literal[('function', 'constructor', 'fallback', 'receive')]
.parametrize('decomp', ['PartialPivLU', 'FullPivLU']) def test_local_solve(decomp): V = FunctionSpace(UnitSquareMesh(3, 3), 'DG', 3) f = Function(V).assign(1.0) u = TrialFunction(V) v = TestFunction(V) A = Tensor((inner(u, v) * dx)) b = Tensor((inner(f, v) * dx)) x = assemble(A.solve(b, decomposition=decomp)) assert np.allclose(x.dat.data, f.dat.data, rtol=1e-13)
class LocationMenu(models.Model): location = models.ForeignKey(Location) name = models.CharField(max_length=15, help_text=('A short title for your menu. Note: If there is only one page in the menu, it will be used as a ' + 'top level nav item, and the menu name will not be used.')) def page_count(self): return len(self.pages.all()) def __str__(self): return self.name
class ValidatorStatus(Enum): UNSPECIFIED = 'BOND_STATUS_UNSPECIFIED' BONDED = 'BOND_STATUS_BONDED' UNBONDING = 'BOND_STATUS_UNBONDING' UNBONDED = 'BOND_STATUS_UNBONDED' def from_proto(cls, value: int) -> 'ValidatorStatus': if (value == 0): return cls.UNSPECIFIED if (value == 1): return cls.UNBONDED if (value == 2): return cls.UNBONDING if (value == 3): return cls.BONDED raise RuntimeError(f'Unable to decode validator status: {value}')
def readable_size(nbytes): suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] if (nbytes == 0): return '0 B' i = 0 while ((nbytes >= 1024) and (i < (len(suffixes) - 1))): nbytes /= 1024.0 i += 1 f = ('%.2f' % nbytes).rstrip('0').rstrip('.') return ('%s %s' % (f, suffixes[i]))
def extractOrokincentralCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('demon king executive', 'As The Heros Mother, I Became An Executive Of The Demon Kings Army', 'translated'), ('deported for innocent charges', 'Former Operations Chief Exiled For Innocent Charges Becomes The Strongest Adventurer', 'translated'), ('northwest gas station', 'I Run A Gas Station In The Northwest', 'translated'), ('after rebirth, he married his childhood sweetheart', 'after rebirth, he married his childhood sweetheart', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class clade(): def __init__(self, givenName): self.branchType = 'leaf' self.subtree = None self.leaves = None self.length = 0.0 self.height = None self.absoluteTime = None self.parent = None self.traits = {} self.index = None self.name = givenName self.x = None self.y = None self.lastHeight = None self.lastAbsoluteTime = None self.width = 1 def is_leaflike(self): return True def is_leaf(self): return False def is_node(self): return False
def extractSakuraidreaderWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('ring ring', 'ring ring', 'translated'), ('Graceling: The Girl With Time', 'Graceling: The Girl With Time', 'oel'), ('Imagine Online: The Game', 'Imagine Online: The Game', 'oel'), ('Suzaku: The Phoenix God of Fire', 'Suzaku: The Phoenix God of Fire', 'oel'), ('1 Soul, 2 lives', '1 Soul, 2 lives', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class LChChroma(Fit): NAME = 'lch-chroma' EPSILON = 0.1 LIMIT = 2.0 DE = '2000' DE_OPTIONS = {} SPACE = 'lch-d65' MIN_LIGHTNESS = 0 MAX_LIGHTNESS = 100 MIN_CONVERGENCE = 0.0001 def fit(self, color: 'Color', **kwargs: Any) -> None: space = color.space() mapcolor = color.convert(self.SPACE, norm=False) (l, c) = mapcolor._space.indexes()[:2] lightness = mapcolor[l] sdr = (color._space.DYNAMIC_RANGE == 'sdr') if (sdr and ((lightness >= self.MAX_LIGHTNESS) or alg.isclose(lightness, self.MAX_LIGHTNESS, abs_tol=1e-06, dims=alg.SC))): clip_channels(color.update('xyz-d65', WHITE, mapcolor[(- 1)])) return elif (lightness <= self.MIN_LIGHTNESS): clip_channels(color.update('xyz-d65', BLACK, mapcolor[(- 1)])) return low = 0.0 high = mapcolor[c] clip_channels(color._hotswap(mapcolor.convert(space, norm=False))) if (mapcolor.delta_e(color, method=self.DE, **self.DE_OPTIONS) >= self.LIMIT): lower_in_gamut = True while ((high - low) > self.MIN_CONVERGENCE): mapcolor[c] = ((high + low) * 0.5) if (lower_in_gamut and mapcolor.in_gamut(space, tolerance=0)): low = mapcolor[c] else: clip_channels(color._hotswap(mapcolor.convert(space, norm=False))) de = mapcolor.delta_e(color, method=self.DE, **self.DE_OPTIONS) if (de < self.LIMIT): if ((self.LIMIT - de) < self.EPSILON): break if lower_in_gamut: lower_in_gamut = False low = mapcolor[c] else: high = mapcolor[c] color.normalize()
def make_dropdown(text, options=['None'], command=None, scale=ani.menu_text_scale): dropdown = DirectOptionMenu(scale=scale, items=options, highlightColor=(0.65, 0.65, 0.65, 1), command=command, textMayChange=1, text_align=TextNode.ALeft) DirectLabel(text=(text + ':'), relief=None, text_fg=(0, 0, 0, 1), text_align=TextNode.ALeft, parent=dropdown, pos=(0, 0, 1)) return dropdown
class HTTP1ThenHTTP2Backend(MockBackend): def connect_tcp(self, host: str, port: int, timeout: typing.Optional[float]=None, local_address: typing.Optional[str]=None, socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]]=None) -> NetworkStream: return HTTP1ThenHTTP2Stream(list(self._buffer))
class FieldWrapper(Wrapper[dataclasses.Field]): def __init__(self, field: dataclasses.Field, parent: Any=None, prefix: str=''): super().__init__(wrapped=field, name=field.name) self.field: dataclasses.Field = field self.prefix: str = prefix self._parent: Any = parent self._option_strings: Optional[Set[str]] = None self._required: Optional[bool] = None self._docstring: docstring.AttributeDocString = docstring.AttributeDocString() self._help: Optional[str] = None self._default: Optional[Union[(Any, List[Any])]] = None self._dest: Optional[str] = None self._arg_options: Dict[(str, Any)] = {} self._type: Optional[Type[Any]] = None self._results: Dict[(str, Any)] = {} def arg_options(self) -> Dict[(str, Any)]: if self._arg_options: return self._arg_options options = self.get_arg_options() options.update(self.custom_arg_options) action = options.get('action', 'store') self._arg_options = only_keep_action_args(options, action) return self._arg_options def get_arg_options(self) -> Dict[(str, Any)]: if (not self.field.init): return {} _arg_options: Dict[(str, Any)] = {} _arg_options['required'] = False _arg_options['dest'] = self.dest _arg_options['default'] = self.default if self.help: _arg_options['help'] = self.help elif (self.default is not None): _arg_options['help'] = ' ' _arg_options['type'] = self.type try: _arg_options['type'].__name__ = self.type.__repr__().replace('typing.', '') except Exception as e: pass return _arg_options def action(self) -> Union[(str, Type[argparse.Action])]: return self.custom_arg_options.get('action', 'store') def action_str(self) -> str: if isinstance(self.action, str): return self.action return self.action.__name__ def custom_arg_options(self) -> Dict[(str, Any)]: return self.field.metadata.get('custom_args', {}) def option_strings(self) -> List[str]: dashes: List[str] = [] options: List[str] = [] dashes.append('--') options.append(self.dest) option_strings = set((f'{dash}{option}' for (dash, option) in zip(dashes, options))) return list(sorted(option_strings, key=len)) def dest(self) -> str: self._dest = super().dest return self._dest def nargs(self): return self.custom_arg_options.get('nargs', None) def default(self) -> Any: if (self._default is not None): return self._default default: Any = utils.default_value(self.field) if (default is dataclasses.MISSING): default = None self._default = default return self._default def default(self, value: Any): self._default = value def required(self) -> bool: if (self._required is not None): return self._required if self.action_str.startswith('store_'): self._required = False elif self.is_optional: self._required = False elif self.parent.required: self._required = True elif (self.nargs in {'?', '*'}): self._required = False elif (self.nargs == '+'): self._required = True elif (self.default is None): self._required = True else: self._required = False return self._required def required(self, value: bool): self._required = value def type(self) -> Type[Any]: if (self._type is None): self._type = self.field.type return self._type def __str__(self): return f"<FieldWrapper for field '{self.dest}'>" def help(self) -> Optional[str]: if self._help: return self._help try: self._docstring = docstring.get_attribute_docstring(self.parent.dataclass, self.field.name) except (SystemExit, Exception) as e: logger.debug(f"Couldn't find attribute docstring for field {self.name}, {e}") self._docstring = docstring.AttributeDocString() if self._docstring.docstring_below: self._help = self._docstring.docstring_below elif self._docstring.comment_above: self._help = self._docstring.comment_above elif self._docstring.comment_inline: self._help = self._docstring.comment_inline return self._help def help(self, value: str): self._help = value def name(self) -> str: return self.field.name def is_list(self): return utils.is_list(self.type) def is_enum(self) -> bool: return utils.is_enum(self.type) def is_tuple(self) -> bool: return utils.is_tuple(self.type) def is_bool(self) -> bool: return utils.is_bool(self.type) def is_optional(self) -> bool: return utils.is_optional(self.field.type) def is_union(self) -> bool: return utils.is_union(self.field.type) def type_arguments(self) -> Optional[Tuple[(Type, ...)]]: return utils.get_type_arguments(self.type) def parent(self) -> 'DataclassWrapper': return self._parent
class Version(namedtuple('Version', ['major', 'minor', 'micro', 'release', 'pre', 'post', 'dev'])): def __new__(cls, major: int, minor: int, micro: int, release: str='final', pre: int=0, post: int=0, dev: int=0) -> Version: for value in (major, minor, micro, pre, post): if (not (isinstance(value, int) and (value >= 0))): raise ValueError("All version parts except 'release' should be integers.") if (release not in REL_MAP): raise ValueError(f"'{release}' is not a valid release type.") if ('.dev-candidate' < release < 'final'): if (pre == 0): raise ValueError('Implicit pre-releases not allowed.') elif dev: raise ValueError('Version is not a development release.') elif post: raise ValueError('Post-releases are not allowed with pre-releases.') elif (release < 'alpha'): if ((release > '.dev') and (pre == 0)): raise ValueError('Implicit pre-release not allowed.') elif post: raise ValueError('Post-releases are not allowed with pre-releases.') elif pre: raise ValueError('Version is not a pre-release.') elif dev: raise ValueError('Version is not a development release.') return super().__new__(cls, major, minor, micro, release, pre, post, dev) def _is_pre(self) -> bool: return bool((self.pre > 0)) def _is_dev(self) -> bool: return bool((self.release < 'alpha')) def _is_post(self) -> bool: return bool((self.post > 0)) def _get_dev_status(self) -> str: return DEV_STATUS[self.release] def _get_canonical(self) -> str: if (self.micro == 0): ver = f'{self.major}.{self.minor}' else: ver = f'{self.major}.{self.minor}.{self.micro}' if self._is_pre(): ver += f'{REL_MAP[self.release]}{self.pre}' if self._is_post(): ver += f'.post{self.post}' if self._is_dev(): ver += f'.dev{self.dev}' return ver
class IntercomMock(CommonIntercomMock): def add_single_file_task(self, task): self.task_list.append(task) def get_available_analysis_plugins(self): plugins = super().get_available_analysis_plugins() plugins.update({'failed_analysis': ('plugin description', False, {'default': True}, *self._common_fields)}) return plugins
class OptionSeriesBoxplotSonificationContexttracksMappingHighpass(Options): def frequency(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingHighpassFrequency': return self._config_sub_data('frequency', OptionSeriesBoxplotSonificationContexttracksMappingHighpassFrequency) def resonance(self) -> 'OptionSeriesBoxplotSonificationContexttracksMappingHighpassResonance': return self._config_sub_data('resonance', OptionSeriesBoxplotSonificationContexttracksMappingHighpassResonance)
class PythonFileFinder(): exclude: tuple[(str, ...)] extend_exclude: tuple[(str, ...)] using_default_exclude: bool ignore_notebooks: bool = False def get_all_python_files_in(self, directories: tuple[(Path, ...)]) -> list[Path]: logging.debug('Collecting Python files to scan...') source_files = set() ignore_regex = re.compile('|'.join((self.exclude + self.extend_exclude))) file_lookup_suffixes = ({'.py'} if self.ignore_notebooks else {'.py', '.ipynb'}) gitignore_spec = self._generate_gitignore_pathspec(Path()) for directory in directories: for (root_str, dirs, files) in os.walk(directory, topdown=True): root = Path(root_str) if self._is_directory_ignored(root, ignore_regex): dirs[:] = [] continue for file_str in files: file = (root / file_str) if (not self._is_file_ignored(file, file_lookup_suffixes, ignore_regex, gitignore_spec)): source_files.add(file) source_files_list = list(source_files) logging.debug('Python files to scan for imports:\n%s\n', '\n'.join([str(file) for file in source_files_list])) return source_files_list def _is_directory_ignored(self, directory: Path, ignore_regex: Pattern[str]) -> bool: return bool(((self.exclude + self.extend_exclude) and ignore_regex.match(str(directory)))) def _is_file_ignored(self, file: Path, file_lookup_suffixes: set[str], ignore_regex: Pattern[str], gitignore_spec: (PathSpec | None)) -> bool: return bool(((file.suffix not in file_lookup_suffixes) or ((self.exclude + self.extend_exclude) and ignore_regex.match(file.as_posix())) or (gitignore_spec and gitignore_spec.match_file(file)))) def _generate_gitignore_pathspec(self, directory: Path) -> (PathSpec | None): if (not self.using_default_exclude): return None try: with (directory / '.gitignore').open() as gitignore: return PathSpec.from_lines('gitwildmatch', gitignore) except FileNotFoundError: return None
_deserializable class LocalQnaPairLoader(BaseLoader): def load_data(self, content): (question, answer) = content content = f'''Q: {question} A: {answer}''' url = 'local' meta_data = {'url': url, 'question': question} doc_id = hashlib.sha256((content + url).encode()).hexdigest() return {'doc_id': doc_id, 'data': [{'content': content, 'meta_data': meta_data}]}
class Output(base_tests.SimpleDataPlane): def runTest(self): (in_port, out_port) = openflow_ports(2) actions = [ofp.action.output(out_port)] pkt = simple_tcp_packet() logging.info('Running actions test for %s', pp(actions)) delete_all_flows(self.controller) logging.info('Inserting flow') request = ofp.message.flow_add(table_id=test_param_get('table', 0), match=packet_to_flow_match(self, pkt), instructions=[ofp.instruction.apply_actions(actions)], buffer_id=ofp.OFP_NO_BUFFER, priority=1000) self.controller.message_send(request) do_barrier(self.controller) pktstr = str(pkt) logging.info('Sending packet, expecting output to port %d', out_port) self.dataplane.send(in_port, pktstr) verify_packets(self, pktstr, [out_port])