code stringlengths 281 23.7M |
|---|
def extractAnotherWorldTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Depths of Labyrinth' in item['tags']):
return buildReleaseMessageWithType(item, "Aim for the Deepest Part of the Different World's Labyrinth", vol, chp, frag=frag, postfix=postfix)
if ('Because, Janitor-san Is Not a Hero' in item['tags']):
return buildReleaseMessageWithType(item, 'Because, Janitor-san Is Not a Hero', vol, chp, frag=frag, postfix=postfix)
if ('World Death Game' in item['tags']):
return buildReleaseMessageWithType(item, 'The World is Fun as it has Become a Death Game', vol, chp, frag=frag, postfix=postfix)
return False |
class DataTable(ConstrainedControl):
def __init__(self, columns: Optional[List[DataColumn]]=None, rows: Optional[List[DataRow]]=None, ref: Optional[Ref]=None, key: Optional[str]=None, width: OptionalNumber=None, height: OptionalNumber=None, left: OptionalNumber=None, top: OptionalNumber=None, right: OptionalNumber=None, bottom: OptionalNumber=None, expand: Union[(None, bool, int)]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, rotate: RotateValue=None, scale: ScaleValue=None, offset: OffsetValue=None, aspect_ratio: OptionalNumber=None, animate_opacity: AnimationValue=None, animate_size: AnimationValue=None, animate_position: AnimationValue=None, animate_rotation: AnimationValue=None, animate_scale: AnimationValue=None, animate_offset: AnimationValue=None, on_animation_end=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, border: Optional[Border]=None, border_radius: BorderRadiusValue=None, horizontal_lines: Optional[BorderSide]=None, vertical_lines: Optional[BorderSide]=None, checkbox_horizontal_margin: OptionalNumber=None, column_spacing: OptionalNumber=None, data_row_color: Union[(None, str, Dict[(MaterialState, str)])]=None, data_row_min_height: OptionalNumber=None, data_row_max_height: OptionalNumber=None, data_text_style: Optional[TextStyle]=None, bgcolor: Optional[str]=None, gradient: Optional[Gradient]=None, divider_thickness: OptionalNumber=None, heading_row_color: Union[(None, str, Dict[(MaterialState, str)])]=None, heading_row_height: OptionalNumber=None, heading_text_style: Optional[TextStyle]=None, horizontal_margin: OptionalNumber=None, show_bottom_border: Optional[bool]=None, show_checkbox_column: Optional[bool]=None, sort_ascending: Optional[bool]=None, sort_column_index: Optional[int]=None, on_select_all=None):
ConstrainedControl.__init__(self, ref=ref, key=key, width=width, height=height, left=left, top=top, right=right, bottom=bottom, expand=expand, col=col, opacity=opacity, rotate=rotate, scale=scale, offset=offset, aspect_ratio=aspect_ratio, animate_opacity=animate_opacity, animate_size=animate_size, animate_position=animate_position, animate_rotation=animate_rotation, animate_scale=animate_scale, animate_offset=animate_offset, on_animation_end=on_animation_end, tooltip=tooltip, visible=visible, disabled=disabled, data=data)
self.columns = columns
self.rows = rows
self.border = border
self.border_radius = border_radius
self.horizontal_lines = horizontal_lines
self.vertical_lines = vertical_lines
self.bgcolor = bgcolor
self.gradient = gradient
self.divider_thickness = divider_thickness
self.checkbox_horizontal_margin = checkbox_horizontal_margin
self.column_spacing = column_spacing
self.data_row_color = data_row_color
self.data_row_min_height = data_row_min_height
self.data_row_max_height = data_row_max_height
self.data_text_style = data_text_style
self.heading_row_color = heading_row_color
self.heading_row_height = heading_row_height
self.heading_text_style = heading_text_style
self.horizontal_margin = horizontal_margin
self.show_bottom_border = show_bottom_border
self.show_checkbox_column = show_checkbox_column
self.sort_ascending = sort_ascending
self.sort_column_index = sort_column_index
self.on_select_all = on_select_all
def _get_control_name(self):
return 'datatable'
def _before_build_command(self):
super()._before_build_command()
self._set_attr_json('border', self.__border)
self._set_attr_json('gradient', self.__gradient)
self._set_attr_json('borderRadius', self.__border_radius)
self._set_attr_json('horizontalLines', self.__horizontal_lines)
self._set_attr_json('verticalLines', self.__vertical_lines)
self._set_attr_json('dataRowColor', self.__data_row_color)
self._set_attr_json('headingRowColor', self.__heading_row_color)
self._set_attr_json('dataTextStyle', self.__data_text_style)
self._set_attr_json('headingTextStyle', self.__heading_text_style)
def _get_children(self):
children = []
children.extend(self.__columns)
children.extend(self.__rows)
return children
def columns(self):
return self.__columns
def columns(self, value: Optional[List[DataColumn]]):
self.__columns = (value if (value is not None) else [])
def rows(self):
return self.__rows
def rows(self, value: Optional[List[DataRow]]):
self.__rows = (value if (value is not None) else [])
def border(self) -> Optional[Border]:
return self.__border
def border(self, value: Optional[Border]):
self.__border = value
def border_radius(self) -> BorderRadiusValue:
return self.__border_radius
_radius.setter
def border_radius(self, value: BorderRadiusValue):
self.__border_radius = value
def horizontal_lines(self) -> Optional[BorderSide]:
return self.__horizontal_lines
_lines.setter
def horizontal_lines(self, value: Optional[BorderSide]):
self.__horizontal_lines = value
def vertical_lines(self) -> Optional[BorderSide]:
return self.__vertical_lines
_lines.setter
def vertical_lines(self, value: Optional[BorderSide]):
self.__vertical_lines = value
def checkbox_horizontal_margin(self) -> OptionalNumber:
return self._get_attr('checkboxHorizontalMargin')
_horizontal_margin.setter
def checkbox_horizontal_margin(self, value: OptionalNumber):
self._set_attr('checkboxHorizontalMargin', value)
def column_spacing(self) -> OptionalNumber:
return self._get_attr('columnSpacing')
_spacing.setter
def column_spacing(self, value: OptionalNumber):
self._set_attr('columnSpacing', value)
def divider_thickness(self) -> OptionalNumber:
return self._get_attr('dividerThickness', data_type='float', def_value=1.0)
_thickness.setter
def divider_thickness(self, value: OptionalNumber):
self._set_attr('dividerThickness', value)
def horizontal_margin(self) -> OptionalNumber:
return self._get_attr('horizontalMargin')
_margin.setter
def horizontal_margin(self, value: OptionalNumber):
self._set_attr('horizontalMargin', value)
def data_row_color(self) -> Union[(None, str, Dict[(MaterialState, str)])]:
return self.__data_row_color
_row_color.setter
def data_row_color(self, value: Union[(None, str, Dict[(MaterialState, str)])]):
self.__data_row_color = value
def data_row_min_height(self) -> OptionalNumber:
return self._get_attr('dataRowMinHeight')
_row_min_height.setter
def data_row_min_height(self, value: OptionalNumber):
self._set_attr('dataRowMinHeight', value)
def data_row_max_height(self) -> OptionalNumber:
return self._get_attr('dataRowMaxHeight')
_row_max_height.setter
def data_row_max_height(self, value: OptionalNumber):
self._set_attr('dataRowMaxHeight', value)
def data_text_style(self):
return self.__data_text_style
_text_style.setter
def data_text_style(self, value: Optional[TextStyle]):
self.__data_text_style = value
def bgcolor(self):
return self._get_attr('bgColor')
def bgcolor(self, value):
self._set_attr('bgColor', value)
def gradient(self) -> Optional[Gradient]:
return self.__gradient
def gradient(self, value: Optional[Gradient]):
self.__gradient = value
def heading_row_color(self) -> Union[(None, str, Dict[(MaterialState, str)])]:
return self.__heading_row_color
_row_color.setter
def heading_row_color(self, value: Union[(None, str, Dict[(MaterialState, str)])]):
self.__heading_row_color = value
def heading_row_height(self) -> OptionalNumber:
return self._get_attr('headingRowHeight')
_row_height.setter
def heading_row_height(self, value: OptionalNumber):
self._set_attr('headingRowHeight', value)
def heading_text_style(self):
return self.__heading_text_style
_text_style.setter
def heading_text_style(self, value: Optional[TextStyle]):
self.__heading_text_style = value
def show_bottom_border(self) -> Optional[bool]:
return self._get_attr('showBottomBorder', data_type='bool', def_value=False)
_bottom_border.setter
def show_bottom_border(self, value: Optional[bool]):
self._set_attr('showBottomBorder', value)
def show_checkbox_column(self) -> Optional[bool]:
return self._get_attr('showCheckboxColumn', data_type='bool', def_value=False)
_checkbox_column.setter
def show_checkbox_column(self, value: Optional[bool]):
self._set_attr('showCheckboxColumn', value)
def sort_ascending(self) -> Optional[bool]:
return self._get_attr('sortAscending', data_type='bool', def_value=False)
_ascending.setter
def sort_ascending(self, value: Optional[bool]):
self._set_attr('sortAscending', value)
def sort_column_index(self) -> Optional[int]:
return self._get_attr('sortColumnIndex')
_column_index.setter
def sort_column_index(self, value: Optional[int]):
self._set_attr('sortColumnIndex', value)
def on_select_all(self):
return self._get_event_handler('select_all')
_select_all.setter
def on_select_all(self, handler):
self._add_event_handler('select_all', handler)
self._set_attr('onSelectAll', (True if (handler is not None) else None)) |
class GenCheckDriver():
def __init__(self, module):
self.module = module
def reset(self):
(yield self.module.reset.eq(1))
(yield)
(yield self.module.reset.eq(0))
(yield)
def configure(self, base, length, end=None, random_addr=None, random_data=None):
if (end is None):
end = (base + 1048576)
(yield self.module.base.eq(base))
(yield self.module.end.eq(end))
(yield self.module.length.eq(length))
if (random_addr is not None):
(yield self.module.random_addr.eq(random_addr))
if (random_data is not None):
(yield self.module.random_data.eq(random_data))
def run(self):
(yield self.module.start.eq(1))
(yield)
(yield self.module.start.eq(0))
(yield)
while ((yield self.module.done) == 0):
(yield)
if hasattr(self.module, 'errors'):
self.errors = (yield self.module.errors) |
class ViewMode(BaseMode):
name = Mode.view
keymap = {Action.aim: False, Action.call_shot: False, Action.fine_control: False, Action.move: False, Action.stroke: False, Action.quit: False, Action.zoom: False, Action.cam_save: False, Action.cam_load: False, Action.show_help: False, Action.pick_ball: False, Action.ball_in_hand: False, Action.power: False, Action.elevation: False, Action.english: False, Action.prev_shot: False, Action.introspect: False, Action.hide_cue: False, Action.exec_shot: False}
def __init__(self):
super().__init__()
self.magnet_theta = True
self.magnet_threshold = 0.2
def enter(self, move_active=False, load_prev_cam=False):
mouse.mode(MouseMode.RELATIVE)
if (multisystem.active is not None):
visual.cue.hide_nodes(ignore=('cue_cseg',))
if load_prev_cam:
cam.load_saved_state(Mode.view)
if move_active:
self.keymap[Action.move] = True
self.register_keymap_event('escape', Action.quit, True)
self.register_keymap_event('mouse1', Action.zoom, True)
self.register_keymap_event('mouse1-up', Action.zoom, False)
self.register_keymap_event('a', Action.aim, True)
self.register_keymap_event('s', Action.stroke, True)
self.register_keymap_event('v', Action.move, True)
self.register_keymap_event('v-up', Action.move, False)
self.register_keymap_event('1', Action.cam_save, True)
self.register_keymap_event('2', Action.cam_load, True)
self.register_keymap_event('h', Action.show_help, True)
self.register_keymap_event('q', Action.pick_ball, True)
self.register_keymap_event('g', Action.ball_in_hand, True)
self.register_keymap_event('c', Action.call_shot, True)
self.register_keymap_event('i', Action.introspect, True)
self.register_keymap_event('i-up', Action.introspect, False)
self.register_keymap_event('b', Action.elevation, True)
self.register_keymap_event('b-up', Action.hide_cue, True)
self.register_keymap_event('e', Action.english, True)
self.register_keymap_event('e-up', Action.hide_cue, True)
self.register_keymap_event('x', Action.power, True)
self.register_keymap_event('x-up', Action.hide_cue, True)
self.register_keymap_event('p-up', Action.prev_shot, True)
self.register_keymap_event('space', Action.exec_shot, True)
self.register_keymap_event('space-up', Action.exec_shot, False)
tasks.add(self.view_task, 'view_task')
tasks.add(self.shared_task, 'shared_task')
if ani.settings['gameplay']['cue_collision']:
tasks.add(cue_avoid.collision_task, 'collision_task')
def exit(self):
tasks.remove('view_task')
tasks.remove('shared_task')
if ani.settings['gameplay']['cue_collision']:
tasks.remove('collision_task')
cam.store_state(Mode.view, overwrite=True)
def view_task(self, task):
if self.keymap[Action.stroke]:
Global.mode_mgr.change_mode(Mode.stroke)
elif self.keymap[Action.pick_ball]:
Global.mode_mgr.change_mode(Mode.pick_ball)
elif self.keymap[Action.call_shot]:
Global.mode_mgr.change_mode(Mode.call_shot)
elif self.keymap[Action.ball_in_hand]:
Global.mode_mgr.change_mode(Mode.ball_in_hand)
elif self.keymap[Action.zoom]:
cam.zoom_via_mouse()
elif self.keymap[Action.move]:
cam.move_fixation_via_mouse()
elif self.keymap[Action.hide_cue]:
self.keymap[Action.hide_cue] = False
self.keymap[Action.english] = False
self.keymap[Action.elevation] = False
self.keymap[Action.power] = False
if (multisystem.active is not None):
visual.cue.hide_nodes(ignore=('cue_cseg',))
elif self.keymap[Action.elevation]:
self.view_elevate_cue()
elif self.keymap[Action.english]:
self.view_apply_english()
elif self.keymap[Action.power]:
self.view_apply_power()
elif self.keymap[Action.aim]:
Global.mode_mgr.change_mode(Mode.aim, enter_kwargs=dict(load_prev_cam=True))
elif self.keymap[Action.exec_shot]:
self.keymap[Action.exec_shot] = False
if Global.game.shot_constraints.can_shoot():
Global.mode_mgr.mode_stroked_from = Mode.aim
visual.cue.set_object_state_as_render_state(skip_V0=True)
multisystem.active.strike()
Global.mode_mgr.change_mode(Mode.calculate)
elif self.keymap[Action.prev_shot]:
self.keymap[Action.prev_shot] = False
if (len(multisystem) > 1):
self.change_animation((multisystem.active_index - 1))
Global.mode_mgr.change_mode(Mode.shot, enter_kwargs=dict(build_animations=False))
return task.done
else:
cam.rotate_via_mouse()
return task.cont
def view_apply_power(self):
visual.cue.show_nodes(ignore=('cue_cseg',))
with mouse:
dy = mouse.get_dy()
V0 = (multisystem.active.cue.V0 + (dy * ani.power_sensitivity))
if (V0 < ani.min_stroke_speed):
V0 = ani.min_stroke_speed
if (V0 > ani.max_stroke_speed):
V0 = ani.max_stroke_speed
multisystem.active.cue.set_state(V0=V0)
hud.update_cue(multisystem.active.cue)
def view_elevate_cue(self):
visual.cue.show_nodes(ignore=('cue_cseg',))
cue = visual.cue.get_node('cue_stick_focus')
with mouse:
delta_elevation = (mouse.get_dy() * ani.elevate_sensitivity)
old_elevation = (- cue.getR())
new_elevation = max(0, min(ani.max_elevate, (old_elevation + delta_elevation)))
if (cue_avoid.min_theta >= (new_elevation - self.magnet_threshold)):
self.magnet_theta = True
new_elevation = cue_avoid.min_theta
else:
self.magnet_theta = False
cue.setR((- new_elevation))
multisystem.active.cue.set_state(theta=new_elevation)
hud.update_cue(multisystem.active.cue)
def view_apply_english(self):
visual.cue.show_nodes(ignore=('cue_cseg',))
with mouse:
(dx, dy) = (mouse.get_dx(), mouse.get_dy())
cue = visual.cue.get_node('cue_stick')
cue_focus = visual.cue.get_node('cue_stick_focus')
R = visual.cue.follow._ball.params.R
(delta_y, delta_z) = ((dx * ani.english_sensitivity), (dy * ani.english_sensitivity))
new_y = (cue.getY() + delta_y)
new_z = (cue.getZ() + delta_z)
norm = np.sqrt(((new_y ** 2) + (new_z ** 2)))
if (norm > (ani.max_english * R)):
new_y *= ((ani.max_english * R) / norm)
new_z *= ((ani.max_english * R) / norm)
cue.setY(new_y)
cue.setZ(new_z)
if (self.magnet_theta or (cue_avoid.min_theta >= ((- cue_focus.getR()) - self.magnet_threshold))):
cue_focus.setR((- cue_avoid.min_theta))
multisystem.active.cue.set_state(a=((- new_y) / R), b=(new_z / R), theta=(- visual.cue.get_node('cue_stick_focus').getR()))
hud.update_cue(multisystem.active.cue)
def change_animation(self, shot_index):
multisystem.set_active(shot_index)
visual.attach_system(multisystem.active)
visual.buildup()
visual.build_shot_animation()
visual.animate(PlaybackMode.LOOP)
raise NotImplementedError()
dummy = Ball('dummy')
dummy.R = multisystem.active.cue.cueing_ball.params.R
dummy.rvw = multisystem.active.cue.cueing_ball.history.rvw[0]
dummy.render()
visual.cue.init_focus(dummy)
multisystem.active.cue.set_render_state_as_object_state()
visual.cue.follow = None
dummy.remove_nodes()
del dummy
cue_avoid.init_collisions()
hud.update_cue(multisystem.active.cue) |
.parametrize('interpol_cls', [Interpolator, LST, IDPP, Redund])
def test_ala_dipeptide_interpol(interpol_cls):
initial = geom_loader('lib:dipeptide_init.xyz')
final = geom_loader('lib:dipeptide_fin.xyz')
geoms = (initial, final)
interpolator = interpol_cls(geoms, 28, align=True)
geoms = interpolator.interpolate_all()
assert (len(geoms) == 30) |
class PubsubSubscription(resource_class_factory('pubsub_subscription', 'name', hash_key=True)):
('iam_policy')
def get_iam_policy(self, client=None):
try:
(data, _) = client.fetch_pubsub_subscription_iam_policy(self['name'])
return data
except (api_errors.ApiExecutionError, ResourceNotSupported) as e:
err_msg = ('Could not get PubSub Subscription IAM Policy for %s in project %s: %s' % (self['name'], self.parent().key(), e))
LOGGER.warning(err_msg)
self.add_warning(err_msg)
return None |
class OptionSeriesCylinderSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesCylinderSonificationDefaultinstrumentoptionsMappingVolume) |
class OptionSeriesTilemapStatesSelect(Options):
def animation(self) -> 'OptionSeriesTilemapStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesTilemapStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesTilemapStatesSelectHalo':
return self._config_sub_data('halo', OptionSeriesTilemapStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesTilemapStatesSelectMarker':
return self._config_sub_data('marker', OptionSeriesTilemapStatesSelectMarker) |
def _genenv_ignoring_key_case_with_prefixes(env_key: str, env_prefixes: List[str]=None, default_value=None) -> str:
if env_prefixes:
for env_prefix in env_prefixes:
env_var_value = _genenv_ignoring_key_case(env_key, env_prefix)
if env_var_value:
return env_var_value
return _genenv_ignoring_key_case(env_key, default_value=default_value) |
def vel_u_AFBC(x, flag):
if (non_slip_BCs and ((flag == boundaryTags['box_left']) or (flag == boundaryTags['box_right']) or (flag == boundaryTags['box_top']) or (flag == boundaryTags['box_front']) or (flag == boundaryTags['box_back']))):
return None
elif (openTop and (flag == boundaryTags['top'])):
return None
else:
return (lambda x, t: 0.0) |
class TimeEdit(QtWidgets.QTimeEdit):
def __init__(self, *args, **kwargs):
self.resolution = None
if ('resolution' in kwargs):
self.resolution = kwargs['resolution']
kwargs.pop('resolution')
super(TimeEdit, self).__init__(*args, **kwargs)
def stepBy(self, step):
if (self.currentSectionIndex() == 1):
if (step < 0):
minute = self.time().minute()
if (minute == 0):
self.setTime(QtCore.QTime((self.time().hour() - 1), (60 - self.resolution)))
else:
self.setTime(QtCore.QTime(self.time().hour(), (minute - self.resolution)))
else:
minute = self.time().minute()
if (minute == (60 - self.resolution)):
self.setTime(QtCore.QTime((self.time().hour() + 1), 0))
else:
self.setTime(QtCore.QTime(self.time().hour(), (minute + self.resolution)))
elif (step < 0):
if (self.time().hour() != 0):
super(TimeEdit, self).stepBy(step)
elif (self.time().hour() != 23):
super(TimeEdit, self).stepBy(step) |
class OptionSeriesPolygonMarkerStatesHover(Options):
def animation(self) -> 'OptionSeriesPolygonMarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesPolygonMarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
class TestContrast(util.ColorAsserts, unittest.TestCase):
def test_bad_indirect_input(self):
with self.assertRaises(TypeError):
Color('red').contrast(3)
def test_contrast_dict(self):
self.assertEqual(Color('white').contrast('blue'), Color('white').contrast({'space': 'srgb', 'coords': [0, 0, 1]}))
def test_bad_method(self):
with self.assertRaises(ValueError):
Color('white').contrast('blue', method='bad') |
def create_homestead_header_from_parent(parent_header: BlockHeaderAPI, **header_params: Any) -> BlockHeader:
if ('difficulty' not in header_params):
header_params.setdefault('timestamp', (parent_header.timestamp + 1))
header_params['difficulty'] = compute_homestead_difficulty(parent_header, header_params['timestamp'])
return create_frontier_header_from_parent(parent_header, **header_params) |
def get_interpolated_tones(text, tones):
print(text)
print(tones)
print('The length of text and tones: ', len(text), len(tones))
assert (len(text.split()) == len(tones.split()))
interpolated_tones = []
chars = []
for (phone, tone) in list(zip(text.split(), tones.split())):
interpolated_tones.append(tone)
return interpolated_tones |
class Test_svlan(unittest.TestCase):
pcp = 0
cfi = 0
vid = 32
tci = (((pcp << 15) | (cfi << 12)) | vid)
ethertype = ether.ETH_TYPE_8021Q
buf = pack(svlan._PACK_STR, tci, ethertype)
sv = svlan(pcp, cfi, vid, ethertype)
def setUp(self):
pass
def tearDown(self):
pass
def find_protocol(self, pkt, name):
for p in pkt.protocols:
if (p.protocol_name == name):
return p
def test_init(self):
eq_(self.pcp, self.sv.pcp)
eq_(self.cfi, self.sv.cfi)
eq_(self.vid, self.sv.vid)
eq_(self.ethertype, self.sv.ethertype)
def test_parser(self):
(res, ptype, _) = self.sv.parser(self.buf)
eq_(res.pcp, self.pcp)
eq_(res.cfi, self.cfi)
eq_(res.vid, self.vid)
eq_(res.ethertype, self.ethertype)
eq_(ptype, vlan)
def test_serialize(self):
data = bytearray()
prev = None
buf = self.sv.serialize(data, prev)
fmt = svlan._PACK_STR
res = struct.unpack(fmt, buf)
eq_(res[0], self.tci)
eq_(res[1], self.ethertype)
def _build_svlan(self):
src_mac = '00:07:0d:af:f4:54'
dst_mac = '00:00:00:00:00:00'
ethertype = ether.ETH_TYPE_8021AD
e = ethernet(dst_mac, src_mac, ethertype)
pcp = 0
cfi = 0
vid = 32
tci = (((pcp << 15) | (cfi << 12)) | vid)
ethertype = ether.ETH_TYPE_IP
v = vlan(pcp, cfi, vid, ethertype)
version = 4
header_length = 20
tos = 0
total_length = 24
identification = 35421
flags = 0
offset = 1480
ttl = 64
proto = inet.IPPROTO_ICMP
csum = 42994
src = '131.151.32.21'
dst = '131.151.32.129'
option = b'TEST'
ip = ipv4(version, header_length, tos, total_length, identification, flags, offset, ttl, proto, csum, src, dst, option)
p = Packet()
p.add_protocol(e)
p.add_protocol(self.sv)
p.add_protocol(v)
p.add_protocol(ip)
p.serialize()
return p
def test_build_svlan(self):
p = self._build_svlan()
e = self.find_protocol(p, 'ethernet')
ok_(e)
eq_(e.ethertype, ether.ETH_TYPE_8021AD)
sv = self.find_protocol(p, 'svlan')
ok_(sv)
eq_(sv.ethertype, ether.ETH_TYPE_8021Q)
v = self.find_protocol(p, 'vlan')
ok_(v)
eq_(v.ethertype, ether.ETH_TYPE_IP)
ip = self.find_protocol(p, 'ipv4')
ok_(ip)
eq_(sv.pcp, self.pcp)
eq_(sv.cfi, self.cfi)
eq_(sv.vid, self.vid)
eq_(sv.ethertype, self.ethertype)
(Exception)
def test_malformed_svlan(self):
m_short_buf = self.buf[1:svlan._MIN_LEN]
svlan.parser(m_short_buf)
def test_json(self):
jsondict = self.sv.to_jsondict()
sv = svlan.from_jsondict(jsondict['svlan'])
eq_(str(self.sv), str(sv)) |
def extractAnykatranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('face slapping the slag gong system', 'face slapping the slag gong system', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_generate_probe_features_normal():
sel = ProbeFeatureSelection(estimator=DecisionTreeClassifier(), n_probes=2, distribution='normal', random_state=1)
n_obs = 3
probe_features = sel._generate_probe_features(n_obs).round(3)
expected_results = {'gaussian_probe_0': [4.873, (- 1.835), (- 1.585)], 'gaussian_probe_1': [(- 3.219), 2.596, (- 6.905)]}
expected_results_df = pd.DataFrame(expected_results)
pd.testing.assert_frame_equal(probe_features, expected_results_df, check_dtype=False) |
def test():
import spacy.matcher
assert isinstance(matcher, spacy.matcher.Matcher), 'matcher?'
assert ('Matcher(nlp.vocab)' in __solution__), 'matcher?'
assert (len(pattern) == 2), '2(2)'
assert (isinstance(pattern[0], dict) and isinstance(pattern[1], dict)), ''
assert ((len(pattern[0]) == 1) and (len(pattern[1]) == 1)), '1'
assert any(((pattern[0].get(key) == 'iPhone') for key in ['text', 'TEXT'])), '?'
assert any(((pattern[1].get(key) == 'X') for key in ['text', 'TEXT'])), '?'
assert ('matcher.add("IPHONE_X_PATTERN"' in __solution__), 'matcher?'
assert ('matches = matcher(doc)' in __solution__), 'matcherdoc?'
__msg__.good('!doc[2:4]iPhone X') |
_heads([Sum, Product])
def tex_Sum_Product(head, args, **kwargs):
if (head == Sum):
ss = '\\sum'
else:
ss = '\\prod'
if (len(args) == 0):
raise ValueError
func = args[0].latex(**kwargs)
if (len(args) == 1):
return ((ss + ' ') + func)
if (args[1].head() == For):
forargs = args[1].args()
if (len(forargs) == 3):
(var, low, high) = forargs
var = var.latex()
low = low.latex(in_small=True)
high = high.latex(in_small=True)
if (len(args) == 2):
return (ss + ('_{%s=%s}^{%s} %s' % (var, low, high, func)))
if (len(args) == 3):
cond = args[2].latex(in_small=True)
return (ss + ('_{\\textstyle{%s=%s \\atop %s}}^{%s} %s' % (var, low, cond, high, func)))
if (len(forargs) == 1):
if (len(args) == 2):
var = forargs[0].latex(in_small=True)
return (ss + ('_{%s} %s' % (var, func)))
if (len(args) == 3):
cond = args[2].latex(in_small=True)
return (ss + ('_{%s} %s' % (cond, func)))
if (args[1].head() == ForElement):
(var, S) = args[1].args()
var = Element(var, S).latex(in_small=True)
if (len(args) == 2):
return (ss + ('_{%s} %s' % (var, func)))
if (len(args) == 3):
cond = args[2].latex(in_small=True)
return (ss + ('_{\\textstyle{%s \\atop %s}} %s' % (var, cond, func)))
raise ValueError |
class MuxAccountLookCommand(COMMAND_DEFAULT_CLASS):
def parse(self):
super().parse()
playable = self.account.characters
if self.args:
self.playable = dict(((utils.to_str(char.key.lower()), char) for char in playable)).get(self.args.lower(), None)
else:
self.playable = playable |
class ExtractSummary(BaseChat):
chat_scene: str = ChatScene.ExtractSummary.value()
def __init__(self, chat_param: Dict):
chat_param['chat_mode'] = ChatScene.ExtractSummary
super().__init__(chat_param=chat_param)
self.user_input = chat_param['select_param']
async def generate_input_values(self):
input_values = {'context': self.user_input}
return input_values
def chat_type(self) -> str:
return ChatScene.ExtractSummary.value |
class TestFilmAdvanceLever(object):
def test_lever_not_on_camera_cannot_be_pressed(self):
l = FilmAdvanceLever()
with pytest.raises(FilmAdvanceLever.CannotBeWound):
l.wind()
def test_lever_advances_mechanism(self):
c = Camera()
assert (c.exposure_control_system.shutter.cocked == False)
assert (c.film_advance_mechanism.advanced == False)
c.film_advance_lever.wind()
assert (c.exposure_control_system.shutter.cocked == True)
assert (c.film_advance_mechanism.advanced == True) |
class Sensor(GenericSensor):
def setup_module(self) -> None:
from smbus2 import SMBus
self.bus = SMBus(self.config['i2c_bus_num'])
self.address = self.config['chip_addr']
def get_value(self, sens_conf: ConfigType) -> SensorValueType:
value: int = (self.bus.read_word_data(self.address, LM75_TEMP_REGISTER) & 65535)
value = (((value << 8) & 65280) + (value >> 8))
celsius: float = ((value / 32.0) / 8.0)
return celsius |
class FormulaFile():
def __init__(self, formulas, contents, mtime, filename):
self.formulas = formulas
self.contents = contents
self.mtime = mtime
self.filename = filename
self.file_backed = True
def out_of_date(self):
return (self.file_backed and (os.stat(self.filename)[stat.ST_MTIME] > self.mtime))
def get_formula(self, formula):
return self.formulas.get(formula)
def get_formula_names(self, skip_type=None):
names = []
for name in list(self.formulas.keys()):
sym = self.formulas[name].symmetry
if ((sym is None) or (sym == 'BOTH') or (sym != skip_type)):
names.append(name)
return names |
def test_angle_limit(setup):
print('')
print('get joint min angle value:')
print(mc.get_joint_min_angle(1))
print(mc.get_joint_min_angle(2))
print(mc.get_joint_min_angle(3))
print(mc.get_joint_min_angle(4))
print(mc.get_joint_min_angle(5))
print(mc.get_joint_min_angle(6))
print('get joint max angle value:')
print(mc.get_joint_max_angle(1))
print(mc.get_joint_max_angle(2))
print(mc.get_joint_max_angle(3))
print(mc.get_joint_max_angle(4))
print(mc.get_joint_max_angle(5))
print(mc.get_joint_max_angle(6)) |
(scope='function')
def privacy_request_with_custom_fields(db: Session, policy: Policy) -> PrivacyRequest:
privacy_request = _create_privacy_request_for_policy(db, policy)
privacy_request.persist_custom_privacy_request_fields(db=db, custom_privacy_request_fields={'first_name': CustomPrivacyRequestField(label='First name', value='John'), 'last_name': CustomPrivacyRequestField(label='Last name', value='Doe')})
privacy_request.save(db)
(yield privacy_request)
privacy_request.delete(db) |
def bm_focal_loss() -> None:
if (not torch.cuda.is_available()):
print('Skipped: CUDA unavailable')
return
kwargs_list = [{'N': 100}, {'N': 100, 'alpha': 0}, {'N': 1000}, {'N': 1000, 'alpha': 0}, {'N': 10000}, {'N': 10000, 'alpha': 0}]
benchmark(TestFocalLoss.focal_loss_with_init, 'Focal_loss', kwargs_list, warmup_iters=1)
benchmark(TestFocalLoss.focal_loss_jit_with_init, 'Focal_loss_JIT', kwargs_list, warmup_iters=1) |
def extract_source_code_release(identifier):
source_code_archive_path = get_source_code_archive_path(identifier)
source_code_extract_path = get_source_code_extract_path(identifier)
ensure_path_exists(source_code_extract_path)
print(f'Extracting archive: {source_code_archive_path} -> {source_code_extract_path}')
with tarfile.open(source_code_archive_path, 'r:gz') as archive_file:
def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return (prefix == abs_directory)
def safe_extract(tar, path='.'):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if (not is_within_directory(path, member_path)):
raise Exception('Attempted Path Traversal in Tar File')
tar.extractall(path)
safe_extract(archive_file, source_code_extract_path) |
def main():
parser = argparse.ArgumentParser(description='LiteEth Etherbone test utility')
parser.add_argument('--port', default='1234', help='Host bind port')
parser.add_argument('--udp', action='store_true', help='Use CommUDP directly instead of RemoteClient')
parser.add_argument('--ident', action='store_true', help='Read FPGA identifier')
parser.add_argument('--access', action='store_true', help='Test single Write/Read access over Etherbone')
parser.add_argument('--sram', action='store_true', help='Test SRAM access over Etherbone')
parser.add_argument('--speed', action='store_true', help='Test speed over Etherbone')
args = parser.parse_args()
port = int(args.port, 0)
comm = ('cli' if (not args.udp) else 'udp')
if args.ident:
ident_test(comm=comm, port=port)
if args.access:
access_test(comm=comm, port=port)
if args.sram:
sram_test(comm=comm, port=port)
if args.speed:
speed_test(comm=comm, port=port) |
class OptionPlotoptionsPictorialSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
def interleaved_tokenlist(required_item, other_item, sep, allow_trailing=False, at_least_two=False):
sep = sep.suppress()
if at_least_two:
out = (((Group(required_item) + Group(OneOrMore((sep + other_item)))) | (Group((other_item + ZeroOrMore((sep + other_item)))) + Group(OneOrMore((sep + required_item))))) | Group((required_item + OneOrMore((sep + required_item)))))
else:
out = ((Optional(Group(OneOrMore((other_item + sep)))) + Group((required_item + ZeroOrMore((sep + required_item))))) + Optional(Group(OneOrMore((sep + other_item)))))
out += ZeroOrMore((Group(OneOrMore((sep + required_item))) | Group(OneOrMore((sep + other_item)))))
if allow_trailing:
out += Optional(sep)
return out |
class SimpleLogin():
messages = {'login_success': Message('login success!', 'success'), 'login_failure': Message('invalid credentials', 'danger'), 'is_logged_in': Message('already logged in'), 'logout': Message('Logged out!'), 'login_required': Message('You need to login first', 'warning'), 'access_denied': Message('Access Denied'), 'auth_error': Message('Authentication Error: {0}')}
def flash(label, *args, **kwargs):
msg = Message.from_current_app(label)
if (not msg):
return
if (args or kwargs):
flash(msg.format(*args, **kwargs), msg.category)
else:
flash(msg.text, msg.category)
def __init__(self, app=None, login_checker=None, login_form=None, messages=None):
self.config = {'blueprint': 'simplelogin', 'login_url': '/login/', 'logout_url': '/logout/', 'home_url': '/'}
self.app = None
self._login_checker = (login_checker or default_login_checker)
self._login_form = (login_form or LoginForm)
if (app is not None):
self.init_app(app=app, login_checker=login_checker, login_form=login_form, messages=messages)
def login_checker(self, f):
self._login_checker = f
return f
def init_app(self, app, login_checker=None, login_form=None, messages=None):
if login_checker:
self._login_checker = login_checker
if login_form:
self._login_form = login_form
if (messages and isinstance(messages, dict)):
cleaned = {k: v for (k, v) in messages.items() if (k in self.messages.keys())}
for key in cleaned.keys():
if isinstance(cleaned[key], str):
cleaned[key] = Message(cleaned[key])
self.messages.update(cleaned)
self._register(app)
self._load_config()
self._set_default_secret()
self._register_views()
self._register_extras()
def _register(self, app):
if (not hasattr(app, 'extensions')):
app.extensions = {}
if ('simplelogin' in app.extensions):
raise RuntimeError('Flask extension already initialized')
app.extensions['simplelogin'] = self
self.app = app
def _load_config(self):
config = self.app.config.get_namespace(namespace='SIMPLELOGIN_', lowercase=True, trim_namespace=True)
old_config = self.app.config.get_namespace(namespace='SIMPLE_LOGIN_', lowercase=True, trim_namespace=True)
config.update(old_config)
if old_config:
msg = 'Settings defined as SIMPLE_LOGIN_ will be deprecated. Please, use SIMPLELOGIN_ instead.'
warn(msg, FutureWarning)
self.config.update(old_config)
self.config.update(dict(((key, value) for (key, value) in config.items() if value)))
def _set_default_secret(self):
if (self.app.config.get('SECRET_KEY') is None):
secret_key = str(uuid4())
logger.warning('Using random SECRET_KEY: {0}, please set it on your app.config["SECRET_KEY"]'.format(secret_key))
self.app.config['SECRET_KEY'] = secret_key
def _register_views(self):
self.blueprint = Blueprint(self.config['blueprint'], __name__, template_folder='templates')
self.blueprint.add_url_rule(self.config['login_url'], endpoint='login', view_func=self.login, methods=['GET', 'POST'])
self.blueprint.add_url_rule(self.config['logout_url'], endpoint='logout', view_func=self.logout, methods=['GET'])
self.app.register_blueprint(self.blueprint)
def _register_extras(self):
self.app.add_template_global(is_logged_in)
self.app.add_template_global(get_username)
def basic_auth(self, response=None):
auth = request.authorization
if (auth and self._login_checker({'username': auth.username, 'password': auth.password})):
session['simple_logged_in'] = True
session['simple_basic_auth'] = True
session['simple_username'] = auth.username
return (response or True)
else:
headers = {'WWW-Authenticate': 'Basic realm="Login Required"'}
return ('Invalid credentials', 401, headers)
def login(self):
destiny = request.args.get('next', default=request.form.get('next', default=self.config.get('home_url', '/')))
host_url = urlparse(request.host_url)
redirect_url = urlparse(urljoin(request.host_url, destiny))
if ((not (host_url.netloc == redirect_url.netloc)) and (redirect_url.netloc not in self.app.config.get('ALLOWED_HOSTS', []))):
return abort(400, 'Invalid next url, can only redirect to the same host')
if is_logged_in():
self.flash('is_logged_in')
return redirect(destiny)
if request.is_json:
return self.basic_auth(destiny=redirect(destiny))
form = self._login_form()
ret_code = 200
if form.validate_on_submit():
if self._login_checker(form.data):
self.flash('login_success')
session['simple_logged_in'] = True
session['simple_username'] = form.data.get('username')
return redirect(destiny)
else:
self.flash('login_failure')
ret_code = 401
return (render_template('login.html', form=form, next=destiny), ret_code)
def logout(self):
session.clear()
self.flash('logout')
return redirect(self.config.get('home_url', '/')) |
class OptionPlotoptionsBulletSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TaskActionManagerBuilder(ActionManagerBuilder):
task = Instance(Task)
additions = Property(List(SchemaAddition), observe='task.extra_actions')
def create_menu_bar_manager(self):
if self.task.menu_bar:
return self.create_action_manager(self.task.menu_bar)
return None
def create_tool_bar_managers(self):
schemas = (self.task.tool_bars + self.get_additional_toolbar_schemas())
return [self.create_action_manager(schema) for schema in self._get_ordered_schemas(schemas)]
def _controller_default(self):
from .task_action_controller import TaskActionController
return TaskActionController(task=self.task)
def _get_additions(self):
if self.task:
return self.task.extra_actions
else:
return [] |
class TreeNodeModel(models.Model):
treenode_display_field = None
tn_ancestors_pks = models.TextField(blank=True, default='', editable=False, verbose_name=_('Ancestors pks'))
tn_ancestors_count = models.PositiveIntegerField(default=0, editable=False, verbose_name=_('Ancestors count'))
tn_children_pks = models.TextField(blank=True, default='', editable=False, verbose_name=_('Children pks'))
tn_children_count = models.PositiveIntegerField(default=0, editable=False, verbose_name=_('Children count'))
tn_depth = models.PositiveIntegerField(default=0, editable=False, validators=[MinValueValidator(0), MaxValueValidator(10)], verbose_name=_('Depth'))
tn_descendants_pks = models.TextField(blank=True, default='', editable=False, verbose_name=_('Descendants pks'))
tn_descendants_count = models.PositiveIntegerField(default=0, editable=False, verbose_name=_('Descendants count'))
tn_index = models.PositiveIntegerField(default=0, editable=False, verbose_name=_('Index'))
tn_level = models.PositiveIntegerField(default=1, editable=False, validators=[MinValueValidator(1), MaxValueValidator(10)], verbose_name=_('Level'))
tn_parent = models.ForeignKey('self', related_name='tn_children', on_delete=models.CASCADE, blank=True, null=True, verbose_name=_('Parent'))
tn_priority = models.PositiveIntegerField(default=0, validators=[MinValueValidator(0), MaxValueValidator(9999)], verbose_name=_('Priority'))
tn_order = models.PositiveIntegerField(default=0, editable=False, verbose_name=_('Order'))
tn_siblings_pks = models.TextField(blank=True, default='', editable=False, verbose_name=_('Siblings pks'))
tn_siblings_count = models.PositiveIntegerField(default=0, editable=False, verbose_name=_('Siblings count'))
def delete(self, using=None, keep_parents=False, cascade=True):
with no_signals():
if (not cascade):
children_qs = self.get_children_queryset()
children_qs.update(tn_parent=None)
self.__class__.objects.filter(pk=self.pk).delete()
self.update_tree()
def delete_tree(cls):
with no_signals():
with transaction.atomic(using=router.db_for_write(cls)):
cls.objects.all().delete()
clear_refs(cls)
clear_cache(cls)
def get_ancestors(self, cache=True):
if cache:
return query_cache(self.__class__, pks=self.tn_ancestors_pks)
else:
return list(self.get_ancestors_queryset())
def get_ancestors_count(self):
return self.tn_ancestors_count
def get_ancestors_pks(self):
return split_pks(self.tn_ancestors_pks)
def get_ancestors_queryset(self):
return self.__class__.objects.filter(pk__in=self.get_ancestors_pks())
def get_breadcrumbs(self, attr=None, cache=True):
objs = ((self.get_ancestors(cache=cache) if self.tn_parent_id else []) + [self])
return ([getattr(obj, attr) for obj in objs] if attr else objs)
def get_children(self, cache=True):
if cache:
return query_cache(self.__class__, pks=self.tn_children_pks)
else:
return list(self.get_children_queryset())
def get_children_count(self):
return self.tn_children_count
def get_children_pks(self):
return split_pks(self.tn_children_pks)
def get_children_queryset(self):
return self.__class__.objects.filter(pk__in=self.get_children_pks())
def get_depth(self):
return self.tn_depth
def get_descendants(self, cache=True):
if cache:
return query_cache(self.__class__, pks=self.tn_descendants_pks)
else:
return list(self.get_descendants_queryset())
def get_descendants_count(self):
return self.tn_descendants_count
def get_descendants_pks(self):
return split_pks(self.tn_descendants_pks)
def get_descendants_queryset(self):
return self.__class__.objects.filter(pk__in=self.get_descendants_pks())
def get_descendants_tree(self, cache=True):
return self.__get_nodes_tree(instance=self, cache=cache)
def get_descendants_tree_display(self, cache=True):
objs = self.get_descendants(cache=cache)
strs = [f'{obj}' for obj in objs]
d = '\n'.join(strs)
return d
def get_display(self, indent=True, mark=' '):
indentation = ((mark * self.tn_ancestors_count) if indent else '')
indentation = force_str(indentation)
text = self.get_display_text()
text = conditional_escape(force_str(text))
return (indentation + text)
def get_display_text(self):
text = ''
if (hasattr(self, 'treenode_display_field') and (self.treenode_display_field is not None)):
field_name = self.treenode_display_field
text = getattr(self, field_name, '')
if ((not text) and self.pk):
text = self.pk
return force_str(text)
def get_first_child(self, cache=True):
return (self.get_children(cache=cache)[0] if self.get_children_count() else None)
def get_index(self):
return self.tn_index
def get_last_child(self, cache=True):
return (self.get_children(cache=cache)[(- 1)] if self.get_children_count() else None)
def get_level(self):
return self.tn_level
def get_order(self):
return self.tn_order
def get_parent(self):
return self.tn_parent
def get_parent_pk(self):
return self.tn_parent_id
def set_parent(self, obj):
with no_signals():
if obj:
obj_cls = obj.__class__
cls = self.__class__
if (obj_cls != cls):
raise ValueError(f"obj can't be set as parent, it is istance of {obj_cls.__name__}, expected instance of {cls.__name__}.")
if (obj == self):
raise ValueError("obj can't be set as parent of itself.")
if (not obj.pk):
obj.save()
if (obj.pk in split_pks(self.tn_descendants_pks)):
obj.tn_parent = self.tn_parent
obj.save()
self.tn_parent = obj
self.save()
self.update_tree()
def get_priority(self):
return self.tn_priority
def set_priority(self, val):
self.tn_priority = val
self.save()
def get_root(self, cache=True):
root_pk = self.get_root_pk()
if cache:
root_obj = query_cache(self.__class__, pk=root_pk)
else:
root_obj = self.__class__.objects.get(pk=root_pk)
return root_obj
def get_root_pk(self):
return (split_pks(self.tn_ancestors_pks) + [self.pk])[0]
def get_roots(cls, cache=True):
if cache:
return [obj for obj in query_cache(cls) if (obj.tn_ancestors_count == 0)]
else:
return list(cls.get_roots_queryset())
def get_roots_queryset(cls):
return cls.objects.filter(tn_ancestors_count=0)
def get_siblings(self, cache=True):
if cache:
return query_cache(self.__class__, pks=self.tn_siblings_pks)
else:
return list(self.get_siblings_queryset())
def get_siblings_count(self):
return self.tn_siblings_count
def get_siblings_pks(self):
return split_pks(self.tn_siblings_pks)
def get_siblings_queryset(self):
return self.__class__.objects.filter(pk__in=self.get_siblings_pks())
def get_tree(cls, cache=True):
return cls.__get_nodes_tree(instance=None, cache=cache)
def get_tree_display(cls, cache=True):
if cache:
objs = query_cache(cls)
else:
objs = list(cls.objects.all())
strs = [f'{obj}' for obj in objs]
d = '\n'.join(strs)
return d
def is_ancestor_of(self, obj):
return ((self.__class__ == obj.__class__) and self.pk and (self.pk != obj.pk) and (str(self.pk) in split_pks(obj.tn_ancestors_pks)))
def is_child_of(self, obj):
return ((self.__class__ == obj.__class__) and self.pk and (self.pk != obj.pk) and (str(self.pk) in split_pks(obj.tn_children_pks)))
def is_descendant_of(self, obj):
return ((self.__class__ == obj.__class__) and self.pk and (self.pk != obj.pk) and (str(self.pk) in split_pks(obj.tn_descendants_pks)))
def is_first_child(self):
return (self.pk and (self.tn_index == 0))
def is_last_child(self):
return (self.pk and (self.tn_index == self.tn_siblings_count))
def is_leaf(self):
return (self.pk and (self.tn_children_count == 0))
def is_parent_of(self, obj):
return ((self.__class__ == obj.__class__) and self.pk and (self.pk != obj.pk) and (obj.tn_ancestors_count > 0) and (str(self.pk) == split_pks(obj.tn_ancestors_pks)[(- 1)]))
def is_root(self):
return (self.pk and (self.tn_ancestors_count == 0))
def is_root_of(self, obj):
return (self.is_root() and self.is_ancestor_of(obj))
def is_sibling_of(self, obj):
return ((self.__class__ == obj.__class__) and self.pk and (self.pk != obj.pk) and (self.tn_ancestors_pks == obj.tn_ancestors_pks))
def update_tree(cls):
debug_message_prefix = f'[treenode] update {cls.__module__}.{cls.__name__} tree: '
with debug_performance(debug_message_prefix):
objs_data = cls.__get_nodes_data()
with transaction.atomic(using=router.db_for_write(cls)):
obj_manager = cls.objects
for (obj_pk, obj_data) in objs_data.items():
obj_manager.filter(pk=obj_pk).update(**obj_data)
update_refs(cls, objs_data)
update_cache(cls)
def __get_node_order_str(self):
priority_max =
priority_len = len(str(priority_max))
priority_val = (priority_max - min(self.tn_priority, priority_max))
priority_key = str(priority_val).zfill(priority_len)
alphabetical_val = slugify(str(self))
alphabetical_key = alphabetical_val.ljust(priority_len, 'z')
alphabetical_key = alphabetical_key[0:priority_len]
if isinstance(self.pk, uuid.UUID):
pk_val = self.pk.int
pk_val = int(str(pk_val)[:priority_len])
else:
try:
pk_val = min(self.pk, priority_max)
except TypeError:
pk_val = str(self.pk)
pk_key = str(pk_val).zfill(priority_len)
s = f'{priority_key}{alphabetical_key}{pk_key}'
s = s.upper()
return s
def __get_node_data(self, objs_list, objs_dict):
obj_dict = {}
parent_pk = self.get_parent_pk()
ancestors_list = []
ancestor_pk = parent_pk
while ancestor_pk:
ancestor_obj = objs_dict.get(str(ancestor_pk))
if (not ancestor_obj):
break
ancestors_list.insert(0, ancestor_obj)
ancestor_pk = ancestor_obj.get_parent_pk()
ancestors_pks = [obj.pk for obj in ancestors_list]
ancestors_count = len(ancestors_pks)
order_objs = (list(ancestors_list) + [self])
order_strs = [obj.__get_node_order_str() for obj in order_objs]
order_str = ''.join(order_strs)
obj_dict = {'instance': self, 'pk': self.pk, 'tn_parent_pk': parent_pk, 'tn_ancestors_pks': ancestors_pks, 'tn_ancestors_count': ancestors_count, 'tn_children_pks': [], 'tn_children_count': 0, 'tn_descendants_pks': [], 'tn_descendants_count': 0, 'tn_siblings_pks': [], 'tn_siblings_count': 0, 'tn_depth': 0, 'tn_level': (ancestors_count + 1), 'tn_order': 0, 'tn_order_str': order_str}
return obj_dict
def __get_nodes_data(cls):
objs_qs = cls.objects.select_related('tn_parent')
objs_list = list(objs_qs)
objs_dict = {str(obj.pk): obj for obj in objs_list}
objs_data_dict = {str(obj.pk): obj.__get_node_data(objs_list, objs_dict) for obj in objs_list}
def objs_data_sort(obj):
return objs_data_dict[str(obj['pk'])]['tn_order_str']
objs_data_list = list(objs_data_dict.values())
objs_data_list.sort(key=objs_data_sort)
objs_pks_by_parent = {}
objs_order_cursor = 0
objs_index_cursors = {}
objs_index_cursor = 0
for obj_data in objs_data_list:
obj_parent_key = str(obj_data['tn_parent_pk'])
objs_pks_by_parent.setdefault(obj_parent_key, [])
objs_pks_by_parent[obj_parent_key].append(obj_data['pk'])
obj_data['tn_order'] = objs_order_cursor
objs_order_cursor += 1
obj_parent_key = str(obj_data['tn_parent_pk'])
objs_index_cursor = objs_index_cursors.get(obj_parent_key, 0)
obj_data['tn_index'] = objs_index_cursor
objs_index_cursor += 1
objs_index_cursors[obj_parent_key] = objs_index_cursor
for obj_data in sorted(objs_data_list, key=(lambda obj: obj['tn_level']), reverse=True):
children_parent_key = str(obj_data['pk'])
obj_data['tn_children_pks'] = list(objs_pks_by_parent.get(children_parent_key, []))
obj_data['tn_children_count'] = len(obj_data['tn_children_pks'])
siblings_parent_key = str(obj_data['tn_parent_pk'])
obj_data['tn_siblings_pks'] = list(objs_pks_by_parent.get(siblings_parent_key, []))
obj_data['tn_siblings_pks'].remove(obj_data['pk'])
obj_data['tn_siblings_count'] = len(obj_data['tn_siblings_pks'])
if (obj_data['tn_children_count'] > 0):
obj_children_pks = obj_data['tn_children_pks']
obj_descendants_pks = list(obj_children_pks)
obj_depth = 1
for obj_child_pk in obj_children_pks:
obj_child_key = str(obj_child_pk)
obj_child_data = objs_data_dict[obj_child_key]
obj_child_descendants_pks = obj_child_data.get('tn_descendants_pks', [])
if obj_child_descendants_pks:
obj_descendants_pks += obj_child_descendants_pks
obj_depth = max(obj_depth, (obj_child_data['tn_depth'] + 1))
if obj_descendants_pks:
def obj_descendants_sort(obj_pk):
return objs_data_dict[str(obj_pk)]['tn_order']
obj_descendants_pks.sort(key=obj_descendants_sort)
obj_data['tn_descendants_pks'] = obj_descendants_pks
obj_data['tn_descendants_count'] = len(obj_data['tn_descendants_pks'])
obj_data['tn_depth'] = obj_depth
for obj_data in objs_data_list:
obj = obj_data['instance']
obj_key = str(obj_data['pk'])
obj_data['tn_ancestors_pks'] = join_pks(obj_data['tn_ancestors_pks'])
obj_data['tn_children_pks'] = join_pks(obj_data['tn_children_pks'])
obj_data['tn_descendants_pks'] = join_pks(obj_data['tn_descendants_pks'])
obj_data['tn_siblings_pks'] = join_pks(obj_data['tn_siblings_pks'])
obj_data.pop('instance', None)
obj_data.pop('pk', None)
obj_data.pop('tn_parent_pk', None)
obj_data.pop('tn_order_str', None)
keys = ['tn_ancestors_count', 'tn_ancestors_pks', 'tn_children_count', 'tn_children_pks', 'tn_depth', 'tn_descendants_count', 'tn_descendants_pks', 'tn_index', 'tn_level', 'tn_order', 'tn_siblings_count', 'tn_siblings_pks']
for key in keys:
if (obj_data[key] == getattr(obj, key, None)):
obj_data.pop(key, None)
if (len(obj_data) == 0):
objs_data_dict.pop(obj_key, None)
return objs_data_dict
def __get_nodes_tree(cls, instance=None, cache=True):
def __get_node_tree(obj):
child_tree = {'node': obj, 'tree': []}
child_tree_append = child_tree['tree'].append
if obj.tn_children_pks:
children_pks = split_pks(obj.tn_children_pks)
for child_pk in children_pks:
child_key = str(child_pk)
child_obj = objs_dict.get(child_key)
if child_obj:
child_tree_append(__get_node_tree(child_obj))
return child_tree
if instance:
objs_pks = instance.tn_descendants_pks
if cache:
objs_list = query_cache(cls, pks=objs_pks)
else:
objs_list = list(cls.objects.filter(pk__in=split_pks(objs_pks)))
objs_dict = {str(obj.pk): obj for obj in objs_list}
objs_tree = __get_node_tree(instance)['tree']
else:
if cache:
objs_list = query_cache(cls)
else:
objs_list = list(cls.objects.all())
objs_dict = {str(obj.pk): obj for obj in objs_list}
objs_tree = [__get_node_tree(obj) for obj in objs_list if (obj.tn_level == 1)]
return objs_tree
def ancestors(self):
return self.get_ancestors()
def ancestors_count(self):
return self.get_ancestors_count()
def ancestors_pks(self):
return self.get_ancestors_pks()
def breadcrumbs(self):
return self.get_breadcrumbs()
def children(self):
return self.get_children()
def children_count(self):
return self.get_children_count()
def children_pks(self):
return self.get_children_pks()
def depth(self):
return self.get_depth()
def descendants(self):
return self.get_descendants()
def descendants_count(self):
return self.get_descendants_count()
def descendants_pks(self):
return self.get_descendants_pks()
def descendants_tree(self):
return self.get_descendants_tree()
def descendants_tree_display(self):
return self.get_descendants_tree_display()
def first_child(self):
return self.get_first_child()
def index(self):
return self.get_index()
def last_child(self):
return self.get_last_child()
def level(self):
return self.get_level()
def order(self):
return self.get_order()
def parent(self):
return self.get_parent()
def parent_pk(self):
return self.get_parent_pk()
def priority(self):
return self.get_priority()
def roots(cls):
return cls.get_roots()
def root(self):
return self.get_root()
def root_pk(self):
return self.get_root_pk()
def siblings(self):
return self.get_siblings()
def siblings_count(self):
return self.get_siblings_count()
def siblings_pks(self):
return self.get_siblings_pks()
def tree(cls):
return cls.get_tree()
def tree_display(cls):
return cls.get_tree_display()
class Meta():
abstract = True
ordering = ['tn_order']
def __str__(self):
return conditional_escape(self.get_display(indent=True)) |
('/m3u8/<cid>/<offset>/<star>/<typefilter>/<searchstr>/<name>')
def m3u8(cid, offset, star, typefilter='0', searchstr='0', name='0'):
stm = '0'
qtyps = []
qtyps.append(('', '800000'))
qtyps.append(('', '1200000'))
qtyps.append(('', '1800000'))
qtyps.append(('1080p', '3000000'))
qtyps.append(('4K', '7500000'))
qtyps.append(('', ''))
dialog = xbmcgui.Dialog()
sel = dialog.select('', [q[0] for q in qtyps])
if (sel == (- 1)):
return '-1'
stm = str(qtyps[sel][1])
global milkvrcount
milkvrcount = 0
basepath = '/sdcard/Download/115/'
savepath = xbmc.translatePath(os.path.join(basepath, name))[0:40]
if (not os.path.exists(savepath)):
os.makedirs(savepath)
htmlfname = xbmc.translatePath(os.path.join(basepath, (name[0:20] + '.html')))
with open(htmlfname, 'wb') as htmlFile:
htmlFile.write(('<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8"><TITLE>%s</TITLE><h3>%s</h3>' % (name, name)))
htmlFile.write('\r\n')
htmlFile.close()
genm3u8(cid, offset, star, typefilter, searchstr, savepath, stm, name)
notify(msg=(('/Download/115/' + str(milkvrcount)) + 'M3U8!')) |
def test_hover_pointer_attr():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'hover') / 'pointers.f90')
string += hover_req(file_path, 1, 26)
(errcode, results) = run_request(string, fortls_args=['--sort_keywords'])
assert (errcode == 0)
ref_results = ['```fortran90\nINTEGER, POINTER :: val1\n```']
validate_hover(results, ref_results) |
class OptionsChartData(Options):
def categories(self):
return self._config_get()
def categories(self, values):
self._config(values)
def add_series(self, name, data) -> OptionsChartDataSeries:
new_series = self._config_sub_data_enum('series', OptionsChartDataSeries)
new_series.name = name
new_series.data = data
return new_series |
.authentication
class TestReadToken():
.asyncio
async def test_missing_token(self, redis_strategy: RedisStrategy[(UserModel, IDType)], user_manager):
authenticated_user = (await redis_strategy.read_token(None, user_manager))
assert (authenticated_user is None)
.asyncio
async def test_invalid_token(self, redis_strategy: RedisStrategy[(UserModel, IDType)], user_manager):
authenticated_user = (await redis_strategy.read_token('TOKEN', user_manager))
assert (authenticated_user is None)
.asyncio
async def test_valid_token_invalid_uuid(self, redis_strategy: RedisStrategy[(UserModel, IDType)], redis: RedisMock, user_manager):
(await redis.set(f'{redis_strategy.key_prefix}TOKEN', 'bar'))
authenticated_user = (await redis_strategy.read_token('TOKEN', user_manager))
assert (authenticated_user is None)
.asyncio
async def test_valid_token_not_existing_user(self, redis_strategy: RedisStrategy[(UserModel, IDType)], redis: RedisMock, user_manager):
(await redis.set(f'{redis_strategy.key_prefix}TOKEN', 'd35d213e-f3d8-4f08-954a-7e0d1bea286f'))
authenticated_user = (await redis_strategy.read_token('TOKEN', user_manager))
assert (authenticated_user is None)
.asyncio
async def test_valid_token(self, redis_strategy: RedisStrategy[(UserModel, IDType)], redis: RedisMock, user_manager, user):
(await redis.set(f'{redis_strategy.key_prefix}TOKEN', str(user.id)))
authenticated_user = (await redis_strategy.read_token('TOKEN', user_manager))
assert (authenticated_user is not None)
assert (authenticated_user.id == user.id) |
class _ZebraBfdDestination(_ZebraMessageBody):
_HEADER_FMT = '!I'
HEADER_SIZE = struct.calcsize(_HEADER_FMT)
_FAMILY_FMT = '!H'
FAMILY_SIZE = struct.calcsize(_FAMILY_FMT)
_BODY_FMT = '!IIBB'
BODY_SIZE = struct.calcsize(_BODY_FMT)
_FOOTER_FMT = '!B'
FOOTER_SIZE = struct.calcsize(_FOOTER_FMT)
def __init__(self, pid, dst_family, dst_prefix, min_rx_timer, min_tx_timer, detect_mult, multi_hop, src_family, src_prefix, multi_hop_count=None, ifname=None):
super(_ZebraBfdDestination, self).__init__()
self.pid = pid
self.dst_family = dst_family
assert (ip.valid_ipv4(dst_prefix) or ip.valid_ipv6(dst_prefix))
self.dst_prefix = dst_prefix
self.min_rx_timer = min_rx_timer
self.min_tx_timer = min_tx_timer
self.detect_mult = detect_mult
self.multi_hop = multi_hop
self.src_family = src_family
assert (ip.valid_ipv4(src_prefix) or ip.valid_ipv6(src_prefix))
self.src_prefix = src_prefix
self.multi_hop_count = multi_hop_count
self.ifname = ifname
def _parse_family_prefix(cls, buf):
(family,) = struct.unpack_from(cls._FAMILY_FMT, buf)
rest = buf[cls.FAMILY_SIZE:]
if (socket.AF_INET == family):
return (family, addrconv.ipv4.bin_to_text(rest[:4]), rest[4:])
elif (socket.AF_INET6 == family):
return (family, addrconv.ipv6.bin_to_text(rest[:16]), rest[16:])
raise struct.error(('Unsupported family: %d' % family))
def parse(cls, buf, version=_DEFAULT_VERSION):
(pid,) = struct.unpack_from(cls._HEADER_FMT, buf)
rest = buf[cls.HEADER_SIZE:]
(dst_family, dst_prefix, rest) = cls._parse_family_prefix(rest)
(min_rx_timer, min_tx_timer, detect_mult, multi_hop) = struct.unpack_from(cls._BODY_FMT, rest)
rest = rest[cls.BODY_SIZE:]
(src_family, src_prefix, rest) = cls._parse_family_prefix(rest)
multi_hop_count = None
ifname = None
if multi_hop:
(multi_hop_count,) = struct.unpack_from(cls._FOOTER_FMT, rest)
else:
(ifname_len,) = struct.unpack_from(cls._FOOTER_FMT, rest)
ifname_bin = rest[cls.FOOTER_SIZE:(cls.FOOTER_SIZE + ifname_len)]
ifname = str(six.text_type(ifname_bin.strip(b'\x00'), 'ascii'))
return cls(pid, dst_family, dst_prefix, min_rx_timer, min_tx_timer, detect_mult, multi_hop, src_family, src_prefix, multi_hop_count, ifname)
def _serialize_family_prefix(self, prefix):
if ip.valid_ipv4(prefix):
family = socket.AF_INET
return (family, (struct.pack(self._FAMILY_FMT, family) + addrconv.ipv4.text_to_bin(prefix)))
elif ip.valid_ipv6(prefix):
family = socket.AF_INET6
return (family, (struct.pack(self._FAMILY_FMT, family) + addrconv.ipv6.text_to_bin(prefix)))
raise ValueError(('Invalid prefix: %s' % prefix))
def serialize(self, version=_DEFAULT_VERSION):
(self.dst_family, dst_bin) = self._serialize_family_prefix(self.dst_prefix)
body_bin = struct.pack(self._BODY_FMT, self.min_rx_timer, self.min_tx_timer, self.detect_mult, self.multi_hop)
(self.src_family, src_bin) = self._serialize_family_prefix(self.src_prefix)
if self.multi_hop:
footer_bin = struct.pack(self._FOOTER_FMT, self.multi_hop_count)
else:
ifname_bin = self.ifname.encode('ascii')
footer_bin = (struct.pack(self._FOOTER_FMT, len(ifname_bin)) + ifname_bin)
return ((((struct.pack(self._HEADER_FMT, self.pid) + dst_bin) + body_bin) + src_bin) + footer_bin) |
_eh
class GrimoireHandler(THBEventHandler):
interested = ['action_after', 'action_shootdown']
def handle(self, evt_type, act):
if (evt_type == 'action_shootdown'):
if (not isinstance(act, LaunchCard)):
return act
c = act.card
if c.is_card(GrimoireSkill):
src = act.source
t = ttags(src)
if t['grimoire_tag']:
raise ActionLimitExceeded
if (t['vitality'] <= 0):
raise VitalityLimitExceeded
elif ((evt_type == 'action_after') and isinstance(act, LaunchCard)):
c = act.card
if c.is_card(GrimoireSkill):
src = act.source
t = ttags(src)
t['vitality'] -= 1
t['grimoire_tag'] = True
return act |
def test_wrong_zcornsv_size():
split_enz = np.full(8, fill_value=4, dtype=np.uint8).tobytes()
zvals = np.ones(300, dtype=np.float32)
zcornsv = np.zeros(5, dtype=np.float32)
retval = _cxtgeo.grd3d_roff2xtgeo_splitenz(3, 1.0, 1.0, split_enz, zvals, zcornsv)
assert (retval == (- 4)) |
def test_record_none_exc_info(django_elasticapm_client):
record = logging.LogRecord('foo', logging.INFO, pathname=None, lineno=None, msg='test', args=(), exc_info=(None, None, None))
handler = LoggingHandler()
handler.emit(record)
assert (len(django_elasticapm_client.events[ERROR]) == 1)
event = django_elasticapm_client.events[ERROR][0]
assert (event['log']['param_message'] == 'test')
assert (event['log']['logger_name'] == 'foo')
assert (event['log']['level'] == 'info')
assert ('exception' not in event) |
class IBCCoreConnectionRestClient(IBCCoreConnection):
API_URL = '/ibc/core/connection/v1beta1'
def __init__(self, rest_api: RestClient) -> None:
self._rest_api = rest_api
def Connection(self, request: QueryConnectionRequest) -> QueryConnectionResponse:
json_response = self._rest_api.get(f'{self.API_URL}/connections/{request.connection_id}')
return Parse(json_response, QueryConnectionResponse())
def Connections(self, request: QueryConnectionsRequest) -> QueryConnectionsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/connections', request)
return Parse(json_response, QueryConnectionsResponse())
def ClientConnections(self, request: QueryClientConnectionsRequest) -> QueryClientConnectionsResponse:
json_response = self._rest_api.get(f'{self.API_URL}/client_connections/{request.client_id}')
return Parse(json_response, QueryClientConnectionsResponse())
def ConnectionClientState(self, request: QueryConnectionClientStateRequest) -> QueryConnectionClientStateResponse:
json_response = self._rest_api.get(f'{self.API_URL}/connections/{request.connection_id}/client_state')
return Parse(json_response, QueryConnectionClientStateResponse())
def ConnectionConsensusState(self, request: QueryConnectionConsensusStateRequest) -> QueryConnectionConsensusStateResponse:
json_response = self._rest_api.get(f'{self.API_URL}/connections/{request.connection_id}/consensus_state/revision/{request.revision_number}/height/{request.revision_height}')
return Parse(json_response, QueryConnectionConsensusStateResponse()) |
def test_machine_should_use_and_model_attr_other_than_state(campaign_machine):
model = MyModel(status='producing')
machine = campaign_machine(model, state_field='status')
assert (getattr(model, 'state', None) is None)
assert (model.status == 'producing')
assert (machine.current_state == machine.producing)
machine.deliver()
assert (model.status == 'closed')
assert (machine.current_state == machine.closed) |
def get_special_exec_cond_and_kernel(func_attrs, input_type, output_type, acc_type, output_accessors, reduction_op, reduction_identity) -> (str, str):
exec_conds = []
for (vector_type, vec_bytesize) in vector_types[input_type]:
vlen = int((vec_bytesize / bytesize[input_type]))
exec_cond = EXEC_COND_TEMPLATE.render(indent=' ', func_name=func_attrs['name'], elem_input_type=input_type, elem_output_type=output_type, elem_compute_type=acc_type, vector_type=vector_type, vlen=vlen)
exec_conds.append(exec_cond)
special_reduction_code = SRC_TEMPLATE.render(func_name=func_attrs['name'], reduction_op=reduction_op, reduction_identity=reduction_identity)
return (''.join(exec_conds), special_reduction_code) |
def train(trn_data: List[List[Tuple[(str, str)]]], dev_data: List[List[Tuple[(str, str)]]]) -> Tuple:
cw_dict = create_cw_dict(trn_data)
pp_dict = create_pp_dict(trn_data)
pw_dict = create_pw_dict(trn_data)
nw_dict = create_nw_dict(trn_data)
(best_acc, best_args) = ((- 1), None)
grid = [0.1, 0.5, 1.0]
for cw_weight in grid:
for pp_weight in grid:
for pw_weight in grid:
for nw_weight in grid:
args = (cw_dict, pp_dict, pw_dict, nw_dict, cw_weight, pp_weight, pw_weight, nw_weight)
acc = evaluate(dev_data, *args)
print('{:5.2f}% - cw: {:3.1f}, pp: {:3.1f}, pw: {:3.1f}, nw: {:3.1f}'.format(acc, cw_weight, pp_weight, pw_weight, nw_weight))
if (acc > best_acc):
(best_acc, best_args) = (acc, args)
return best_args |
class SQLAlchemyDefaultImages(DefaultImages):
_DEFAULT_IMAGE_PREFIXES = {PythonVersion.PYTHON_3_8: 'cr.flyte.org/flyteorg/flytekit:py3.8-sqlalchemy-', PythonVersion.PYTHON_3_9: 'cr.flyte.org/flyteorg/flytekit:py3.9-sqlalchemy-', PythonVersion.PYTHON_3_10: 'cr.flyte.org/flyteorg/flytekit:py3.10-sqlalchemy-', PythonVersion.PYTHON_3_11: 'cr.flyte.org/flyteorg/flytekit:py3.11-sqlalchemy-'} |
class TestLoggingCoprPermissionsLogic(CoprsTestCase):
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_copr_permissions', 'f_db')
def test_update_permissions(self, log):
perm = models.CoprPermission(copr=self.c2, user=self.u3, copr_builder=helpers.PermissionEnum('request'), copr_admin=helpers.PermissionEnum('request'))
CoprPermissionsLogic.update_permissions(self.u2, self.c2, perm, new_builder=helpers.PermissionEnum('approved'), new_admin=helpers.PermissionEnum('nothing'))
log.info.assert_called_with("User '%s' authorized permission change for project '%s' - The '%s' user is now 'builder=%s', 'admin=%s'", 'user2', 'user2/foocopr', 'user3', 'approved', 'nothing')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_copr_permissions', 'f_db')
def test_update_permissions_by_applier(self, log):
CoprPermissionsLogic.update_permissions_by_applier(self.u2, self.c2, None, new_builder=helpers.PermissionEnum('request'), new_admin=helpers.PermissionEnum('nothing'))
msg = "User '%s' requests 'builder=%s', 'admin=%s' permissions for project '%s'"
log.info.assert_called_once_with(msg, 'user2', 'request', 'nothing', 'user2/foocopr')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_copr_permissions', 'f_db')
def test_set_permissions(self, log):
CoprPermissionsLogic.set_permissions(self.u2, self.c2, self.u3, 'builder', 'approved')
log.info.assert_called_with("User '%s' authorized permission change for project '%s' - The '%s' user is now '%s=%s'", 'user2', 'user2/foocopr', 'user3', 'builder', 'approved')
('coprs.app.logger', return_value=MagicMock())
.usefixtures('f_users', 'f_coprs', 'f_db')
def test_request_permission(self, log):
CoprPermissionsLogic.request_permission(self.c1, self.u2, 'builder', True)
log.info.assert_called_once_with("User '%s' requests '%s=%s' permission for project '%s'", 'user2', 'builder', 'request', 'user1/foocopr') |
('/')
def index():
tmp = u'\n<p><a href="/admin/?lang=en">Click me to get to Admin! (English)</a></p>\n<p><a href="/admin/?lang=cs">Click me to get to Admin! (Czech)</a></p>\n<p><a href="/admin/?lang=de">Click me to get to Admin! (German)</a></p>\n<p><a href="/admin/?lang=es">Click me to get to Admin! (Spanish)</a></p>\n<p><a href="/admin/?lang=fa">Click me to get to Admin! (Farsi)</a></p>\n<p><a href="/admin/?lang=fr">Click me to get to Admin! (French)</a></p>\n<p><a href="/admin/?lang=pt">Click me to get to Admin! (Portuguese)</a></p>\n<p><a href="/admin/?lang=ru">Click me to get to Admin! (Russian)</a></p>\n<p><a href="/admin/?lang=pa">Click me to get to Admin! (Punjabi)</a></p>\n<p><a href="/admin/?lang=zh_CN">Click me to get to Admin! (Chinese - Simplified)</a></p>\n<p><a href="/admin/?lang=zh_TW">Click me to get to Admin! (Chinese - Traditional)</a></p>\n'
return tmp |
class TestPluginRoutes():
def setup_method(self):
self.app = Flask(__name__)
self.app.config.from_object(__name__)
self.api = Api(self.app)
def test_get_modules_in_path(self):
plugin_dir_path = os.path.join(get_src_dir(), 'plugins')
plugin_folder_modules = _get_modules_in_path(plugin_dir_path)
assert (len(plugin_folder_modules) >= 3)
for category in PLUGIN_CATEGORIES:
assert (category in plugin_folder_modules)
def test_find_plugins(self):
result = _find_plugins()
(categories, plugins) = zip(*result)
plugins = chain(*plugins)
assert all(((c in categories) for c in PLUGIN_CATEGORIES))
assert ('dummy' in plugins)
assert ('file_coverage' in plugins)
def test_module_has_routes(self):
assert (_module_has_routes('dummy', 'analysis') is True)
assert (_module_has_routes('file_type', 'analysis') is False)
def test_import_module_routes(self):
dummy_endpoint = 'plugins/dummy'
plugin_routes = PluginRoutesMock(self.app, api=self.api)
assert (dummy_endpoint not in self._get_app_endpoints(self.app))
plugin_routes._import_module_routes('dummy', 'analysis')
assert (dummy_endpoint in self._get_app_endpoints(self.app))
test_client = self.app.test_client()
result = test_client.get(dummy_endpoint)
assert (result.data == b'dummy')
def test_import_module_routes__rest(self):
dummy_endpoint = 'plugins/dummy/rest'
plugin_routes = PluginRoutesMock(self.app, api=self.api)
assert (dummy_endpoint not in self._get_app_endpoints(self.app))
plugin_routes._import_module_routes('dummy', 'analysis')
test_client = self.app.test_client()
result = test_client.get(dummy_endpoint).json
assert ('dummy' in result)
assert ('rest' in result['dummy'])
def _get_app_endpoints(app):
rules = []
for rule in app.url_map.iter_rules():
rules.append(rule.endpoint)
return rules |
class NumberVersion(ModelSimple):
allowed_values = {}
validations = {('value',): {'inclusive_minimum': 1}}
additional_properties_type = None
_nullable = False
_property
def openapi_types():
return {'value': (int,)}
_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
return self |
def on_call_check_tokens(request: _Request) -> _OnCallTokenVerification:
verifications = _OnCallTokenVerification()
auth_token = _on_call_check_auth_token(request)
if (auth_token is None):
verifications.auth = OnCallTokenState.MISSING
elif isinstance(auth_token, dict):
verifications.auth = OnCallTokenState.VALID
verifications.auth_token = auth_token
app_token = _on_call_check_app_token(request)
if (app_token is None):
verifications.app = OnCallTokenState.MISSING
elif isinstance(app_token, dict):
verifications.app = OnCallTokenState.VALID
verifications.app_token = app_token
log_payload = {**verifications.as_dict(), 'logging.googleapis.com/labels': {'firebase-log-type': 'callable-request-verification'}}
errs = []
if (verifications.app == OnCallTokenState.INVALID):
errs.append(('AppCheck token was rejected.', log_payload))
if (verifications.auth == OnCallTokenState.INVALID):
errs.append(('Auth token was rejected.', log_payload))
if (len(errs) == 0):
_logging.info('Callable request verification passed: %s', log_payload)
else:
_logging.warning(f'Callable request verification failed: ${errs}', log_payload)
return verifications |
class Server(object):
settings_class = ServerSettings
request_class = EnrichedActionRequest
client_class = Client
use_django = False
service_name = None
action_class_map = {}
introspection_action = None
def __init__(self, settings, forked_process_id=None):
if (not self.service_name):
raise AttributeError('Server subclass must set service_name')
self.settings = settings
if self.settings['metrics'].get('kwargs', {}).get('config', {}).get('publishers', {}):
fid = ('main' if (forked_process_id is None) else six.text_type(forked_process_id))
for publisher in self.settings['metrics']['kwargs']['config']['publishers']:
if (self.settings['metrics']['kwargs']['config']['version'] == 1):
_replace_fid(publisher, fid)
elif publisher.get('kwargs', {}):
_replace_fid(publisher['kwargs'], fid)
self.metrics = self.settings['metrics']['object'](**self.settings['metrics'].get('kwargs', {}))
self.transport = self.settings['transport']['object'](self.service_name, self.metrics, (forked_process_id or 1), **self.settings['transport'].get('kwargs', {}))
self._async_event_loop_thread = None
if AsyncEventLoopThread:
self._async_event_loop_thread = AsyncEventLoopThread([m['object'](**m.get('kwargs', {})) for m in self.settings['coroutine_middleware']])
self.shutting_down = False
self._shutdown_lock = threading.Lock()
self._last_signal = 0
self._last_signal_received = 0.0
self._middleware = [m['object'](**m.get('kwargs', {})) for m in self.settings['middleware']]
self._middleware_job_wrapper = self.make_middleware_stack([m.job for m in self._middleware], self.execute_job)
self.logger = logging.getLogger('pysoa.server')
self.job_logger = logging.getLogger('pysoa.server.job')
self.request_log_success_level = logging.getLevelName(self.settings['request_log_success_level'])
self.request_log_error_level = logging.getLevelName(self.settings['request_log_error_level'])
class DictWrapper(RecursivelyCensoredDictWrapper):
SENSITIVE_FIELDS = frozenset((RecursivelyCensoredDictWrapper.SENSITIVE_FIELDS | settings['extra_fields_to_redact']))
self.logging_dict_wrapper_class = DictWrapper
self._default_status_action_class = None
self._idle_timer = None
self._heartbeat_file = None
self._heartbeat_file_path = None
self._heartbeat_file_last_update = 0.0
self._forked_process_id = forked_process_id
self._skip_django_database_cleanup = False
def handle_next_request(self):
if (not self._idle_timer):
self._idle_timer = self.metrics.timer('server.idle_time', resolution=TimerResolution.MICROSECONDS)
self._idle_timer.start()
try:
(request_id, meta, job_request) = self.transport.receive_request_message()
if ((request_id is None) or (meta is None) or (job_request is None)):
self.logger.warning('Thought to be impossible, but the transport returned None')
raise MessageReceiveTimeout()
except MessageReceiveTimeout:
self._idle_timer.stop()
self.perform_idle_actions()
self._set_busy_metrics(False)
self._idle_timer.start()
return
self._idle_timer.stop()
self._idle_timer = None
self._set_busy_metrics(True)
self.metrics.publish_all()
try:
PySOALogContextFilter.set_logging_request_context(request_id=request_id, **job_request.get('context', {}))
except TypeError:
PySOALogContextFilter.set_logging_request_context(request_id=request_id, **{six.text_type(k): v for (k, v) in six.iteritems(job_request['context'])})
request_for_logging = self.logging_dict_wrapper_class(job_request)
self.job_logger.log(self.request_log_success_level, 'Job request: %s', request_for_logging)
client_version = (tuple(meta['client_version']) if ('client_version' in meta) else (0, 40, 0))
def attr_filter(attrib, _value):
return ((not attrib.metadata) or ('added_in_version' not in attrib.metadata) or (client_version >= attrib.metadata['added_in_version']))
try:
self.perform_pre_request_actions()
job_response = self.process_job(job_request)
try:
response_message = attr.asdict(job_response, dict_factory=UnicodeKeysDict, filter=attr_filter)
except Exception as e:
self.metrics.counter('server.error.response_conversion_failure').increment()
job_response = self.handle_unhandled_exception(e, JobResponse, variables={'job_response': job_response})
response_message = attr.asdict(job_response, dict_factory=UnicodeKeysDict, filter=attr_filter)
response_for_logging = self.logging_dict_wrapper_class(response_message)
try:
if (not job_request.get('control', {}).get('suppress_response', False)):
self.transport.send_response_message(request_id, meta, response_message)
except MessageTooLarge as e:
self.metrics.counter('server.error.response_too_large').increment()
job_response = self.handle_job_error_code(ERROR_CODE_RESPONSE_TOO_LARGE, 'Could not send the response because it was too large', request_for_logging, response_for_logging, extra={'serialized_length_in_bytes': e.message_size_in_bytes})
self.transport.send_response_message(request_id, meta, attr.asdict(job_response, dict_factory=UnicodeKeysDict, filter=attr_filter))
except InvalidField:
self.metrics.counter('server.error.response_not_serializable').increment()
job_response = self.handle_job_error_code(ERROR_CODE_RESPONSE_NOT_SERIALIZABLE, 'Could not send the response because it failed to serialize', request_for_logging, response_for_logging)
self.transport.send_response_message(request_id, meta, attr.asdict(job_response, dict_factory=UnicodeKeysDict, filter=attr_filter))
finally:
if (job_response.errors or any((a.errors for a in job_response.actions))):
if ((self.request_log_error_level > self.request_log_success_level) and (self.job_logger.getEffectiveLevel() > self.request_log_success_level)):
self.job_logger.log(self.request_log_error_level, 'Job request: %s', request_for_logging)
self.job_logger.log(self.request_log_error_level, 'Job response: %s', response_for_logging)
else:
self.job_logger.log(self.request_log_success_level, 'Job response: %s', response_for_logging)
finally:
PySOALogContextFilter.clear_logging_request_context()
self.perform_post_request_actions()
self._set_busy_metrics(False)
def make_client(self, context, extra_context=None, **kwargs):
context = context.copy()
if extra_context:
context.update(extra_context)
context['calling_service'] = self.service_name
return self.client_class(self.settings['client_routing'], context=context, **kwargs)
def make_middleware_stack(middleware, base):
for ware in reversed(middleware):
base = ware(base)
return base
def process_job(self, job_request):
try:
validation_errors = [Error(code=error.code, message=error.message, field=error.pointer, is_caller_error=False) for error in (JobRequestSchema.errors(job_request) or [])]
if validation_errors:
raise JobError(errors=validation_errors, set_is_caller_error_to=None)
job_request['client'] = self.make_client(job_request['context'])
if self._async_event_loop_thread:
job_request['run_coroutine'] = self._async_event_loop_thread.run_coroutine
else:
job_request['run_coroutine'] = None
job_response = self._middleware_job_wrapper(EnrichedJobRequest(**job_request))
if ('correlation_id' in job_request['context']):
job_response.context['correlation_id'] = job_request['context']['correlation_id']
except HarakiriInterrupt:
self.metrics.counter('server.error.harakiri', harakiri_level='job')
job_response = JobResponse(errors=[Error(code=ERROR_CODE_JOB_TIMEOUT, message='The service job ran for too long and had to be interrupted (probably a middleware issue).', is_caller_error=False)])
except JobError as e:
self.metrics.counter('server.error.job_error').increment()
job_response = JobResponse(errors=e.errors)
except Exception as e:
self.metrics.counter('server.error.unhandled_error').increment()
return self.handle_unhandled_exception(e, JobResponse)
return job_response
def handle_unhandled_exception(self, exception, response_type, variables=None, **kwargs):
try:
(error_str, traceback_str) = (six.text_type(exception), traceback.format_exc())
except Exception:
self.metrics.counter('server.error.error_formatting_failure').increment()
(error_str, traceback_str) = ('Error formatting error', traceback.format_exc())
self.logger.exception(exception)
if (not isinstance(traceback_str, six.text_type)):
try:
traceback_str = traceback_str.decode('utf-8')
except UnicodeDecodeError:
traceback_str = 'UnicodeDecodeError: Traceback could not be decoded'
error_dict = {'code': ERROR_CODE_SERVER_ERROR, 'message': ('Internal server error: %s' % error_str), 'traceback': traceback_str, 'is_caller_error': False}
if (variables is not None):
try:
error_dict['variables'] = {key: repr(value) for (key, value) in variables.items()}
except Exception:
self.metrics.counter('server.error.variable_formatting_failure').increment()
error_dict['variables'] = 'Error formatting variables'
return response_type(errors=[Error(**error_dict)], **kwargs)
def handle_job_error_code(self, code, message, request_for_logging, response_for_logging, extra=None):
log_extra = {'data': {'request': request_for_logging, 'response': response_for_logging}}
if extra:
log_extra['data'].update(extra)
self.logger.error(message, exc_info=True, extra=log_extra)
return JobResponse(errors=[Error(code=code, message=message, is_caller_error=False)])
def execute_job(self, job_request):
harakiri = False
job_response = JobResponse()
job_switches = RequestSwitchSet(job_request.context['switches'])
for (i, simple_action_request) in enumerate(job_request.actions):
action_request = self.request_class(action=simple_action_request.action, body=simple_action_request.body, switches=job_switches, context=job_request.context, control=job_request.control, client=job_request.client, run_coroutine=job_request.run_coroutine)
action_request._server = self
action_in_class_map = (action_request.action in self.action_class_map)
if (action_in_class_map or (action_request.action in ('status', 'introspect'))):
if action_in_class_map:
action = self.action_class_map[action_request.action](self.settings)
elif (action_request.action == 'introspect'):
if (self.introspection_action is not None):
action = self.introspection_action(self)
else:
from pysoa.server.action.introspection import IntrospectionAction
action = IntrospectionAction(server=self)
else:
if (not self._default_status_action_class):
from pysoa.server.action.status import make_default_status_action_class
self._default_status_action_class = make_default_status_action_class(self.__class__)
action = self._default_status_action_class(self.settings)
wrapper = self.make_middleware_stack([m.action for m in self._middleware], action)
try:
PySOALogContextFilter.set_logging_action_name(action_request.action)
action_response = wrapper(action_request)
except HarakiriInterrupt:
self.metrics.counter('server.error.harakiri', harakiri_level='action')
action_response = ActionResponse(action=action_request.action, errors=[Error(code=ERROR_CODE_ACTION_TIMEOUT, message='The action "{}" ran for too long and had to be interrupted.'.format(action_request.action), is_caller_error=False)])
harakiri = True
except ActionError as e:
action_response = ActionResponse(action=action_request.action, errors=e.errors)
except JobError:
raise
except Exception as e:
self.metrics.counter('server.error.unhandled_error').increment()
action_response = self.handle_unhandled_exception(e, ActionResponse, action=action_request.action)
finally:
PySOALogContextFilter.clear_logging_action_name()
else:
action_response = ActionResponse(action=action_request.action, errors=[Error(code=ERROR_CODE_UNKNOWN, message='The action "{}" was not found on this server.'.format(action_request.action), field='action', is_caller_error=True)])
job_response.actions.append(action_response)
if (harakiri or (action_response.errors and (not job_request.control.get('continue_on_error', False)))):
break
return job_response
def handle_shutdown_signal(self, signal_number, _stack_frame):
if (not self._shutdown_lock.acquire(False)):
return
try:
if self.shutting_down:
if ((self._last_signal in (signal.SIGINT, signal.SIGTERM)) and (self._last_signal != signal_number) and ((time.time() - self._last_signal_received) < 1)):
self.logger.info('Ignoring duplicate shutdown signal received within one second of original signal')
else:
self.logger.warning('Received double interrupt, forcing shutdown')
sys.exit(1)
else:
self.logger.warning('Received interrupt, initiating shutdown')
self.shutting_down = True
self._last_signal = signal_number
self._last_signal_received = time.time()
finally:
self._shutdown_lock.release()
def harakiri(self, signal_number, _stack_frame):
if (not self._shutdown_lock.acquire(False)):
return
threads = {cast(int, t.ident): {'name': t.name, 'traceback': ['Unknown']} for t in threading.enumerate()}
for (thread_id, frame) in sys._current_frames().items():
stack = []
for f in traceback.format_stack(frame):
stack.extend(f.rstrip().split('\n'))
if (('for f in traceback.format_stack(frame):' in stack[(- 1)]) and ('in harakiri' in stack[(- 2)])):
stack = stack[:(- 2)]
threads.setdefault(thread_id, {'name': thread_id})['traceback'] = stack
extra = {'data': {'thread_status': {t['name']: [line.rstrip() for line in t['traceback']] for t in threads.values()}}}
details = 'Current thread status at harakiri trigger:\n{}'.format('\n'.join(('Thread {}:\n{}'.format(t['name'], '\n'.join(t['traceback'])) for t in threads.values())))
try:
self._last_signal = signal_number
self._last_signal_received = time.time()
if self.shutting_down:
self.logger.error('Graceful shutdown failed {} seconds after harakiri. Exiting now!'.format(self.settings['harakiri']['shutdown_grace']), extra=extra)
self.logger.info(details)
try:
self.metrics.counter('server.error.harakiri', harakiri_level='emergency')
self.metrics.publish_all()
finally:
try:
exit_func = getattr(atexit, '_run_exitfuncs', None)
if exit_func:
thread = threading.Thread(target=exit_func)
thread.start()
thread.join(5.0)
else:
time.sleep(2.0)
finally:
os._exit(1)
else:
self.logger.warning('No activity for {} seconds, triggering harakiri with grace period of {} seconds'.format(self.settings['harakiri']['timeout'], self.settings['harakiri']['shutdown_grace']), extra=extra)
self.logger.info(details)
signal.alarm(self.settings['harakiri']['shutdown_grace'])
self.shutting_down = True
raise HarakiriInterrupt()
finally:
self._shutdown_lock.release()
def setup(self):
def teardown(self):
def _close_old_django_connections(self):
if (self.use_django and (not self._skip_django_database_cleanup)):
django_close_old_database_connections()
def _close_django_caches(self, shutdown=False):
if (self.use_django and django_caches):
if shutdown:
self.logger.info('Closing all Django caches')
for cache in django_caches.all():
cache.close(for_shutdown=shutdown)
def _create_heartbeat_file(self):
if self.settings['heartbeat_file']:
heartbeat_file_path = self.settings['heartbeat_file'].replace('{{pid}}', six.text_type(os.getpid()))
if (('{{fid}}' in heartbeat_file_path) and (self._forked_process_id is not None)):
heartbeat_file_path = heartbeat_file_path.replace('{{fid}}', six.text_type(self._forked_process_id))
self.logger.info('Creating heartbeat file {}'.format(heartbeat_file_path))
file_path = os.path.abspath(heartbeat_file_path)
self._heartbeat_file_path = file_path
self._heartbeat_file = codecs.open(filename=file_path, mode='wb', encoding='utf-8')
self._update_heartbeat_file()
def _delete_heartbeat_file(self):
if self._heartbeat_file:
self.logger.info('Closing and removing heartbeat file')
try:
self._heartbeat_file.close()
except Exception:
self.logger.warning('Error while closing heartbeat file', exc_info=True)
finally:
try:
if self._heartbeat_file_path:
os.remove(self._heartbeat_file_path)
except Exception:
self.logger.warning('Error while removing heartbeat file', exc_info=True)
def _update_heartbeat_file(self):
if (self._heartbeat_file and ((time.time() - self._heartbeat_file_last_update) > 2.5)):
self._heartbeat_file.seek(0)
self._heartbeat_file.write(six.text_type(time.time()))
self._heartbeat_file.flush()
self._heartbeat_file_last_update = time.time()
def perform_pre_request_actions(self):
self.metrics.publish_all()
if self.use_django:
django_reset_database_queries()
self._close_old_django_connections()
def perform_post_request_actions(self):
self._close_old_django_connections()
self._close_django_caches()
self._update_heartbeat_file()
def perform_idle_actions(self):
self._close_old_django_connections()
self._update_heartbeat_file()
def _set_busy_metrics(self, busy, running=True):
self.metrics.gauge('server.worker.running').set((1 if running else 0))
self.metrics.gauge('server.worker.busy').set((1 if busy else 0))
def run(self):
self.logger.info('Service "{service}" server starting up, pysoa version {pysoa}, listening on transport {transport}.'.format(service=self.service_name, pysoa=pysoa.version.__version__, transport=self.transport))
self.setup()
self.metrics.counter('server.worker.startup').increment()
self._set_busy_metrics(False)
self.metrics.publish_all()
if self._async_event_loop_thread:
self._async_event_loop_thread.start()
self._create_heartbeat_file()
signal.signal(signal.SIGINT, self.handle_shutdown_signal)
signal.signal(signal.SIGTERM, self.handle_shutdown_signal)
signal.signal(signal.SIGALRM, self.harakiri)
transient_failures = 0
try:
while (not self.shutting_down):
signal.alarm(self.settings['harakiri']['timeout'])
try:
self.handle_next_request()
if (transient_failures > 0):
transient_failures -= 1
except TransientPySOATransportError:
if (transient_failures > 5):
self.logger.exception('Too many errors receiving message from transport; shutting down!')
break
sleep = (((2 ** transient_failures) + random.random()) / 4.0)
self.logger.info('Transient error receiving message from transport, sleeping {} seconds and continuing.'.format(sleep))
time.sleep(sleep)
transient_failures += 1
finally:
self.metrics.publish_all()
except HarakiriInterrupt:
self.metrics.counter('server.error.harakiri', harakiri_level='server')
self.logger.error('Harakiri interrupt occurred outside of action or job handling')
except Exception:
self.metrics.counter('server.error.unknown').increment()
self.logger.exception('Unhandled server error; shutting down')
finally:
self.teardown()
self.metrics.counter('server.worker.shutdown').increment()
self._set_busy_metrics(False, False)
self.metrics.publish_all()
self.logger.info('Server shutting down')
if self._async_event_loop_thread:
self._async_event_loop_thread.join()
self._close_django_caches(shutdown=True)
self._delete_heartbeat_file()
self.logger.info('Server shutdown complete')
def pre_fork(cls):
def initialize(cls, settings):
return cls
def main(cls, forked_process_id=None):
parser = argparse.ArgumentParser(description='Server for the {} SOA service'.format(cls.service_name))
parser.add_argument('-d', '--daemon', action='store_true', help='run the server process as a daemon')
if (not cls.use_django):
parser.add_argument('-s', '--settings', help='The settings module to use', required=True)
(cmd_options, _) = parser.parse_known_args(sys.argv[1:])
if cls.use_django:
if (not django_settings):
raise ImportError('Could not import Django. You must install Django if you enable Django support in your service.')
try:
settings = cls.settings_class(django_settings.SOA_SERVER_SETTINGS)
except AttributeError:
raise ValueError('Cannot find `SOA_SERVER_SETTINGS` in the Django settings.')
else:
try:
settings_module = importlib.import_module(cmd_options.settings)
except ImportError as e:
raise ValueError(('Cannot import settings module `%s`: %s' % (cmd_options.settings, e)))
try:
settings_dict = getattr(settings_module, 'SOA_SERVER_SETTINGS')
except AttributeError:
try:
settings_dict = getattr(settings_module, 'settings')
except AttributeError:
raise ValueError('Cannot find `SOA_SERVER_SETTINGS` or `settings` variable in settings module `{}`.'.format(cmd_options.settings))
settings = cls.settings_class(settings_dict)
if (not cls.service_name):
raise AttributeError('Server subclass must set service_name')
PySOALogContextFilter.set_service_name(cls.service_name)
logging.config.dictConfig(settings['logging'])
if cmd_options.daemon:
pid = os.fork()
if (pid > 0):
print('PID={}'.format(pid))
sys.exit()
server = cls.initialize(settings)(settings, forked_process_id)
server.run() |
.compilertest
def test_irauth_grpcservice_version_default():
if EDGE_STACK:
pytest.xfail('XFailing for now, custom AuthServices not supported in Edge Stack')
yaml = '\n---\napiVersion: getambassador.io/v3alpha1\nkind: AuthService\nmetadata:\n name: mycoolauthservice\n namespace: default\nspec:\n auth_service: someservice\n proto: grpc\n'
econf = _get_envoy_config(yaml)
conf = econf.as_dict()
ext_auth_config = _get_ext_auth_config(conf)
assert (ext_auth_config == False)
errors = econf.ir.aconf.errors['mycoolauthservice.default.1']
assert (errors[0]['error'] == 'AuthService: protocol_version v2 is unsupported, protocol_version must be "v3"') |
class CatalogItemAppealStatus(AbstractObject):
def __init__(self, api=None):
super(CatalogItemAppealStatus, self).__init__()
self._isCatalogItemAppealStatus = True
self._api = api
class Field(AbstractObject.Field):
handle = 'handle'
item_id = 'item_id'
status = 'status'
use_cases = 'use_cases'
class Status():
this_item_cannot_be_appealed_as_it_is_either_approved_or_already_has_an_appeal = 'This item cannot be appealed as it is either approved or already has an appeal'
this_item_is_not_rejected_for_any_of_channels = 'This item is not rejected for any of channels'
we_ve_encountered_unexpected_error_while_processing_this_request_please_try_again_later_ = "We've encountered unexpected error while processing this request. Please try again later !"
you_ve_reached_the_maximum_number_of_item_requests_you_can_make_this_week_you_ll_be_able_to_request_item_reviews_again_within_the_next_7_days_ = "You've reached the maximum number of item requests you can make this week. You'll be able to request item reviews again within the next 7 days."
your_request_was_received_see_information_below_to_learn_more_ = 'Your request was received. See information below to learn more.'
_field_types = {'handle': 'string', 'item_id': 'int', 'status': 'Status', 'use_cases': 'list<Object>'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['Status'] = CatalogItemAppealStatus.Status.__dict__.values()
return field_enum_info |
def process_data(data, tokens):
for house in data:
house_data = extract_house_data(house)
if (house_data is None):
continue
if (house_data['token'] in tokens):
continue
tokens.append(house_data['token'])
send_telegram_message(house_data)
time.sleep(1)
return tokens |
class Ticket(BaseObject):
def __init__(self, api=None, assignee_id=None, brand_id=None, collaborator_ids=None, created_at=None, custom_fields=None, description=None, due_at=None, external_id=None, fields=None, forum_topic_id=None, group_id=None, has_incidents=None, id=None, organization_id=None, priority=None, problem_id=None, raw_subject=None, recipient=None, requester_id=None, satisfaction_rating=None, sharing_agreement_ids=None, status=None, subject=None, submitter_id=None, tags=None, type=None, updated_at=None, url=None, via=None, **kwargs):
self.api = api
self.assignee_id = assignee_id
self.brand_id = brand_id
self.collaborator_ids = collaborator_ids
self.created_at = created_at
self.custom_fields = custom_fields
self.description = description
self.due_at = due_at
self.external_id = external_id
self.fields = fields
self.forum_topic_id = forum_topic_id
self.group_id = group_id
self.has_incidents = has_incidents
self.id = id
self.organization_id = organization_id
self.priority = priority
self.problem_id = problem_id
self.raw_subject = raw_subject
self.recipient = recipient
self.requester_id = requester_id
self.satisfaction_rating = satisfaction_rating
self.sharing_agreement_ids = sharing_agreement_ids
self.status = status
self.subject = subject
self.submitter_id = submitter_id
self.tags = tags
self.type = type
self.updated_at = updated_at
self.url = url
self.via = via
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue
def assignee(self):
if (self.api and self.assignee_id):
return self.api._get_user(self.assignee_id)
def assignee(self, assignee):
if assignee:
self.assignee_id = assignee.id
self._assignee = assignee
def brand(self):
if (self.api and self.brand_id):
return self.api._get_brand(self.brand_id)
def brand(self, brand):
if brand:
self.brand_id = brand.id
self._brand = brand
def collaborators(self):
if (self.api and self.collaborator_ids):
return self.api._get_users(self.collaborator_ids)
def collaborators(self, collaborators):
if collaborators:
self.collaborator_ids = [o.id for o in collaborators]
self._collaborators = collaborators
def created(self):
if self.created_at:
return dateutil.parser.parse(self.created_at)
def created(self, created):
if created:
self.created_at = created
def due(self):
if self.due_at:
return dateutil.parser.parse(self.due_at)
def due(self, due):
if due:
self.due_at = due
def forum_topic(self):
if (self.api and self.forum_topic_id):
return self.api._get_topic(self.forum_topic_id)
_topic.setter
def forum_topic(self, forum_topic):
if forum_topic:
self.forum_topic_id = forum_topic.id
self._forum_topic = forum_topic
def group(self):
if (self.api and self.group_id):
return self.api._get_group(self.group_id)
def group(self, group):
if group:
self.group_id = group.id
self._group = group
def organization(self):
if (self.api and self.organization_id):
return self.api._get_organization(self.organization_id)
def organization(self, organization):
if organization:
self.organization_id = organization.id
self._organization = organization
def problem(self):
if (self.api and self.problem_id):
return self.api._get_problem(self.problem_id)
def problem(self, problem):
if problem:
self.problem_id = problem.id
self._problem = problem
def requester(self):
if (self.api and self.requester_id):
return self.api._get_user(self.requester_id)
def requester(self, requester):
if requester:
self.requester_id = requester.id
self._requester = requester
def sharing_agreements(self):
if (self.api and self.sharing_agreement_ids):
return self.api._get_sharing_agreements(self.sharing_agreement_ids)
_agreements.setter
def sharing_agreements(self, sharing_agreements):
if sharing_agreements:
self.sharing_agreement_ids = [o.id for o in sharing_agreements]
self._sharing_agreements = sharing_agreements
def submitter(self):
if (self.api and self.submitter_id):
return self.api._get_user(self.submitter_id)
def submitter(self, submitter):
if submitter:
self.submitter_id = submitter.id
self._submitter = submitter
def updated(self):
if self.updated_at:
return dateutil.parser.parse(self.updated_at)
def updated(self, updated):
if updated:
self.updated_at = updated |
class SlackNotification():
def __init__(self, app=None, env=None, prop_path=None):
timestamp = time.strftime('%B %d, %Y %H:%M:%S %Z', time.gmtime())
self.settings = get_properties(prop_path)
short_commit_sha = self.settings['pipeline']['config_commit'][0:11]
self.info = {'app': app, 'env': env, 'config_commit_short': short_commit_sha, 'timestamp': timestamp}
def post_message(self):
message = get_template(template_file='slack/pipeline-prepare-ran.j2', info=self.info)
channel = '#deployments-prod'
post_slack_message(message=message, channel=channel, username='pipeline-bot', icon_emoji=':gear:')
def notify_slack_channel(self):
message = get_template(template_file='slack/pipeline-prepare-ran.j2', info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(message=message, channel=self.settings['pipeline']['notifications']['slack'], username='pipeline-bot', icon_emoji=':gear:') |
def success(args, reactor, snmpEngine):
(errorStatus, errorIndex, varBindTable) = args
if errorStatus:
print(('%s: %s at %s' % (hostname, errorStatus.prettyPrint(), ((errorIndex and varBindTable[0][(int(errorIndex) - 1)][0]) or '?'))))
else:
for varBindRow in varBindTable:
for varBind in varBindRow:
print(' = '.join([x.prettyPrint() for x in varBind]))
if (not isEndOfMib(varBindTable[(- 1)])):
return getnext(reactor, snmpEngine, *varBindTable[(- 1)]) |
class OptionSeriesHeatmapSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def _sign_in_with_password(email, password, api_key):
body = {'email': email, 'password': password, 'returnSecureToken': True}
params = {'key': api_key}
resp = requests.request('post', _verify_password_url, params=params, json=body)
resp.raise_for_status()
return resp.json().get('idToken') |
def test_map_value_judged_only():
current = pd.DataFrame(data=dict(user_id=['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], prediction=[1, 2, 3, 1, 2, 3, 1, 2, 3], target=[1, 0, 0, 0, 0, 0, 0, 0, 1]))
metric = MAPKMetric(k=3, no_feedback_users=True)
report = Report(metrics=[metric])
column_mapping = ColumnMapping(recommendations_type=RecomType.RANK)
report.run(reference_data=None, current_data=current, column_mapping=column_mapping)
results = metric.get_result()
assert (len(results.current) == 3)
assert np.isclose(results.current[1], 0.3333333)
assert np.isclose(results.current[2], 0.3333333)
assert np.isclose(results.current[3], 0.4444444) |
(NOTICES_SERVED, status_code=HTTP_200_OK, response_model=List[LastServedConsentSchema])
_limiter.limit(CONFIG.security.public_request_rate_limit)
def save_consent_served_to_user(*, db: Session=Depends(get_db), data: RecordConsentServedRequest, request: Request, response: Response) -> List[LastServedNotice]:
verify_privacy_notice_and_historical_records(db=db, notice_history_list=data.privacy_notice_history_ids)
fides_user_provided_identity = get_or_create_fides_user_device_id_provided_identity(db=db, identity_data=data.browser_identity)
logger.info('Recording consent served with respect to fides user device id')
try:
return save_consent_served_for_identities(db=db, verified_provided_identity=None, fides_user_provided_identity=fides_user_provided_identity, request=request, original_request_data=data)
except (IdentityNotFoundException, PrivacyNoticeHistoryNotFound, SystemNotFound) as exc:
raise HTTPException(status_code=400, detail=exc.args[0]) |
class APILogin():
def __init__(self):
self.api_logger = zapscan.logger()
self.parse_data = parsers.PostmanParser()
def fetch_logintoken(self, url, method, headers, body=None, relogin=None):
if (method.upper() == 'GET'):
login_request = requests.get(url, headers=headers)
elif (method.upper() == 'POST'):
login_request = requests.post(url, headers=headers, json=body)
logs.logging.info('HTTP response of login API : %s %s %s', login_request.status_code, headers, body)
else:
print('[-]Invalid request')
sys.exit(1)
try:
login_response = json.loads(login_request.text)
except:
pass
if (relogin is not None):
print("Session fixation attack won't be tested since it failed to re-login.")
return
auth_names = get_value('config.property', 'login', 'auth_names')
auth_type = get_value('config.property', 'login', 'auth_type')
auth_names = auth_names.split(',')
auth_status = False
if (auth_type == 'cookie'):
if login_request.headers['Set-Cookie']:
auth_cookie = {'cookie': login_request.headers['Set-Cookie']}
print('[+]Login successful')
update_value('login', 'auth_success', 'Y')
update_value('login', 'cookie', auth_cookie)
auth_status = True
else:
for auth_name in auth_names:
if (auth_name in login_response):
auth_success_token = login_response[auth_name]
print('[+]Login successful')
update_value('login', 'auth_success', 'Y')
update_value('login', 'auth_success_param', auth_name)
update_value('login', 'auth_success_token', auth_success_token)
auth_status = True
break
if (not auth_status):
login_response = input((((('Failed to login. Do you want to continue scanning without cookie(y/n),' + self.api_logger.G) + url) + ': ') + self.api_logger.W))
if ((login_response == 'Y') or (login_response == 'y')):
return
elif ((login_response == 'n') or (login_response == 'N')):
sys.exit(1)
def create_urllist(self, collection_data):
url_list = []
for data in collection_data:
try:
url = data['url']['raw']
url_list.append(url)
except:
url = data['url']
url_list.append(url)
return url_list
def verify_logout(self, collection_data, api_url):
if (api_url is not None):
for data in collection_data:
if (data['url'] == api_url):
(url, method, headers, body) = (data['url'], data['method'], data['headers'], data['body'])
logout_data = {'logouturl': url, 'logoutmethod': method, 'logoutheaders': headers, 'logoutbody': body, 'logoutresult': 'Y'}
break
for (key, value) in list(logout_data.items()):
update_value('logout', key, value)
return
else:
print('Failed')
def auth_verify(self, collection_data, api):
login_names = ['login', 'signin', 'authenticate']
logout_names = ['logout', 'signout']
if (api == 'login'):
api_types = login_names
elif (api == 'logout'):
api_types = logout_names
url_list = self.create_urllist(collection_data)
for url in url_list:
for name in api_types:
if (name in url):
if (api == 'login'):
result = input((((('Is it a correct login URL(y/n),' + self.api_logger.G) + url) + ': ') + self.api_logger.W))
elif (api == 'logout'):
result = input((((('Is it a correct logout URL(y/n),' + self.api_logger.G) + url) + ': ') + self.api_logger.W))
if ((result == 'y') or (result == 'Y')):
return (url, api)
else:
return (None, None)
return (None, None)
def verify_login(self, collection_data):
(api_url, api_type) = self.auth_verify(collection_data, 'login')
logs.logging.info('API URL for login is : %s', api_url)
if (api_url is None):
auth_response = input(((self.api_logger.Y + '[-]Failed to detect login url. Do you want to contiune without authentication?(y/n):') + self.api_logger.W))
if ((auth_response == 'y') or (auth_response == 'Y')):
return
else:
sys.exit(1)
for data in collection_data:
if (data['url'] == api_url):
(url, method, headers, body) = (data['url'], data['method'], data['headers'], data['body'])
if (api_type == 'login'):
if body:
body = json.loads(base64.b64decode(body))
login_token = self.fetch_logintoken(url, method, headers, body)
else:
login_token = self.fetch_logintoken(url, method, headers)
if (login_token == True):
logs.logging.info('Login successfully : %s', url)
(logout_url, api_type) = self.auth_verify(collection_data, 'logout')
self.verify_logout(collection_data, logout_url)
auth_data = {'loginurl': url, 'loginmethod': method, 'loginheaders': headers, 'loginbody': body, 'loginresult': 'Y'}
for (key, value) in list(auth_data.items()):
update_value('login', key, value)
return
def parse_logindata(self, loginurl):
for data in self.parse_data.api_lst:
if (loginurl == data['url']):
(headers, method, body) = (data['headers'], data['method'], '')
if (data['body'] != ''):
body = json.loads(base64.b64decode(data['body']))
return (loginurl, headers, method, body) |
class IDispatch(IUnknown):
if TYPE_CHECKING:
_disp_methods_ = hints.AnnoField()
_GetTypeInfo = hints.AnnoField()
__com_GetIDsOfNames = hints.AnnoField()
__com_Invoke = hints.AnnoField()
_iid_ = GUID('{-0000-0000-C000-}')
_methods_ = [COMMETHOD([], HRESULT, 'GetTypeInfoCount', (['out'], POINTER(UINT))), COMMETHOD([], HRESULT, 'GetTypeInfo', (['in'], UINT, 'index'), (['in'], LCID, 'lcid', 0), (['out'], POINTER(POINTER(IUnknown)))), STDMETHOD(HRESULT, 'GetIDsOfNames', [POINTER(IID), POINTER(c_wchar_p), UINT, LCID, POINTER(DISPID)]), STDMETHOD(HRESULT, 'Invoke', [DISPID, POINTER(IID), LCID, WORD, POINTER(DISPPARAMS), POINTER(VARIANT), POINTER(EXCEPINFO), POINTER(UINT)])]
def GetTypeInfo(self, index, lcid=0):
import comtypes.typeinfo
result = self._GetTypeInfo(index, lcid)
return result.QueryInterface(comtypes.typeinfo.ITypeInfo)
def GetIDsOfNames(self, *names, **kw):
lcid = kw.pop('lcid', 0)
assert (not kw)
arr = (c_wchar_p * len(names))(*names)
ids = (DISPID * len(names))()
self.__com_GetIDsOfNames(riid_null, arr, len(names), lcid, ids)
return ids[:]
def _invoke(self, memid, invkind, lcid, *args):
var = VARIANT()
argerr = c_uint()
dp = DISPPARAMS()
if args:
array = (VARIANT * len(args))()
for (i, a) in enumerate(args[::(- 1)]):
array[i].value = a
dp.cArgs = len(args)
if (invkind in (DISPATCH_PROPERTYPUT, DISPATCH_PROPERTYPUTREF)):
dp.cNamedArgs = 1
dp.rgdispidNamedArgs = pointer(DISPID(DISPID_PROPERTYPUT))
dp.rgvarg = array
self.__com_Invoke(memid, riid_null, lcid, invkind, dp, var, None, argerr)
return var._get_value(dynamic=True)
def Invoke(self, dispid, *args, **kw):
_invkind = kw.pop('_invkind', 1)
_lcid = kw.pop('_lcid', 0)
if kw:
raise ValueError('named parameters not yet implemented')
result = VARIANT()
excepinfo = EXCEPINFO()
argerr = c_uint()
if (_invkind in (DISPATCH_PROPERTYPUT, DISPATCH_PROPERTYPUTREF)):
array = (VARIANT * len(args))()
for (i, a) in enumerate(args[::(- 1)]):
array[i].value = a
dp = DISPPARAMS()
dp.cArgs = len(args)
dp.cNamedArgs = 1
dp.rgvarg = array
dp.rgdispidNamedArgs = pointer(DISPID(DISPID_PROPERTYPUT))
else:
array = (VARIANT * len(args))()
for (i, a) in enumerate(args[::(- 1)]):
array[i].value = a
dp = DISPPARAMS()
dp.cArgs = len(args)
dp.cNamedArgs = 0
dp.rgvarg = array
try:
self.__com_Invoke(dispid, riid_null, _lcid, _invkind, byref(dp), byref(result), byref(excepinfo), byref(argerr))
except COMError as err:
(hresult, text, details) = err.args
if (hresult == DISP_E_EXCEPTION):
details = (excepinfo.bstrDescription, excepinfo.bstrSource, excepinfo.bstrHelpFile, excepinfo.dwHelpContext, excepinfo.scode)
raise COMError(hresult, text, details)
elif (hresult == DISP_E_PARAMNOTFOUND):
raise COMError(hresult, text, argerr.value)
elif (hresult == DISP_E_TYPEMISMATCH):
raise COMError(hresult, text, (('TypeError: Parameter %s' % (argerr.value + 1)), args))
raise
return result._get_value(dynamic=True) |
class TestSuperFencesClassesIdsAttrListNoPygmentsOnPre(util.MdCase):
extension = ['pymdownx.highlight', 'pymdownx.superfences', 'markdown.extensions.attr_list']
extension_configs = {'pymdownx.highlight': {'use_pygments': False, 'code_attr_on_pre': True}}
def test_classes(self):
self.check_markdown('\n ```{.python .more}\n import test\n ```\n ', '\n <pre class="language-python highlight more"><code>import test</code></pre>\n ', True)
def test_id(self):
self.check_markdown('\n ```{.python #id}\n import test\n ```\n ', '\n <pre id="id" class="language-python highlight"><code>import test</code></pre>\n ', True)
def test_attr(self):
self.check_markdown('\n ```{.python #id attr="test"}\n import test\n ```\n ', '\n <pre id="id" class="language-python highlight" attr="test"><code>import test</code></pre>\n ', True) |
class OptionPlotoptionsSolidgaugeOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
def test_extruded_periodic_1_layer():
mesh = UnitIntervalMesh(1)
extm = ExtrudedMesh(mesh, layers=1, extrusion_type='uniform', periodic=True)
elem = TensorProductElement(FiniteElement('DG', mesh.ufl_cell(), 0), FiniteElement('CG', 'interval', 1))
V = FunctionSpace(extm, elem)
v = TestFunction(V)
u = TrialFunction(V)
A = assemble((inner(u, v) * dx))
assert np.allclose(A.M.values, np.array([[1.0]], dtype=ScalarType))
elem = TensorProductElement(FiniteElement('DG', mesh.ufl_cell(), 0), FiniteElement('CG', 'interval', 2))
V = FunctionSpace(extm, elem)
v = TestFunction(V)
u = TrialFunction(V)
A = assemble((inner(u, v) * dx))
assert np.allclose(A.M.values, np.array([[(1.0 / 5.0), (2.0 / 15.0)], [(2.0 / 15.0), (8.0 / 15)]], dtype=ScalarType)) |
def test_integration_output_psv(capsys):
CISAudit().output(data=data, format='psv')
(output, error) = capsys.readouterr()
assert (error == '')
assert (output.split('\n')[0] == 'ID|Description|Level|Result|Duration')
assert (output.split('\n')[1] == '1|"section header"|||')
assert (output.split('\n')[2] == '1.1|"subsection header"|||')
assert (output.split('\n')[3] == '1.1.1|"test 1.1.1"|1|Pass|1ms')
assert (output.split('\n')[4] == '2|"section header"|||')
assert (output.split('\n')[5] == '2.1|"test 2.1"|1|Fail|10ms')
assert (output.split('\n')[6] == '2.2|"test 2.2"|2|Pass|100ms')
assert (output.split('\n')[7] == '2.3|"test 2.3"|1|Not Implemented|') |
class ServiceIdAndVersionString(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'service_id': (str,), 'version': (str,)}
_property
def discriminator():
return None
attribute_map = {'service_id': 'service_id', 'version': 'version'}
read_only_vars = {'service_id', 'version'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def syncpwm(bcmpin, value):
for x in range(0, len(Settings.Tasks)):
if (Settings.Tasks[x] and (type(Settings.Tasks[x]) is not bool)):
if Settings.Tasks[x].enabled:
if ((Settings.Tasks[x].pluginid == 213) and (Settings.Tasks[x].taskdevicepin[0] == bcmpin)):
try:
Settings.Tasks[x].uservar[0] = value
except:
pass
if (Settings.Tasks[x].valuenames[0] != ''):
commands.rulesProcessing(((((Settings.Tasks[x].taskname + '#') + Settings.Tasks[x].valuenames[0]) + '=') + str(value)), rpieGlobals.RULE_USER)
Settings.Tasks[x].plugin_senddata()
break |
class OptionSeriesVectorSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def fine(node):
if isinstance(node, IntConst):
return 'INT({})({})'.format(node.width, node.value)
elif isinstance(node, StringConst):
return 'STR'
elif isinstance(node, SwitchTable):
return 'SWITCH'
elif isinstance(node, Flag):
return '{}:{}'.format(node.base_flag, node.index)
elif isinstance(node, Insn):
return node.name
elif isinstance(node, CodeOffset):
return 'CODE'
elif isinstance(node, VirtualElm):
return 'VIRTUAL'
elif isinstance(node, IndirectOffset):
return '{}:O:{}'.format(node.base_pointer, node.offset)
elif isinstance(node, TempOffset):
return '{}:T:{}'.format(node.base_pointer, node.offset)
elif isinstance(node, GivOffset):
return node.offset
elif isinstance(node, DirectOffset):
return 'DIRECT'
elif isinstance(node, StringArrayOffset):
return 'SARRAY'
elif isinstance(node, GivReg):
return '{}:GIVR:{}'.format(node.base_register, node.index)
elif isinstance(node, Reg):
return '{}:R:{}'.format(node.base_register, node.index)
elif isinstance(node, Function):
return 'FUNC'
elif isinstance(node, Ttype):
return '{}:TTYPE'.format(fine(node.owner)) |
class TestDialogueBase():
def setup(cls):
cls.incomplete_reference = (str(1), '')
cls.complete_reference = (str(1), str(1))
cls.opponent_address = 'agent 2'
cls.agent_address = 'agent 1'
cls.dialogue_label = DialogueLabel(dialogue_reference=cls.incomplete_reference, dialogue_opponent_addr=cls.opponent_address, dialogue_starter_addr=cls.agent_address)
cls.dialogue = Dialogue(dialogue_label=cls.dialogue_label)
cls.dialogue_label_opponent_started = DialogueLabel(dialogue_reference=cls.complete_reference, dialogue_opponent_addr=cls.opponent_address, dialogue_starter_addr=cls.opponent_address)
cls.dialogue_opponent_started = Dialogue(dialogue_label=cls.dialogue_label_opponent_started)
cls.valid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), performative=DefaultMessage.Performative.BYTES, content=b'Hello')
cls.valid_message_1_by_self.sender = cls.agent_address
cls.valid_message_1_by_self.to = cls.opponent_address
cls.valid_message_2_by_other = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=(- 1), target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
cls.valid_message_2_by_other.sender = cls.opponent_address
cls.valid_message_2_by_other.to = cls.agent_address
cls.valid_message_3_by_self = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=2, target=(- 1), performative=DefaultMessage.Performative.BYTES, content=b'Hello back 2')
cls.valid_message_3_by_self.sender = cls.agent_address
cls.valid_message_3_by_self.to = cls.opponent_address
def test_inner_classes(self):
assert (str(Dialogue.Role.ROLE1) == 'role1')
assert (str(Dialogue.Role.ROLE2) == 'role2')
assert (str(Dialogue.EndState.SUCCESSFUL) == '0')
assert (str(Dialogue.EndState.FAILED) == '1')
def test_dialogue_properties(self):
assert (self.dialogue.dialogue_label == self.dialogue_label)
assert (self.dialogue.incomplete_dialogue_label == self.dialogue_label)
assert (self.dialogue.dialogue_labels == {self.dialogue_label})
assert (self.dialogue.self_address == self.agent_address)
assert (self.dialogue.role == Dialogue.Role.ROLE1)
assert (str(self.dialogue.role) == 'role1')
assert (self.dialogue.rules.initial_performatives == frozenset({DefaultMessage.Performative.BYTES}))
assert (self.dialogue.rules.terminal_performatives == frozenset({DefaultMessage.Performative.ERROR}))
assert (self.dialogue.rules.valid_replies == {DefaultMessage.Performative.BYTES: frozenset({DefaultMessage.Performative.BYTES, DefaultMessage.Performative.ERROR}), DefaultMessage.Performative.ERROR: frozenset()})
assert (self.dialogue.rules.get_valid_replies(DefaultMessage.Performative.BYTES) == frozenset({DefaultMessage.Performative.BYTES, DefaultMessage.Performative.ERROR}))
assert (self.dialogue.rules.get_valid_replies(DefaultMessage.Performative.ERROR) == frozenset({}))
assert (self.dialogue.message_class == DefaultMessage)
assert self.dialogue.is_self_initiated
assert (self.dialogue.last_incoming_message is None)
assert (self.dialogue.last_outgoing_message is None)
assert (self.dialogue.last_message is None)
assert self.dialogue.is_empty
def test_counterparty_from_message(self):
assert (self.dialogue._counterparty_from_message(self.valid_message_1_by_self) == self.opponent_address)
assert (self.dialogue._counterparty_from_message(self.valid_message_2_by_other) == self.opponent_address)
def test_is_message_by_self(self):
assert self.dialogue._is_message_by_self(self.valid_message_1_by_self)
assert (not self.dialogue._is_message_by_self(self.valid_message_2_by_other))
def test_is_message_by_other(self):
assert (not self.dialogue._is_message_by_other(self.valid_message_1_by_self))
assert self.dialogue._is_message_by_other(self.valid_message_2_by_other)
def test_try_get_message(self):
assert (self.dialogue.get_message_by_id(self.valid_message_1_by_self.message_id) is None)
self.dialogue._update(self.valid_message_1_by_self)
assert (self.dialogue.get_message_by_id(self.valid_message_1_by_self.message_id) == self.valid_message_1_by_self)
assert (self.dialogue.get_message_by_id(self.valid_message_2_by_other.message_id) is None)
self.dialogue._update(self.valid_message_2_by_other)
assert (self.dialogue.get_message_by_id(self.valid_message_2_by_other.message_id) == self.valid_message_2_by_other)
def test_has_message_id(self):
assert (self.dialogue._has_message_id(1) is False)
self.dialogue._update(self.valid_message_1_by_self)
assert (self.dialogue._has_message_id(1) is True)
assert (self.dialogue._has_message_id(2) is False)
def test_update_positive(self):
self.dialogue._update(self.valid_message_1_by_self)
assert (self.dialogue.last_outgoing_message == self.valid_message_1_by_self)
def test_update_positive_multiple_messages_by_self(self):
self.dialogue._update(self.valid_message_1_by_self)
valid_message_2_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=2, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
valid_message_2_by_self.sender = self.agent_address
valid_message_2_by_self.to = self.opponent_address
self.dialogue._update(valid_message_2_by_self)
assert (self.dialogue.last_message.message_id == 2)
def test_terminal_state_callback(self):
called = False
def callback(dialogue):
nonlocal called
called = True
self.dialogue.add_terminal_state_callback(callback)
self.dialogue._update(self.valid_message_1_by_self)
self.dialogue.reply(target_message=self.valid_message_1_by_self, performative=DefaultMessage.Performative.ERROR, error_code=ErrorCode.UNSUPPORTED_PROTOCOL, error_msg='oops', error_data={})
assert called
def test_update_negative_is_valid_next_message_fails(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=200, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
with pytest.raises(InvalidDialogueMessage, match='Message .* is invalid with respect to this dialogue. Error: Invalid message_id. Expected .*. Found 200.'):
self.dialogue._update(invalid_message_1_by_self)
assert (self.dialogue.last_outgoing_message is None)
def test_update_dialogue_negative_message_does_not_belong_to_dialogue(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(2), ''), performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
with pytest.raises(InvalidDialogueMessage) as cm:
self.dialogue._update(invalid_message_1_by_self)
assert (str(cm.value) == 'The message 1 does not belong to this dialogue.The dialogue reference of the message is {}, while the dialogue reference of the dialogue is {}'.format(invalid_message_1_by_self.dialogue_reference, self.dialogue.dialogue_label.dialogue_reference))
assert self.dialogue.is_empty
def test_is_belonging_to_dialogue(self):
valid_message_2_by_self = DefaultMessage(dialogue_reference=(str(2), ''), performative=DefaultMessage.Performative.BYTES, content=b'Hello')
valid_message_2_by_self.sender = self.agent_address
valid_message_2_by_self.to = self.opponent_address
assert self.dialogue._is_belonging_to_dialogue(self.valid_message_1_by_self)
assert (not self.dialogue._is_belonging_to_dialogue(valid_message_2_by_self))
def test_reply_positive(self):
self.dialogue._update(self.valid_message_1_by_self)
self.dialogue.reply(target_message=self.valid_message_1_by_self, performative=DefaultMessage.Performative.BYTES, content=b'Hello Back')
assert (self.dialogue.last_message.message_id == 2)
def test_reply_negative_empty_dialogue(self):
with pytest.raises(ValueError) as cm:
self.dialogue.reply(target_message=self.valid_message_1_by_self, performative=DefaultMessage.Performative.BYTES, content=b'Hello Back')
assert (str(cm.value) == 'Cannot reply in an empty dialogue!')
assert self.dialogue.is_empty
def test_reply_negative_target_does_not_exist(self):
self.dialogue._update(self.valid_message_1_by_self)
with pytest.raises(ValueError) as cm:
self.dialogue.reply(target=10, performative=DefaultMessage.Performative.BYTES, content=b'Hello Back')
assert (str(cm.value) == 'No target message found!')
def test_reply_negative_target_message_target_mismatch(self):
self.dialogue._update(self.valid_message_1_by_self)
assert (self.dialogue.last_message.message_id == 1)
with pytest.raises(AEAEnforceError) as cm:
self.dialogue.reply(target_message=self.valid_message_1_by_self, target=2, performative=DefaultMessage.Performative.BYTES, content=b'Hello Back')
assert (str(cm.value) == 'The provided target and target_message do not match.')
assert (self.dialogue.last_message.message_id == 1)
def test_reply_negative_invalid_target(self):
self.dialogue._update(self.valid_message_1_by_self)
assert (self.dialogue.last_message.message_id == 1)
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=2, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello There')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
with pytest.raises(AEAEnforceError) as cm:
self.dialogue.reply(target_message=invalid_message_1_by_self, performative=DefaultMessage.Performative.BYTES, content=b'Hello Back')
assert (str(cm.value) == 'The target message does not exist in this dialogue.')
assert (self.dialogue.last_message.message_id == 1)
def test_is_valid_next_message_positive(self):
self.dialogue._update(self.valid_message_1_by_self)
self.dialogue._update(self.valid_message_2_by_other)
(result, msg) = self.dialogue._validate_next_message(self.valid_message_3_by_self)
assert (result is True)
assert (msg == 'Message is valid with respect to this dialogue.')
def test_is_valid_next_message_negative_basic_validation_fails(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=2, target=0, performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
(result, msg) = self.dialogue._validate_next_message(invalid_message_1_by_self)
assert (result is False)
assert (msg == 'Invalid message_id. Expected 1. Found 2.')
def test_is_valid_next_message_negative_additional_validation_fails(self):
self.dialogue._update(self.valid_message_1_by_self)
self.dialogue._update(self.valid_message_2_by_other)
invalid_message_3_by_self = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=2, target=3, performative=DefaultMessage.Performative.BYTES, content=b'Hello back 2')
invalid_message_3_by_self.sender = self.agent_address
invalid_message_3_by_self.to = self.opponent_address
(result, msg) = self.dialogue._validate_next_message(invalid_message_3_by_self)
assert (result is False)
assert ('Invalid target' in msg)
def test_is_valid_next_message_negative_is_valid_fails(self):
def failing_custom_validation(self, message: Message) -> Tuple[(bool, str)]:
return (False, 'some reason')
with patch.object(self.dialogue.__class__, '_custom_validation', failing_custom_validation):
(result, msg) = self.dialogue._validate_next_message(self.valid_message_1_by_self)
assert (result is False)
assert (msg == 'some reason')
def test_basic_validation_positive(self):
(result, msg) = self.dialogue._basic_validation(self.valid_message_1_by_self)
assert (result is True)
assert (msg == 'The initial message passes basic validation.')
self.dialogue._update(self.valid_message_1_by_self)
(result, msg) = self.dialogue._basic_validation(self.valid_message_2_by_other)
assert (result is True)
assert (msg == 'The non-initial message passes basic validation.')
def test_basic_validation_negative_initial_message_invalid(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=2, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
assert self.dialogue.is_empty
(result, msg) = self.dialogue._basic_validation(invalid_message_1_by_self)
assert (result is False)
assert (msg == 'Invalid message_id. Expected 1. Found 2.')
(BaseDialogue, '_validate_message_id', return_value=None)
def test_basic_validation_negative_non_initial_message_invalid(self, *mocks):
self.dialogue._update(self.valid_message_1_by_self)
invalid_message_2_by_other = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=(- 1), target=0, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
invalid_message_2_by_other.sender = self.opponent_address
invalid_message_2_by_other.to = self.agent_address
(result, msg) = self.dialogue._basic_validation(invalid_message_2_by_other)
assert (result is False)
assert (msg == 'Invalid target. Expected a non-zero integer. Found 0.')
def test_basic_validation_initial_message_positive(self):
(result, msg) = self.dialogue._basic_validation_initial_message(self.valid_message_1_by_self)
assert (result is True)
assert (msg == 'The initial message passes basic validation.')
def test_basic_validation_initial_message_negative_invalid_dialogue_reference(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(2), ''), performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
(result, msg) = self.dialogue._basic_validation_initial_message(invalid_message_1_by_self)
assert (result is False)
assert (msg == 'Invalid dialogue_reference[0]. Expected 1. Found 2.')
def test_basic_validation_initial_message_negative_invalid_message_id(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=200, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
(result, msg) = self.dialogue._basic_validation_initial_message(invalid_message_1_by_self)
assert (result is False)
assert re.match('Invalid message_id. Expected .*. Found 200.', msg)
def test_basic_validation_initial_message_negative_invalid_target(self):
invalid_message_1_by_self = DefaultMessage(dialogue_reference=(str(1), ''), message_id=1, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello')
invalid_message_1_by_self.sender = self.agent_address
invalid_message_1_by_self.to = self.opponent_address
(result, msg) = self.dialogue._basic_validation_initial_message(invalid_message_1_by_self)
assert (result is False)
assert (msg == 'Invalid target. Expected 0. Found 1.')
def test_basic_validation_initial_message_negative_invalid_performative(self):
invalid_initial_msg = DefaultMessage(dialogue_reference=(str(1), ''), performative=DefaultMessage.Performative.ERROR, error_code=DefaultMessage.ErrorCode.INVALID_MESSAGE, error_msg='some_error_message', error_data={'some_data': b'some_bytes'})
invalid_initial_msg.sender = self.agent_address
invalid_initial_msg.to = self.opponent_address
(result, msg) = self.dialogue._basic_validation_initial_message(invalid_initial_msg)
assert (result is False)
assert (msg == 'Invalid initial performative. Expected one of {}. Found error.'.format(self.dialogue.rules.initial_performatives))
def test_basic_validation_non_initial_message_positive(self):
self.dialogue._update(self.valid_message_1_by_self)
(result, msg) = self.dialogue._basic_validation_non_initial_message(self.valid_message_2_by_other)
assert (result is True)
assert (msg == 'The non-initial message passes basic validation.')
def test_basic_validation_non_initial_message_negative_invalid_dialogue_reference(self):
self.dialogue._update(self.valid_message_1_by_self)
invalid_message_2_by_other = DefaultMessage(dialogue_reference=(str(2), str(1)), message_id=2, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
invalid_message_2_by_other.sender = self.opponent_address
invalid_message_2_by_other.to = self.agent_address
(result, msg) = self.dialogue._basic_validation_non_initial_message(invalid_message_2_by_other)
assert (result is False)
assert (msg == 'Invalid dialogue_reference[0]. Expected 1. Found 2.')
def test_basic_validation_non_initial_message_negative_invalid_message_id(self):
self.dialogue._update(self.valid_message_1_by_self)
invalid_message_2_by_other = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=, target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
invalid_message_2_by_other.sender = self.opponent_address
invalid_message_2_by_other.to = self.agent_address
(result, msg) = self.dialogue._basic_validation_non_initial_message(invalid_message_2_by_other)
assert (result is False)
assert re.match('Invalid message_id. Expected .*. Found ', msg)
(BaseDialogue, '_validate_message_id', return_value=None)
def test_basic_validation_non_initial_message_negative_invalid_target_1(self, *mocks):
self.dialogue._update(self.valid_message_1_by_self)
invalid_message_2_by_other = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=2, target=0, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
invalid_message_2_by_other.sender = self.opponent_address
invalid_message_2_by_other.to = self.agent_address
(result, msg) = self.dialogue._basic_validation_non_initial_message(invalid_message_2_by_other)
assert (result is False)
assert (msg == 'Invalid target. Expected a non-zero integer. Found 0.')
def test_basic_validation_non_initial_message_negative_invalid_target_2(self):
self.dialogue._update(self.valid_message_1_by_self)
invalid_message_2_by_other = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=(- 1), target=2, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
invalid_message_2_by_other.sender = self.opponent_address
invalid_message_2_by_other.to = self.agent_address
(result, msg) = self.dialogue._basic_validation_non_initial_message(invalid_message_2_by_other)
assert (result is False)
assert ('Invalid target. Expected a value less than ' in msg)
(BaseDialogue, '_validate_message_id', return_value=None)
def test_basic_validation_non_initial_message_negative_invalid_performative(self, *mocks):
self.dialogue._update(self.valid_message_1_by_self)
invalid_message_2_by_other = StateUpdateMessage(dialogue_reference=(str(1), str(1)), message_id=Mock(), target=1, performative=StateUpdateMessage.Performative.APPLY, amount_by_currency_id={}, quantities_by_good_id={})
invalid_message_2_by_other.sender = self.opponent_address
invalid_message_2_by_other.to = self.agent_address
(result, msg) = self.dialogue._basic_validation_non_initial_message(invalid_message_2_by_other)
assert (result is False)
assert (msg == 'Invalid performative. Expected one of {}. Found {}.'.format(self.dialogue.rules.get_valid_replies(self.valid_message_1_by_self.performative), invalid_message_2_by_other.performative))
def test_update_dialogue_label_positive(self):
self.dialogue._update(self.valid_message_1_by_self)
new_label = DialogueLabel((str(1), str(1)), self.valid_message_1_by_self.to, self.agent_address)
self.dialogue._update_dialogue_label(new_label)
assert (self.dialogue.dialogue_label == new_label)
def test_update_dialogue_label_negative_invalid_existing_label(self):
self.dialogue._update(self.valid_message_1_by_self)
self.dialogue._update(self.valid_message_2_by_other)
new_label = DialogueLabel((str(1), str(1)), self.valid_message_1_by_self.to, self.agent_address)
self.dialogue._update_dialogue_label(new_label)
assert (self.dialogue.dialogue_label == new_label)
new_label = DialogueLabel((str(1), str(2)), self.valid_message_1_by_self.to, self.agent_address)
with pytest.raises(AEAEnforceError) as cm:
self.dialogue._update_dialogue_label(new_label)
assert (str(cm.value) == 'Dialogue label cannot be updated.')
assert (self.dialogue.dialogue_label != new_label)
def test_update_dialogue_label_negative_invalid_input_label(self):
self.dialogue._update(self.valid_message_1_by_self)
new_label = DialogueLabel((str(2), ''), self.valid_message_1_by_self.to, self.agent_address)
with pytest.raises(AEAEnforceError) as cm:
self.dialogue._update_dialogue_label(new_label)
assert (str(cm.value) == 'Dialogue label cannot be updated.')
assert (self.dialogue.dialogue_label != new_label)
def test___str__1(self):
self.dialogue._update(self.valid_message_1_by_self)
self.dialogue._update(self.valid_message_2_by_other)
self.dialogue._update(self.valid_message_3_by_self)
dialogue_str = 'Dialogue Label:\n1__agent 2_agent 1\nMessages:\nmessage_id=1, target=0, performative=bytes\nmessage_id=-1, target=1, performative=bytes\nmessage_id=2, target=-1, performative=bytes\n'
assert (str(self.dialogue) == dialogue_str)
def test___str__2(self):
valid_message_1_by_other = DefaultMessage(dialogue_reference=(str(1), ''), performative=DefaultMessage.Performative.BYTES, content=b'Hello')
valid_message_1_by_other.sender = self.opponent_address
valid_message_1_by_other.to = self.agent_address
self.dialogue_opponent_started._update(valid_message_1_by_other)
valid_message_2_by_self = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=(- 1), target=1, performative=DefaultMessage.Performative.BYTES, content=b'Hello back')
valid_message_2_by_self.sender = self.agent_address
valid_message_2_by_self.to = self.opponent_address
self.dialogue_opponent_started._update(valid_message_2_by_self)
valid_message_3_by_other = DefaultMessage(dialogue_reference=(str(1), str(1)), message_id=2, target=(- 1), performative=DefaultMessage.Performative.BYTES, content=b'Hello back 2')
valid_message_3_by_other.sender = self.opponent_address
valid_message_3_by_other.to = self.agent_address
self.dialogue_opponent_started._update(valid_message_3_by_other)
dialogue_str = 'Dialogue Label:\n1_1_agent 2_agent 2\nMessages:\nmessage_id=1, target=0, performative=bytes\nmessage_id=-1, target=1, performative=bytes\nmessage_id=2, target=-1, performative=bytes\n'
assert (str(self.dialogue_opponent_started) == dialogue_str) |
class _XorMatcher(_AlreadyChainedMatcher):
def __init__(self, a: Matcher, b: Matcher) -> None:
self.a = a
self.b = b
def __eq__(self, other: AnyType) -> bool:
return (((self.a == other) or (self.b != other)) and ((self.a != other) or (self.b == other)))
def __repr__(self) -> str:
return f'{self.a} ^ {self.b}' |
class OptionSeriesXrangeSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class QRDialog(WindowModalDialog):
def __init__(self, data, parent=None, title='', show_text=False):
WindowModalDialog.__init__(self, parent, title)
vbox = QVBoxLayout()
qrw = QRCodeWidget(data)
qscreen = QApplication.primaryScreen()
vbox.addWidget(qrw, 1)
if show_text:
text = QTextEdit()
text.setText(data)
text.setReadOnly(True)
vbox.addWidget(text)
hbox = QHBoxLayout()
hbox.addStretch(1)
filename = os.path.join(app_state.config.path, 'qrcode.png')
def print_qr():
pixmap = qrw.grab()
pixmap.save(filename, 'png')
self.show_message(((_('QR code saved to file') + ' ') + filename))
def copy_to_clipboard():
pixmap = qrw.grab()
QApplication.clipboard().setPixmap(pixmap)
self.show_message(_('QR code copied to clipboard'))
b = QPushButton(_('Copy'))
hbox.addWidget(b)
b.clicked.connect(copy_to_clipboard)
b = QPushButton(_('Save'))
hbox.addWidget(b)
b.clicked.connect(print_qr)
b = QPushButton(_('Close'))
hbox.addWidget(b)
b.clicked.connect(self.accept)
b.setDefault(True)
vbox.addLayout(hbox)
self.setLayout(vbox) |
def pytest_generate_tests(metafunc):
if ('language_pack' in metafunc.fixturenames):
if ('language_string' in metafunc.fixturenames):
language_packs = get_language_packs()
metafunc.parametrize(['language_pack', 'language_string'], [(lp, ls) for lp in language_packs for ls in get_language_pack(lp).keys() if (ls in LANGUAGE_STRUCTURE)])
else:
metafunc.parametrize('language_pack', get_language_packs()) |
def build_index(force_rebuild=False, execute_after_end=None):
config = mw.addonManager.getConfig(__name__)
md_files = []
if ((config['md.folder_path'] is not None) and (len(config['md.folder_path']) > 0) and os.path.exists(config['md.folder_path'])):
stamp = ''
if config['md.last_index_stamp']:
stamp = config['md.last_index_stamp']
md_files = scan_folder_for_changed_files(config['md.folder_path'], stamp)
anki_index_data = get_notes_in_collection()
if (get_index() is None):
p = ProcessRunnable(_build_index, anki_index_data, md_files, force_rebuild)
if (execute_after_end is not None):
p.after_end = execute_after_end
p.start() |
class WebUIRequests(_RequestsInterface):
def new_project(self, name, chroots, **kwargs):
data = {'name': name, 'chroots': chroots}
for config in ['bootstrap', 'isolation', 'contact', 'homepage', 'appstream', 'follow_fedora_branching']:
if (not (config in kwargs)):
continue
data[config] = kwargs[config]
resp = self.client.post('/coprs/{0}/new/'.format(self.transaction_username), data=data, follow_redirects=False)
assert ((resp.status_code == 302) if self.success_expected else 200)
return resp
def edit_chroot(self, project, chroot, bootstrap=None, bootstrap_image=None, owner=None, isolation=None, additional_packages=None, reset_fields=None, comps_filename=None):
route = '/coprs/{user}/{project}/update_chroot/{chroot}/'.format(user=(owner or self.transaction_username), project=project, chroot=chroot)
data = {'submit': 'update'}
if (bootstrap is not None):
data['bootstrap'] = bootstrap
if (bootstrap_image is not None):
data['bootstrap_image'] = bootstrap_image
if (isolation is not None):
data['isolation'] = isolation
if comps_filename:
data['comps'] = FileStorage(stream=open(comps_filename, 'rb'), filename=os.path.basename(comps_filename), content_type='application/text')
resp = self.client.post(route, data=data, content_type='multipart/form-data')
if self.success_expected:
assert (resp.status_code == 302)
return resp
def create_distgit_package(self, project, pkgname, options=None, expected_status_code=None):
if options:
raise NotImplementedError
data = {'package_name': pkgname}
route = '/coprs/{user}/{project}/package/new/distgit'.format(user=self.transaction_username, project=project)
resp = self.client.post(route, data=data)
assert (resp.status_code == (expected_status_code or 302))
return resp
def cancel_build(self, project, build_id):
route = '/coprs/{user}/{project}/cancel_build/{build_id}/'.format(user=self.transaction_username, project=project, build_id=build_id)
resp = self.client.post(route, data={})
assert (resp.status_code == 302)
return resp
def _form_data_from_build_options(build_options):
if (build_options is None):
build_options = {}
form_data = {'chroots': build_options.get('chroots')}
for attr in ['bootstrap', 'with_build_id', 'after_build_id', 'isolation']:
value = build_options.get(attr)
if (value is None):
continue
form_data[attr] = value
return form_data
def _submit_url_build(self, project, urls, build_options):
form_data = self._form_data_from_build_options(build_options)
form_data['pkgs'] = urls
route = '/coprs/{user}/{project}/new_build/'.format(user=self.transaction_username, project=project)
resp = self.client.post(route, data=form_data)
if (resp.status_code != 302):
print(parse_web_form_error(resp.data))
assert (resp.status_code == 302)
return resp
def rebuild_all_packages(self, project_id, package_names=None):
copr = models.Copr.query.get(project_id)
if (not package_names):
packages = copr.packages
package_names = [p.name for p in packages]
chroots = [mch.name for mch in copr.mock_chroots]
route = '/coprs/{}/packages/rebuild-all/'.format(copr.full_name)
form_data = {'packages': package_names}
for ch in chroots:
form_data[ch] = 'y'
resp = self.client.post(route, data=form_data)
return resp
def resubmit_build_id(self, build_id):
build = models.Build.query.get(build_id)
path = f'/coprs/{build.copr.full_name}/new_build_rebuild/{build_id}'
response = self.client.post(path, data={})
assert (response.status_code == 302)
return response |
def simulate(shot: System, engine: Optional[PhysicsEngine]=None, inplace: bool=False, continuous: bool=False, dt: Optional[float]=None, t_final: Optional[float]=None, quartic_solver: QuarticSolver=QuarticSolver.HYBRID, include: Set[EventType]=INCLUDED_EVENTS) -> System:
if (not inplace):
shot = shot.copy()
if (not engine):
engine = PhysicsEngine()
shot.reset_history()
shot.update_history(null_event(time=0))
if ((shot.get_system_energy() == 0) and (shot.cue.V0 > 0)):
event = stick_ball_collision(stick=shot.cue, ball=shot.balls[shot.cue.cue_ball_id], time=0, set_initial=True)
engine.resolver.resolve(shot, event)
shot.update_history(event)
transition_cache = TransitionCache.create(shot)
while True:
event = get_next_event(shot, transition_cache=transition_cache, quartic_solver=quartic_solver)
if (event.time == np.inf):
shot.update_history(null_event(time=shot.t))
break
_evolve(shot, (event.time - shot.t))
if (event.event_type in include):
engine.resolver.resolve(shot, event)
transition_cache.update(event)
shot.update_history(event)
if ((t_final is not None) and (shot.t >= t_final)):
shot.update_history(null_event(time=shot.t))
break
if continuous:
continuize(shot, dt=(0.01 if (dt is None) else dt), inplace=True)
return shot |
def test_gzip_streaming_response(test_client_factory):
def homepage(request):
async def generator(bytes, count):
for index in range(count):
(yield bytes)
streaming = generator(bytes=(b'x' * 400), count=10)
return StreamingResponse(streaming, status_code=200)
app = Starlette(routes=[Route('/', endpoint=homepage)], middleware=[Middleware(GZipMiddleware)])
client = test_client_factory(app)
response = client.get('/', headers={'accept-encoding': 'gzip'})
assert (response.status_code == 200)
assert (response.text == ('x' * 4000))
assert (response.headers['Content-Encoding'] == 'gzip')
assert ('Content-Length' not in response.headers) |
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptions(Options):
def activeWhen(self) -> 'OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMapping':
return self._config_sub_data('mapping', OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class WheelKeys():
SCHEMA = 1
CONFIG_NAME = 'wheel.json'
def __init__(self):
self.data = {'signers': [], 'verifiers': []}
def load(self):
for path in load_config_paths('wheel'):
conf = os.path.join(native(path), self.CONFIG_NAME)
if os.path.exists(conf):
with open(conf, 'r') as infile:
self.data = json.load(infile)
for x in ('signers', 'verifiers'):
if (x not in self.data):
self.data[x] = []
if ('schema' not in self.data):
self.data['schema'] = self.SCHEMA
elif (self.data['schema'] != self.SCHEMA):
raise ValueError('Bad wheel.json version {0}, expected {1}'.format(self.data['schema'], self.SCHEMA))
break
return self
def save(self):
path = save_config_path('wheel')
conf = os.path.join(native(path), self.CONFIG_NAME)
with open(conf, 'w+') as out:
json.dump(self.data, out, indent=2)
return self
def trust(self, scope, vk):
self.data['verifiers'].append({'scope': scope, 'vk': vk})
return self
def untrust(self, scope, vk):
self.data['verifiers'].remove({'scope': scope, 'vk': vk})
return self
def trusted(self, scope=None):
trust = [(x['scope'], x['vk']) for x in self.data['verifiers'] if (x['scope'] in (scope, '+'))]
trust.sort(key=(lambda x: x[0]))
trust.reverse()
return trust
def signers(self, scope):
sign = [(x['scope'], x['vk']) for x in self.data['signers'] if (x['scope'] in (scope, '+'))]
sign.sort(key=(lambda x: x[0]))
sign.reverse()
return sign
def add_signer(self, scope, vk):
self.data['signers'].append({'scope': scope, 'vk': vk}) |
def all_blogs(request):
public_blogs = Blog.objects.filter(public=True)
if request.user.is_authenticated:
user_blogs = Blog.objects.filter(owner=request.user)
else:
user_blogs = []
context = {'public_blogs': public_blogs, 'user_blogs': user_blogs}
return render(request, 'blogs/all_blogs.html', context=context) |
def create_click_web_app(module, command: click.BaseCommand, root='/'):
global _flask_app, logger
assert (_flask_app is None), 'Flask App already created.'
_register(module, command)
_flask_app = Flask(__name__, static_url_path=(root.rstrip('/') + '/static'))
_flask_app.config['APPLICATION_ROOT'] = root
root = root.rstrip('/')
_flask_app.jinja_env.add_extension('jinja2.ext.do')
_flask_app.add_url_rule((root + '/'), 'index', click_web.resources.index.index)
_flask_app.add_url_rule((root + '/<path:command_path>'), 'command', click_web.resources.cmd_form.get_form_for)
executor = click_web.resources.cmd_exec.Executor()
_flask_app.add_url_rule((root + '/<path:command_path>'), 'command_execute', executor.exec, methods=['POST'])
_flask_app.logger.info(f'OUTPUT_FOLDER: {OUTPUT_FOLDER}')
results_blueprint = Blueprint('results', __name__, static_url_path=(root + '/static/results'), static_folder=OUTPUT_FOLDER)
_flask_app.register_blueprint(results_blueprint)
logger = _flask_app.logger
return _flask_app |
def test_value_mean_error_test_render_json() -> None:
test_dataset = pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0, 1, 2, 5], 'target': [0, 0, 0, 1], 'prediction': [0, 1, 0, 0]})
suite = TestSuite(tests=[TestValueMeanError()])
suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=ColumnMapping())
result_json = suite.json()
assert isinstance(result_json, str)
result = json.loads(result_json)['tests'][0]
assert (result == {'description': 'The ME is 0.0. The test threshold is eq=0 0.0816.', 'group': 'regression', 'name': 'Mean Error (ME)', 'parameters': {'condition': {'eq': {'absolute': 0., 'relative': 1e-06, 'value': 0}}, 'value': 0.0}, 'status': 'SUCCESS'}) |
class CliParser(argparse.ArgumentParser):
def set_default_subparser(self, name, args=None):
subparser_found = False
for arg in sys.argv[1:]:
if (arg in ['-h', '--help']):
break
else:
for x in self._subparsers._actions:
if (not isinstance(x, argparse._SubParsersAction)):
continue
for sp_name in x._name_parser_map.keys():
if (sp_name in sys.argv[1:]):
subparser_found = True
if (not subparser_found):
if (args is None):
sys.argv.insert(1, name)
else:
args.insert(0, name)
def error(self, message):
sys.stderr.write((messages.generic.weevely_s_error_s_usage % (messages.version, message)))
raise ArgparseError(message) |
def format_quote(username, content):
profile_url = url_for('user.profile', username=username)
content = '\n> '.join(content.strip().split('\n'))
quote = u'**[{username}]({profile_url}) wrote:**\n> {content}\n'.format(username=username, profile_url=profile_url, content=content)
return quote |
def main(argv):
import getopt
def usage():
error(f'Usage: {argv[0]} [-d] [-l logfile] [-s sshdir] [-L addr] [-p port] [-c cmdexe] [-u username] [-a authkeys] [-h homedir] ssh_host_key ...')
return 100
try:
(opts, args) = getopt.getopt(argv[1:], 'dl:s:L:p:u:a:h:c:')
except getopt.GetoptError:
return usage()
homedir = getpath(shellcon.CSIDL_PROFILE)
appdata = os.path.join(getpath(shellcon.CSIDL_APPDATA), 'PyRexecd')
loglevel = logging.INFO
logfile = None
sshdir = appdata
if windows:
logfile = os.path.join(appdata, 'pyrexecd.log')
port = 2200
addr = '127.0.0.1'
reuseaddr = False
username = win32api.GetUserName()
authkeys = []
cmdexe = ['cmd', '/Q']
for (k, v) in opts:
if (k == '-d'):
loglevel = logging.DEBUG
elif (k == '-l'):
logfile = v
elif (k == '-L'):
addr = v
elif (k == '-s'):
sshdir = v
elif (k == '-p'):
port = int(v)
elif (k == '-u'):
username = v
elif (k == '-a'):
authkeys.append(v)
elif (k == '-h'):
homedir = v
elif (k == '-c'):
cmdexe = v.split(' ')
try:
os.makedirs(sshdir)
except OSError:
pass
logging.basicConfig(level=loglevel, filename=logfile, filemode='a')
logging.info(f'Sshdir: {sshdir!r}')
hostkeys = []
for path in args:
if os.path.isfile(path):
hostkeys.append(get_host_key(path))
for name in os.listdir(sshdir):
path = os.path.join(sshdir, name)
if os.path.isfile(path):
try:
hostkeys.append(get_host_key(path))
except ValueError:
pass
if (not hostkeys):
path = os.path.join(sshdir, 'ssh_host_rsa_key')
key = paramiko.RSAKey.generate(2048)
key.write_private_key_file(path)
sig = ':'.join((('%02x' % b) for b in key.get_fingerprint()))
logging.info(f'Hostkey is created: {sig!r}')
error(f'Hostkey is created: {sig!r}')
hostkeys.append(key)
logging.info(f'Hostkeys: {len(hostkeys)}')
if (not authkeys):
authkeys = [os.path.join(sshdir, 'authorized_keys')]
pubkeys = []
for path in authkeys:
if os.path.isfile(path):
pubkeys.extend(get_authorized_keys(path))
if (not pubkeys):
shellopen('explore', sshdir)
logging.error('No authorized_keys found!')
error('No authorized_keys found!')
return
logging.info(f'Username: {username!r} (pubkeys:{len(pubkeys)})')
logging.info(f'Homedir: {homedir!r}')
logging.info(f'Cmd.exe: {cmdexe!r}')
logging.info(f'Listening: {addr}:{port}...')
PyRexecTrayApp.initialize(os.path.dirname(__file__))
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if reuseaddr:
ra = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, (ra | 1))
sock.bind((addr, port))
sock.listen(5)
sock.settimeout(0.05)
app = PyRexecTrayApp()
run_server(app, sock, hostkeys, username, pubkeys, homedir, cmdexe, msg=f'Listening: {addr}:{port}...')
except (OSError, socket.error) as e:
logging.error(f'Error: {e!r}')
error(f'Error: {e!r}')
return |
class LMTPMultiProcessingClient():
def __init__(self, model_identifier, **kwargs):
ensure_picklable(kwargs, 'lmtp.inprocess kwargs must be pickleable as it has to be sent to a subprocess')
self.model_identifier = model_identifier
if (multiprocessing.current_process().name != 'MainProcess'):
import sys
sys.stderr.write('Error: Detected an access to a lmql.model(..., inprocess=True)/local:<MODEL> inside the multiprocessing worker process itself. This is not supported and may lead to unexpected behavior.\n\nTo avoid this, please make sure to not call lmql.model(..., inprocess=True) on the top level of your script, but only inside functions or the __main__ block.')
sys.stderr.flush()
assert False
(c2, c1) = multiprocessing.Pipe(duplex=True)
self.subprocess = multiprocessing.Process(target=multiprocessing_main, args=(c1, kwargs), name='lmtp-model-server', daemon=True)
self.subprocess.start()
self.connection = c2
self.stream_id = 0
self.iterators = {}
self.request_futures = {}
self.poll_task = asyncio.create_task(self.poll_messages())
self.poll_running = asyncio.Event()
def ref(self):
return LMTPMultiProcessingClientRef(self).ref()
async def request(self, name, payload):
self.stream_id += 1
payload = {'stream_id': self.stream_id, 'model': self.model_identifier, 'data': payload}
self.connection.send((name, payload))
fut = asyncio.Future()
self.request_futures[self.stream_id] = fut
try:
result = (await asyncio.wait_for(fut, timeout=5))
except TimeoutError as e:
raise TimeoutError("LMTP request '{}' timed out after 5 seconds".format(name))
self._model_info = result
return result
def __del__(self):
if ((self.poll_task is not None) and self.poll_running.is_set()):
self.poll_task.cancel()
async def poll_messages(self):
try:
self.poll_running.set()
while True:
if (not self.connection.poll()):
(await asyncio.sleep(0.001))
continue
try:
msg = self.connection.recv()
if (msg is None):
continue
(type, d) = msg
if (type == 'TOKEN'):
stream_id = d['stream_id']
consumers = self.iterators.get(stream_id, [])
for q in consumers:
q.put_nowait(d)
elif (type == 'MSG'):
stream_id = d['stream_id']
fut = self.request_futures.pop(stream_id, None)
if (fut is not None):
fut.set_result(d)
except Exception as e:
warnings.warn('failed to handle msg {}: {}'.format(msg, e))
except asyncio.CancelledError:
return
async def close(self):
if ((self.poll_task is not None) and self.poll_running.is_set()):
self.poll_task.cancel()
for itr_list in self.iterators.values():
for it in itr_list:
it.put_nowait(None)
self.subprocess.terminate()
async def generate(self, prompt, **kwargs):
self.stream_id += 1
payload = {**kwargs, 'model': self.model_identifier, 'prompt': prompt, 'stream_id': self.stream_id}
if (payload.get('logit_bias', None) is None):
payload.pop('logit_bias', None)
self.connection.send(('GENERATE', payload))
async for token in self.stream_iterator(self.stream_id):
(yield token)
async def score(self, prompt, scored_prompt, **kwargs):
self.stream_id += 1
payload = {**kwargs, 'model': self.model_identifier, 'prompt': prompt, 'scored': scored_prompt, 'stream_id': self.stream_id}
self.connection.send(('SCORE', payload))
async for token in self.stream_iterator(self.stream_id):
(yield token)
async def stream_iterator(self, stream_id):
q = asyncio.Queue()
self.iterators.setdefault(stream_id, []).append(q)
while True:
item = (await q.get())
if (item is None):
break
if (item.get('error') is not None):
raise LMTPStreamError(item['error'])
if (item.get('finish_reason') is not None):
(yield item)
break
(yield item) |
class Node():
def __init__(self, id_: int, data: Dict, type_: NodeType) -> None:
self.parent: Optional[Node] = None
self.data: Dict = data
self.children: Dict[(int, Node)] = {}
self.id = id_
self.type = type_
def __repr__(self) -> str:
parent = ('no ' if (self.parent is None) else '')
children = ('no ' if (len(self.children) == 0) else f'{len(self.children)} ')
return f'Node<{self.type}>{self.id} with {parent}parent and {children}children'
def add_child(self, node: 'Node', node_id: Optional[int]=None) -> None:
node.parent = self
if (node_id is None):
node_id = node.id
self.children[node_id] = node
def row(self) -> int:
if ('index' in self.data):
return int(self.data['index'])
if self.parent:
return list(self.parent.children.keys()).index(self.id)
raise ValueError(f'{self} had no parent') |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
def test_schema_in_dataclass():
import pandas as pd
schema = TestSchema()
df = pd.DataFrame(data={'some_str': ['a', 'b', 'c']})
schema.open().write(df)
o = Result(result=InnerResult(number=1, schema=schema), schema=schema)
ctx = FlyteContext.current_context()
tf = DataclassTransformer()
lt = tf.get_literal_type(Result)
lv = tf.to_literal(ctx, o, Result, lt)
ot = tf.to_python_value(ctx, lv=lv, expected_python_type=Result)
assert (o == ot) |
def common_config(request, docker_mount_base_dir) -> config.Common:
overwrite_config = merge_markers(request, 'common_config_overwrite', dict)
if ('docker_mount_base_dir' in overwrite_config):
raise ValueError('docker-mount-base-dir may not be changed with `.common_config_overwrite`')
config.load()
test_config = {'temp_dir_path': '/tmp', 'docker_mount_base_dir': docker_mount_base_dir, 'redis': dict({'fact_db': config.common.redis.test_db, 'test_db': config.common.redis.test_db, 'host': config.common.redis.host, 'port': config.common.redis.port}, **({'password': config.common.redis.password} if (config.common.redis.password is not None) else {})), 'logging': {'file_backend': '/tmp/fact_tests_backend.log', 'file_frontend': '/tmp/fact_tests_frontend.log', 'file_database': '/tmp/fact_tests_database.log', 'level': 'DEBUG'}, 'postgres': {'server': config.common.postgres.server, 'port': config.common.postgres.port, 'database': config.common.postgres.test_database, 'test_database': config.common.postgres.test_database, 'ro_user': config.common.postgres.ro_user, 'ro_pw': config.common.postgres.ro_pw, 'rw_user': config.common.postgres.rw_user, 'rw_pw': config.common.postgres.rw_pw, 'del_user': config.common.postgres.del_user, 'del_pw': config.common.postgres.del_pw, 'admin_user': config.common.postgres.admin_user, 'admin_pw': config.common.postgres.admin_pw}, 'analysis_preset': {'default': {'name': 'default', 'plugins': []}, 'minimal': {'name': 'minimal', 'plugins': []}}}
test_config = deep_update(test_config, overwrite_config)
return config.Common(**test_config) |
def gridproperties_dataframe(gridproperties: Iterable[GridProperties], grid: (Grid | None)=None, activeonly: bool=True, ijk: bool=False, xyz: bool=False, doubleformat: bool=False) -> pd.DataFrame:
proplist = list(gridproperties)
dataframe_dict = {}
if ijk:
if activeonly:
if grid:
(ix, jy, kz) = _grid_etc1.get_ijk(grid)
dataframe_dict['IX'] = ix.get_active_npvalues1d()
dataframe_dict['JY'] = jy.get_active_npvalues1d()
dataframe_dict['KZ'] = kz.get_active_npvalues1d()
elif proplist:
(ix, jy, kz) = _grid_etc1.get_ijk(proplist[0])
dataframe_dict['IX'] = ix.get_active_npvalues1d()
dataframe_dict['JY'] = jy.get_active_npvalues1d()
dataframe_dict['KZ'] = kz.get_active_npvalues1d()
else:
if (not grid):
raise ValueError('You ask for active_only but no Grid is present. Use grid=...')
act = grid.get_actnum(dual=True)
(ix, jy, kz) = grid.get_ijk(asmasked=False)
dataframe_dict['ACTNUM'] = act.values1d
dataframe_dict['IX'] = ix.values1d
dataframe_dict['JY'] = jy.values1d
dataframe_dict['KZ'] = kz.values1d
if xyz:
if (not grid):
raise ValueError('You ask for xyz but no Grid is present. Use grid=...')
(xc, yc, zc) = grid.get_xyz(asmasked=activeonly)
if activeonly:
dataframe_dict['X_UTME'] = xc.get_active_npvalues1d()
dataframe_dict['Y_UTMN'] = yc.get_active_npvalues1d()
dataframe_dict['Z_TVDSS'] = zc.get_active_npvalues1d()
else:
dataframe_dict['X_UTME'] = xc.values1d
dataframe_dict['Y_UTMN'] = yc.values1d
dataframe_dict['Z_TVDSS'] = zc.values1d
for prop in gridproperties:
if activeonly:
vector = prop.get_active_npvalues1d()
else:
vector = prop.values1d.copy()
if prop.isdiscrete:
vector = vector.filled(fill_value=0)
else:
vector = vector.filled(fill_value=np.nan)
if doubleformat:
vector = vector.astype(np.float64)
else:
vector = vector.astype(np.float32)
dataframe_dict[prop.name] = vector
return pd.DataFrame.from_dict(dataframe_dict) |
class CmdUnconnectedCreate(MuxCommand):
key = 'create'
aliases = ['cre', 'cr']
locks = 'cmd:all()'
def parse(self):
super().parse()
self.accountinfo = []
if (len(self.arglist) < 3):
return
if (len(self.arglist) > 3):
password = self.arglist.pop()
email = self.arglist.pop()
username = ' '.join(self.arglist)
else:
(username, email, password) = self.arglist
username = username.replace('"', '')
username = username.replace("'", '')
self.accountinfo = (username, email, password)
def func(self):
Account = class_from_module(settings.BASE_ACCOUNT_TYPECLASS)
address = self.session.address
session = self.caller
try:
(username, email, password) = self.accountinfo
except ValueError:
string = '\n\r Usage (without <>): create "<accountname>" <email> <password>'
session.msg(string)
return
if ((not email) or (not password)):
session.msg('\n\r You have to supply an e-mail address followed by a password.')
return
if (not utils.validate_email_address(email)):
session.msg(("'%s' is not a valid e-mail address." % email))
return
non_normalized_username = username
username = Account.normalize_username(username)
if (non_normalized_username != username):
session.msg('Note: your username was normalized to strip spaces and remove characters that could be visually confusing.')
answer = (yield f'''You want to create an account '{username}' with email '{email}' and password '{password}'.
Is this what you intended? [Y]/N?''')
if (answer.lower() in ('n', 'no')):
session.msg('Aborted. If your user name contains spaces, surround it by quotes.')
return
(account, errors) = Account.create(username=username, email=email, password=password, ip=address, session=session)
if account:
string = "A new account '%s' was created. Welcome!"
if (' ' in username):
string += '\n\nYou can now log in with the command \'connect "%s" <your password>\'.'
else:
string += "\n\nYou can now log with the command 'connect %s <your password>'."
session.msg((string % (username, username)))
else:
session.msg(('|R%s|n' % '\n'.join(errors))) |
def test_transformer_pipeline_tagger_internal():
orig_config = Config().from_str(cfg_string)
nlp = util.load_model_from_config(orig_config, auto_fill=True, validate=True)
assert (nlp.pipe_names == ['tagger'])
tagger = nlp.get_pipe('tagger')
tagger_trf = tagger.model.get_ref('tok2vec').layers[0]
assert isinstance(tagger_trf, Model)
train_examples = []
for t in TRAIN_DATA:
train_examples.append(Example.from_dict(nlp.make_doc(t[0]), t[1]))
for tag in t[1]['tags']:
tagger.add_label(tag)
optimizer = nlp.initialize((lambda : train_examples))
for i in range(2):
losses = {}
nlp.update(train_examples, sgd=optimizer, losses=losses)
doc = nlp("We're interested at underwater basket weaving.")
doc_tensor = tagger_trf.predict([doc])
with make_tempdir() as d:
file_path = (d / 'trained_nlp')
nlp.to_disk(file_path)
nlp2 = util.load_model_from_config(orig_config, auto_fill=True, validate=True)
nlp2.initialize((lambda : train_examples))
doc2 = nlp2("We're interested at underwater basket weaving.")
tagger2 = nlp2.get_pipe('tagger')
tagger_trf2 = tagger2.model.get_ref('tok2vec').layers[0]
doc_tensor2 = tagger_trf2.predict([doc2])
with pytest.raises(AssertionError):
_assert_equal_tensors(doc_tensor2.doc_data[0].tensors, doc_tensor.doc_data[0].tensors)
nlp3 = util.load_model_from_config(orig_config, auto_fill=True, validate=True)
nlp3.from_disk(file_path)
doc3 = nlp3("We're interested at underwater basket weaving.")
tagger3 = nlp3.get_pipe('tagger')
tagger_trf3 = tagger3.model.get_ref('tok2vec').layers[0]
doc_tensor3 = tagger_trf3.predict([doc3])
_assert_equal_tensors(doc_tensor3.doc_data[0].tensors, doc_tensor.doc_data[0].tensors) |
class LightPeerChain(PeerSubscriber, Service, BaseLightPeerChain):
reply_timeout = REPLY_TIMEOUT
headerdb: BaseAsyncHeaderDB = None
_pending_replies: 'weakref.WeakValueDictionary[int, asyncio.Future[CommandAPI[Any]]]'
def __init__(self, headerdb: BaseAsyncHeaderDB, peer_pool: LESPeerPool) -> None:
PeerSubscriber.__init__(self)
self.logger = get_logger('trinity.sync.light.LightPeerChain')
self.headerdb = headerdb
self.peer_pool = peer_pool
self._pending_replies = weakref.WeakValueDictionary()
subscription_msg_types: FrozenSet[Type[CommandAPI[Any]]] = frozenset({BaseCommand})
msg_queue_maxsize = 500
async def run(self) -> None:
with self.subscribe(self.peer_pool):
while self.manager.is_running:
(peer, cmd) = (await self.msg_queue.get())
request_id = getattr(cmd.payload, 'request_id', None)
if ((request_id is not None) and (request_id in self._pending_replies)):
self._pending_replies.pop(request_id).set_result(cmd)
async def _wait_for_reply(self, request_id: int) -> CommandAPI[Any]:
fut: 'asyncio.Future[CommandAPI[Any]]' = asyncio.Future()
self._pending_replies[request_id] = fut
return (await asyncio.wait_for(fut, timeout=self.reply_timeout))
_cache(maxsize=1024, cache_exceptions=False)
_timeout(COMPLETION_TIMEOUT)
async def coro_get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeader:
return (await self._retry_on_bad_response(partial(self._get_block_header_by_hash, block_hash)))
_cache(maxsize=1024, cache_exceptions=False)
_timeout(COMPLETION_TIMEOUT)
async def coro_get_block_body_by_hash(self, block_hash: Hash32) -> BlockBody:
peer = cast(LESPeer, self.peer_pool.highest_td_peer)
self.logger.debug('Fetching block %s from %s', encode_hex(block_hash), peer)
request_id = peer.les_api.send_get_block_bodies([block_hash])
block_bodies = (await self._wait_for_reply(request_id))
if (not block_bodies.payload.bodies):
raise BlockNotFound(f'Peer {peer} has no block with hash {block_hash.hex()}')
return block_bodies.payload.bodies[0]
_cache(maxsize=1024, cache_exceptions=False)
_timeout(COMPLETION_TIMEOUT)
async def coro_get_receipts(self, block_hash: Hash32) -> List[Receipt]:
peer = cast(LESPeer, self.peer_pool.highest_td_peer)
self.logger.debug('Fetching %s receipts from %s', encode_hex(block_hash), peer)
request_id = peer.les_api.send_get_receipts((block_hash,))
receipts = (await self._wait_for_reply(request_id))
if (not receipts.payload.receipts):
raise BlockNotFound(f'No block with hash {block_hash.hex()} found')
return receipts.payload.receipts[0]
_cache(maxsize=1024, cache_exceptions=False)
_timeout(COMPLETION_TIMEOUT)
async def coro_get_account(self, block_hash: Hash32, address: ETHAddress) -> Account:
return (await self._retry_on_bad_response(partial(self._get_account_from_peer, block_hash, address)))
async def _get_account_from_peer(self, block_hash: Hash32, address: ETHAddress, peer: LESPeer) -> Account:
state_key = keccak(address)
proof = (await self._get_proof(peer, block_hash, state_key=state_key, storage_key=None))
header = (await self._get_block_header_by_hash(block_hash, peer))
try:
rlp_account = HexaryTrie.get_from_proof(header.state_root, state_key, proof)
except BadTrieProof as exc:
raise BadLESResponse(f'Peer {peer} returned an invalid proof for account {encode_hex(address)} at block {encode_hex(block_hash)}') from exc
return rlp.decode(rlp_account, sedes=Account)
_cache(maxsize=1024, cache_exceptions=False)
_timeout(COMPLETION_TIMEOUT)
async def coro_get_contract_code(self, block_hash: Hash32, address: ETHAddress) -> bytes:
try:
account = (await self.coro_get_account(block_hash, address))
except HeaderNotFound as exc:
raise NoEligiblePeers(f'Our best peer does not have header {block_hash.hex()}') from exc
code_hash = account.code_hash
return (await self._retry_on_bad_response(partial(self._get_contract_code_from_peer, block_hash, address, code_hash)))
async def _get_contract_code_from_peer(self, block_hash: Hash32, address: ETHAddress, code_hash: Hash32, peer: LESPeer) -> bytes:
request_id = peer.les_api.send_get_contract_code(block_hash, keccak(address))
contract_codes = (await self._wait_for_reply(request_id))
if (not contract_codes.payload.codes):
bytecode = b''
else:
bytecode = contract_codes.payload.codes[0]
if (code_hash == keccak(bytecode)):
return bytecode
elif (bytecode == b''):
(await self._raise_for_empty_code(block_hash, address, code_hash, peer))
raise RuntimeError('Unreachable, _raise_for_empty_code must raise its own exception')
else:
raise BadLESResponse(f'Peer {{peer}} sent code {{encode_hex(bytecode)}} that did not match hash {encode_hex(code_hash)} in account {encode_hex(address)}')
async def _raise_for_empty_code(self, block_hash: Hash32, address: ETHAddress, code_hash: Hash32, peer: LESPeer) -> None:
try:
header = (await self._get_block_header_by_hash(block_hash, peer))
except HeaderNotFound:
raise NoEligiblePeers(f'Our best peer does not have the header {block_hash.hex()}')
head_number = peer.head_info.head_number
if ((head_number - header.block_number) > MAX_REORG_DEPTH):
if ((await self.headerdb.coro_get_canonical_block_hash(header.block_number)) == block_hash):
raise BadLESResponse(f'Peer {peer} sent empty code that did not match hash {encode_hex(code_hash)} in account {encode_hex(address)}')
else:
raise NoEligiblePeers(f'Our best peer does not have the non-canonical header {block_hash.hex()}')
elif ((head_number - header.block_number) < 0):
raise NoEligiblePeers(f"Our best peer's head does include header {block_hash.hex()}")
else:
raise NoEligiblePeers(f'Peer {peer} claims to be ahead of {header}, but returned empty code with hash {code_hash.hex()}. It is on number {head_number}, maybe an uncle. Retry with an older block hash.')
async def _get_block_header_by_hash(self, block_hash: Hash32, peer: LESPeer) -> BlockHeaderAPI:
self.logger.debug('Fetching header %s from %s', encode_hex(block_hash), peer)
max_headers = 1
headers = (await peer.chain_api.get_block_headers(block_hash, max_headers, skip=0, reverse=False))
if (not headers):
raise HeaderNotFound(f'Peer {peer} has no block with hash {block_hash.hex()}')
header = headers[0]
if (header.hash != block_hash):
raise BadLESResponse(f'Received header hash ({header.hex_hash}) does not match what we requested ({encode_hex(block_hash)})')
return header
async def _get_proof(self, peer: LESPeer, block_hash: Hash32, state_key: Hash32, storage_key: Optional[Hash32], from_level: int=0) -> Tuple[(bytes, ...)]:
request_id = peer.les_api.send_get_proof(block_hash, state_key, storage_key, from_level)
proofs = (await self._wait_for_reply(request_id))
if isinstance(proofs, ProofsV1):
if proofs.payload.proofs:
return proofs.payload.proofs[0]
else:
return ()
elif isinstance(proofs, ProofsV2):
return proofs.payload.proof
else:
raise Exception('Unreachable')
async def _retry_on_bad_response(self, make_request_to_peer: Callable[([LESPeer], Any)]) -> Any:
for _ in range(MAX_REQUEST_ATTEMPTS):
try:
peer = cast(LESPeer, self.peer_pool.highest_td_peer)
except NoConnectedPeers as exc:
raise NoEligiblePeers() from exc
try:
return (await make_request_to_peer(peer))
except BadLESResponse as exc:
self.logger.warning('Disconnecting from peer, because: %s', exc)
(await peer.disconnect(DisconnectReason.SUBPROTOCOL_ERROR))
raise asyncio.TimeoutError(f'Could not complete peer request in {MAX_REQUEST_ATTEMPTS} attempts') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.