code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def update_extent_from_map_canvas(self):
"""Update extent value in GUI based from value in map.
.. note:: Delegates to update_extent()
"""
self.bounding_box_group.setTitle(
self.tr('Bounding box from the map canvas'))
# Get the extent as [xmin, ymin, xmax, ymax]
extent = viewport_geo_array(self.iface.mapCanvas())
self.update_extent(extent)
|
def function[update_extent_from_map_canvas, parameter[self]]:
constant[Update extent value in GUI based from value in map.
.. note:: Delegates to update_extent()
]
call[name[self].bounding_box_group.setTitle, parameter[call[name[self].tr, parameter[constant[Bounding box from the map canvas]]]]]
variable[extent] assign[=] call[name[viewport_geo_array], parameter[call[name[self].iface.mapCanvas, parameter[]]]]
call[name[self].update_extent, parameter[name[extent]]]
|
keyword[def] identifier[update_extent_from_map_canvas] ( identifier[self] ):
literal[string]
identifier[self] . identifier[bounding_box_group] . identifier[setTitle] (
identifier[self] . identifier[tr] ( literal[string] ))
identifier[extent] = identifier[viewport_geo_array] ( identifier[self] . identifier[iface] . identifier[mapCanvas] ())
identifier[self] . identifier[update_extent] ( identifier[extent] )
|
def update_extent_from_map_canvas(self):
"""Update extent value in GUI based from value in map.
.. note:: Delegates to update_extent()
"""
self.bounding_box_group.setTitle(self.tr('Bounding box from the map canvas'))
# Get the extent as [xmin, ymin, xmax, ymax]
extent = viewport_geo_array(self.iface.mapCanvas())
self.update_extent(extent)
|
def get_intersections(self):
"""
Return a list of unordered intersection points.
"""
if Real is float:
return list(self.intersections.keys())
else:
return [(float(p[0]), float(p[1])) for p in self.intersections.keys()]
|
def function[get_intersections, parameter[self]]:
constant[
Return a list of unordered intersection points.
]
if compare[name[Real] is name[float]] begin[:]
return[call[name[list], parameter[call[name[self].intersections.keys, parameter[]]]]]
|
keyword[def] identifier[get_intersections] ( identifier[self] ):
literal[string]
keyword[if] identifier[Real] keyword[is] identifier[float] :
keyword[return] identifier[list] ( identifier[self] . identifier[intersections] . identifier[keys] ())
keyword[else] :
keyword[return] [( identifier[float] ( identifier[p] [ literal[int] ]), identifier[float] ( identifier[p] [ literal[int] ])) keyword[for] identifier[p] keyword[in] identifier[self] . identifier[intersections] . identifier[keys] ()]
|
def get_intersections(self):
"""
Return a list of unordered intersection points.
"""
if Real is float:
return list(self.intersections.keys()) # depends on [control=['if'], data=[]]
else:
return [(float(p[0]), float(p[1])) for p in self.intersections.keys()]
|
def email_embed_image(email, img_content_id, img_data):
"""
email is a django.core.mail.EmailMessage object
"""
img = MIMEImage(img_data)
img.add_header('Content-ID', '<%s>' % img_content_id)
img.add_header('Content-Disposition', 'inline')
email.attach(img)
|
def function[email_embed_image, parameter[email, img_content_id, img_data]]:
constant[
email is a django.core.mail.EmailMessage object
]
variable[img] assign[=] call[name[MIMEImage], parameter[name[img_data]]]
call[name[img].add_header, parameter[constant[Content-ID], binary_operation[constant[<%s>] <ast.Mod object at 0x7da2590d6920> name[img_content_id]]]]
call[name[img].add_header, parameter[constant[Content-Disposition], constant[inline]]]
call[name[email].attach, parameter[name[img]]]
|
keyword[def] identifier[email_embed_image] ( identifier[email] , identifier[img_content_id] , identifier[img_data] ):
literal[string]
identifier[img] = identifier[MIMEImage] ( identifier[img_data] )
identifier[img] . identifier[add_header] ( literal[string] , literal[string] % identifier[img_content_id] )
identifier[img] . identifier[add_header] ( literal[string] , literal[string] )
identifier[email] . identifier[attach] ( identifier[img] )
|
def email_embed_image(email, img_content_id, img_data):
"""
email is a django.core.mail.EmailMessage object
"""
img = MIMEImage(img_data)
img.add_header('Content-ID', '<%s>' % img_content_id)
img.add_header('Content-Disposition', 'inline')
email.attach(img)
|
def get_lang_class(lang):
"""Import and load a Language class.
lang (unicode): Two-letter language code, e.g. 'en'.
RETURNS (Language): Language class.
"""
global LANGUAGES
# Check if an entry point is exposed for the language code
entry_point = get_entry_point("spacy_languages", lang)
if entry_point is not None:
LANGUAGES[lang] = entry_point
return entry_point
if lang not in LANGUAGES:
try:
module = importlib.import_module(".lang.%s" % lang, "spacy")
except ImportError as err:
raise ImportError(Errors.E048.format(lang=lang, err=err))
LANGUAGES[lang] = getattr(module, module.__all__[0])
return LANGUAGES[lang]
|
def function[get_lang_class, parameter[lang]]:
constant[Import and load a Language class.
lang (unicode): Two-letter language code, e.g. 'en'.
RETURNS (Language): Language class.
]
<ast.Global object at 0x7da1b1ef8820>
variable[entry_point] assign[=] call[name[get_entry_point], parameter[constant[spacy_languages], name[lang]]]
if compare[name[entry_point] is_not constant[None]] begin[:]
call[name[LANGUAGES]][name[lang]] assign[=] name[entry_point]
return[name[entry_point]]
if compare[name[lang] <ast.NotIn object at 0x7da2590d7190> name[LANGUAGES]] begin[:]
<ast.Try object at 0x7da1b1efbaf0>
call[name[LANGUAGES]][name[lang]] assign[=] call[name[getattr], parameter[name[module], call[name[module].__all__][constant[0]]]]
return[call[name[LANGUAGES]][name[lang]]]
|
keyword[def] identifier[get_lang_class] ( identifier[lang] ):
literal[string]
keyword[global] identifier[LANGUAGES]
identifier[entry_point] = identifier[get_entry_point] ( literal[string] , identifier[lang] )
keyword[if] identifier[entry_point] keyword[is] keyword[not] keyword[None] :
identifier[LANGUAGES] [ identifier[lang] ]= identifier[entry_point]
keyword[return] identifier[entry_point]
keyword[if] identifier[lang] keyword[not] keyword[in] identifier[LANGUAGES] :
keyword[try] :
identifier[module] = identifier[importlib] . identifier[import_module] ( literal[string] % identifier[lang] , literal[string] )
keyword[except] identifier[ImportError] keyword[as] identifier[err] :
keyword[raise] identifier[ImportError] ( identifier[Errors] . identifier[E048] . identifier[format] ( identifier[lang] = identifier[lang] , identifier[err] = identifier[err] ))
identifier[LANGUAGES] [ identifier[lang] ]= identifier[getattr] ( identifier[module] , identifier[module] . identifier[__all__] [ literal[int] ])
keyword[return] identifier[LANGUAGES] [ identifier[lang] ]
|
def get_lang_class(lang):
"""Import and load a Language class.
lang (unicode): Two-letter language code, e.g. 'en'.
RETURNS (Language): Language class.
"""
global LANGUAGES
# Check if an entry point is exposed for the language code
entry_point = get_entry_point('spacy_languages', lang)
if entry_point is not None:
LANGUAGES[lang] = entry_point
return entry_point # depends on [control=['if'], data=['entry_point']]
if lang not in LANGUAGES:
try:
module = importlib.import_module('.lang.%s' % lang, 'spacy') # depends on [control=['try'], data=[]]
except ImportError as err:
raise ImportError(Errors.E048.format(lang=lang, err=err)) # depends on [control=['except'], data=['err']]
LANGUAGES[lang] = getattr(module, module.__all__[0]) # depends on [control=['if'], data=['lang', 'LANGUAGES']]
return LANGUAGES[lang]
|
def get_attention(config: AttentionConfig, max_seq_len: int, prefix: str = C.ATTENTION_PREFIX) -> 'Attention':
"""
Returns an Attention instance based on attention_type.
:param config: Attention configuration.
:param max_seq_len: Maximum length of source sequences.
:param prefix: Name prefix.
:return: Instance of Attention.
"""
att_cls = Attention.get_attention_cls(config.type)
params = config.__dict__.copy()
params.pop('_frozen')
params['max_seq_len'] = max_seq_len
params['prefix'] = prefix
return _instantiate(att_cls, params)
|
def function[get_attention, parameter[config, max_seq_len, prefix]]:
constant[
Returns an Attention instance based on attention_type.
:param config: Attention configuration.
:param max_seq_len: Maximum length of source sequences.
:param prefix: Name prefix.
:return: Instance of Attention.
]
variable[att_cls] assign[=] call[name[Attention].get_attention_cls, parameter[name[config].type]]
variable[params] assign[=] call[name[config].__dict__.copy, parameter[]]
call[name[params].pop, parameter[constant[_frozen]]]
call[name[params]][constant[max_seq_len]] assign[=] name[max_seq_len]
call[name[params]][constant[prefix]] assign[=] name[prefix]
return[call[name[_instantiate], parameter[name[att_cls], name[params]]]]
|
keyword[def] identifier[get_attention] ( identifier[config] : identifier[AttentionConfig] , identifier[max_seq_len] : identifier[int] , identifier[prefix] : identifier[str] = identifier[C] . identifier[ATTENTION_PREFIX] )-> literal[string] :
literal[string]
identifier[att_cls] = identifier[Attention] . identifier[get_attention_cls] ( identifier[config] . identifier[type] )
identifier[params] = identifier[config] . identifier[__dict__] . identifier[copy] ()
identifier[params] . identifier[pop] ( literal[string] )
identifier[params] [ literal[string] ]= identifier[max_seq_len]
identifier[params] [ literal[string] ]= identifier[prefix]
keyword[return] identifier[_instantiate] ( identifier[att_cls] , identifier[params] )
|
def get_attention(config: AttentionConfig, max_seq_len: int, prefix: str=C.ATTENTION_PREFIX) -> 'Attention':
"""
Returns an Attention instance based on attention_type.
:param config: Attention configuration.
:param max_seq_len: Maximum length of source sequences.
:param prefix: Name prefix.
:return: Instance of Attention.
"""
att_cls = Attention.get_attention_cls(config.type)
params = config.__dict__.copy()
params.pop('_frozen')
params['max_seq_len'] = max_seq_len
params['prefix'] = prefix
return _instantiate(att_cls, params)
|
def conversions(self):
"""
Returns a string showing the available conversions.
Useful tool in interactive mode.
"""
return "\n".join(str(self.to(unit)) for unit in self.supported_units)
|
def function[conversions, parameter[self]]:
constant[
Returns a string showing the available conversions.
Useful tool in interactive mode.
]
return[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da1b21a1600>]]]
|
keyword[def] identifier[conversions] ( identifier[self] ):
literal[string]
keyword[return] literal[string] . identifier[join] ( identifier[str] ( identifier[self] . identifier[to] ( identifier[unit] )) keyword[for] identifier[unit] keyword[in] identifier[self] . identifier[supported_units] )
|
def conversions(self):
"""
Returns a string showing the available conversions.
Useful tool in interactive mode.
"""
return '\n'.join((str(self.to(unit)) for unit in self.supported_units))
|
def _config_chooser_dialog(self, title_text, description):
"""Dialog to select which config shall be exported
:param title_text: Title text
:param description: Description
"""
dialog = Gtk.Dialog(title_text, self.view["preferences_window"],
flags=0, buttons=
(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
label = Gtk.Label(label=description)
label.set_padding(xpad=10, ypad=10)
dialog.vbox.pack_start(label, True, True, 0)
label.show()
self._gui_checkbox = Gtk.CheckButton(label="GUI Config")
dialog.vbox.pack_start(self._gui_checkbox, True, True, 0)
self._gui_checkbox.show()
self._core_checkbox = Gtk.CheckButton(label="Core Config")
self._core_checkbox.show()
dialog.vbox.pack_start(self._core_checkbox, True, True, 0)
response = dialog.run()
dialog.destroy()
return response
|
def function[_config_chooser_dialog, parameter[self, title_text, description]]:
constant[Dialog to select which config shall be exported
:param title_text: Title text
:param description: Description
]
variable[dialog] assign[=] call[name[Gtk].Dialog, parameter[name[title_text], call[name[self].view][constant[preferences_window]]]]
variable[label] assign[=] call[name[Gtk].Label, parameter[]]
call[name[label].set_padding, parameter[]]
call[name[dialog].vbox.pack_start, parameter[name[label], constant[True], constant[True], constant[0]]]
call[name[label].show, parameter[]]
name[self]._gui_checkbox assign[=] call[name[Gtk].CheckButton, parameter[]]
call[name[dialog].vbox.pack_start, parameter[name[self]._gui_checkbox, constant[True], constant[True], constant[0]]]
call[name[self]._gui_checkbox.show, parameter[]]
name[self]._core_checkbox assign[=] call[name[Gtk].CheckButton, parameter[]]
call[name[self]._core_checkbox.show, parameter[]]
call[name[dialog].vbox.pack_start, parameter[name[self]._core_checkbox, constant[True], constant[True], constant[0]]]
variable[response] assign[=] call[name[dialog].run, parameter[]]
call[name[dialog].destroy, parameter[]]
return[name[response]]
|
keyword[def] identifier[_config_chooser_dialog] ( identifier[self] , identifier[title_text] , identifier[description] ):
literal[string]
identifier[dialog] = identifier[Gtk] . identifier[Dialog] ( identifier[title_text] , identifier[self] . identifier[view] [ literal[string] ],
identifier[flags] = literal[int] , identifier[buttons] =
( identifier[Gtk] . identifier[STOCK_CANCEL] , identifier[Gtk] . identifier[ResponseType] . identifier[REJECT] ,
identifier[Gtk] . identifier[STOCK_OK] , identifier[Gtk] . identifier[ResponseType] . identifier[ACCEPT] ))
identifier[label] = identifier[Gtk] . identifier[Label] ( identifier[label] = identifier[description] )
identifier[label] . identifier[set_padding] ( identifier[xpad] = literal[int] , identifier[ypad] = literal[int] )
identifier[dialog] . identifier[vbox] . identifier[pack_start] ( identifier[label] , keyword[True] , keyword[True] , literal[int] )
identifier[label] . identifier[show] ()
identifier[self] . identifier[_gui_checkbox] = identifier[Gtk] . identifier[CheckButton] ( identifier[label] = literal[string] )
identifier[dialog] . identifier[vbox] . identifier[pack_start] ( identifier[self] . identifier[_gui_checkbox] , keyword[True] , keyword[True] , literal[int] )
identifier[self] . identifier[_gui_checkbox] . identifier[show] ()
identifier[self] . identifier[_core_checkbox] = identifier[Gtk] . identifier[CheckButton] ( identifier[label] = literal[string] )
identifier[self] . identifier[_core_checkbox] . identifier[show] ()
identifier[dialog] . identifier[vbox] . identifier[pack_start] ( identifier[self] . identifier[_core_checkbox] , keyword[True] , keyword[True] , literal[int] )
identifier[response] = identifier[dialog] . identifier[run] ()
identifier[dialog] . identifier[destroy] ()
keyword[return] identifier[response]
|
def _config_chooser_dialog(self, title_text, description):
"""Dialog to select which config shall be exported
:param title_text: Title text
:param description: Description
"""
dialog = Gtk.Dialog(title_text, self.view['preferences_window'], flags=0, buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT, Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
label = Gtk.Label(label=description)
label.set_padding(xpad=10, ypad=10)
dialog.vbox.pack_start(label, True, True, 0)
label.show()
self._gui_checkbox = Gtk.CheckButton(label='GUI Config')
dialog.vbox.pack_start(self._gui_checkbox, True, True, 0)
self._gui_checkbox.show()
self._core_checkbox = Gtk.CheckButton(label='Core Config')
self._core_checkbox.show()
dialog.vbox.pack_start(self._core_checkbox, True, True, 0)
response = dialog.run()
dialog.destroy()
return response
|
def close_remote(self):
"""Cleans up and closes connection to remote server if defined.
"""
if self._remote:
try:
# first see if remote dir is still there
self._remote.execute('ls %s' % (self._remote_id,))
if self.status != 'Completed':
self.remove()
self._remote.execute('rm -rf %s' % (self._remote_id,))
except RuntimeError:
pass
self._remote.close()
del self._remote
|
def function[close_remote, parameter[self]]:
constant[Cleans up and closes connection to remote server if defined.
]
if name[self]._remote begin[:]
<ast.Try object at 0x7da1b1f9ef80>
call[name[self]._remote.close, parameter[]]
<ast.Delete object at 0x7da1b1f9ed70>
|
keyword[def] identifier[close_remote] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_remote] :
keyword[try] :
identifier[self] . identifier[_remote] . identifier[execute] ( literal[string] %( identifier[self] . identifier[_remote_id] ,))
keyword[if] identifier[self] . identifier[status] != literal[string] :
identifier[self] . identifier[remove] ()
identifier[self] . identifier[_remote] . identifier[execute] ( literal[string] %( identifier[self] . identifier[_remote_id] ,))
keyword[except] identifier[RuntimeError] :
keyword[pass]
identifier[self] . identifier[_remote] . identifier[close] ()
keyword[del] identifier[self] . identifier[_remote]
|
def close_remote(self):
"""Cleans up and closes connection to remote server if defined.
"""
if self._remote:
try:
# first see if remote dir is still there
self._remote.execute('ls %s' % (self._remote_id,))
if self.status != 'Completed':
self.remove() # depends on [control=['if'], data=[]]
self._remote.execute('rm -rf %s' % (self._remote_id,)) # depends on [control=['try'], data=[]]
except RuntimeError:
pass # depends on [control=['except'], data=[]]
self._remote.close()
del self._remote # depends on [control=['if'], data=[]]
|
def from_location(cls, location, north_angle=0, daylight_saving_period=None):
"""Create a sun path from a LBlocation."""
location = Location.from_location(location)
return cls(location.latitude, location.longitude,
location.time_zone, north_angle, daylight_saving_period)
|
def function[from_location, parameter[cls, location, north_angle, daylight_saving_period]]:
constant[Create a sun path from a LBlocation.]
variable[location] assign[=] call[name[Location].from_location, parameter[name[location]]]
return[call[name[cls], parameter[name[location].latitude, name[location].longitude, name[location].time_zone, name[north_angle], name[daylight_saving_period]]]]
|
keyword[def] identifier[from_location] ( identifier[cls] , identifier[location] , identifier[north_angle] = literal[int] , identifier[daylight_saving_period] = keyword[None] ):
literal[string]
identifier[location] = identifier[Location] . identifier[from_location] ( identifier[location] )
keyword[return] identifier[cls] ( identifier[location] . identifier[latitude] , identifier[location] . identifier[longitude] ,
identifier[location] . identifier[time_zone] , identifier[north_angle] , identifier[daylight_saving_period] )
|
def from_location(cls, location, north_angle=0, daylight_saving_period=None):
"""Create a sun path from a LBlocation."""
location = Location.from_location(location)
return cls(location.latitude, location.longitude, location.time_zone, north_angle, daylight_saving_period)
|
def delete_saved_search(self, id, **kwargs): # noqa: E501
"""Delete a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
|
def function[delete_saved_search, parameter[self, id]]:
constant[Delete a specific saved search # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_saved_search(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSavedSearch
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].delete_saved_search_with_http_info, parameter[name[id]]]]
|
keyword[def] identifier[delete_saved_search] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[delete_saved_search_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[delete_saved_search_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def delete_saved_search(self, id, **kwargs): # noqa: E501
'Delete a specific saved search # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_saved_search(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: ResponseContainerSavedSearch\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.delete_saved_search_with_http_info(id, **kwargs) # noqa: E501
return data
|
def list(self, filter_args=None):
"""
List the faked resources of this manager.
Parameters:
filter_args (dict):
Filter arguments. `None` causes no filtering to happen. See
:meth:`~zhmcclient.BaseManager.list()` for details.
Returns:
list of FakedBaseResource: The faked resource objects of this
manager.
"""
res = list()
for oid in self._resources:
resource = self._resources[oid]
if self._matches_filters(resource, filter_args):
res.append(resource)
return res
|
def function[list, parameter[self, filter_args]]:
constant[
List the faked resources of this manager.
Parameters:
filter_args (dict):
Filter arguments. `None` causes no filtering to happen. See
:meth:`~zhmcclient.BaseManager.list()` for details.
Returns:
list of FakedBaseResource: The faked resource objects of this
manager.
]
variable[res] assign[=] call[name[list], parameter[]]
for taget[name[oid]] in starred[name[self]._resources] begin[:]
variable[resource] assign[=] call[name[self]._resources][name[oid]]
if call[name[self]._matches_filters, parameter[name[resource], name[filter_args]]] begin[:]
call[name[res].append, parameter[name[resource]]]
return[name[res]]
|
keyword[def] identifier[list] ( identifier[self] , identifier[filter_args] = keyword[None] ):
literal[string]
identifier[res] = identifier[list] ()
keyword[for] identifier[oid] keyword[in] identifier[self] . identifier[_resources] :
identifier[resource] = identifier[self] . identifier[_resources] [ identifier[oid] ]
keyword[if] identifier[self] . identifier[_matches_filters] ( identifier[resource] , identifier[filter_args] ):
identifier[res] . identifier[append] ( identifier[resource] )
keyword[return] identifier[res]
|
def list(self, filter_args=None):
"""
List the faked resources of this manager.
Parameters:
filter_args (dict):
Filter arguments. `None` causes no filtering to happen. See
:meth:`~zhmcclient.BaseManager.list()` for details.
Returns:
list of FakedBaseResource: The faked resource objects of this
manager.
"""
res = list()
for oid in self._resources:
resource = self._resources[oid]
if self._matches_filters(resource, filter_args):
res.append(resource) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['oid']]
return res
|
def drawForeground(self, painter, rect, showGrid, showColumns, showRows):
"""
Draws the grid on the inputed painter
:param painter | <QPainter>
rect | <QRect>
showGrid | <bool>
showColumns | <bool>
showRows | <bool>
"""
painter.save()
center = self.buildData('center')
radius = self.buildData('radius') / 2.0
palette = QtGui.QApplication.palette()
pen = QtGui.QPen(palette.color(palette.Text))
pen.setWidthF(0.75)
painter.setBrush(palette.color(palette.Base))
painter.setPen(pen)
painter.setRenderHint(painter.Antialiasing)
painter.drawEllipse(center, radius, radius)
painter.restore()
|
def function[drawForeground, parameter[self, painter, rect, showGrid, showColumns, showRows]]:
constant[
Draws the grid on the inputed painter
:param painter | <QPainter>
rect | <QRect>
showGrid | <bool>
showColumns | <bool>
showRows | <bool>
]
call[name[painter].save, parameter[]]
variable[center] assign[=] call[name[self].buildData, parameter[constant[center]]]
variable[radius] assign[=] binary_operation[call[name[self].buildData, parameter[constant[radius]]] / constant[2.0]]
variable[palette] assign[=] call[name[QtGui].QApplication.palette, parameter[]]
variable[pen] assign[=] call[name[QtGui].QPen, parameter[call[name[palette].color, parameter[name[palette].Text]]]]
call[name[pen].setWidthF, parameter[constant[0.75]]]
call[name[painter].setBrush, parameter[call[name[palette].color, parameter[name[palette].Base]]]]
call[name[painter].setPen, parameter[name[pen]]]
call[name[painter].setRenderHint, parameter[name[painter].Antialiasing]]
call[name[painter].drawEllipse, parameter[name[center], name[radius], name[radius]]]
call[name[painter].restore, parameter[]]
|
keyword[def] identifier[drawForeground] ( identifier[self] , identifier[painter] , identifier[rect] , identifier[showGrid] , identifier[showColumns] , identifier[showRows] ):
literal[string]
identifier[painter] . identifier[save] ()
identifier[center] = identifier[self] . identifier[buildData] ( literal[string] )
identifier[radius] = identifier[self] . identifier[buildData] ( literal[string] )/ literal[int]
identifier[palette] = identifier[QtGui] . identifier[QApplication] . identifier[palette] ()
identifier[pen] = identifier[QtGui] . identifier[QPen] ( identifier[palette] . identifier[color] ( identifier[palette] . identifier[Text] ))
identifier[pen] . identifier[setWidthF] ( literal[int] )
identifier[painter] . identifier[setBrush] ( identifier[palette] . identifier[color] ( identifier[palette] . identifier[Base] ))
identifier[painter] . identifier[setPen] ( identifier[pen] )
identifier[painter] . identifier[setRenderHint] ( identifier[painter] . identifier[Antialiasing] )
identifier[painter] . identifier[drawEllipse] ( identifier[center] , identifier[radius] , identifier[radius] )
identifier[painter] . identifier[restore] ()
|
def drawForeground(self, painter, rect, showGrid, showColumns, showRows):
"""
Draws the grid on the inputed painter
:param painter | <QPainter>
rect | <QRect>
showGrid | <bool>
showColumns | <bool>
showRows | <bool>
"""
painter.save()
center = self.buildData('center')
radius = self.buildData('radius') / 2.0
palette = QtGui.QApplication.palette()
pen = QtGui.QPen(palette.color(palette.Text))
pen.setWidthF(0.75)
painter.setBrush(palette.color(palette.Base))
painter.setPen(pen)
painter.setRenderHint(painter.Antialiasing)
painter.drawEllipse(center, radius, radius)
painter.restore()
|
def get_proxy_manager(self, osid=None, implementation=None, version=None):
"""Finds, loads and instantiates providers of OSID managers.
Providers must conform to an ``OsidManager`` interface. The
interfaces are defined in the OSID enumeration. For all OSID
requests, an instance of ``OsidManager`` that implements the
``OsidManager`` interface is returned. In bindings where
permitted, this can be safely cast into the requested manager.
arg: osid (osid.OSID): represents the OSID
arg: implementation (string): the name of the implementation
arg: version (osid.installation.Version): the minimum
required OSID specification version
return: (osid.OsidProxyManager) - the manager of the service
raise: ConfigurationError - an error in configuring the
implementation
raise: NotFound - the implementation class was not found
raise: NullArgument - ``implementation`` or ``version`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported - ``implementation`` does not support the
requested OSID
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: After finding and instantiating the
requested ``OsidManager,`` providers must invoke
``OsidManager.initialize(OsidRuntimeManager)`` where the
environment is an instance of the current environment that
includes the configuration for the service being initialized.
The ``OsidRuntimeManager`` passed may include information useful
for the configuration such as the identity of the service being
instantiated.
"""
# This implementation assumes that all osid impls reside as seperate
# packages in the dlkit library, so that for instance the proxy manager for an
# OSID = 'osidpackage' in an implementation named 'impl_name' manager can
# be found in the python path for the module: dlkit.impl_name.osid.managers
# Also this implementation currently ignores the OSID specification version.
try:
manager_module = import_module('dlkit.' + implementation + '.' + osid.lower() + '.managers')
except ImportError:
raise NotFound()
try:
proxy_manager = getattr(manager_module, osid.title() + 'ProxyManager')
except AttributeError:
raise Unsupported()
return proxy_manager
|
def function[get_proxy_manager, parameter[self, osid, implementation, version]]:
constant[Finds, loads and instantiates providers of OSID managers.
Providers must conform to an ``OsidManager`` interface. The
interfaces are defined in the OSID enumeration. For all OSID
requests, an instance of ``OsidManager`` that implements the
``OsidManager`` interface is returned. In bindings where
permitted, this can be safely cast into the requested manager.
arg: osid (osid.OSID): represents the OSID
arg: implementation (string): the name of the implementation
arg: version (osid.installation.Version): the minimum
required OSID specification version
return: (osid.OsidProxyManager) - the manager of the service
raise: ConfigurationError - an error in configuring the
implementation
raise: NotFound - the implementation class was not found
raise: NullArgument - ``implementation`` or ``version`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported - ``implementation`` does not support the
requested OSID
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: After finding and instantiating the
requested ``OsidManager,`` providers must invoke
``OsidManager.initialize(OsidRuntimeManager)`` where the
environment is an instance of the current environment that
includes the configuration for the service being initialized.
The ``OsidRuntimeManager`` passed may include information useful
for the configuration such as the identity of the service being
instantiated.
]
<ast.Try object at 0x7da2054a5600>
<ast.Try object at 0x7da20e9540d0>
return[name[proxy_manager]]
|
keyword[def] identifier[get_proxy_manager] ( identifier[self] , identifier[osid] = keyword[None] , identifier[implementation] = keyword[None] , identifier[version] = keyword[None] ):
literal[string]
keyword[try] :
identifier[manager_module] = identifier[import_module] ( literal[string] + identifier[implementation] + literal[string] + identifier[osid] . identifier[lower] ()+ literal[string] )
keyword[except] identifier[ImportError] :
keyword[raise] identifier[NotFound] ()
keyword[try] :
identifier[proxy_manager] = identifier[getattr] ( identifier[manager_module] , identifier[osid] . identifier[title] ()+ literal[string] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[Unsupported] ()
keyword[return] identifier[proxy_manager]
|
def get_proxy_manager(self, osid=None, implementation=None, version=None):
"""Finds, loads and instantiates providers of OSID managers.
Providers must conform to an ``OsidManager`` interface. The
interfaces are defined in the OSID enumeration. For all OSID
requests, an instance of ``OsidManager`` that implements the
``OsidManager`` interface is returned. In bindings where
permitted, this can be safely cast into the requested manager.
arg: osid (osid.OSID): represents the OSID
arg: implementation (string): the name of the implementation
arg: version (osid.installation.Version): the minimum
required OSID specification version
return: (osid.OsidProxyManager) - the manager of the service
raise: ConfigurationError - an error in configuring the
implementation
raise: NotFound - the implementation class was not found
raise: NullArgument - ``implementation`` or ``version`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unsupported - ``implementation`` does not support the
requested OSID
*compliance: mandatory -- This method must be implemented.*
*implementation notes*: After finding and instantiating the
requested ``OsidManager,`` providers must invoke
``OsidManager.initialize(OsidRuntimeManager)`` where the
environment is an instance of the current environment that
includes the configuration for the service being initialized.
The ``OsidRuntimeManager`` passed may include information useful
for the configuration such as the identity of the service being
instantiated.
"""
# This implementation assumes that all osid impls reside as seperate
# packages in the dlkit library, so that for instance the proxy manager for an
# OSID = 'osidpackage' in an implementation named 'impl_name' manager can
# be found in the python path for the module: dlkit.impl_name.osid.managers
# Also this implementation currently ignores the OSID specification version.
try:
manager_module = import_module('dlkit.' + implementation + '.' + osid.lower() + '.managers') # depends on [control=['try'], data=[]]
except ImportError:
raise NotFound() # depends on [control=['except'], data=[]]
try:
proxy_manager = getattr(manager_module, osid.title() + 'ProxyManager') # depends on [control=['try'], data=[]]
except AttributeError:
raise Unsupported() # depends on [control=['except'], data=[]]
return proxy_manager
|
def delete_types(self, base_key, out_key, *types):
"""
Method to delete a parameter from a parameter documentation.
This method deletes the given `param` from the `base_key` item in the
:attr:`params` dictionary and creates a new item with the original
documentation without the description of the param. This method works
for ``'Results'`` like sections.
See the :meth:`keep_types` method for an example.
Parameters
----------
base_key: str
key in the :attr:`params` dictionary
out_key: str
Extension for the base key (the final key will be like
``'%s.%s' % (base_key, out_key)``
``*types``
str. The type identifier of which the documentations shall deleted
See Also
--------
delete_params"""
self.params['%s.%s' % (base_key, out_key)] = self.delete_types_s(
self.params[base_key], types)
|
def function[delete_types, parameter[self, base_key, out_key]]:
constant[
Method to delete a parameter from a parameter documentation.
This method deletes the given `param` from the `base_key` item in the
:attr:`params` dictionary and creates a new item with the original
documentation without the description of the param. This method works
for ``'Results'`` like sections.
See the :meth:`keep_types` method for an example.
Parameters
----------
base_key: str
key in the :attr:`params` dictionary
out_key: str
Extension for the base key (the final key will be like
``'%s.%s' % (base_key, out_key)``
``*types``
str. The type identifier of which the documentations shall deleted
See Also
--------
delete_params]
call[name[self].params][binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b6bfd0>, <ast.Name object at 0x7da1b1b69240>]]]] assign[=] call[name[self].delete_types_s, parameter[call[name[self].params][name[base_key]], name[types]]]
|
keyword[def] identifier[delete_types] ( identifier[self] , identifier[base_key] , identifier[out_key] ,* identifier[types] ):
literal[string]
identifier[self] . identifier[params] [ literal[string] %( identifier[base_key] , identifier[out_key] )]= identifier[self] . identifier[delete_types_s] (
identifier[self] . identifier[params] [ identifier[base_key] ], identifier[types] )
|
def delete_types(self, base_key, out_key, *types):
"""
Method to delete a parameter from a parameter documentation.
This method deletes the given `param` from the `base_key` item in the
:attr:`params` dictionary and creates a new item with the original
documentation without the description of the param. This method works
for ``'Results'`` like sections.
See the :meth:`keep_types` method for an example.
Parameters
----------
base_key: str
key in the :attr:`params` dictionary
out_key: str
Extension for the base key (the final key will be like
``'%s.%s' % (base_key, out_key)``
``*types``
str. The type identifier of which the documentations shall deleted
See Also
--------
delete_params"""
self.params['%s.%s' % (base_key, out_key)] = self.delete_types_s(self.params[base_key], types)
|
def _find_unpurge_targets(desired, **kwargs):
'''
Find packages which are marked to be purged but can't yet be removed
because they are dependencies for other installed packages. These are the
packages which will need to be 'unpurged' because they are part of
pkg.installed states. This really just applies to Debian-based Linuxes.
'''
return [
x for x in desired
if x in __salt__['pkg.list_pkgs'](purge_desired=True, **kwargs)
]
|
def function[_find_unpurge_targets, parameter[desired]]:
constant[
Find packages which are marked to be purged but can't yet be removed
because they are dependencies for other installed packages. These are the
packages which will need to be 'unpurged' because they are part of
pkg.installed states. This really just applies to Debian-based Linuxes.
]
return[<ast.ListComp object at 0x7da1b212c460>]
|
keyword[def] identifier[_find_unpurge_targets] ( identifier[desired] ,** identifier[kwargs] ):
literal[string]
keyword[return] [
identifier[x] keyword[for] identifier[x] keyword[in] identifier[desired]
keyword[if] identifier[x] keyword[in] identifier[__salt__] [ literal[string] ]( identifier[purge_desired] = keyword[True] ,** identifier[kwargs] )
]
|
def _find_unpurge_targets(desired, **kwargs):
"""
Find packages which are marked to be purged but can't yet be removed
because they are dependencies for other installed packages. These are the
packages which will need to be 'unpurged' because they are part of
pkg.installed states. This really just applies to Debian-based Linuxes.
"""
return [x for x in desired if x in __salt__['pkg.list_pkgs'](purge_desired=True, **kwargs)]
|
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
options = super(Console, self).fix_config(options)
opt = "prefix"
if opt not in options:
options[opt] = ""
if opt not in self.help:
self.help[opt] = "The prefix for the output (string)."
return options
|
def function[fix_config, parameter[self, options]]:
constant[
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
]
variable[options] assign[=] call[call[name[super], parameter[name[Console], name[self]]].fix_config, parameter[name[options]]]
variable[opt] assign[=] constant[prefix]
if compare[name[opt] <ast.NotIn object at 0x7da2590d7190> name[options]] begin[:]
call[name[options]][name[opt]] assign[=] constant[]
if compare[name[opt] <ast.NotIn object at 0x7da2590d7190> name[self].help] begin[:]
call[name[self].help][name[opt]] assign[=] constant[The prefix for the output (string).]
return[name[options]]
|
keyword[def] identifier[fix_config] ( identifier[self] , identifier[options] ):
literal[string]
identifier[options] = identifier[super] ( identifier[Console] , identifier[self] ). identifier[fix_config] ( identifier[options] )
identifier[opt] = literal[string]
keyword[if] identifier[opt] keyword[not] keyword[in] identifier[options] :
identifier[options] [ identifier[opt] ]= literal[string]
keyword[if] identifier[opt] keyword[not] keyword[in] identifier[self] . identifier[help] :
identifier[self] . identifier[help] [ identifier[opt] ]= literal[string]
keyword[return] identifier[options]
|
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
options = super(Console, self).fix_config(options)
opt = 'prefix'
if opt not in options:
options[opt] = '' # depends on [control=['if'], data=['opt', 'options']]
if opt not in self.help:
self.help[opt] = 'The prefix for the output (string).' # depends on [control=['if'], data=['opt']]
return options
|
def row_major(self, value):
"""Validate and set row-major format for multidimensional arrays."""
if value is not None:
if not isinstance(value, bool):
raise TypeError(
'f90nml: error: row_major must be a logical value.')
else:
self._row_major = value
|
def function[row_major, parameter[self, value]]:
constant[Validate and set row-major format for multidimensional arrays.]
if compare[name[value] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b06ca3b0> begin[:]
<ast.Raise object at 0x7da1b06c9510>
|
keyword[def] identifier[row_major] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[bool] ):
keyword[raise] identifier[TypeError] (
literal[string] )
keyword[else] :
identifier[self] . identifier[_row_major] = identifier[value]
|
def row_major(self, value):
"""Validate and set row-major format for multidimensional arrays."""
if value is not None:
if not isinstance(value, bool):
raise TypeError('f90nml: error: row_major must be a logical value.') # depends on [control=['if'], data=[]]
else:
self._row_major = value # depends on [control=['if'], data=['value']]
|
def stop(self, task_is_resetting=False):
"""
Stop a task. This stops all processes for the task. The approach
is to mark the task as "stopping" , send a SIGTERM to each process,
and schedule a SIGKILL for some time later.
If the legion or task is resetting and a "restart" event is in scope,
that event will be fired rather than sending the SIGTERM. Otherwise,
if a "stop" event is in scope, that event will be fired. In
either case, the SIGKILL escalation will still occur so the
recipient needs to process the event and exit promptly.
Returns True to request a shorter period before the next call,
False if nothing special is needed.
"""
log = self._params.get('log', self._discard)
if self._stopped:
log.debug("'%s' is already stopped", self._name)
return False
now = time.time()
running = len(self.get_pids())
if self._stopping and running == 0:
log.debug("All '%s' processes are now stopped", self._name)
self._reset_state()
self._stopped = now
return False
if self._config_running:
control = self._config_running.get('control')
else:
control = None
if self._terminated:
# These are tasks that have been explicitly terminated but have not yet stopped
#
if self._killed:
log.warning("%d '%s' process%s still running %s after SIGKILL escalation",
running, self._name, ses(running, 'es'), deltafmt(now - self._killed))
elif self._terminated + sigkill_escalation < now:
log.warning("Excalating to SIGKILL with %d '%s' process%s still running",
running, self._name, ses(running, 'es'))
self._signal(signal.SIGKILL)
self._killed = now
else:
log.debug("%d '%s' process%s still running %s after being terminated",
running, self._name, ses(running, 'es'), deltafmt(now - self._terminated))
return True
if self._limit and now > self._limit:
# These are tasks that have a time limit set and it has expired.
# This case falls through to the stop code.
log.info("Stopping task '%s', time limit exceeded %s ago", self._name, deltafmt(now - self._limit))
elif self._stopping and not self._legion.is_exiting():
# These are tasks that are expected to stop soon but have not been explicitly
# terminated. These are typically tasks with 'once' or 'event' controls.
# Unless there is a time limit set, they are allowed to run indefinitely
#
log.debug("%d '%s' '%s' process%s still running %s",
running, self._name, control, ses(running, 'es'), deltafmt(now - self._stopping))
return False
if not self._stopping:
self._stopping = now
self._terminated = now
restart_target = None
stop_target = None
resetting = self._legion.is_resetting() or task_is_resetting
if self._config_running:
for event in self._config_running.get('events', []):
ev_type = self._get(event.get('type'))
if resetting and ev_type == 'restart':
restart_target = self._make_event_target(event, control)
elif ev_type == 'stop':
stop_target = self._make_event_target(event, control)
if restart_target:
log.debug("Restart event on %d '%s' process%s", running, self._name, ses(running, 'es'))
restart_target.handle()
elif stop_target:
log.debug("Stop event on %d '%s' process%s", running, self._name, ses(running, 'es'))
stop_target.handle()
else:
log.debug("Stopping %d '%s' process%s with SIGTERM", running, self._name, ses(running, 'es'))
self._signal(signal.SIGTERM)
return True
|
def function[stop, parameter[self, task_is_resetting]]:
constant[
Stop a task. This stops all processes for the task. The approach
is to mark the task as "stopping" , send a SIGTERM to each process,
and schedule a SIGKILL for some time later.
If the legion or task is resetting and a "restart" event is in scope,
that event will be fired rather than sending the SIGTERM. Otherwise,
if a "stop" event is in scope, that event will be fired. In
either case, the SIGKILL escalation will still occur so the
recipient needs to process the event and exit promptly.
Returns True to request a shorter period before the next call,
False if nothing special is needed.
]
variable[log] assign[=] call[name[self]._params.get, parameter[constant[log], name[self]._discard]]
if name[self]._stopped begin[:]
call[name[log].debug, parameter[constant['%s' is already stopped], name[self]._name]]
return[constant[False]]
variable[now] assign[=] call[name[time].time, parameter[]]
variable[running] assign[=] call[name[len], parameter[call[name[self].get_pids, parameter[]]]]
if <ast.BoolOp object at 0x7da20c6e6e60> begin[:]
call[name[log].debug, parameter[constant[All '%s' processes are now stopped], name[self]._name]]
call[name[self]._reset_state, parameter[]]
name[self]._stopped assign[=] name[now]
return[constant[False]]
if name[self]._config_running begin[:]
variable[control] assign[=] call[name[self]._config_running.get, parameter[constant[control]]]
if name[self]._terminated begin[:]
if name[self]._killed begin[:]
call[name[log].warning, parameter[constant[%d '%s' process%s still running %s after SIGKILL escalation], name[running], name[self]._name, call[name[ses], parameter[name[running], constant[es]]], call[name[deltafmt], parameter[binary_operation[name[now] - name[self]._killed]]]]]
return[constant[True]]
if <ast.BoolOp object at 0x7da20c6e4fd0> begin[:]
call[name[log].info, parameter[constant[Stopping task '%s', time limit exceeded %s ago], name[self]._name, call[name[deltafmt], parameter[binary_operation[name[now] - name[self]._limit]]]]]
if <ast.UnaryOp object at 0x7da20c6e5780> begin[:]
name[self]._stopping assign[=] name[now]
name[self]._terminated assign[=] name[now]
variable[restart_target] assign[=] constant[None]
variable[stop_target] assign[=] constant[None]
variable[resetting] assign[=] <ast.BoolOp object at 0x7da20c6e5b70>
if name[self]._config_running begin[:]
for taget[name[event]] in starred[call[name[self]._config_running.get, parameter[constant[events], list[[]]]]] begin[:]
variable[ev_type] assign[=] call[name[self]._get, parameter[call[name[event].get, parameter[constant[type]]]]]
if <ast.BoolOp object at 0x7da20c992650> begin[:]
variable[restart_target] assign[=] call[name[self]._make_event_target, parameter[name[event], name[control]]]
if name[restart_target] begin[:]
call[name[log].debug, parameter[constant[Restart event on %d '%s' process%s], name[running], name[self]._name, call[name[ses], parameter[name[running], constant[es]]]]]
call[name[restart_target].handle, parameter[]]
return[constant[True]]
|
keyword[def] identifier[stop] ( identifier[self] , identifier[task_is_resetting] = keyword[False] ):
literal[string]
identifier[log] = identifier[self] . identifier[_params] . identifier[get] ( literal[string] , identifier[self] . identifier[_discard] )
keyword[if] identifier[self] . identifier[_stopped] :
identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[_name] )
keyword[return] keyword[False]
identifier[now] = identifier[time] . identifier[time] ()
identifier[running] = identifier[len] ( identifier[self] . identifier[get_pids] ())
keyword[if] identifier[self] . identifier[_stopping] keyword[and] identifier[running] == literal[int] :
identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[_name] )
identifier[self] . identifier[_reset_state] ()
identifier[self] . identifier[_stopped] = identifier[now]
keyword[return] keyword[False]
keyword[if] identifier[self] . identifier[_config_running] :
identifier[control] = identifier[self] . identifier[_config_running] . identifier[get] ( literal[string] )
keyword[else] :
identifier[control] = keyword[None]
keyword[if] identifier[self] . identifier[_terminated] :
keyword[if] identifier[self] . identifier[_killed] :
identifier[log] . identifier[warning] ( literal[string] ,
identifier[running] , identifier[self] . identifier[_name] , identifier[ses] ( identifier[running] , literal[string] ), identifier[deltafmt] ( identifier[now] - identifier[self] . identifier[_killed] ))
keyword[elif] identifier[self] . identifier[_terminated] + identifier[sigkill_escalation] < identifier[now] :
identifier[log] . identifier[warning] ( literal[string] ,
identifier[running] , identifier[self] . identifier[_name] , identifier[ses] ( identifier[running] , literal[string] ))
identifier[self] . identifier[_signal] ( identifier[signal] . identifier[SIGKILL] )
identifier[self] . identifier[_killed] = identifier[now]
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] ,
identifier[running] , identifier[self] . identifier[_name] , identifier[ses] ( identifier[running] , literal[string] ), identifier[deltafmt] ( identifier[now] - identifier[self] . identifier[_terminated] ))
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[_limit] keyword[and] identifier[now] > identifier[self] . identifier[_limit] :
identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[_name] , identifier[deltafmt] ( identifier[now] - identifier[self] . identifier[_limit] ))
keyword[elif] identifier[self] . identifier[_stopping] keyword[and] keyword[not] identifier[self] . identifier[_legion] . identifier[is_exiting] ():
identifier[log] . identifier[debug] ( literal[string] ,
identifier[running] , identifier[self] . identifier[_name] , identifier[control] , identifier[ses] ( identifier[running] , literal[string] ), identifier[deltafmt] ( identifier[now] - identifier[self] . identifier[_stopping] ))
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[self] . identifier[_stopping] :
identifier[self] . identifier[_stopping] = identifier[now]
identifier[self] . identifier[_terminated] = identifier[now]
identifier[restart_target] = keyword[None]
identifier[stop_target] = keyword[None]
identifier[resetting] = identifier[self] . identifier[_legion] . identifier[is_resetting] () keyword[or] identifier[task_is_resetting]
keyword[if] identifier[self] . identifier[_config_running] :
keyword[for] identifier[event] keyword[in] identifier[self] . identifier[_config_running] . identifier[get] ( literal[string] ,[]):
identifier[ev_type] = identifier[self] . identifier[_get] ( identifier[event] . identifier[get] ( literal[string] ))
keyword[if] identifier[resetting] keyword[and] identifier[ev_type] == literal[string] :
identifier[restart_target] = identifier[self] . identifier[_make_event_target] ( identifier[event] , identifier[control] )
keyword[elif] identifier[ev_type] == literal[string] :
identifier[stop_target] = identifier[self] . identifier[_make_event_target] ( identifier[event] , identifier[control] )
keyword[if] identifier[restart_target] :
identifier[log] . identifier[debug] ( literal[string] , identifier[running] , identifier[self] . identifier[_name] , identifier[ses] ( identifier[running] , literal[string] ))
identifier[restart_target] . identifier[handle] ()
keyword[elif] identifier[stop_target] :
identifier[log] . identifier[debug] ( literal[string] , identifier[running] , identifier[self] . identifier[_name] , identifier[ses] ( identifier[running] , literal[string] ))
identifier[stop_target] . identifier[handle] ()
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] , identifier[running] , identifier[self] . identifier[_name] , identifier[ses] ( identifier[running] , literal[string] ))
identifier[self] . identifier[_signal] ( identifier[signal] . identifier[SIGTERM] )
keyword[return] keyword[True]
|
def stop(self, task_is_resetting=False):
"""
Stop a task. This stops all processes for the task. The approach
is to mark the task as "stopping" , send a SIGTERM to each process,
and schedule a SIGKILL for some time later.
If the legion or task is resetting and a "restart" event is in scope,
that event will be fired rather than sending the SIGTERM. Otherwise,
if a "stop" event is in scope, that event will be fired. In
either case, the SIGKILL escalation will still occur so the
recipient needs to process the event and exit promptly.
Returns True to request a shorter period before the next call,
False if nothing special is needed.
"""
log = self._params.get('log', self._discard)
if self._stopped:
log.debug("'%s' is already stopped", self._name)
return False # depends on [control=['if'], data=[]]
now = time.time()
running = len(self.get_pids())
if self._stopping and running == 0:
log.debug("All '%s' processes are now stopped", self._name)
self._reset_state()
self._stopped = now
return False # depends on [control=['if'], data=[]]
if self._config_running:
control = self._config_running.get('control') # depends on [control=['if'], data=[]]
else:
control = None
if self._terminated:
# These are tasks that have been explicitly terminated but have not yet stopped
#
if self._killed:
log.warning("%d '%s' process%s still running %s after SIGKILL escalation", running, self._name, ses(running, 'es'), deltafmt(now - self._killed)) # depends on [control=['if'], data=[]]
elif self._terminated + sigkill_escalation < now:
log.warning("Excalating to SIGKILL with %d '%s' process%s still running", running, self._name, ses(running, 'es'))
self._signal(signal.SIGKILL)
self._killed = now # depends on [control=['if'], data=['now']]
else:
log.debug("%d '%s' process%s still running %s after being terminated", running, self._name, ses(running, 'es'), deltafmt(now - self._terminated))
return True # depends on [control=['if'], data=[]]
if self._limit and now > self._limit:
# These are tasks that have a time limit set and it has expired.
# This case falls through to the stop code.
log.info("Stopping task '%s', time limit exceeded %s ago", self._name, deltafmt(now - self._limit)) # depends on [control=['if'], data=[]]
elif self._stopping and (not self._legion.is_exiting()):
# These are tasks that are expected to stop soon but have not been explicitly
# terminated. These are typically tasks with 'once' or 'event' controls.
# Unless there is a time limit set, they are allowed to run indefinitely
#
log.debug("%d '%s' '%s' process%s still running %s", running, self._name, control, ses(running, 'es'), deltafmt(now - self._stopping))
return False # depends on [control=['if'], data=[]]
if not self._stopping:
self._stopping = now # depends on [control=['if'], data=[]]
self._terminated = now
restart_target = None
stop_target = None
resetting = self._legion.is_resetting() or task_is_resetting
if self._config_running:
for event in self._config_running.get('events', []):
ev_type = self._get(event.get('type'))
if resetting and ev_type == 'restart':
restart_target = self._make_event_target(event, control) # depends on [control=['if'], data=[]]
elif ev_type == 'stop':
stop_target = self._make_event_target(event, control) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['event']] # depends on [control=['if'], data=[]]
if restart_target:
log.debug("Restart event on %d '%s' process%s", running, self._name, ses(running, 'es'))
restart_target.handle() # depends on [control=['if'], data=[]]
elif stop_target:
log.debug("Stop event on %d '%s' process%s", running, self._name, ses(running, 'es'))
stop_target.handle() # depends on [control=['if'], data=[]]
else:
log.debug("Stopping %d '%s' process%s with SIGTERM", running, self._name, ses(running, 'es'))
self._signal(signal.SIGTERM)
return True
|
def start(self):
"""
Starts the worker threads if they are not already started.
This method is thread-safe and will be called automatically
when executing an operation.
"""
# Check whether we are already started, skip if we are.
if not self._started.is_set():
# If we are not started, try to capture the lock.
if self._lock.acquire(False):
# If we got the lock, go ahead and start the worker
# threads, set the started flag, and release the lock.
for i in range(self._size):
name = "riak.client.multi-worker-{0}-{1}".format(
self._name, i)
worker = Thread(target=self._worker_method, name=name)
worker.daemon = False
worker.start()
self._workers.append(worker)
self._started.set()
self._lock.release()
else:
# We didn't get the lock, so someone else is already
# starting the worker threads. Wait until they have
# signaled that the threads are started.
self._started.wait()
|
def function[start, parameter[self]]:
constant[
Starts the worker threads if they are not already started.
This method is thread-safe and will be called automatically
when executing an operation.
]
if <ast.UnaryOp object at 0x7da20c7c9c60> begin[:]
if call[name[self]._lock.acquire, parameter[constant[False]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[name[self]._size]]] begin[:]
variable[name] assign[=] call[constant[riak.client.multi-worker-{0}-{1}].format, parameter[name[self]._name, name[i]]]
variable[worker] assign[=] call[name[Thread], parameter[]]
name[worker].daemon assign[=] constant[False]
call[name[worker].start, parameter[]]
call[name[self]._workers.append, parameter[name[worker]]]
call[name[self]._started.set, parameter[]]
call[name[self]._lock.release, parameter[]]
|
keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_started] . identifier[is_set] ():
keyword[if] identifier[self] . identifier[_lock] . identifier[acquire] ( keyword[False] ):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_size] ):
identifier[name] = literal[string] . identifier[format] (
identifier[self] . identifier[_name] , identifier[i] )
identifier[worker] = identifier[Thread] ( identifier[target] = identifier[self] . identifier[_worker_method] , identifier[name] = identifier[name] )
identifier[worker] . identifier[daemon] = keyword[False]
identifier[worker] . identifier[start] ()
identifier[self] . identifier[_workers] . identifier[append] ( identifier[worker] )
identifier[self] . identifier[_started] . identifier[set] ()
identifier[self] . identifier[_lock] . identifier[release] ()
keyword[else] :
identifier[self] . identifier[_started] . identifier[wait] ()
|
def start(self):
"""
Starts the worker threads if they are not already started.
This method is thread-safe and will be called automatically
when executing an operation.
"""
# Check whether we are already started, skip if we are.
if not self._started.is_set():
# If we are not started, try to capture the lock.
if self._lock.acquire(False):
# If we got the lock, go ahead and start the worker
# threads, set the started flag, and release the lock.
for i in range(self._size):
name = 'riak.client.multi-worker-{0}-{1}'.format(self._name, i)
worker = Thread(target=self._worker_method, name=name)
worker.daemon = False
worker.start()
self._workers.append(worker) # depends on [control=['for'], data=['i']]
self._started.set()
self._lock.release() # depends on [control=['if'], data=[]]
else:
# We didn't get the lock, so someone else is already
# starting the worker threads. Wait until they have
# signaled that the threads are started.
self._started.wait() # depends on [control=['if'], data=[]]
|
def _text_checker(job, interval, _interval_set=False, quiet=False, output=sys.stdout):
"""A text-based job status checker
Args:
job (BaseJob): The job to check.
interval (int): The interval at which to check.
_interval_set (bool): Was interval time set by user?
quiet (bool): If True, do not print status messages.
output (file): The file like object to write status messages to.
By default this is sys.stdout.
"""
status = job.status()
msg = status.value
prev_msg = msg
msg_len = len(msg)
if not quiet:
print('\r%s: %s' % ('Job Status', msg), end='', file=output)
while status.name not in ['DONE', 'CANCELLED', 'ERROR']:
time.sleep(interval)
status = job.status()
msg = status.value
if status.name == 'QUEUED':
msg += ' (%s)' % job.queue_position()
if not _interval_set:
interval = max(job.queue_position(), 2)
else:
if not _interval_set:
interval = 2
# Adjust length of message so there are no artifacts
if len(msg) < msg_len:
msg += ' ' * (msg_len - len(msg))
elif len(msg) > msg_len:
msg_len = len(msg)
if msg != prev_msg and not quiet:
print('\r%s: %s' % ('Job Status', msg), end='', file=output)
prev_msg = msg
if not quiet:
print('', file=output)
|
def function[_text_checker, parameter[job, interval, _interval_set, quiet, output]]:
constant[A text-based job status checker
Args:
job (BaseJob): The job to check.
interval (int): The interval at which to check.
_interval_set (bool): Was interval time set by user?
quiet (bool): If True, do not print status messages.
output (file): The file like object to write status messages to.
By default this is sys.stdout.
]
variable[status] assign[=] call[name[job].status, parameter[]]
variable[msg] assign[=] name[status].value
variable[prev_msg] assign[=] name[msg]
variable[msg_len] assign[=] call[name[len], parameter[name[msg]]]
if <ast.UnaryOp object at 0x7da1b05ac3d0> begin[:]
call[name[print], parameter[binary_operation[constant[
%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b05adb10>, <ast.Name object at 0x7da1b05af640>]]]]]
while compare[name[status].name <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b05afd30>, <ast.Constant object at 0x7da1b05af490>, <ast.Constant object at 0x7da1b05ad5a0>]]] begin[:]
call[name[time].sleep, parameter[name[interval]]]
variable[status] assign[=] call[name[job].status, parameter[]]
variable[msg] assign[=] name[status].value
if compare[name[status].name equal[==] constant[QUEUED]] begin[:]
<ast.AugAssign object at 0x7da1b05ac190>
if <ast.UnaryOp object at 0x7da1b05ac940> begin[:]
variable[interval] assign[=] call[name[max], parameter[call[name[job].queue_position, parameter[]], constant[2]]]
if compare[call[name[len], parameter[name[msg]]] less[<] name[msg_len]] begin[:]
<ast.AugAssign object at 0x7da1b05affd0>
if <ast.BoolOp object at 0x7da1b05adea0> begin[:]
call[name[print], parameter[binary_operation[constant[
%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Constant object at 0x7da1b05ad9c0>, <ast.Name object at 0x7da1b05acaf0>]]]]]
variable[prev_msg] assign[=] name[msg]
if <ast.UnaryOp object at 0x7da1b05ae2f0> begin[:]
call[name[print], parameter[constant[]]]
|
keyword[def] identifier[_text_checker] ( identifier[job] , identifier[interval] , identifier[_interval_set] = keyword[False] , identifier[quiet] = keyword[False] , identifier[output] = identifier[sys] . identifier[stdout] ):
literal[string]
identifier[status] = identifier[job] . identifier[status] ()
identifier[msg] = identifier[status] . identifier[value]
identifier[prev_msg] = identifier[msg]
identifier[msg_len] = identifier[len] ( identifier[msg] )
keyword[if] keyword[not] identifier[quiet] :
identifier[print] ( literal[string] %( literal[string] , identifier[msg] ), identifier[end] = literal[string] , identifier[file] = identifier[output] )
keyword[while] identifier[status] . identifier[name] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[time] . identifier[sleep] ( identifier[interval] )
identifier[status] = identifier[job] . identifier[status] ()
identifier[msg] = identifier[status] . identifier[value]
keyword[if] identifier[status] . identifier[name] == literal[string] :
identifier[msg] += literal[string] % identifier[job] . identifier[queue_position] ()
keyword[if] keyword[not] identifier[_interval_set] :
identifier[interval] = identifier[max] ( identifier[job] . identifier[queue_position] (), literal[int] )
keyword[else] :
keyword[if] keyword[not] identifier[_interval_set] :
identifier[interval] = literal[int]
keyword[if] identifier[len] ( identifier[msg] )< identifier[msg_len] :
identifier[msg] += literal[string] *( identifier[msg_len] - identifier[len] ( identifier[msg] ))
keyword[elif] identifier[len] ( identifier[msg] )> identifier[msg_len] :
identifier[msg_len] = identifier[len] ( identifier[msg] )
keyword[if] identifier[msg] != identifier[prev_msg] keyword[and] keyword[not] identifier[quiet] :
identifier[print] ( literal[string] %( literal[string] , identifier[msg] ), identifier[end] = literal[string] , identifier[file] = identifier[output] )
identifier[prev_msg] = identifier[msg]
keyword[if] keyword[not] identifier[quiet] :
identifier[print] ( literal[string] , identifier[file] = identifier[output] )
|
def _text_checker(job, interval, _interval_set=False, quiet=False, output=sys.stdout):
"""A text-based job status checker
Args:
job (BaseJob): The job to check.
interval (int): The interval at which to check.
_interval_set (bool): Was interval time set by user?
quiet (bool): If True, do not print status messages.
output (file): The file like object to write status messages to.
By default this is sys.stdout.
"""
status = job.status()
msg = status.value
prev_msg = msg
msg_len = len(msg)
if not quiet:
print('\r%s: %s' % ('Job Status', msg), end='', file=output) # depends on [control=['if'], data=[]]
while status.name not in ['DONE', 'CANCELLED', 'ERROR']:
time.sleep(interval)
status = job.status()
msg = status.value
if status.name == 'QUEUED':
msg += ' (%s)' % job.queue_position()
if not _interval_set:
interval = max(job.queue_position(), 2) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif not _interval_set:
interval = 2 # depends on [control=['if'], data=[]]
# Adjust length of message so there are no artifacts
if len(msg) < msg_len:
msg += ' ' * (msg_len - len(msg)) # depends on [control=['if'], data=['msg_len']]
elif len(msg) > msg_len:
msg_len = len(msg) # depends on [control=['if'], data=['msg_len']]
if msg != prev_msg and (not quiet):
print('\r%s: %s' % ('Job Status', msg), end='', file=output)
prev_msg = msg # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
if not quiet:
print('', file=output) # depends on [control=['if'], data=[]]
|
def _create_tree(self, endpoint=None, index=0):
"""
This will return a string of the endpoint tree structure
:param endpoint: Endpoint's Current path of the source
:param index: int number of tabs to space over
:return: str
"""
tab = '' # '\t' * index
ret = ''
if endpoint:
name = endpoint.path.split('.', 1)[1].replace('.', '/') + '/'
ret += tab + name + '\n'
ret += endpoint.method_calls(' ' * len(tab + name))
else:
endpoint = self
for child_name in endpoint._endpoints:
child = getattr(endpoint, child_name, None)
if child:
ret += self._create_tree(child, index + 1)
return ret
|
def function[_create_tree, parameter[self, endpoint, index]]:
constant[
This will return a string of the endpoint tree structure
:param endpoint: Endpoint's Current path of the source
:param index: int number of tabs to space over
:return: str
]
variable[tab] assign[=] constant[]
variable[ret] assign[=] constant[]
if name[endpoint] begin[:]
variable[name] assign[=] binary_operation[call[call[call[name[endpoint].path.split, parameter[constant[.], constant[1]]]][constant[1]].replace, parameter[constant[.], constant[/]]] + constant[/]]
<ast.AugAssign object at 0x7da20e957610>
<ast.AugAssign object at 0x7da20e957ac0>
for taget[name[child_name]] in starred[name[endpoint]._endpoints] begin[:]
variable[child] assign[=] call[name[getattr], parameter[name[endpoint], name[child_name], constant[None]]]
if name[child] begin[:]
<ast.AugAssign object at 0x7da20e9559c0>
return[name[ret]]
|
keyword[def] identifier[_create_tree] ( identifier[self] , identifier[endpoint] = keyword[None] , identifier[index] = literal[int] ):
literal[string]
identifier[tab] = literal[string]
identifier[ret] = literal[string]
keyword[if] identifier[endpoint] :
identifier[name] = identifier[endpoint] . identifier[path] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[replace] ( literal[string] , literal[string] )+ literal[string]
identifier[ret] += identifier[tab] + identifier[name] + literal[string]
identifier[ret] += identifier[endpoint] . identifier[method_calls] ( literal[string] * identifier[len] ( identifier[tab] + identifier[name] ))
keyword[else] :
identifier[endpoint] = identifier[self]
keyword[for] identifier[child_name] keyword[in] identifier[endpoint] . identifier[_endpoints] :
identifier[child] = identifier[getattr] ( identifier[endpoint] , identifier[child_name] , keyword[None] )
keyword[if] identifier[child] :
identifier[ret] += identifier[self] . identifier[_create_tree] ( identifier[child] , identifier[index] + literal[int] )
keyword[return] identifier[ret]
|
def _create_tree(self, endpoint=None, index=0):
"""
This will return a string of the endpoint tree structure
:param endpoint: Endpoint's Current path of the source
:param index: int number of tabs to space over
:return: str
"""
tab = '' # '\t' * index
ret = ''
if endpoint:
name = endpoint.path.split('.', 1)[1].replace('.', '/') + '/'
ret += tab + name + '\n'
ret += endpoint.method_calls(' ' * len(tab + name)) # depends on [control=['if'], data=[]]
else:
endpoint = self
for child_name in endpoint._endpoints:
child = getattr(endpoint, child_name, None)
if child:
ret += self._create_tree(child, index + 1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['child_name']]
return ret
|
def blockSignals(self, state):
"""
Sets whether or not updates will be enabled.
:param state | <bool>
"""
super(XGanttWidget, self).blockSignals(state)
self.treeWidget().blockSignals(state)
self.viewWidget().blockSignals(state)
|
def function[blockSignals, parameter[self, state]]:
constant[
Sets whether or not updates will be enabled.
:param state | <bool>
]
call[call[name[super], parameter[name[XGanttWidget], name[self]]].blockSignals, parameter[name[state]]]
call[call[name[self].treeWidget, parameter[]].blockSignals, parameter[name[state]]]
call[call[name[self].viewWidget, parameter[]].blockSignals, parameter[name[state]]]
|
keyword[def] identifier[blockSignals] ( identifier[self] , identifier[state] ):
literal[string]
identifier[super] ( identifier[XGanttWidget] , identifier[self] ). identifier[blockSignals] ( identifier[state] )
identifier[self] . identifier[treeWidget] (). identifier[blockSignals] ( identifier[state] )
identifier[self] . identifier[viewWidget] (). identifier[blockSignals] ( identifier[state] )
|
def blockSignals(self, state):
"""
Sets whether or not updates will be enabled.
:param state | <bool>
"""
super(XGanttWidget, self).blockSignals(state)
self.treeWidget().blockSignals(state)
self.viewWidget().blockSignals(state)
|
def _sample_action_fluent(self,
name: str,
dtype: tf.DType,
size: Sequence[int],
constraints: Dict[str, Constraints],
default_value: tf.Tensor,
prob: float) -> tf.Tensor:
'''Samples the action fluent with given `name`, `dtype`, and `size`.
With probability `prob` it chooses the action fluent `default_value`,
with probability 1-`prob` it samples the fluent w.r.t. its `constraints`.
Args:
name (str): The name of the action fluent.
dtype (tf.DType): The data type of the action fluent.
size (Sequence[int]): The size and shape of the action fluent.
constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent.
default_value (tf.Tensor): The default value for the action fluent.
prob (float): A probability measure.
Returns:
tf.Tensor: A tensor for sampling the action fluent.
'''
shape = [self.batch_size] + list(size)
if dtype == tf.float32:
bounds = constraints.get(name)
if bounds is None:
low, high = -self.MAX_REAL_VALUE, self.MAX_REAL_VALUE
dist = tf.distributions.Uniform(low=low, high=high)
sampled_fluent = dist.sample(shape)
else:
low, high = bounds
batch = (low is not None and low.batch) or (high is not None and high.batch)
low = tf.cast(low.tensor, tf.float32) if low is not None else -self.MAX_REAL_VALUE
high = tf.cast(high.tensor, tf.float32) if high is not None else self.MAX_REAL_VALUE
dist = tf.distributions.Uniform(low=low, high=high)
if batch:
sampled_fluent = dist.sample()
elif isinstance(low, tf.Tensor) or isinstance(high, tf.Tensor):
if (low+high).shape.as_list() == list(size):
sampled_fluent = dist.sample([self.batch_size])
else:
raise ValueError('bounds are not compatible with action fluent.')
else:
sampled_fluent = dist.sample(shape)
elif dtype == tf.int32:
logits = [1.0] * self.MAX_INT_VALUE
dist = tf.distributions.Categorical(logits=logits, dtype=tf.int32)
sampled_fluent = dist.sample(shape)
elif dtype == tf.bool:
probs = 0.5
dist = tf.distributions.Bernoulli(probs=probs, dtype=tf.bool)
sampled_fluent = dist.sample(shape)
select_default = tf.distributions.Bernoulli(prob, dtype=tf.bool).sample(self.batch_size)
action_fluent = tf.where(select_default, default_value, sampled_fluent)
return action_fluent
|
def function[_sample_action_fluent, parameter[self, name, dtype, size, constraints, default_value, prob]]:
constant[Samples the action fluent with given `name`, `dtype`, and `size`.
With probability `prob` it chooses the action fluent `default_value`,
with probability 1-`prob` it samples the fluent w.r.t. its `constraints`.
Args:
name (str): The name of the action fluent.
dtype (tf.DType): The data type of the action fluent.
size (Sequence[int]): The size and shape of the action fluent.
constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent.
default_value (tf.Tensor): The default value for the action fluent.
prob (float): A probability measure.
Returns:
tf.Tensor: A tensor for sampling the action fluent.
]
variable[shape] assign[=] binary_operation[list[[<ast.Attribute object at 0x7da20e956110>]] + call[name[list], parameter[name[size]]]]
if compare[name[dtype] equal[==] name[tf].float32] begin[:]
variable[bounds] assign[=] call[name[constraints].get, parameter[name[name]]]
if compare[name[bounds] is constant[None]] begin[:]
<ast.Tuple object at 0x7da20e954af0> assign[=] tuple[[<ast.UnaryOp object at 0x7da20e956f50>, <ast.Attribute object at 0x7da20e957730>]]
variable[dist] assign[=] call[name[tf].distributions.Uniform, parameter[]]
variable[sampled_fluent] assign[=] call[name[dist].sample, parameter[name[shape]]]
variable[select_default] assign[=] call[call[name[tf].distributions.Bernoulli, parameter[name[prob]]].sample, parameter[name[self].batch_size]]
variable[action_fluent] assign[=] call[name[tf].where, parameter[name[select_default], name[default_value], name[sampled_fluent]]]
return[name[action_fluent]]
|
keyword[def] identifier[_sample_action_fluent] ( identifier[self] ,
identifier[name] : identifier[str] ,
identifier[dtype] : identifier[tf] . identifier[DType] ,
identifier[size] : identifier[Sequence] [ identifier[int] ],
identifier[constraints] : identifier[Dict] [ identifier[str] , identifier[Constraints] ],
identifier[default_value] : identifier[tf] . identifier[Tensor] ,
identifier[prob] : identifier[float] )-> identifier[tf] . identifier[Tensor] :
literal[string]
identifier[shape] =[ identifier[self] . identifier[batch_size] ]+ identifier[list] ( identifier[size] )
keyword[if] identifier[dtype] == identifier[tf] . identifier[float32] :
identifier[bounds] = identifier[constraints] . identifier[get] ( identifier[name] )
keyword[if] identifier[bounds] keyword[is] keyword[None] :
identifier[low] , identifier[high] =- identifier[self] . identifier[MAX_REAL_VALUE] , identifier[self] . identifier[MAX_REAL_VALUE]
identifier[dist] = identifier[tf] . identifier[distributions] . identifier[Uniform] ( identifier[low] = identifier[low] , identifier[high] = identifier[high] )
identifier[sampled_fluent] = identifier[dist] . identifier[sample] ( identifier[shape] )
keyword[else] :
identifier[low] , identifier[high] = identifier[bounds]
identifier[batch] =( identifier[low] keyword[is] keyword[not] keyword[None] keyword[and] identifier[low] . identifier[batch] ) keyword[or] ( identifier[high] keyword[is] keyword[not] keyword[None] keyword[and] identifier[high] . identifier[batch] )
identifier[low] = identifier[tf] . identifier[cast] ( identifier[low] . identifier[tensor] , identifier[tf] . identifier[float32] ) keyword[if] identifier[low] keyword[is] keyword[not] keyword[None] keyword[else] - identifier[self] . identifier[MAX_REAL_VALUE]
identifier[high] = identifier[tf] . identifier[cast] ( identifier[high] . identifier[tensor] , identifier[tf] . identifier[float32] ) keyword[if] identifier[high] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[MAX_REAL_VALUE]
identifier[dist] = identifier[tf] . identifier[distributions] . identifier[Uniform] ( identifier[low] = identifier[low] , identifier[high] = identifier[high] )
keyword[if] identifier[batch] :
identifier[sampled_fluent] = identifier[dist] . identifier[sample] ()
keyword[elif] identifier[isinstance] ( identifier[low] , identifier[tf] . identifier[Tensor] ) keyword[or] identifier[isinstance] ( identifier[high] , identifier[tf] . identifier[Tensor] ):
keyword[if] ( identifier[low] + identifier[high] ). identifier[shape] . identifier[as_list] ()== identifier[list] ( identifier[size] ):
identifier[sampled_fluent] = identifier[dist] . identifier[sample] ([ identifier[self] . identifier[batch_size] ])
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
identifier[sampled_fluent] = identifier[dist] . identifier[sample] ( identifier[shape] )
keyword[elif] identifier[dtype] == identifier[tf] . identifier[int32] :
identifier[logits] =[ literal[int] ]* identifier[self] . identifier[MAX_INT_VALUE]
identifier[dist] = identifier[tf] . identifier[distributions] . identifier[Categorical] ( identifier[logits] = identifier[logits] , identifier[dtype] = identifier[tf] . identifier[int32] )
identifier[sampled_fluent] = identifier[dist] . identifier[sample] ( identifier[shape] )
keyword[elif] identifier[dtype] == identifier[tf] . identifier[bool] :
identifier[probs] = literal[int]
identifier[dist] = identifier[tf] . identifier[distributions] . identifier[Bernoulli] ( identifier[probs] = identifier[probs] , identifier[dtype] = identifier[tf] . identifier[bool] )
identifier[sampled_fluent] = identifier[dist] . identifier[sample] ( identifier[shape] )
identifier[select_default] = identifier[tf] . identifier[distributions] . identifier[Bernoulli] ( identifier[prob] , identifier[dtype] = identifier[tf] . identifier[bool] ). identifier[sample] ( identifier[self] . identifier[batch_size] )
identifier[action_fluent] = identifier[tf] . identifier[where] ( identifier[select_default] , identifier[default_value] , identifier[sampled_fluent] )
keyword[return] identifier[action_fluent]
|
def _sample_action_fluent(self, name: str, dtype: tf.DType, size: Sequence[int], constraints: Dict[str, Constraints], default_value: tf.Tensor, prob: float) -> tf.Tensor:
"""Samples the action fluent with given `name`, `dtype`, and `size`.
With probability `prob` it chooses the action fluent `default_value`,
with probability 1-`prob` it samples the fluent w.r.t. its `constraints`.
Args:
name (str): The name of the action fluent.
dtype (tf.DType): The data type of the action fluent.
size (Sequence[int]): The size and shape of the action fluent.
constraints (Dict[str, Tuple[Optional[TensorFluent], Optional[TensorFluent]]]): The bounds for each action fluent.
default_value (tf.Tensor): The default value for the action fluent.
prob (float): A probability measure.
Returns:
tf.Tensor: A tensor for sampling the action fluent.
"""
shape = [self.batch_size] + list(size)
if dtype == tf.float32:
bounds = constraints.get(name)
if bounds is None:
(low, high) = (-self.MAX_REAL_VALUE, self.MAX_REAL_VALUE)
dist = tf.distributions.Uniform(low=low, high=high)
sampled_fluent = dist.sample(shape) # depends on [control=['if'], data=[]]
else:
(low, high) = bounds
batch = low is not None and low.batch or (high is not None and high.batch)
low = tf.cast(low.tensor, tf.float32) if low is not None else -self.MAX_REAL_VALUE
high = tf.cast(high.tensor, tf.float32) if high is not None else self.MAX_REAL_VALUE
dist = tf.distributions.Uniform(low=low, high=high)
if batch:
sampled_fluent = dist.sample() # depends on [control=['if'], data=[]]
elif isinstance(low, tf.Tensor) or isinstance(high, tf.Tensor):
if (low + high).shape.as_list() == list(size):
sampled_fluent = dist.sample([self.batch_size]) # depends on [control=['if'], data=[]]
else:
raise ValueError('bounds are not compatible with action fluent.') # depends on [control=['if'], data=[]]
else:
sampled_fluent = dist.sample(shape) # depends on [control=['if'], data=[]]
elif dtype == tf.int32:
logits = [1.0] * self.MAX_INT_VALUE
dist = tf.distributions.Categorical(logits=logits, dtype=tf.int32)
sampled_fluent = dist.sample(shape) # depends on [control=['if'], data=[]]
elif dtype == tf.bool:
probs = 0.5
dist = tf.distributions.Bernoulli(probs=probs, dtype=tf.bool)
sampled_fluent = dist.sample(shape) # depends on [control=['if'], data=[]]
select_default = tf.distributions.Bernoulli(prob, dtype=tf.bool).sample(self.batch_size)
action_fluent = tf.where(select_default, default_value, sampled_fluent)
return action_fluent
|
def _create_producer(self, settings):
"""Tries to establish a Kafka consumer connection"""
try:
brokers = settings['KAFKA_HOSTS']
self.logger.debug("Creating new kafka producer using brokers: " +
str(brokers))
return KafkaProducer(bootstrap_servers=brokers,
value_serializer=lambda m: json.dumps(m),
retries=3,
linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'],
buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES'])
except KeyError as e:
self.logger.error('Missing setting named ' + str(e),
{'ex': traceback.format_exc()})
except:
self.logger.error("Couldn't initialize kafka producer in plugin.",
{'ex': traceback.format_exc()})
raise
|
def function[_create_producer, parameter[self, settings]]:
constant[Tries to establish a Kafka consumer connection]
<ast.Try object at 0x7da1b18dd1b0>
|
keyword[def] identifier[_create_producer] ( identifier[self] , identifier[settings] ):
literal[string]
keyword[try] :
identifier[brokers] = identifier[settings] [ literal[string] ]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] +
identifier[str] ( identifier[brokers] ))
keyword[return] identifier[KafkaProducer] ( identifier[bootstrap_servers] = identifier[brokers] ,
identifier[value_serializer] = keyword[lambda] identifier[m] : identifier[json] . identifier[dumps] ( identifier[m] ),
identifier[retries] = literal[int] ,
identifier[linger_ms] = identifier[settings] [ literal[string] ],
identifier[buffer_memory] = identifier[settings] [ literal[string] ])
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] + identifier[str] ( identifier[e] ),
{ literal[string] : identifier[traceback] . identifier[format_exc] ()})
keyword[except] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] ,
{ literal[string] : identifier[traceback] . identifier[format_exc] ()})
keyword[raise]
|
def _create_producer(self, settings):
"""Tries to establish a Kafka consumer connection"""
try:
brokers = settings['KAFKA_HOSTS']
self.logger.debug('Creating new kafka producer using brokers: ' + str(brokers))
return KafkaProducer(bootstrap_servers=brokers, value_serializer=lambda m: json.dumps(m), retries=3, linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'], buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES']) # depends on [control=['try'], data=[]]
except KeyError as e:
self.logger.error('Missing setting named ' + str(e), {'ex': traceback.format_exc()}) # depends on [control=['except'], data=['e']]
except:
self.logger.error("Couldn't initialize kafka producer in plugin.", {'ex': traceback.format_exc()})
raise # depends on [control=['except'], data=[]]
|
def update(self, buffer, length):
"""
Add buffer into digest calculation
"""
return lib.zdigest_update(self._as_parameter_, buffer, length)
|
def function[update, parameter[self, buffer, length]]:
constant[
Add buffer into digest calculation
]
return[call[name[lib].zdigest_update, parameter[name[self]._as_parameter_, name[buffer], name[length]]]]
|
keyword[def] identifier[update] ( identifier[self] , identifier[buffer] , identifier[length] ):
literal[string]
keyword[return] identifier[lib] . identifier[zdigest_update] ( identifier[self] . identifier[_as_parameter_] , identifier[buffer] , identifier[length] )
|
def update(self, buffer, length):
"""
Add buffer into digest calculation
"""
return lib.zdigest_update(self._as_parameter_, buffer, length)
|
def get_upcoming_events(self, days_to_look_ahead):
'''Returns the events from the calendar for the next days_to_look_ahead days.'''
now = datetime.now(tz=self.timezone) # timezone?
start_time = datetime(year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, tzinfo=self.timezone)
end_time = start_time + timedelta(days = days_to_look_ahead)
start_time = start_time.isoformat()
end_time = end_time.isoformat()
return self.get_events(start_time, end_time)
|
def function[get_upcoming_events, parameter[self, days_to_look_ahead]]:
constant[Returns the events from the calendar for the next days_to_look_ahead days.]
variable[now] assign[=] call[name[datetime].now, parameter[]]
variable[start_time] assign[=] call[name[datetime], parameter[]]
variable[end_time] assign[=] binary_operation[name[start_time] + call[name[timedelta], parameter[]]]
variable[start_time] assign[=] call[name[start_time].isoformat, parameter[]]
variable[end_time] assign[=] call[name[end_time].isoformat, parameter[]]
return[call[name[self].get_events, parameter[name[start_time], name[end_time]]]]
|
keyword[def] identifier[get_upcoming_events] ( identifier[self] , identifier[days_to_look_ahead] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[now] ( identifier[tz] = identifier[self] . identifier[timezone] )
identifier[start_time] = identifier[datetime] ( identifier[year] = identifier[now] . identifier[year] , identifier[month] = identifier[now] . identifier[month] , identifier[day] = identifier[now] . identifier[day] , identifier[hour] = identifier[now] . identifier[hour] , identifier[minute] = identifier[now] . identifier[minute] , identifier[second] = identifier[now] . identifier[second] , identifier[tzinfo] = identifier[self] . identifier[timezone] )
identifier[end_time] = identifier[start_time] + identifier[timedelta] ( identifier[days] = identifier[days_to_look_ahead] )
identifier[start_time] = identifier[start_time] . identifier[isoformat] ()
identifier[end_time] = identifier[end_time] . identifier[isoformat] ()
keyword[return] identifier[self] . identifier[get_events] ( identifier[start_time] , identifier[end_time] )
|
def get_upcoming_events(self, days_to_look_ahead):
"""Returns the events from the calendar for the next days_to_look_ahead days."""
now = datetime.now(tz=self.timezone) # timezone?
start_time = datetime(year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, tzinfo=self.timezone)
end_time = start_time + timedelta(days=days_to_look_ahead)
start_time = start_time.isoformat()
end_time = end_time.isoformat()
return self.get_events(start_time, end_time)
|
def crl_distribution_points(self):
"""
Returns complete CRL URLs - does not include delta CRLs
:return:
A list of zero or more DistributionPoint objects
"""
if self._crl_distribution_points is None:
self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value)
return self._crl_distribution_points
|
def function[crl_distribution_points, parameter[self]]:
constant[
Returns complete CRL URLs - does not include delta CRLs
:return:
A list of zero or more DistributionPoint objects
]
if compare[name[self]._crl_distribution_points is constant[None]] begin[:]
name[self]._crl_distribution_points assign[=] call[name[self]._get_http_crl_distribution_points, parameter[name[self].crl_distribution_points_value]]
return[name[self]._crl_distribution_points]
|
keyword[def] identifier[crl_distribution_points] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_crl_distribution_points] keyword[is] keyword[None] :
identifier[self] . identifier[_crl_distribution_points] = identifier[self] . identifier[_get_http_crl_distribution_points] ( identifier[self] . identifier[crl_distribution_points_value] )
keyword[return] identifier[self] . identifier[_crl_distribution_points]
|
def crl_distribution_points(self):
"""
Returns complete CRL URLs - does not include delta CRLs
:return:
A list of zero or more DistributionPoint objects
"""
if self._crl_distribution_points is None:
self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value) # depends on [control=['if'], data=[]]
return self._crl_distribution_points
|
def _clean_lazymodule(module):
"""Removes all lazy behavior from a module's class, for loading.
Also removes all module attributes listed under the module's class deletion
dictionaries. Deletion dictionaries are class attributes with names
specified in `_DELETION_DICT`.
Parameters
----------
module: LazyModule
Returns
-------
dict
A dictionary of deleted class attributes, that can be used to reset the
lazy state using :func:`_reset_lazymodule`.
"""
modclass = type(module)
_clean_lazy_submod_refs(module)
modclass.__getattribute__ = ModuleType.__getattribute__
modclass.__setattr__ = ModuleType.__setattr__
cls_attrs = {}
for cls_attr in _CLS_ATTRS:
try:
cls_attrs[cls_attr] = getattr(modclass, cls_attr)
delattr(modclass, cls_attr)
except AttributeError:
pass
return cls_attrs
|
def function[_clean_lazymodule, parameter[module]]:
constant[Removes all lazy behavior from a module's class, for loading.
Also removes all module attributes listed under the module's class deletion
dictionaries. Deletion dictionaries are class attributes with names
specified in `_DELETION_DICT`.
Parameters
----------
module: LazyModule
Returns
-------
dict
A dictionary of deleted class attributes, that can be used to reset the
lazy state using :func:`_reset_lazymodule`.
]
variable[modclass] assign[=] call[name[type], parameter[name[module]]]
call[name[_clean_lazy_submod_refs], parameter[name[module]]]
name[modclass].__getattribute__ assign[=] name[ModuleType].__getattribute__
name[modclass].__setattr__ assign[=] name[ModuleType].__setattr__
variable[cls_attrs] assign[=] dictionary[[], []]
for taget[name[cls_attr]] in starred[name[_CLS_ATTRS]] begin[:]
<ast.Try object at 0x7da1b0fc5000>
return[name[cls_attrs]]
|
keyword[def] identifier[_clean_lazymodule] ( identifier[module] ):
literal[string]
identifier[modclass] = identifier[type] ( identifier[module] )
identifier[_clean_lazy_submod_refs] ( identifier[module] )
identifier[modclass] . identifier[__getattribute__] = identifier[ModuleType] . identifier[__getattribute__]
identifier[modclass] . identifier[__setattr__] = identifier[ModuleType] . identifier[__setattr__]
identifier[cls_attrs] ={}
keyword[for] identifier[cls_attr] keyword[in] identifier[_CLS_ATTRS] :
keyword[try] :
identifier[cls_attrs] [ identifier[cls_attr] ]= identifier[getattr] ( identifier[modclass] , identifier[cls_attr] )
identifier[delattr] ( identifier[modclass] , identifier[cls_attr] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[return] identifier[cls_attrs]
|
def _clean_lazymodule(module):
"""Removes all lazy behavior from a module's class, for loading.
Also removes all module attributes listed under the module's class deletion
dictionaries. Deletion dictionaries are class attributes with names
specified in `_DELETION_DICT`.
Parameters
----------
module: LazyModule
Returns
-------
dict
A dictionary of deleted class attributes, that can be used to reset the
lazy state using :func:`_reset_lazymodule`.
"""
modclass = type(module)
_clean_lazy_submod_refs(module)
modclass.__getattribute__ = ModuleType.__getattribute__
modclass.__setattr__ = ModuleType.__setattr__
cls_attrs = {}
for cls_attr in _CLS_ATTRS:
try:
cls_attrs[cls_attr] = getattr(modclass, cls_attr)
delattr(modclass, cls_attr) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['cls_attr']]
return cls_attrs
|
def density_sampling(data, local_densities = None, metric = 'manhattan',
kernel_mult = 2.0, outlier_percentile = 0.01,
target_percentile = 0.05, desired_samples = None):
"""The i-th sample point of the data-set 'data' is selected by density sampling
with a probability given by:
| 0 if outlier_density > LD[i];
P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density;
| target_density / LD[i] if LD[i] > target_density.
Here 'LD[i]' denotes the local density of the i-th sample point of the data-set,
whereas 'outlier_density' and 'target_density' are computed as particular percentiles
of that distribution of local densities.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
local_densities : array of shape (n_samples,), optional (default = None)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
outlier_percentile : float, optional (default = 0.01)
Specify the outlier density as a percentile of the distribution of local densities.
target_percentile : float, optional (default = 0.05)
Specifiy the target density as a percentile of the distribution of local densities.
Relevant only if 'desired_samples' is left unspecified.
desired_samples : int, optional (default = None)
The number of samples to be selected from the whole data-set such that members
of rare populations and members of more common populations are roughly
equally represented. To that purpose, a target density is computed that to selects about
'desired_samples' data-points.
Returns
-------
samples_kept : array of shape (n_selected_samples,)
If the 'i'-th sample point of 'data' has been selected by a given instance of
density sampling, number 'i' is featured in the array returned by
the present function.
"""
random_state = np.random.RandomState()
data = np.atleast_2d(data)
for x in (kernel_mult, outlier_percentile, target_percentile):
assert isinstance(x, numbers.Real) and x > 0
for x in (outlier_percentile, target_percentile):
assert x <= 1.0
if local_densities is None:
local_densities = get_local_densities(data, kernel_mult, metric)
if reduce(operator.mul, local_densities.shape, 1) != max(local_densities.shape):
raise ValueError("\nERROR: Density_Sampling: density_sampling: problem with "
"the dimensions of the vector of local densities provided.\n")
else:
local_densities = np.reshape(local_densities, local_densities.size)
outlier_density = np.percentile(local_densities, outlier_percentile)
target_density = np.percentile(local_densities, target_percentile)
samples_kept = np.where(local_densities > outlier_density)[0]
N_kept = samples_kept.size
local_densities = local_densities[samples_kept]
if desired_samples is None:
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size = N_kept))[0]
samples_kept = samples_kept[ind]
elif desired_samples <= N_kept:
sorted_densities = np.sort(local_densities)
temp = np.reciprocal(sorted_densities[::-1].astype(float))
cdf = np.cumsum(temp)[::-1]
target_density = (desired_samples + 0.0) / cdf[0]
if target_density > sorted_densities[0]:
temp = desired_samples - np.arange(1.0, N_kept + 1.0)
possible_targets = np.divide(temp, cdf)
ind = np.argmax(possible_targets < sorted_densities)
target_density = possible_targets[ind]
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size = N_kept))[0]
samples_kept = samples_kept[ind]
else:
print("\nERROR: Density_Sampling: density_sampling: 'desired_samples' has been "
"assigned a value of {desired_samples}, larger than {N_kept}, "
"the number of samples whose local densities are high enough "
"(i.e. excluded are the local densities in the lowest {outlier_percentile} "
"percentile).\n".format(**locals()))
exit(1)
return samples_kept
|
def function[density_sampling, parameter[data, local_densities, metric, kernel_mult, outlier_percentile, target_percentile, desired_samples]]:
constant[The i-th sample point of the data-set 'data' is selected by density sampling
with a probability given by:
| 0 if outlier_density > LD[i];
P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density;
| target_density / LD[i] if LD[i] > target_density.
Here 'LD[i]' denotes the local density of the i-th sample point of the data-set,
whereas 'outlier_density' and 'target_density' are computed as particular percentiles
of that distribution of local densities.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
local_densities : array of shape (n_samples,), optional (default = None)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
outlier_percentile : float, optional (default = 0.01)
Specify the outlier density as a percentile of the distribution of local densities.
target_percentile : float, optional (default = 0.05)
Specifiy the target density as a percentile of the distribution of local densities.
Relevant only if 'desired_samples' is left unspecified.
desired_samples : int, optional (default = None)
The number of samples to be selected from the whole data-set such that members
of rare populations and members of more common populations are roughly
equally represented. To that purpose, a target density is computed that to selects about
'desired_samples' data-points.
Returns
-------
samples_kept : array of shape (n_selected_samples,)
If the 'i'-th sample point of 'data' has been selected by a given instance of
density sampling, number 'i' is featured in the array returned by
the present function.
]
variable[random_state] assign[=] call[name[np].random.RandomState, parameter[]]
variable[data] assign[=] call[name[np].atleast_2d, parameter[name[data]]]
for taget[name[x]] in starred[tuple[[<ast.Name object at 0x7da204567610>, <ast.Name object at 0x7da204564cd0>, <ast.Name object at 0x7da204565240>]]] begin[:]
assert[<ast.BoolOp object at 0x7da2045677f0>]
for taget[name[x]] in starred[tuple[[<ast.Name object at 0x7da18f723790>, <ast.Name object at 0x7da18f721ff0>]]] begin[:]
assert[compare[name[x] less_or_equal[<=] constant[1.0]]]
if compare[name[local_densities] is constant[None]] begin[:]
variable[local_densities] assign[=] call[name[get_local_densities], parameter[name[data], name[kernel_mult], name[metric]]]
if compare[call[name[reduce], parameter[name[operator].mul, name[local_densities].shape, constant[1]]] not_equal[!=] call[name[max], parameter[name[local_densities].shape]]] begin[:]
<ast.Raise object at 0x7da18f720820>
variable[outlier_density] assign[=] call[name[np].percentile, parameter[name[local_densities], name[outlier_percentile]]]
variable[target_density] assign[=] call[name[np].percentile, parameter[name[local_densities], name[target_percentile]]]
variable[samples_kept] assign[=] call[call[name[np].where, parameter[compare[name[local_densities] greater[>] name[outlier_density]]]]][constant[0]]
variable[N_kept] assign[=] name[samples_kept].size
variable[local_densities] assign[=] call[name[local_densities]][name[samples_kept]]
if compare[name[desired_samples] is constant[None]] begin[:]
variable[probs] assign[=] call[name[np].divide, parameter[binary_operation[name[target_density] + constant[0.0]], name[local_densities]]]
variable[ind] assign[=] call[call[name[np].where, parameter[compare[name[probs] greater[>] call[name[random_state].uniform, parameter[]]]]]][constant[0]]
variable[samples_kept] assign[=] call[name[samples_kept]][name[ind]]
return[name[samples_kept]]
|
keyword[def] identifier[density_sampling] ( identifier[data] , identifier[local_densities] = keyword[None] , identifier[metric] = literal[string] ,
identifier[kernel_mult] = literal[int] , identifier[outlier_percentile] = literal[int] ,
identifier[target_percentile] = literal[int] , identifier[desired_samples] = keyword[None] ):
literal[string]
identifier[random_state] = identifier[np] . identifier[random] . identifier[RandomState] ()
identifier[data] = identifier[np] . identifier[atleast_2d] ( identifier[data] )
keyword[for] identifier[x] keyword[in] ( identifier[kernel_mult] , identifier[outlier_percentile] , identifier[target_percentile] ):
keyword[assert] identifier[isinstance] ( identifier[x] , identifier[numbers] . identifier[Real] ) keyword[and] identifier[x] > literal[int]
keyword[for] identifier[x] keyword[in] ( identifier[outlier_percentile] , identifier[target_percentile] ):
keyword[assert] identifier[x] <= literal[int]
keyword[if] identifier[local_densities] keyword[is] keyword[None] :
identifier[local_densities] = identifier[get_local_densities] ( identifier[data] , identifier[kernel_mult] , identifier[metric] )
keyword[if] identifier[reduce] ( identifier[operator] . identifier[mul] , identifier[local_densities] . identifier[shape] , literal[int] )!= identifier[max] ( identifier[local_densities] . identifier[shape] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[else] :
identifier[local_densities] = identifier[np] . identifier[reshape] ( identifier[local_densities] , identifier[local_densities] . identifier[size] )
identifier[outlier_density] = identifier[np] . identifier[percentile] ( identifier[local_densities] , identifier[outlier_percentile] )
identifier[target_density] = identifier[np] . identifier[percentile] ( identifier[local_densities] , identifier[target_percentile] )
identifier[samples_kept] = identifier[np] . identifier[where] ( identifier[local_densities] > identifier[outlier_density] )[ literal[int] ]
identifier[N_kept] = identifier[samples_kept] . identifier[size]
identifier[local_densities] = identifier[local_densities] [ identifier[samples_kept] ]
keyword[if] identifier[desired_samples] keyword[is] keyword[None] :
identifier[probs] = identifier[np] . identifier[divide] ( identifier[target_density] + literal[int] , identifier[local_densities] )
identifier[ind] = identifier[np] . identifier[where] ( identifier[probs] > identifier[random_state] . identifier[uniform] ( identifier[size] = identifier[N_kept] ))[ literal[int] ]
identifier[samples_kept] = identifier[samples_kept] [ identifier[ind] ]
keyword[elif] identifier[desired_samples] <= identifier[N_kept] :
identifier[sorted_densities] = identifier[np] . identifier[sort] ( identifier[local_densities] )
identifier[temp] = identifier[np] . identifier[reciprocal] ( identifier[sorted_densities] [::- literal[int] ]. identifier[astype] ( identifier[float] ))
identifier[cdf] = identifier[np] . identifier[cumsum] ( identifier[temp] )[::- literal[int] ]
identifier[target_density] =( identifier[desired_samples] + literal[int] )/ identifier[cdf] [ literal[int] ]
keyword[if] identifier[target_density] > identifier[sorted_densities] [ literal[int] ]:
identifier[temp] = identifier[desired_samples] - identifier[np] . identifier[arange] ( literal[int] , identifier[N_kept] + literal[int] )
identifier[possible_targets] = identifier[np] . identifier[divide] ( identifier[temp] , identifier[cdf] )
identifier[ind] = identifier[np] . identifier[argmax] ( identifier[possible_targets] < identifier[sorted_densities] )
identifier[target_density] = identifier[possible_targets] [ identifier[ind] ]
identifier[probs] = identifier[np] . identifier[divide] ( identifier[target_density] + literal[int] , identifier[local_densities] )
identifier[ind] = identifier[np] . identifier[where] ( identifier[probs] > identifier[random_state] . identifier[uniform] ( identifier[size] = identifier[N_kept] ))[ literal[int] ]
identifier[samples_kept] = identifier[samples_kept] [ identifier[ind] ]
keyword[else] :
identifier[print] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] (** identifier[locals] ()))
identifier[exit] ( literal[int] )
keyword[return] identifier[samples_kept]
|
def density_sampling(data, local_densities=None, metric='manhattan', kernel_mult=2.0, outlier_percentile=0.01, target_percentile=0.05, desired_samples=None):
"""The i-th sample point of the data-set 'data' is selected by density sampling
with a probability given by:
| 0 if outlier_density > LD[i];
P(keep the i-th data-point) = | 1 if outlier_density <= LD[i] <= target_density;
| target_density / LD[i] if LD[i] > target_density.
Here 'LD[i]' denotes the local density of the i-th sample point of the data-set,
whereas 'outlier_density' and 'target_density' are computed as particular percentiles
of that distribution of local densities.
Parameters
----------
data : array of shape (n_samples, n_features)
The data-set, a fraction of whose sample points will be extracted
by density sampling.
local_densities : array of shape (n_samples,), optional (default = None)
The i-th entry of this vector corresponds to the local density of the i-th sample
point in the order of the rows of 'data'.
metric : string, optional (default = 'manhattan')
The distance metric used to determine the nearest-neighbor to each data-point.
The DistanceMetric class defined in scikit-learn's library lists all available
metrics.
kernel_mult : float, optional (default = 2.0)
The kernel multiplier, which determine (in terms of the median of the distribution
of distances among nearest neighbors) the extent of the regions centered
around each sample point to consider for the computation of the local density
associated to that particular sample point.
outlier_percentile : float, optional (default = 0.01)
Specify the outlier density as a percentile of the distribution of local densities.
target_percentile : float, optional (default = 0.05)
Specifiy the target density as a percentile of the distribution of local densities.
Relevant only if 'desired_samples' is left unspecified.
desired_samples : int, optional (default = None)
The number of samples to be selected from the whole data-set such that members
of rare populations and members of more common populations are roughly
equally represented. To that purpose, a target density is computed that to selects about
'desired_samples' data-points.
Returns
-------
samples_kept : array of shape (n_selected_samples,)
If the 'i'-th sample point of 'data' has been selected by a given instance of
density sampling, number 'i' is featured in the array returned by
the present function.
"""
random_state = np.random.RandomState()
data = np.atleast_2d(data)
for x in (kernel_mult, outlier_percentile, target_percentile):
assert isinstance(x, numbers.Real) and x > 0 # depends on [control=['for'], data=['x']]
for x in (outlier_percentile, target_percentile):
assert x <= 1.0 # depends on [control=['for'], data=['x']]
if local_densities is None:
local_densities = get_local_densities(data, kernel_mult, metric) # depends on [control=['if'], data=['local_densities']]
if reduce(operator.mul, local_densities.shape, 1) != max(local_densities.shape):
raise ValueError('\nERROR: Density_Sampling: density_sampling: problem with the dimensions of the vector of local densities provided.\n') # depends on [control=['if'], data=[]]
else:
local_densities = np.reshape(local_densities, local_densities.size)
outlier_density = np.percentile(local_densities, outlier_percentile)
target_density = np.percentile(local_densities, target_percentile)
samples_kept = np.where(local_densities > outlier_density)[0]
N_kept = samples_kept.size
local_densities = local_densities[samples_kept]
if desired_samples is None:
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size=N_kept))[0]
samples_kept = samples_kept[ind] # depends on [control=['if'], data=[]]
elif desired_samples <= N_kept:
sorted_densities = np.sort(local_densities)
temp = np.reciprocal(sorted_densities[::-1].astype(float))
cdf = np.cumsum(temp)[::-1]
target_density = (desired_samples + 0.0) / cdf[0]
if target_density > sorted_densities[0]:
temp = desired_samples - np.arange(1.0, N_kept + 1.0)
possible_targets = np.divide(temp, cdf)
ind = np.argmax(possible_targets < sorted_densities)
target_density = possible_targets[ind] # depends on [control=['if'], data=['target_density']]
probs = np.divide(target_density + 0.0, local_densities)
ind = np.where(probs > random_state.uniform(size=N_kept))[0]
samples_kept = samples_kept[ind] # depends on [control=['if'], data=['desired_samples', 'N_kept']]
else:
print("\nERROR: Density_Sampling: density_sampling: 'desired_samples' has been assigned a value of {desired_samples}, larger than {N_kept}, the number of samples whose local densities are high enough (i.e. excluded are the local densities in the lowest {outlier_percentile} percentile).\n".format(**locals()))
exit(1)
return samples_kept
|
def _import_submodules(
__all__, __path__, __name__, include=None, exclude=None,
include_private_modules=False, require__all__=True, recursive=True):
"""
Import all available submodules, all objects defined in the `__all__` lists
of those submodules, and extend `__all__` with the imported objects.
Args:
__all__ (list): The list of public objects in the "root" module
__path__ (str): The path where the ``__init__.py`` file for the "root"
module is located in the file system (every module has a global
`__path__` variable which should be passed here)
__name__ (str): The full name of the "root" module. Again, every module
has a global `__name__` variable.
include (list or None): If not None, list of full module names to be
included. That is, every module not in the `include` list is
ignored
exclude (list or None): List of full module names to be
excluded from the (recursive) input
include_private_modules (bool): Whether to include modules whose name
starts with an underscore
recursive (bool): Whether to recursively act on submodules of the
"root" module. This will make sub-submodules available both in the
submodule, and in the "root" module
"""
mod = sys.modules[__name__]
if exclude is None:
exclude = []
for (_, submodname, ispkg) in pkgutil.iter_modules(path=__path__):
if submodname.startswith('_') and not include_private_modules:
continue
submod = importlib.import_module('.' + submodname, __name__)
if submod.__name__ in exclude:
continue
if include is not None:
if submod.__name__ not in include:
continue
if not hasattr(submod, '__all__'):
setattr(submod, '__all__', [])
if recursive and ispkg:
_import_submodules(
submod.__all__, submod.__path__, submod.__name__)
setattr(mod, submodname, submod)
for obj_name in submod.__all__:
obj = getattr(submod, obj_name)
if hasattr(mod, obj_name):
existing_obj = getattr(mod, obj_name)
if existing_obj is obj:
continue
else:
raise ImportError(
"{mod}.{attr} points to {submod1}.{attr}. "
"Cannot set to {submod2}.{attr}".format(
mod=mod.__name__, attr=obj_name,
submod1=existing_obj.__module__,
submod2=obj.__module__))
setattr(mod, obj_name, obj)
__all__.append(obj_name)
__all__.sort()
|
def function[_import_submodules, parameter[__all__, __path__, __name__, include, exclude, include_private_modules, require__all__, recursive]]:
constant[
Import all available submodules, all objects defined in the `__all__` lists
of those submodules, and extend `__all__` with the imported objects.
Args:
__all__ (list): The list of public objects in the "root" module
__path__ (str): The path where the ``__init__.py`` file for the "root"
module is located in the file system (every module has a global
`__path__` variable which should be passed here)
__name__ (str): The full name of the "root" module. Again, every module
has a global `__name__` variable.
include (list or None): If not None, list of full module names to be
included. That is, every module not in the `include` list is
ignored
exclude (list or None): List of full module names to be
excluded from the (recursive) input
include_private_modules (bool): Whether to include modules whose name
starts with an underscore
recursive (bool): Whether to recursively act on submodules of the
"root" module. This will make sub-submodules available both in the
submodule, and in the "root" module
]
variable[mod] assign[=] call[name[sys].modules][name[__name__]]
if compare[name[exclude] is constant[None]] begin[:]
variable[exclude] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18ede75b0>, <ast.Name object at 0x7da18ede4d30>, <ast.Name object at 0x7da18ede5150>]]] in starred[call[name[pkgutil].iter_modules, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18ede6d10> begin[:]
continue
variable[submod] assign[=] call[name[importlib].import_module, parameter[binary_operation[constant[.] + name[submodname]], name[__name__]]]
if compare[name[submod].__name__ in name[exclude]] begin[:]
continue
if compare[name[include] is_not constant[None]] begin[:]
if compare[name[submod].__name__ <ast.NotIn object at 0x7da2590d7190> name[include]] begin[:]
continue
if <ast.UnaryOp object at 0x7da18ede4f10> begin[:]
call[name[setattr], parameter[name[submod], constant[__all__], list[[]]]]
if <ast.BoolOp object at 0x7da18ede7be0> begin[:]
call[name[_import_submodules], parameter[name[submod].__all__, name[submod].__path__, name[submod].__name__]]
call[name[setattr], parameter[name[mod], name[submodname], name[submod]]]
for taget[name[obj_name]] in starred[name[submod].__all__] begin[:]
variable[obj] assign[=] call[name[getattr], parameter[name[submod], name[obj_name]]]
if call[name[hasattr], parameter[name[mod], name[obj_name]]] begin[:]
variable[existing_obj] assign[=] call[name[getattr], parameter[name[mod], name[obj_name]]]
if compare[name[existing_obj] is name[obj]] begin[:]
continue
call[name[setattr], parameter[name[mod], name[obj_name], name[obj]]]
call[name[__all__].append, parameter[name[obj_name]]]
call[name[__all__].sort, parameter[]]
|
keyword[def] identifier[_import_submodules] (
identifier[__all__] , identifier[__path__] , identifier[__name__] , identifier[include] = keyword[None] , identifier[exclude] = keyword[None] ,
identifier[include_private_modules] = keyword[False] , identifier[require__all__] = keyword[True] , identifier[recursive] = keyword[True] ):
literal[string]
identifier[mod] = identifier[sys] . identifier[modules] [ identifier[__name__] ]
keyword[if] identifier[exclude] keyword[is] keyword[None] :
identifier[exclude] =[]
keyword[for] ( identifier[_] , identifier[submodname] , identifier[ispkg] ) keyword[in] identifier[pkgutil] . identifier[iter_modules] ( identifier[path] = identifier[__path__] ):
keyword[if] identifier[submodname] . identifier[startswith] ( literal[string] ) keyword[and] keyword[not] identifier[include_private_modules] :
keyword[continue]
identifier[submod] = identifier[importlib] . identifier[import_module] ( literal[string] + identifier[submodname] , identifier[__name__] )
keyword[if] identifier[submod] . identifier[__name__] keyword[in] identifier[exclude] :
keyword[continue]
keyword[if] identifier[include] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[submod] . identifier[__name__] keyword[not] keyword[in] identifier[include] :
keyword[continue]
keyword[if] keyword[not] identifier[hasattr] ( identifier[submod] , literal[string] ):
identifier[setattr] ( identifier[submod] , literal[string] ,[])
keyword[if] identifier[recursive] keyword[and] identifier[ispkg] :
identifier[_import_submodules] (
identifier[submod] . identifier[__all__] , identifier[submod] . identifier[__path__] , identifier[submod] . identifier[__name__] )
identifier[setattr] ( identifier[mod] , identifier[submodname] , identifier[submod] )
keyword[for] identifier[obj_name] keyword[in] identifier[submod] . identifier[__all__] :
identifier[obj] = identifier[getattr] ( identifier[submod] , identifier[obj_name] )
keyword[if] identifier[hasattr] ( identifier[mod] , identifier[obj_name] ):
identifier[existing_obj] = identifier[getattr] ( identifier[mod] , identifier[obj_name] )
keyword[if] identifier[existing_obj] keyword[is] identifier[obj] :
keyword[continue]
keyword[else] :
keyword[raise] identifier[ImportError] (
literal[string]
literal[string] . identifier[format] (
identifier[mod] = identifier[mod] . identifier[__name__] , identifier[attr] = identifier[obj_name] ,
identifier[submod1] = identifier[existing_obj] . identifier[__module__] ,
identifier[submod2] = identifier[obj] . identifier[__module__] ))
identifier[setattr] ( identifier[mod] , identifier[obj_name] , identifier[obj] )
identifier[__all__] . identifier[append] ( identifier[obj_name] )
identifier[__all__] . identifier[sort] ()
|
def _import_submodules(__all__, __path__, __name__, include=None, exclude=None, include_private_modules=False, require__all__=True, recursive=True):
"""
Import all available submodules, all objects defined in the `__all__` lists
of those submodules, and extend `__all__` with the imported objects.
Args:
__all__ (list): The list of public objects in the "root" module
__path__ (str): The path where the ``__init__.py`` file for the "root"
module is located in the file system (every module has a global
`__path__` variable which should be passed here)
__name__ (str): The full name of the "root" module. Again, every module
has a global `__name__` variable.
include (list or None): If not None, list of full module names to be
included. That is, every module not in the `include` list is
ignored
exclude (list or None): List of full module names to be
excluded from the (recursive) input
include_private_modules (bool): Whether to include modules whose name
starts with an underscore
recursive (bool): Whether to recursively act on submodules of the
"root" module. This will make sub-submodules available both in the
submodule, and in the "root" module
"""
mod = sys.modules[__name__]
if exclude is None:
exclude = [] # depends on [control=['if'], data=['exclude']]
for (_, submodname, ispkg) in pkgutil.iter_modules(path=__path__):
if submodname.startswith('_') and (not include_private_modules):
continue # depends on [control=['if'], data=[]]
submod = importlib.import_module('.' + submodname, __name__)
if submod.__name__ in exclude:
continue # depends on [control=['if'], data=[]]
if include is not None:
if submod.__name__ not in include:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['include']]
if not hasattr(submod, '__all__'):
setattr(submod, '__all__', []) # depends on [control=['if'], data=[]]
if recursive and ispkg:
_import_submodules(submod.__all__, submod.__path__, submod.__name__) # depends on [control=['if'], data=[]]
setattr(mod, submodname, submod)
for obj_name in submod.__all__:
obj = getattr(submod, obj_name)
if hasattr(mod, obj_name):
existing_obj = getattr(mod, obj_name)
if existing_obj is obj:
continue # depends on [control=['if'], data=[]]
else:
raise ImportError('{mod}.{attr} points to {submod1}.{attr}. Cannot set to {submod2}.{attr}'.format(mod=mod.__name__, attr=obj_name, submod1=existing_obj.__module__, submod2=obj.__module__)) # depends on [control=['if'], data=[]]
setattr(mod, obj_name, obj)
__all__.append(obj_name) # depends on [control=['for'], data=['obj_name']] # depends on [control=['for'], data=[]]
__all__.sort()
|
def add_boundary_regions(regions=None, faces=['front', 'back', 'left',
'right', 'top', 'bottom']):
r"""
Given an image partitioned into regions, pads specified faces with new
regions
Parameters
----------
regions : ND-array
An image of the pore space partitioned into regions and labeled
faces : list of strings
The faces of ``regions`` which should have boundaries added. Options
are:
*'right'* - Adds boundaries to the x=0 face (``im[0, :, :]``)
*'left'* - Adds boundaries to the x=X face (``im[-1, :, :]``)
*'front'* - Adds boundaries to the y=0 face (``im[:, ), :]``)
*'back'* - Adds boundaries to the x=0 face (``im[:, -1, :]``)
*'bottom'* - Adds boundaries to the x=0 face (``im[:, :, 0]``)
*'top'* - Adds boundaries to the x=0 face (``im[:, :, -1]``)
The default is all faces.
Returns
-------
image : ND-array
A copy of ``regions`` with the specified boundaries added, so will be
slightly larger in each direction where boundaries were added.
"""
# -------------------------------------------------------------------------
# Edge pad segmentation and distance transform
if faces is not None:
regions = sp.pad(regions, 1, 'edge')
# ---------------------------------------------------------------------
if regions.ndim == 3:
# Remove boundary nodes interconnection
regions[:, :, 0] = regions[:, :, 0] + regions.max()
regions[:, :, -1] = regions[:, :, -1] + regions.max()
regions[0, :, :] = regions[0, :, :] + regions.max()
regions[-1, :, :] = regions[-1, :, :] + regions.max()
regions[:, 0, :] = regions[:, 0, :] + regions.max()
regions[:, -1, :] = regions[:, -1, :] + regions.max()
regions[:, :, 0] = (~find_boundaries(regions[:, :, 0],
mode='outer')) * regions[:, :, 0]
regions[:, :, -1] = (~find_boundaries(regions[:, :, -1],
mode='outer')) * regions[:, :, -1]
regions[0, :, :] = (~find_boundaries(regions[0, :, :],
mode='outer')) * regions[0, :, :]
regions[-1, :, :] = (~find_boundaries(regions[-1, :, :],
mode='outer')) * regions[-1, :, :]
regions[:, 0, :] = (~find_boundaries(regions[:, 0, :],
mode='outer')) * regions[:, 0, :]
regions[:, -1, :] = (~find_boundaries(regions[:, -1, :],
mode='outer')) * regions[:, -1, :]
# -----------------------------------------------------------------
regions = sp.pad(regions, 2, 'edge')
# Remove unselected faces
if 'front' not in faces:
regions = regions[:, 3:, :] # y
if 'back' not in faces:
regions = regions[:, :-3, :]
if 'left' not in faces:
regions = regions[3:, :, :] # x
if 'right' not in faces:
regions = regions[:-3, :, :]
if 'bottom' not in faces:
regions = regions[:, :, 3:] # z
if 'top' not in faces:
regions = regions[:, :, :-3]
elif regions.ndim == 2:
# Remove boundary nodes interconnection
regions[0, :] = regions[0, :] + regions.max()
regions[-1, :] = regions[-1, :] + regions.max()
regions[:, 0] = regions[:, 0] + regions.max()
regions[:, -1] = regions[:, -1] + regions.max()
regions[0, :] = (~find_boundaries(regions[0, :],
mode='outer')) * regions[0, :]
regions[-1, :] = (~find_boundaries(regions[-1, :],
mode='outer')) * regions[-1, :]
regions[:, 0] = (~find_boundaries(regions[:, 0],
mode='outer')) * regions[:, 0]
regions[:, -1] = (~find_boundaries(regions[:, -1],
mode='outer')) * regions[:, -1]
# -----------------------------------------------------------------
regions = sp.pad(regions, 2, 'edge')
# Remove unselected faces
if 'left' not in faces:
regions = regions[3:, :] # x
if 'right' not in faces:
regions = regions[:-3, :]
if 'front' not in faces and 'bottom' not in faces:
regions = regions[:, 3:] # y
if 'back' not in faces and 'top' not in faces:
regions = regions[:, :-3]
else:
print('add_boundary_regions works only on 2D and 3D images')
# ---------------------------------------------------------------------
# Make labels contiguous
regions = make_contiguous(regions)
else:
regions = regions
return regions
|
def function[add_boundary_regions, parameter[regions, faces]]:
constant[
Given an image partitioned into regions, pads specified faces with new
regions
Parameters
----------
regions : ND-array
An image of the pore space partitioned into regions and labeled
faces : list of strings
The faces of ``regions`` which should have boundaries added. Options
are:
*'right'* - Adds boundaries to the x=0 face (``im[0, :, :]``)
*'left'* - Adds boundaries to the x=X face (``im[-1, :, :]``)
*'front'* - Adds boundaries to the y=0 face (``im[:, ), :]``)
*'back'* - Adds boundaries to the x=0 face (``im[:, -1, :]``)
*'bottom'* - Adds boundaries to the x=0 face (``im[:, :, 0]``)
*'top'* - Adds boundaries to the x=0 face (``im[:, :, -1]``)
The default is all faces.
Returns
-------
image : ND-array
A copy of ``regions`` with the specified boundaries added, so will be
slightly larger in each direction where boundaries were added.
]
if compare[name[faces] is_not constant[None]] begin[:]
variable[regions] assign[=] call[name[sp].pad, parameter[name[regions], constant[1], constant[edge]]]
if compare[name[regions].ndim equal[==] constant[3]] begin[:]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0527670>, <ast.Slice object at 0x7da1b0527640>, <ast.Constant object at 0x7da1b0527610>]]] assign[=] binary_operation[call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0527520>, <ast.Slice object at 0x7da1b05274f0>, <ast.Constant object at 0x7da1b05274c0>]]] + call[name[regions].max, parameter[]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0527340>, <ast.Slice object at 0x7da1b0527310>, <ast.UnaryOp object at 0x7da1b05272e0>]]] assign[=] binary_operation[call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05271c0>, <ast.Slice object at 0x7da1b0527190>, <ast.UnaryOp object at 0x7da1b0527160>]]] + call[name[regions].max, parameter[]]]
call[name[regions]][tuple[[<ast.Constant object at 0x7da1b0526fb0>, <ast.Slice object at 0x7da1b0526f80>, <ast.Slice object at 0x7da1b0526f50>]]] assign[=] binary_operation[call[name[regions]][tuple[[<ast.Constant object at 0x7da1b0526e60>, <ast.Slice object at 0x7da1b0526e30>, <ast.Slice object at 0x7da1b0526e00>]]] + call[name[regions].max, parameter[]]]
call[name[regions]][tuple[[<ast.UnaryOp object at 0x7da1b0526c80>, <ast.Slice object at 0x7da1b0526c20>, <ast.Slice object at 0x7da1b0526bf0>]]] assign[=] binary_operation[call[name[regions]][tuple[[<ast.UnaryOp object at 0x7da1b0526b00>, <ast.Slice object at 0x7da1b0526aa0>, <ast.Slice object at 0x7da1b0526a70>]]] + call[name[regions].max, parameter[]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05268f0>, <ast.Constant object at 0x7da1b05268c0>, <ast.Slice object at 0x7da1b0526890>]]] assign[=] binary_operation[call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05267a0>, <ast.Constant object at 0x7da1b0526770>, <ast.Slice object at 0x7da1b0526740>]]] + call[name[regions].max, parameter[]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05265c0>, <ast.UnaryOp object at 0x7da1b0526590>, <ast.Slice object at 0x7da1b0526530>]]] assign[=] binary_operation[call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0526440>, <ast.UnaryOp object at 0x7da1b0526410>, <ast.Slice object at 0x7da1b05263b0>]]] + call[name[regions].max, parameter[]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0526230>, <ast.Slice object at 0x7da1b0526200>, <ast.Constant object at 0x7da1b05261d0>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0526170> * call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0525ed0>, <ast.Slice object at 0x7da1b0525ea0>, <ast.Constant object at 0x7da1b0525e70>]]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0525d80>, <ast.Slice object at 0x7da1b0525d50>, <ast.UnaryOp object at 0x7da1b0525d20>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0525c90> * call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05259c0>, <ast.Slice object at 0x7da1b0525990>, <ast.UnaryOp object at 0x7da1b0525960>]]]]
call[name[regions]][tuple[[<ast.Constant object at 0x7da1b0525840>, <ast.Slice object at 0x7da1b0525810>, <ast.Slice object at 0x7da1b05257e0>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0525780> * call[name[regions]][tuple[[<ast.Constant object at 0x7da1b05254e0>, <ast.Slice object at 0x7da1b05254b0>, <ast.Slice object at 0x7da1b0525480>]]]]
call[name[regions]][tuple[[<ast.UnaryOp object at 0x7da1b0525390>, <ast.Slice object at 0x7da1b0525330>, <ast.Slice object at 0x7da1b0525300>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b05252a0> * call[name[regions]][tuple[[<ast.UnaryOp object at 0x7da1b0524fd0>, <ast.Slice object at 0x7da1b0524f70>, <ast.Slice object at 0x7da1b0524f40>]]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0524e50>, <ast.Constant object at 0x7da1b0524e20>, <ast.Slice object at 0x7da1b0524df0>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b0524d90> * call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0524af0>, <ast.Constant object at 0x7da1b0524ac0>, <ast.Slice object at 0x7da1b0524a90>]]]]
call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05249a0>, <ast.UnaryOp object at 0x7da1b0524970>, <ast.Slice object at 0x7da1b0524910>]]] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b05248b0> * call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05245e0>, <ast.UnaryOp object at 0x7da1b05245b0>, <ast.Slice object at 0x7da1b0524550>]]]]
variable[regions] assign[=] call[name[sp].pad, parameter[name[regions], constant[2], constant[edge]]]
if compare[constant[front] <ast.NotIn object at 0x7da2590d7190> name[faces]] begin[:]
variable[regions] assign[=] call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05241f0>, <ast.Slice object at 0x7da1b05241c0>, <ast.Slice object at 0x7da1b0524160>]]]
if compare[constant[back] <ast.NotIn object at 0x7da2590d7190> name[faces]] begin[:]
variable[regions] assign[=] call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0556980>, <ast.Slice object at 0x7da1b0554070>, <ast.Slice object at 0x7da1b0554100>]]]
if compare[constant[left] <ast.NotIn object at 0x7da2590d7190> name[faces]] begin[:]
variable[regions] assign[=] call[name[regions]][tuple[[<ast.Slice object at 0x7da1b05543d0>, <ast.Slice object at 0x7da1b0555c30>, <ast.Slice object at 0x7da1b0555c60>]]]
if compare[constant[right] <ast.NotIn object at 0x7da2590d7190> name[faces]] begin[:]
variable[regions] assign[=] call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0556920>, <ast.Slice object at 0x7da1b0556770>, <ast.Slice object at 0x7da1b0556740>]]]
if compare[constant[bottom] <ast.NotIn object at 0x7da2590d7190> name[faces]] begin[:]
variable[regions] assign[=] call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0556560>, <ast.Slice object at 0x7da1b0556530>, <ast.Slice object at 0x7da1b0556500>]]]
if compare[constant[top] <ast.NotIn object at 0x7da2590d7190> name[faces]] begin[:]
variable[regions] assign[=] call[name[regions]][tuple[[<ast.Slice object at 0x7da1b0556f20>, <ast.Slice object at 0x7da1b0556ef0>, <ast.Slice object at 0x7da1b0556ec0>]]]
variable[regions] assign[=] call[name[make_contiguous], parameter[name[regions]]]
return[name[regions]]
|
keyword[def] identifier[add_boundary_regions] ( identifier[regions] = keyword[None] , identifier[faces] =[ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ]):
literal[string]
keyword[if] identifier[faces] keyword[is] keyword[not] keyword[None] :
identifier[regions] = identifier[sp] . identifier[pad] ( identifier[regions] , literal[int] , literal[string] )
keyword[if] identifier[regions] . identifier[ndim] == literal[int] :
identifier[regions] [:,:, literal[int] ]= identifier[regions] [:,:, literal[int] ]+ identifier[regions] . identifier[max] ()
identifier[regions] [:,:,- literal[int] ]= identifier[regions] [:,:,- literal[int] ]+ identifier[regions] . identifier[max] ()
identifier[regions] [ literal[int] ,:,:]= identifier[regions] [ literal[int] ,:,:]+ identifier[regions] . identifier[max] ()
identifier[regions] [- literal[int] ,:,:]= identifier[regions] [- literal[int] ,:,:]+ identifier[regions] . identifier[max] ()
identifier[regions] [:, literal[int] ,:]= identifier[regions] [:, literal[int] ,:]+ identifier[regions] . identifier[max] ()
identifier[regions] [:,- literal[int] ,:]= identifier[regions] [:,- literal[int] ,:]+ identifier[regions] . identifier[max] ()
identifier[regions] [:,:, literal[int] ]=(~ identifier[find_boundaries] ( identifier[regions] [:,:, literal[int] ],
identifier[mode] = literal[string] ))* identifier[regions] [:,:, literal[int] ]
identifier[regions] [:,:,- literal[int] ]=(~ identifier[find_boundaries] ( identifier[regions] [:,:,- literal[int] ],
identifier[mode] = literal[string] ))* identifier[regions] [:,:,- literal[int] ]
identifier[regions] [ literal[int] ,:,:]=(~ identifier[find_boundaries] ( identifier[regions] [ literal[int] ,:,:],
identifier[mode] = literal[string] ))* identifier[regions] [ literal[int] ,:,:]
identifier[regions] [- literal[int] ,:,:]=(~ identifier[find_boundaries] ( identifier[regions] [- literal[int] ,:,:],
identifier[mode] = literal[string] ))* identifier[regions] [- literal[int] ,:,:]
identifier[regions] [:, literal[int] ,:]=(~ identifier[find_boundaries] ( identifier[regions] [:, literal[int] ,:],
identifier[mode] = literal[string] ))* identifier[regions] [:, literal[int] ,:]
identifier[regions] [:,- literal[int] ,:]=(~ identifier[find_boundaries] ( identifier[regions] [:,- literal[int] ,:],
identifier[mode] = literal[string] ))* identifier[regions] [:,- literal[int] ,:]
identifier[regions] = identifier[sp] . identifier[pad] ( identifier[regions] , literal[int] , literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:, literal[int] :,:]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:,:- literal[int] ,:]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [ literal[int] :,:,:]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:- literal[int] ,:,:]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:,:, literal[int] :]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:,:,:- literal[int] ]
keyword[elif] identifier[regions] . identifier[ndim] == literal[int] :
identifier[regions] [ literal[int] ,:]= identifier[regions] [ literal[int] ,:]+ identifier[regions] . identifier[max] ()
identifier[regions] [- literal[int] ,:]= identifier[regions] [- literal[int] ,:]+ identifier[regions] . identifier[max] ()
identifier[regions] [:, literal[int] ]= identifier[regions] [:, literal[int] ]+ identifier[regions] . identifier[max] ()
identifier[regions] [:,- literal[int] ]= identifier[regions] [:,- literal[int] ]+ identifier[regions] . identifier[max] ()
identifier[regions] [ literal[int] ,:]=(~ identifier[find_boundaries] ( identifier[regions] [ literal[int] ,:],
identifier[mode] = literal[string] ))* identifier[regions] [ literal[int] ,:]
identifier[regions] [- literal[int] ,:]=(~ identifier[find_boundaries] ( identifier[regions] [- literal[int] ,:],
identifier[mode] = literal[string] ))* identifier[regions] [- literal[int] ,:]
identifier[regions] [:, literal[int] ]=(~ identifier[find_boundaries] ( identifier[regions] [:, literal[int] ],
identifier[mode] = literal[string] ))* identifier[regions] [:, literal[int] ]
identifier[regions] [:,- literal[int] ]=(~ identifier[find_boundaries] ( identifier[regions] [:,- literal[int] ],
identifier[mode] = literal[string] ))* identifier[regions] [:,- literal[int] ]
identifier[regions] = identifier[sp] . identifier[pad] ( identifier[regions] , literal[int] , literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [ literal[int] :,:]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:- literal[int] ,:]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] keyword[and] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:, literal[int] :]
keyword[if] literal[string] keyword[not] keyword[in] identifier[faces] keyword[and] literal[string] keyword[not] keyword[in] identifier[faces] :
identifier[regions] = identifier[regions] [:,:- literal[int] ]
keyword[else] :
identifier[print] ( literal[string] )
identifier[regions] = identifier[make_contiguous] ( identifier[regions] )
keyword[else] :
identifier[regions] = identifier[regions]
keyword[return] identifier[regions]
|
def add_boundary_regions(regions=None, faces=['front', 'back', 'left', 'right', 'top', 'bottom']):
"""
Given an image partitioned into regions, pads specified faces with new
regions
Parameters
----------
regions : ND-array
An image of the pore space partitioned into regions and labeled
faces : list of strings
The faces of ``regions`` which should have boundaries added. Options
are:
*'right'* - Adds boundaries to the x=0 face (``im[0, :, :]``)
*'left'* - Adds boundaries to the x=X face (``im[-1, :, :]``)
*'front'* - Adds boundaries to the y=0 face (``im[:, ), :]``)
*'back'* - Adds boundaries to the x=0 face (``im[:, -1, :]``)
*'bottom'* - Adds boundaries to the x=0 face (``im[:, :, 0]``)
*'top'* - Adds boundaries to the x=0 face (``im[:, :, -1]``)
The default is all faces.
Returns
-------
image : ND-array
A copy of ``regions`` with the specified boundaries added, so will be
slightly larger in each direction where boundaries were added.
"""
# -------------------------------------------------------------------------
# Edge pad segmentation and distance transform
if faces is not None:
regions = sp.pad(regions, 1, 'edge')
# ---------------------------------------------------------------------
if regions.ndim == 3:
# Remove boundary nodes interconnection
regions[:, :, 0] = regions[:, :, 0] + regions.max()
regions[:, :, -1] = regions[:, :, -1] + regions.max()
regions[0, :, :] = regions[0, :, :] + regions.max()
regions[-1, :, :] = regions[-1, :, :] + regions.max()
regions[:, 0, :] = regions[:, 0, :] + regions.max()
regions[:, -1, :] = regions[:, -1, :] + regions.max()
regions[:, :, 0] = ~find_boundaries(regions[:, :, 0], mode='outer') * regions[:, :, 0]
regions[:, :, -1] = ~find_boundaries(regions[:, :, -1], mode='outer') * regions[:, :, -1]
regions[0, :, :] = ~find_boundaries(regions[0, :, :], mode='outer') * regions[0, :, :]
regions[-1, :, :] = ~find_boundaries(regions[-1, :, :], mode='outer') * regions[-1, :, :]
regions[:, 0, :] = ~find_boundaries(regions[:, 0, :], mode='outer') * regions[:, 0, :]
regions[:, -1, :] = ~find_boundaries(regions[:, -1, :], mode='outer') * regions[:, -1, :]
# -----------------------------------------------------------------
regions = sp.pad(regions, 2, 'edge')
# Remove unselected faces
if 'front' not in faces:
regions = regions[:, 3:, :] # y # depends on [control=['if'], data=[]]
if 'back' not in faces:
regions = regions[:, :-3, :] # depends on [control=['if'], data=[]]
if 'left' not in faces:
regions = regions[3:, :, :] # x # depends on [control=['if'], data=[]]
if 'right' not in faces:
regions = regions[:-3, :, :] # depends on [control=['if'], data=[]]
if 'bottom' not in faces:
regions = regions[:, :, 3:] # z # depends on [control=['if'], data=[]]
if 'top' not in faces:
regions = regions[:, :, :-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif regions.ndim == 2:
# Remove boundary nodes interconnection
regions[0, :] = regions[0, :] + regions.max()
regions[-1, :] = regions[-1, :] + regions.max()
regions[:, 0] = regions[:, 0] + regions.max()
regions[:, -1] = regions[:, -1] + regions.max()
regions[0, :] = ~find_boundaries(regions[0, :], mode='outer') * regions[0, :]
regions[-1, :] = ~find_boundaries(regions[-1, :], mode='outer') * regions[-1, :]
regions[:, 0] = ~find_boundaries(regions[:, 0], mode='outer') * regions[:, 0]
regions[:, -1] = ~find_boundaries(regions[:, -1], mode='outer') * regions[:, -1]
# -----------------------------------------------------------------
regions = sp.pad(regions, 2, 'edge')
# Remove unselected faces
if 'left' not in faces:
regions = regions[3:, :] # x # depends on [control=['if'], data=[]]
if 'right' not in faces:
regions = regions[:-3, :] # depends on [control=['if'], data=[]]
if 'front' not in faces and 'bottom' not in faces:
regions = regions[:, 3:] # y # depends on [control=['if'], data=[]]
if 'back' not in faces and 'top' not in faces:
regions = regions[:, :-3] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('add_boundary_regions works only on 2D and 3D images')
# ---------------------------------------------------------------------
# Make labels contiguous
regions = make_contiguous(regions) # depends on [control=['if'], data=['faces']]
else:
regions = regions
return regions
|
def message_stanza_handler(stanza_type = None, payload_class = None,
payload_key = None, usage_restriction = "post-auth"):
"""Method decorator generator for decorating <message/>
stanza handler methods in `XMPPFeatureHandler` subclasses.
:Parameters:
- `payload_class`: payload class expected
- `stanza_type`: expected value of the 'type' attribute of the stanza.
`None` means all types except 'error'
- `payload_key`: payload class specific filtering key
- `usage_restriction`: optional usage restriction: "pre-auth" or
"post-auth"
:Types:
- `payload_class`: subclass of `StanzaPayload`
- `stanza_type`: `unicode`
- `usage_restriction`: `unicode`
"""
if stanza_type is None:
stanza_type = "normal"
return _stanza_handler("message", stanza_type, payload_class, payload_key,
usage_restriction)
|
def function[message_stanza_handler, parameter[stanza_type, payload_class, payload_key, usage_restriction]]:
constant[Method decorator generator for decorating <message/>
stanza handler methods in `XMPPFeatureHandler` subclasses.
:Parameters:
- `payload_class`: payload class expected
- `stanza_type`: expected value of the 'type' attribute of the stanza.
`None` means all types except 'error'
- `payload_key`: payload class specific filtering key
- `usage_restriction`: optional usage restriction: "pre-auth" or
"post-auth"
:Types:
- `payload_class`: subclass of `StanzaPayload`
- `stanza_type`: `unicode`
- `usage_restriction`: `unicode`
]
if compare[name[stanza_type] is constant[None]] begin[:]
variable[stanza_type] assign[=] constant[normal]
return[call[name[_stanza_handler], parameter[constant[message], name[stanza_type], name[payload_class], name[payload_key], name[usage_restriction]]]]
|
keyword[def] identifier[message_stanza_handler] ( identifier[stanza_type] = keyword[None] , identifier[payload_class] = keyword[None] ,
identifier[payload_key] = keyword[None] , identifier[usage_restriction] = literal[string] ):
literal[string]
keyword[if] identifier[stanza_type] keyword[is] keyword[None] :
identifier[stanza_type] = literal[string]
keyword[return] identifier[_stanza_handler] ( literal[string] , identifier[stanza_type] , identifier[payload_class] , identifier[payload_key] ,
identifier[usage_restriction] )
|
def message_stanza_handler(stanza_type=None, payload_class=None, payload_key=None, usage_restriction='post-auth'):
"""Method decorator generator for decorating <message/>
stanza handler methods in `XMPPFeatureHandler` subclasses.
:Parameters:
- `payload_class`: payload class expected
- `stanza_type`: expected value of the 'type' attribute of the stanza.
`None` means all types except 'error'
- `payload_key`: payload class specific filtering key
- `usage_restriction`: optional usage restriction: "pre-auth" or
"post-auth"
:Types:
- `payload_class`: subclass of `StanzaPayload`
- `stanza_type`: `unicode`
- `usage_restriction`: `unicode`
"""
if stanza_type is None:
stanza_type = 'normal' # depends on [control=['if'], data=['stanza_type']]
return _stanza_handler('message', stanza_type, payload_class, payload_key, usage_restriction)
|
def get_ticker(self, symbol=None):
"""Get symbol tick
https://docs.kucoin.com/#get-ticker
:param symbol: (optional) Name of symbol e.g. KCS-BTC
:type symbol: string
.. code:: python
all_ticks = client.get_ticker()
ticker = client.get_ticker('ETH-BTC')
:returns: ApiResponse
.. code:: python
{
"sequence": "1545825031840", # now sequence
"price": "3494.367783", # last trade price
"size": "0.05027185", # last trade size
"bestBid": "3494.367783", # best bid price
"bestBidSize": "2.60323254", # size at best bid price
"bestAsk": "3499.12", # best ask price
"bestAskSize": "0.01474011" # size at best ask price
}
:raises: KucoinResponseException, KucoinAPIException
"""
data = {}
tick_path = 'market/allTickers'
if symbol is not None:
tick_path = 'market/orderbook/level1'
data = {
'symbol': symbol
}
return self._get(tick_path, False, data=data)
|
def function[get_ticker, parameter[self, symbol]]:
constant[Get symbol tick
https://docs.kucoin.com/#get-ticker
:param symbol: (optional) Name of symbol e.g. KCS-BTC
:type symbol: string
.. code:: python
all_ticks = client.get_ticker()
ticker = client.get_ticker('ETH-BTC')
:returns: ApiResponse
.. code:: python
{
"sequence": "1545825031840", # now sequence
"price": "3494.367783", # last trade price
"size": "0.05027185", # last trade size
"bestBid": "3494.367783", # best bid price
"bestBidSize": "2.60323254", # size at best bid price
"bestAsk": "3499.12", # best ask price
"bestAskSize": "0.01474011" # size at best ask price
}
:raises: KucoinResponseException, KucoinAPIException
]
variable[data] assign[=] dictionary[[], []]
variable[tick_path] assign[=] constant[market/allTickers]
if compare[name[symbol] is_not constant[None]] begin[:]
variable[tick_path] assign[=] constant[market/orderbook/level1]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b084d270>], [<ast.Name object at 0x7da1b084eb60>]]
return[call[name[self]._get, parameter[name[tick_path], constant[False]]]]
|
keyword[def] identifier[get_ticker] ( identifier[self] , identifier[symbol] = keyword[None] ):
literal[string]
identifier[data] ={}
identifier[tick_path] = literal[string]
keyword[if] identifier[symbol] keyword[is] keyword[not] keyword[None] :
identifier[tick_path] = literal[string]
identifier[data] ={
literal[string] : identifier[symbol]
}
keyword[return] identifier[self] . identifier[_get] ( identifier[tick_path] , keyword[False] , identifier[data] = identifier[data] )
|
def get_ticker(self, symbol=None):
"""Get symbol tick
https://docs.kucoin.com/#get-ticker
:param symbol: (optional) Name of symbol e.g. KCS-BTC
:type symbol: string
.. code:: python
all_ticks = client.get_ticker()
ticker = client.get_ticker('ETH-BTC')
:returns: ApiResponse
.. code:: python
{
"sequence": "1545825031840", # now sequence
"price": "3494.367783", # last trade price
"size": "0.05027185", # last trade size
"bestBid": "3494.367783", # best bid price
"bestBidSize": "2.60323254", # size at best bid price
"bestAsk": "3499.12", # best ask price
"bestAskSize": "0.01474011" # size at best ask price
}
:raises: KucoinResponseException, KucoinAPIException
"""
data = {}
tick_path = 'market/allTickers'
if symbol is not None:
tick_path = 'market/orderbook/level1'
data = {'symbol': symbol} # depends on [control=['if'], data=['symbol']]
return self._get(tick_path, False, data=data)
|
def process_message(message):
"""
Process a message dict and return a Message Object
:param message: Message dict returned by `parse_xml` function
:return: Message Object
"""
message["type"] = message.pop("MsgType").lower()
if message["type"] == 'event':
message["type"] = str(message.pop("Event")).lower() + '_event'
message_type = EventMetaClass.TYPES.get(message["type"], UnknownEvent)
else:
message_type = MessageMetaClass.TYPES.get(
message["type"], UnknownMessage
)
return message_type(message)
|
def function[process_message, parameter[message]]:
constant[
Process a message dict and return a Message Object
:param message: Message dict returned by `parse_xml` function
:return: Message Object
]
call[name[message]][constant[type]] assign[=] call[call[name[message].pop, parameter[constant[MsgType]]].lower, parameter[]]
if compare[call[name[message]][constant[type]] equal[==] constant[event]] begin[:]
call[name[message]][constant[type]] assign[=] binary_operation[call[call[name[str], parameter[call[name[message].pop, parameter[constant[Event]]]]].lower, parameter[]] + constant[_event]]
variable[message_type] assign[=] call[name[EventMetaClass].TYPES.get, parameter[call[name[message]][constant[type]], name[UnknownEvent]]]
return[call[name[message_type], parameter[name[message]]]]
|
keyword[def] identifier[process_message] ( identifier[message] ):
literal[string]
identifier[message] [ literal[string] ]= identifier[message] . identifier[pop] ( literal[string] ). identifier[lower] ()
keyword[if] identifier[message] [ literal[string] ]== literal[string] :
identifier[message] [ literal[string] ]= identifier[str] ( identifier[message] . identifier[pop] ( literal[string] )). identifier[lower] ()+ literal[string]
identifier[message_type] = identifier[EventMetaClass] . identifier[TYPES] . identifier[get] ( identifier[message] [ literal[string] ], identifier[UnknownEvent] )
keyword[else] :
identifier[message_type] = identifier[MessageMetaClass] . identifier[TYPES] . identifier[get] (
identifier[message] [ literal[string] ], identifier[UnknownMessage]
)
keyword[return] identifier[message_type] ( identifier[message] )
|
def process_message(message):
"""
Process a message dict and return a Message Object
:param message: Message dict returned by `parse_xml` function
:return: Message Object
"""
message['type'] = message.pop('MsgType').lower()
if message['type'] == 'event':
message['type'] = str(message.pop('Event')).lower() + '_event'
message_type = EventMetaClass.TYPES.get(message['type'], UnknownEvent) # depends on [control=['if'], data=[]]
else:
message_type = MessageMetaClass.TYPES.get(message['type'], UnknownMessage)
return message_type(message)
|
def getTextualNode(
self,
textId: str,
subreference: Union[str, BaseReference]=None,
prevnext: bool=False,
metadata: bool=False
) -> DtsResolverDocument:
""" Retrieve a text node from the API
:param textId: CtsTextMetadata Identifier
:type textId: str
:param subreference: CapitainsCtsPassage Reference
:type subreference: str
:param prevnext: Retrieve graph representing previous and next passage
:type prevnext: boolean
:param metadata: Retrieve metadata about the passage and the text
:type metadata: boolean
:return: CapitainsCtsPassage
:rtype: CapitainsCtsPassage
"""
return DtsResolverDocument.parse(
identifier=textId,
reference=subreference,
resolver=self,
response=self.endpoint.get_document(collection_id=textId, ref=subreference)
)
|
def function[getTextualNode, parameter[self, textId, subreference, prevnext, metadata]]:
constant[ Retrieve a text node from the API
:param textId: CtsTextMetadata Identifier
:type textId: str
:param subreference: CapitainsCtsPassage Reference
:type subreference: str
:param prevnext: Retrieve graph representing previous and next passage
:type prevnext: boolean
:param metadata: Retrieve metadata about the passage and the text
:type metadata: boolean
:return: CapitainsCtsPassage
:rtype: CapitainsCtsPassage
]
return[call[name[DtsResolverDocument].parse, parameter[]]]
|
keyword[def] identifier[getTextualNode] (
identifier[self] ,
identifier[textId] : identifier[str] ,
identifier[subreference] : identifier[Union] [ identifier[str] , identifier[BaseReference] ]= keyword[None] ,
identifier[prevnext] : identifier[bool] = keyword[False] ,
identifier[metadata] : identifier[bool] = keyword[False]
)-> identifier[DtsResolverDocument] :
literal[string]
keyword[return] identifier[DtsResolverDocument] . identifier[parse] (
identifier[identifier] = identifier[textId] ,
identifier[reference] = identifier[subreference] ,
identifier[resolver] = identifier[self] ,
identifier[response] = identifier[self] . identifier[endpoint] . identifier[get_document] ( identifier[collection_id] = identifier[textId] , identifier[ref] = identifier[subreference] )
)
|
def getTextualNode(self, textId: str, subreference: Union[str, BaseReference]=None, prevnext: bool=False, metadata: bool=False) -> DtsResolverDocument:
""" Retrieve a text node from the API
:param textId: CtsTextMetadata Identifier
:type textId: str
:param subreference: CapitainsCtsPassage Reference
:type subreference: str
:param prevnext: Retrieve graph representing previous and next passage
:type prevnext: boolean
:param metadata: Retrieve metadata about the passage and the text
:type metadata: boolean
:return: CapitainsCtsPassage
:rtype: CapitainsCtsPassage
"""
return DtsResolverDocument.parse(identifier=textId, reference=subreference, resolver=self, response=self.endpoint.get_document(collection_id=textId, ref=subreference))
|
def _expval(self, f, nopdf):
""" Return integrand using the tan mapping. """
def ff(theta, nopdf=nopdf):
tan_theta = numpy.tan(theta)
x = self.scale * tan_theta
jac = self.scale * (tan_theta ** 2 + 1.)
if nopdf:
pdf = jac * self.pdf.pjac[None, :]
else:
pdf = jac * numpy.exp(-(x ** 2) / 2.) / numpy.sqrt(2 * numpy.pi)
dp = self.pdf.x2dpflat(x)
parg = None
ans = None
fparg_is_dict = False
# iterate through the batch
for i, (dpi, pdfi) in enumerate(zip(dp, pdf)):
p = self.pdf.meanflat + dpi
if parg is None:
# first time only
if self.pdf.shape is None:
parg = _gvar.BufferDict(self.pdf.g, buf=p)
else:
parg = p.reshape(self.pdf.shape)
else:
if parg.shape is None:
parg.buf = p
else:
parg.flat[:] = p
fparg = 1. if f is None else f(parg)
if ans is None:
# first time only
if hasattr(fparg, 'keys'):
fparg_is_dict = True
if not isinstance(fparg, _gvar.BufferDict):
fparg = _gvar.BufferDict(fparg)
ans = _gvar.BufferDict()
for k in fparg:
ans[k] = numpy.empty(
(len(pdf),) + fparg.slice_shape(k)[1], float
)
else:
if numpy.shape(fparg) == ():
ans = numpy.empty(len(pdf), float)
else:
ans = numpy.empty(
(len(pdf),) + numpy.shape(fparg), float
)
if fparg_is_dict:
prod_pdfi = numpy.prod(pdfi)
for k in ans:
ans[k][i] = fparg[k]
ans[k][i] *= prod_pdfi
else:
if not isinstance(fparg, numpy.ndarray):
fparg = numpy.asarray(fparg)
ans[i] = fparg * numpy.prod(pdfi)
return ans
return ff
|
def function[_expval, parameter[self, f, nopdf]]:
constant[ Return integrand using the tan mapping. ]
def function[ff, parameter[theta, nopdf]]:
variable[tan_theta] assign[=] call[name[numpy].tan, parameter[name[theta]]]
variable[x] assign[=] binary_operation[name[self].scale * name[tan_theta]]
variable[jac] assign[=] binary_operation[name[self].scale * binary_operation[binary_operation[name[tan_theta] ** constant[2]] + constant[1.0]]]
if name[nopdf] begin[:]
variable[pdf] assign[=] binary_operation[name[jac] * call[name[self].pdf.pjac][tuple[[<ast.Constant object at 0x7da1b04ca050>, <ast.Slice object at 0x7da1b04ca380>]]]]
variable[dp] assign[=] call[name[self].pdf.x2dpflat, parameter[name[x]]]
variable[parg] assign[=] constant[None]
variable[ans] assign[=] constant[None]
variable[fparg_is_dict] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b04ca2c0>, <ast.Tuple object at 0x7da1b04cbd60>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[dp], name[pdf]]]]]] begin[:]
variable[p] assign[=] binary_operation[name[self].pdf.meanflat + name[dpi]]
if compare[name[parg] is constant[None]] begin[:]
if compare[name[self].pdf.shape is constant[None]] begin[:]
variable[parg] assign[=] call[name[_gvar].BufferDict, parameter[name[self].pdf.g]]
variable[fparg] assign[=] <ast.IfExp object at 0x7da1b04c8e80>
if compare[name[ans] is constant[None]] begin[:]
if call[name[hasattr], parameter[name[fparg], constant[keys]]] begin[:]
variable[fparg_is_dict] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b04ca320> begin[:]
variable[fparg] assign[=] call[name[_gvar].BufferDict, parameter[name[fparg]]]
variable[ans] assign[=] call[name[_gvar].BufferDict, parameter[]]
for taget[name[k]] in starred[name[fparg]] begin[:]
call[name[ans]][name[k]] assign[=] call[name[numpy].empty, parameter[binary_operation[tuple[[<ast.Call object at 0x7da1b04c81f0>]] + call[call[name[fparg].slice_shape, parameter[name[k]]]][constant[1]]], name[float]]]
if name[fparg_is_dict] begin[:]
variable[prod_pdfi] assign[=] call[name[numpy].prod, parameter[name[pdfi]]]
for taget[name[k]] in starred[name[ans]] begin[:]
call[call[name[ans]][name[k]]][name[i]] assign[=] call[name[fparg]][name[k]]
<ast.AugAssign object at 0x7da1b06d0640>
return[name[ans]]
return[name[ff]]
|
keyword[def] identifier[_expval] ( identifier[self] , identifier[f] , identifier[nopdf] ):
literal[string]
keyword[def] identifier[ff] ( identifier[theta] , identifier[nopdf] = identifier[nopdf] ):
identifier[tan_theta] = identifier[numpy] . identifier[tan] ( identifier[theta] )
identifier[x] = identifier[self] . identifier[scale] * identifier[tan_theta]
identifier[jac] = identifier[self] . identifier[scale] *( identifier[tan_theta] ** literal[int] + literal[int] )
keyword[if] identifier[nopdf] :
identifier[pdf] = identifier[jac] * identifier[self] . identifier[pdf] . identifier[pjac] [ keyword[None] ,:]
keyword[else] :
identifier[pdf] = identifier[jac] * identifier[numpy] . identifier[exp] (-( identifier[x] ** literal[int] )/ literal[int] )/ identifier[numpy] . identifier[sqrt] ( literal[int] * identifier[numpy] . identifier[pi] )
identifier[dp] = identifier[self] . identifier[pdf] . identifier[x2dpflat] ( identifier[x] )
identifier[parg] = keyword[None]
identifier[ans] = keyword[None]
identifier[fparg_is_dict] = keyword[False]
keyword[for] identifier[i] ,( identifier[dpi] , identifier[pdfi] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[dp] , identifier[pdf] )):
identifier[p] = identifier[self] . identifier[pdf] . identifier[meanflat] + identifier[dpi]
keyword[if] identifier[parg] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[pdf] . identifier[shape] keyword[is] keyword[None] :
identifier[parg] = identifier[_gvar] . identifier[BufferDict] ( identifier[self] . identifier[pdf] . identifier[g] , identifier[buf] = identifier[p] )
keyword[else] :
identifier[parg] = identifier[p] . identifier[reshape] ( identifier[self] . identifier[pdf] . identifier[shape] )
keyword[else] :
keyword[if] identifier[parg] . identifier[shape] keyword[is] keyword[None] :
identifier[parg] . identifier[buf] = identifier[p]
keyword[else] :
identifier[parg] . identifier[flat] [:]= identifier[p]
identifier[fparg] = literal[int] keyword[if] identifier[f] keyword[is] keyword[None] keyword[else] identifier[f] ( identifier[parg] )
keyword[if] identifier[ans] keyword[is] keyword[None] :
keyword[if] identifier[hasattr] ( identifier[fparg] , literal[string] ):
identifier[fparg_is_dict] = keyword[True]
keyword[if] keyword[not] identifier[isinstance] ( identifier[fparg] , identifier[_gvar] . identifier[BufferDict] ):
identifier[fparg] = identifier[_gvar] . identifier[BufferDict] ( identifier[fparg] )
identifier[ans] = identifier[_gvar] . identifier[BufferDict] ()
keyword[for] identifier[k] keyword[in] identifier[fparg] :
identifier[ans] [ identifier[k] ]= identifier[numpy] . identifier[empty] (
( identifier[len] ( identifier[pdf] ),)+ identifier[fparg] . identifier[slice_shape] ( identifier[k] )[ literal[int] ], identifier[float]
)
keyword[else] :
keyword[if] identifier[numpy] . identifier[shape] ( identifier[fparg] )==():
identifier[ans] = identifier[numpy] . identifier[empty] ( identifier[len] ( identifier[pdf] ), identifier[float] )
keyword[else] :
identifier[ans] = identifier[numpy] . identifier[empty] (
( identifier[len] ( identifier[pdf] ),)+ identifier[numpy] . identifier[shape] ( identifier[fparg] ), identifier[float]
)
keyword[if] identifier[fparg_is_dict] :
identifier[prod_pdfi] = identifier[numpy] . identifier[prod] ( identifier[pdfi] )
keyword[for] identifier[k] keyword[in] identifier[ans] :
identifier[ans] [ identifier[k] ][ identifier[i] ]= identifier[fparg] [ identifier[k] ]
identifier[ans] [ identifier[k] ][ identifier[i] ]*= identifier[prod_pdfi]
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[fparg] , identifier[numpy] . identifier[ndarray] ):
identifier[fparg] = identifier[numpy] . identifier[asarray] ( identifier[fparg] )
identifier[ans] [ identifier[i] ]= identifier[fparg] * identifier[numpy] . identifier[prod] ( identifier[pdfi] )
keyword[return] identifier[ans]
keyword[return] identifier[ff]
|
def _expval(self, f, nopdf):
""" Return integrand using the tan mapping. """
def ff(theta, nopdf=nopdf):
tan_theta = numpy.tan(theta)
x = self.scale * tan_theta
jac = self.scale * (tan_theta ** 2 + 1.0)
if nopdf:
pdf = jac * self.pdf.pjac[None, :] # depends on [control=['if'], data=[]]
else:
pdf = jac * numpy.exp(-x ** 2 / 2.0) / numpy.sqrt(2 * numpy.pi)
dp = self.pdf.x2dpflat(x)
parg = None
ans = None
fparg_is_dict = False
# iterate through the batch
for (i, (dpi, pdfi)) in enumerate(zip(dp, pdf)):
p = self.pdf.meanflat + dpi
if parg is None:
# first time only
if self.pdf.shape is None:
parg = _gvar.BufferDict(self.pdf.g, buf=p) # depends on [control=['if'], data=[]]
else:
parg = p.reshape(self.pdf.shape) # depends on [control=['if'], data=['parg']]
elif parg.shape is None:
parg.buf = p # depends on [control=['if'], data=[]]
else:
parg.flat[:] = p
fparg = 1.0 if f is None else f(parg)
if ans is None:
# first time only
if hasattr(fparg, 'keys'):
fparg_is_dict = True
if not isinstance(fparg, _gvar.BufferDict):
fparg = _gvar.BufferDict(fparg) # depends on [control=['if'], data=[]]
ans = _gvar.BufferDict()
for k in fparg:
ans[k] = numpy.empty((len(pdf),) + fparg.slice_shape(k)[1], float) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
elif numpy.shape(fparg) == ():
ans = numpy.empty(len(pdf), float) # depends on [control=['if'], data=[]]
else:
ans = numpy.empty((len(pdf),) + numpy.shape(fparg), float) # depends on [control=['if'], data=['ans']]
if fparg_is_dict:
prod_pdfi = numpy.prod(pdfi)
for k in ans:
ans[k][i] = fparg[k]
ans[k][i] *= prod_pdfi # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
else:
if not isinstance(fparg, numpy.ndarray):
fparg = numpy.asarray(fparg) # depends on [control=['if'], data=[]]
ans[i] = fparg * numpy.prod(pdfi) # depends on [control=['for'], data=[]]
return ans
return ff
|
def mouseReleaseEvent(self, event):
"""
Overloads the mouse release event to apply the current changes.
:param event | <QEvent>
"""
super(XGanttViewItem, self).mouseReleaseEvent(event)
if not self.flags() & self.ItemIsMovable:
return
# force the x position to snap to the nearest date
scene = self.scene()
if scene:
gantt = scene.ganttWidget()
curr_x = self.pos().x() + gantt.cellWidth() / 2.0
new_x = curr_x - curr_x % gantt.cellWidth()
self.setPos(new_x, self.pos().y())
# look for date based times
gantt = self.scene().ganttWidget()
# determine hour/minute information
if gantt.timescale() in (gantt.Timescale.Minute,
gantt.Timescale.Hour,
gantt.Timescale.Day):
dstart = self.scene().datetimeAt(self.pos().x())
dend = self.scene().datetimeAt(self.pos().x() + self.rect().width())
dend.addSecs(-60)
else:
dstart = self.scene().dateAt(self.pos().x())
dend = self.scene().dateAt(self.pos().x() + self.rect().width())
dend = dend.addDays(-1)
item = self._treeItem()
if item:
item.viewChanged(dstart, dend)
|
def function[mouseReleaseEvent, parameter[self, event]]:
constant[
Overloads the mouse release event to apply the current changes.
:param event | <QEvent>
]
call[call[name[super], parameter[name[XGanttViewItem], name[self]]].mouseReleaseEvent, parameter[name[event]]]
if <ast.UnaryOp object at 0x7da204347cd0> begin[:]
return[None]
variable[scene] assign[=] call[name[self].scene, parameter[]]
if name[scene] begin[:]
variable[gantt] assign[=] call[name[scene].ganttWidget, parameter[]]
variable[curr_x] assign[=] binary_operation[call[call[name[self].pos, parameter[]].x, parameter[]] + binary_operation[call[name[gantt].cellWidth, parameter[]] / constant[2.0]]]
variable[new_x] assign[=] binary_operation[name[curr_x] - binary_operation[name[curr_x] <ast.Mod object at 0x7da2590d6920> call[name[gantt].cellWidth, parameter[]]]]
call[name[self].setPos, parameter[name[new_x], call[call[name[self].pos, parameter[]].y, parameter[]]]]
variable[gantt] assign[=] call[call[name[self].scene, parameter[]].ganttWidget, parameter[]]
if compare[call[name[gantt].timescale, parameter[]] in tuple[[<ast.Attribute object at 0x7da204347400>, <ast.Attribute object at 0x7da204346230>, <ast.Attribute object at 0x7da204344b80>]]] begin[:]
variable[dstart] assign[=] call[call[name[self].scene, parameter[]].datetimeAt, parameter[call[call[name[self].pos, parameter[]].x, parameter[]]]]
variable[dend] assign[=] call[call[name[self].scene, parameter[]].datetimeAt, parameter[binary_operation[call[call[name[self].pos, parameter[]].x, parameter[]] + call[call[name[self].rect, parameter[]].width, parameter[]]]]]
call[name[dend].addSecs, parameter[<ast.UnaryOp object at 0x7da204347ca0>]]
variable[item] assign[=] call[name[self]._treeItem, parameter[]]
if name[item] begin[:]
call[name[item].viewChanged, parameter[name[dstart], name[dend]]]
|
keyword[def] identifier[mouseReleaseEvent] ( identifier[self] , identifier[event] ):
literal[string]
identifier[super] ( identifier[XGanttViewItem] , identifier[self] ). identifier[mouseReleaseEvent] ( identifier[event] )
keyword[if] keyword[not] identifier[self] . identifier[flags] ()& identifier[self] . identifier[ItemIsMovable] :
keyword[return]
identifier[scene] = identifier[self] . identifier[scene] ()
keyword[if] identifier[scene] :
identifier[gantt] = identifier[scene] . identifier[ganttWidget] ()
identifier[curr_x] = identifier[self] . identifier[pos] (). identifier[x] ()+ identifier[gantt] . identifier[cellWidth] ()/ literal[int]
identifier[new_x] = identifier[curr_x] - identifier[curr_x] % identifier[gantt] . identifier[cellWidth] ()
identifier[self] . identifier[setPos] ( identifier[new_x] , identifier[self] . identifier[pos] (). identifier[y] ())
identifier[gantt] = identifier[self] . identifier[scene] (). identifier[ganttWidget] ()
keyword[if] identifier[gantt] . identifier[timescale] () keyword[in] ( identifier[gantt] . identifier[Timescale] . identifier[Minute] ,
identifier[gantt] . identifier[Timescale] . identifier[Hour] ,
identifier[gantt] . identifier[Timescale] . identifier[Day] ):
identifier[dstart] = identifier[self] . identifier[scene] (). identifier[datetimeAt] ( identifier[self] . identifier[pos] (). identifier[x] ())
identifier[dend] = identifier[self] . identifier[scene] (). identifier[datetimeAt] ( identifier[self] . identifier[pos] (). identifier[x] ()+ identifier[self] . identifier[rect] (). identifier[width] ())
identifier[dend] . identifier[addSecs] (- literal[int] )
keyword[else] :
identifier[dstart] = identifier[self] . identifier[scene] (). identifier[dateAt] ( identifier[self] . identifier[pos] (). identifier[x] ())
identifier[dend] = identifier[self] . identifier[scene] (). identifier[dateAt] ( identifier[self] . identifier[pos] (). identifier[x] ()+ identifier[self] . identifier[rect] (). identifier[width] ())
identifier[dend] = identifier[dend] . identifier[addDays] (- literal[int] )
identifier[item] = identifier[self] . identifier[_treeItem] ()
keyword[if] identifier[item] :
identifier[item] . identifier[viewChanged] ( identifier[dstart] , identifier[dend] )
|
def mouseReleaseEvent(self, event):
"""
Overloads the mouse release event to apply the current changes.
:param event | <QEvent>
"""
super(XGanttViewItem, self).mouseReleaseEvent(event)
if not self.flags() & self.ItemIsMovable:
return # depends on [control=['if'], data=[]] # force the x position to snap to the nearest date
scene = self.scene()
if scene:
gantt = scene.ganttWidget()
curr_x = self.pos().x() + gantt.cellWidth() / 2.0
new_x = curr_x - curr_x % gantt.cellWidth()
self.setPos(new_x, self.pos().y()) # depends on [control=['if'], data=[]] # look for date based times
gantt = self.scene().ganttWidget() # determine hour/minute information
if gantt.timescale() in (gantt.Timescale.Minute, gantt.Timescale.Hour, gantt.Timescale.Day):
dstart = self.scene().datetimeAt(self.pos().x())
dend = self.scene().datetimeAt(self.pos().x() + self.rect().width())
dend.addSecs(-60) # depends on [control=['if'], data=[]]
else:
dstart = self.scene().dateAt(self.pos().x())
dend = self.scene().dateAt(self.pos().x() + self.rect().width())
dend = dend.addDays(-1)
item = self._treeItem()
if item:
item.viewChanged(dstart, dend) # depends on [control=['if'], data=[]]
|
def write(self, start_position: int, size: int, value: bytes) -> None:
"""
Write `value` into memory.
"""
if size:
validate_uint256(start_position)
validate_uint256(size)
validate_is_bytes(value)
validate_length(value, length=size)
validate_lte(start_position + size, maximum=len(self))
for idx, v in enumerate(value):
self._bytes[start_position + idx] = v
|
def function[write, parameter[self, start_position, size, value]]:
constant[
Write `value` into memory.
]
if name[size] begin[:]
call[name[validate_uint256], parameter[name[start_position]]]
call[name[validate_uint256], parameter[name[size]]]
call[name[validate_is_bytes], parameter[name[value]]]
call[name[validate_length], parameter[name[value]]]
call[name[validate_lte], parameter[binary_operation[name[start_position] + name[size]]]]
for taget[tuple[[<ast.Name object at 0x7da1b175e9e0>, <ast.Name object at 0x7da1b175c2b0>]]] in starred[call[name[enumerate], parameter[name[value]]]] begin[:]
call[name[self]._bytes][binary_operation[name[start_position] + name[idx]]] assign[=] name[v]
|
keyword[def] identifier[write] ( identifier[self] , identifier[start_position] : identifier[int] , identifier[size] : identifier[int] , identifier[value] : identifier[bytes] )-> keyword[None] :
literal[string]
keyword[if] identifier[size] :
identifier[validate_uint256] ( identifier[start_position] )
identifier[validate_uint256] ( identifier[size] )
identifier[validate_is_bytes] ( identifier[value] )
identifier[validate_length] ( identifier[value] , identifier[length] = identifier[size] )
identifier[validate_lte] ( identifier[start_position] + identifier[size] , identifier[maximum] = identifier[len] ( identifier[self] ))
keyword[for] identifier[idx] , identifier[v] keyword[in] identifier[enumerate] ( identifier[value] ):
identifier[self] . identifier[_bytes] [ identifier[start_position] + identifier[idx] ]= identifier[v]
|
def write(self, start_position: int, size: int, value: bytes) -> None:
"""
Write `value` into memory.
"""
if size:
validate_uint256(start_position)
validate_uint256(size)
validate_is_bytes(value)
validate_length(value, length=size)
validate_lte(start_position + size, maximum=len(self))
for (idx, v) in enumerate(value):
self._bytes[start_position + idx] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
|
def save_form(self, form):
"""
Save a valid form. If there is a parent attribute,
this will make sure that the parent object is added
to the saved object. Either as a relationship before
saving or in the case of many to many relations after
saving. Any forced instance values are set as well.
Returns the saved object.
"""
# Add any force_instance_values
force = self.get_force_instance_values()
if force:
for k, v in force.items():
setattr(form.instance, k, v)
# Are we adding to an attr or manager
should_add = False
if self.parent_object:
m2ms = [f.name for f in form.instance._meta.many_to_many]
m2ms.extend(
[f.field.rel.related_name for f in
[
f for f in form.instance._meta.get_fields(include_hidden=True)
if f.many_to_many and f.auto_created
]
]
)
if self.parent_field in m2ms:
should_add = True
else:
try:
form.instance._meta.get_field(self.parent_field)
setattr(form.instance, self.parent_field,
self.parent_object)
except FieldDoesNotExist:
pass
obj = form.save()
# Do we need to add this to a m2m
if should_add:
getattr(obj, self.parent_field).add(self.parent_object)
return obj
|
def function[save_form, parameter[self, form]]:
constant[
Save a valid form. If there is a parent attribute,
this will make sure that the parent object is added
to the saved object. Either as a relationship before
saving or in the case of many to many relations after
saving. Any forced instance values are set as well.
Returns the saved object.
]
variable[force] assign[=] call[name[self].get_force_instance_values, parameter[]]
if name[force] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f00ece0>, <ast.Name object at 0x7da18f00ccd0>]]] in starred[call[name[force].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[form].instance, name[k], name[v]]]
variable[should_add] assign[=] constant[False]
if name[self].parent_object begin[:]
variable[m2ms] assign[=] <ast.ListComp object at 0x7da18f00d930>
call[name[m2ms].extend, parameter[<ast.ListComp object at 0x7da18f00f130>]]
if compare[name[self].parent_field in name[m2ms]] begin[:]
variable[should_add] assign[=] constant[True]
variable[obj] assign[=] call[name[form].save, parameter[]]
if name[should_add] begin[:]
call[call[name[getattr], parameter[name[obj], name[self].parent_field]].add, parameter[name[self].parent_object]]
return[name[obj]]
|
keyword[def] identifier[save_form] ( identifier[self] , identifier[form] ):
literal[string]
identifier[force] = identifier[self] . identifier[get_force_instance_values] ()
keyword[if] identifier[force] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[force] . identifier[items] ():
identifier[setattr] ( identifier[form] . identifier[instance] , identifier[k] , identifier[v] )
identifier[should_add] = keyword[False]
keyword[if] identifier[self] . identifier[parent_object] :
identifier[m2ms] =[ identifier[f] . identifier[name] keyword[for] identifier[f] keyword[in] identifier[form] . identifier[instance] . identifier[_meta] . identifier[many_to_many] ]
identifier[m2ms] . identifier[extend] (
[ identifier[f] . identifier[field] . identifier[rel] . identifier[related_name] keyword[for] identifier[f] keyword[in]
[
identifier[f] keyword[for] identifier[f] keyword[in] identifier[form] . identifier[instance] . identifier[_meta] . identifier[get_fields] ( identifier[include_hidden] = keyword[True] )
keyword[if] identifier[f] . identifier[many_to_many] keyword[and] identifier[f] . identifier[auto_created]
]
]
)
keyword[if] identifier[self] . identifier[parent_field] keyword[in] identifier[m2ms] :
identifier[should_add] = keyword[True]
keyword[else] :
keyword[try] :
identifier[form] . identifier[instance] . identifier[_meta] . identifier[get_field] ( identifier[self] . identifier[parent_field] )
identifier[setattr] ( identifier[form] . identifier[instance] , identifier[self] . identifier[parent_field] ,
identifier[self] . identifier[parent_object] )
keyword[except] identifier[FieldDoesNotExist] :
keyword[pass]
identifier[obj] = identifier[form] . identifier[save] ()
keyword[if] identifier[should_add] :
identifier[getattr] ( identifier[obj] , identifier[self] . identifier[parent_field] ). identifier[add] ( identifier[self] . identifier[parent_object] )
keyword[return] identifier[obj]
|
def save_form(self, form):
"""
Save a valid form. If there is a parent attribute,
this will make sure that the parent object is added
to the saved object. Either as a relationship before
saving or in the case of many to many relations after
saving. Any forced instance values are set as well.
Returns the saved object.
"""
# Add any force_instance_values
force = self.get_force_instance_values()
if force:
for (k, v) in force.items():
setattr(form.instance, k, v) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# Are we adding to an attr or manager
should_add = False
if self.parent_object:
m2ms = [f.name for f in form.instance._meta.many_to_many]
m2ms.extend([f.field.rel.related_name for f in [f for f in form.instance._meta.get_fields(include_hidden=True) if f.many_to_many and f.auto_created]])
if self.parent_field in m2ms:
should_add = True # depends on [control=['if'], data=[]]
else:
try:
form.instance._meta.get_field(self.parent_field)
setattr(form.instance, self.parent_field, self.parent_object) # depends on [control=['try'], data=[]]
except FieldDoesNotExist:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
obj = form.save()
# Do we need to add this to a m2m
if should_add:
getattr(obj, self.parent_field).add(self.parent_object) # depends on [control=['if'], data=[]]
return obj
|
def segments(self):
"""A dictionary of lists of contours keyed by z-index"""
segments = dict()
for i in xrange(len(self)):
image = self[i]
for z, contour in image.as_segments.iteritems():
for byte_value, contour_set in contour.iteritems():
if byte_value not in segments:
segments[byte_value] = dict()
if z not in segments[byte_value]:
segments[byte_value][z] = contour_set
else:
segments[byte_value][z] += contour_set
return segments
|
def function[segments, parameter[self]]:
constant[A dictionary of lists of contours keyed by z-index]
variable[segments] assign[=] call[name[dict], parameter[]]
for taget[name[i]] in starred[call[name[xrange], parameter[call[name[len], parameter[name[self]]]]]] begin[:]
variable[image] assign[=] call[name[self]][name[i]]
for taget[tuple[[<ast.Name object at 0x7da1b0955ae0>, <ast.Name object at 0x7da1b0956bf0>]]] in starred[call[name[image].as_segments.iteritems, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0957d30>, <ast.Name object at 0x7da1b0955210>]]] in starred[call[name[contour].iteritems, parameter[]]] begin[:]
if compare[name[byte_value] <ast.NotIn object at 0x7da2590d7190> name[segments]] begin[:]
call[name[segments]][name[byte_value]] assign[=] call[name[dict], parameter[]]
if compare[name[z] <ast.NotIn object at 0x7da2590d7190> call[name[segments]][name[byte_value]]] begin[:]
call[call[name[segments]][name[byte_value]]][name[z]] assign[=] name[contour_set]
return[name[segments]]
|
keyword[def] identifier[segments] ( identifier[self] ):
literal[string]
identifier[segments] = identifier[dict] ()
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[self] )):
identifier[image] = identifier[self] [ identifier[i] ]
keyword[for] identifier[z] , identifier[contour] keyword[in] identifier[image] . identifier[as_segments] . identifier[iteritems] ():
keyword[for] identifier[byte_value] , identifier[contour_set] keyword[in] identifier[contour] . identifier[iteritems] ():
keyword[if] identifier[byte_value] keyword[not] keyword[in] identifier[segments] :
identifier[segments] [ identifier[byte_value] ]= identifier[dict] ()
keyword[if] identifier[z] keyword[not] keyword[in] identifier[segments] [ identifier[byte_value] ]:
identifier[segments] [ identifier[byte_value] ][ identifier[z] ]= identifier[contour_set]
keyword[else] :
identifier[segments] [ identifier[byte_value] ][ identifier[z] ]+= identifier[contour_set]
keyword[return] identifier[segments]
|
def segments(self):
"""A dictionary of lists of contours keyed by z-index"""
segments = dict()
for i in xrange(len(self)):
image = self[i]
for (z, contour) in image.as_segments.iteritems():
for (byte_value, contour_set) in contour.iteritems():
if byte_value not in segments:
segments[byte_value] = dict() # depends on [control=['if'], data=['byte_value', 'segments']]
if z not in segments[byte_value]:
segments[byte_value][z] = contour_set # depends on [control=['if'], data=['z']]
else:
segments[byte_value][z] += contour_set # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['i']]
return segments
|
def click(self):
"""Click the element
:returns: page element instance
"""
try:
self.wait_until_clickable().web_element.click()
except StaleElementReferenceException:
# Retry if element has changed
self.web_element.click()
return self
|
def function[click, parameter[self]]:
constant[Click the element
:returns: page element instance
]
<ast.Try object at 0x7da1b23ecb50>
return[name[self]]
|
keyword[def] identifier[click] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[wait_until_clickable] (). identifier[web_element] . identifier[click] ()
keyword[except] identifier[StaleElementReferenceException] :
identifier[self] . identifier[web_element] . identifier[click] ()
keyword[return] identifier[self]
|
def click(self):
"""Click the element
:returns: page element instance
"""
try:
self.wait_until_clickable().web_element.click() # depends on [control=['try'], data=[]]
except StaleElementReferenceException:
# Retry if element has changed
self.web_element.click() # depends on [control=['except'], data=[]]
return self
|
def get_kernel_spec(self, kernel_name):
"""Returns a :class:`KernelSpec` instance for the given kernel_name.
Raises :exc:`NoSuchKernel` if the given kernel name is not found.
"""
if kernel_name == CURRENT_ENV_KERNEL_NAME:
return self.kernel_spec_class(
resource_dir=ipykernel.kernelspec.RESOURCES,
**ipykernel.kernelspec.get_kernel_dict())
else:
return super(NbvalKernelspecManager, self).get_kernel_spec(kernel_name)
|
def function[get_kernel_spec, parameter[self, kernel_name]]:
constant[Returns a :class:`KernelSpec` instance for the given kernel_name.
Raises :exc:`NoSuchKernel` if the given kernel name is not found.
]
if compare[name[kernel_name] equal[==] name[CURRENT_ENV_KERNEL_NAME]] begin[:]
return[call[name[self].kernel_spec_class, parameter[]]]
|
keyword[def] identifier[get_kernel_spec] ( identifier[self] , identifier[kernel_name] ):
literal[string]
keyword[if] identifier[kernel_name] == identifier[CURRENT_ENV_KERNEL_NAME] :
keyword[return] identifier[self] . identifier[kernel_spec_class] (
identifier[resource_dir] = identifier[ipykernel] . identifier[kernelspec] . identifier[RESOURCES] ,
** identifier[ipykernel] . identifier[kernelspec] . identifier[get_kernel_dict] ())
keyword[else] :
keyword[return] identifier[super] ( identifier[NbvalKernelspecManager] , identifier[self] ). identifier[get_kernel_spec] ( identifier[kernel_name] )
|
def get_kernel_spec(self, kernel_name):
"""Returns a :class:`KernelSpec` instance for the given kernel_name.
Raises :exc:`NoSuchKernel` if the given kernel name is not found.
"""
if kernel_name == CURRENT_ENV_KERNEL_NAME:
return self.kernel_spec_class(resource_dir=ipykernel.kernelspec.RESOURCES, **ipykernel.kernelspec.get_kernel_dict()) # depends on [control=['if'], data=[]]
else:
return super(NbvalKernelspecManager, self).get_kernel_spec(kernel_name)
|
def expect_exitstatus(self, exit_status):
"""Wait for the running program to finish and expect some exit status.
Args:
exit_status (int): The expected exit status.
Raises:
WrongExitStatusException: The produced exit status is not the expected one.
"""
self.expect_end()
logger.debug("Checking exit status of '{0}', output so far: {1}".format(
self.name, self.get_output()))
if self._spawn.exitstatus is None:
raise WrongExitStatusException(
instance=self, expected=exit_status, output=self.get_output())
if self._spawn.exitstatus is not exit_status:
raise WrongExitStatusException(
instance=self,
expected=exit_status,
got=self._spawn.exitstatus,
output=self.get_output())
|
def function[expect_exitstatus, parameter[self, exit_status]]:
constant[Wait for the running program to finish and expect some exit status.
Args:
exit_status (int): The expected exit status.
Raises:
WrongExitStatusException: The produced exit status is not the expected one.
]
call[name[self].expect_end, parameter[]]
call[name[logger].debug, parameter[call[constant[Checking exit status of '{0}', output so far: {1}].format, parameter[name[self].name, call[name[self].get_output, parameter[]]]]]]
if compare[name[self]._spawn.exitstatus is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6c5c60>
if compare[name[self]._spawn.exitstatus is_not name[exit_status]] begin[:]
<ast.Raise object at 0x7da20c6c6cb0>
|
keyword[def] identifier[expect_exitstatus] ( identifier[self] , identifier[exit_status] ):
literal[string]
identifier[self] . identifier[expect_end] ()
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[self] . identifier[name] , identifier[self] . identifier[get_output] ()))
keyword[if] identifier[self] . identifier[_spawn] . identifier[exitstatus] keyword[is] keyword[None] :
keyword[raise] identifier[WrongExitStatusException] (
identifier[instance] = identifier[self] , identifier[expected] = identifier[exit_status] , identifier[output] = identifier[self] . identifier[get_output] ())
keyword[if] identifier[self] . identifier[_spawn] . identifier[exitstatus] keyword[is] keyword[not] identifier[exit_status] :
keyword[raise] identifier[WrongExitStatusException] (
identifier[instance] = identifier[self] ,
identifier[expected] = identifier[exit_status] ,
identifier[got] = identifier[self] . identifier[_spawn] . identifier[exitstatus] ,
identifier[output] = identifier[self] . identifier[get_output] ())
|
def expect_exitstatus(self, exit_status):
"""Wait for the running program to finish and expect some exit status.
Args:
exit_status (int): The expected exit status.
Raises:
WrongExitStatusException: The produced exit status is not the expected one.
"""
self.expect_end()
logger.debug("Checking exit status of '{0}', output so far: {1}".format(self.name, self.get_output()))
if self._spawn.exitstatus is None:
raise WrongExitStatusException(instance=self, expected=exit_status, output=self.get_output()) # depends on [control=['if'], data=[]]
if self._spawn.exitstatus is not exit_status:
raise WrongExitStatusException(instance=self, expected=exit_status, got=self._spawn.exitstatus, output=self.get_output()) # depends on [control=['if'], data=['exit_status']]
|
def initialize():
"""
Function to initialize settings from command line and/or custom settings file
:return: Returns str with operation type
"""
if len(sys.argv) == 1:
usage()
sys.exit()
command = _get_command(sys.argv[1])
try:
opts, args = getopt.getopt(sys.argv[2:], 'h:e:p:u:l:P:s:m:',
['help', 'email=', 'password=', 'url=', 'locale=',
'po-path=', 'settings=', 'message='])
except getopt.GetoptError:
usage()
sys.exit()
params = _get_params_from_options(opts)
_set_settings_file(settings, params)
if command == 'push':
if 'GIT_MESSAGE' in params:
return 'push', params['GIT_MESSAGE']
return 'push', None
return command, None
|
def function[initialize, parameter[]]:
constant[
Function to initialize settings from command line and/or custom settings file
:return: Returns str with operation type
]
if compare[call[name[len], parameter[name[sys].argv]] equal[==] constant[1]] begin[:]
call[name[usage], parameter[]]
call[name[sys].exit, parameter[]]
variable[command] assign[=] call[name[_get_command], parameter[call[name[sys].argv][constant[1]]]]
<ast.Try object at 0x7da2054a6c80>
variable[params] assign[=] call[name[_get_params_from_options], parameter[name[opts]]]
call[name[_set_settings_file], parameter[name[settings], name[params]]]
if compare[name[command] equal[==] constant[push]] begin[:]
if compare[constant[GIT_MESSAGE] in name[params]] begin[:]
return[tuple[[<ast.Constant object at 0x7da2054a6e30>, <ast.Subscript object at 0x7da1b021c490>]]]
return[tuple[[<ast.Constant object at 0x7da1b021ded0>, <ast.Constant object at 0x7da1b021f610>]]]
return[tuple[[<ast.Name object at 0x7da1b021fa30>, <ast.Constant object at 0x7da1b021d6c0>]]]
|
keyword[def] identifier[initialize] ():
literal[string]
keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )== literal[int] :
identifier[usage] ()
identifier[sys] . identifier[exit] ()
identifier[command] = identifier[_get_command] ( identifier[sys] . identifier[argv] [ literal[int] ])
keyword[try] :
identifier[opts] , identifier[args] = identifier[getopt] . identifier[getopt] ( identifier[sys] . identifier[argv] [ literal[int] :], literal[string] ,
[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ])
keyword[except] identifier[getopt] . identifier[GetoptError] :
identifier[usage] ()
identifier[sys] . identifier[exit] ()
identifier[params] = identifier[_get_params_from_options] ( identifier[opts] )
identifier[_set_settings_file] ( identifier[settings] , identifier[params] )
keyword[if] identifier[command] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[params] :
keyword[return] literal[string] , identifier[params] [ literal[string] ]
keyword[return] literal[string] , keyword[None]
keyword[return] identifier[command] , keyword[None]
|
def initialize():
"""
Function to initialize settings from command line and/or custom settings file
:return: Returns str with operation type
"""
if len(sys.argv) == 1:
usage()
sys.exit() # depends on [control=['if'], data=[]]
command = _get_command(sys.argv[1])
try:
(opts, args) = getopt.getopt(sys.argv[2:], 'h:e:p:u:l:P:s:m:', ['help', 'email=', 'password=', 'url=', 'locale=', 'po-path=', 'settings=', 'message=']) # depends on [control=['try'], data=[]]
except getopt.GetoptError:
usage()
sys.exit() # depends on [control=['except'], data=[]]
params = _get_params_from_options(opts)
_set_settings_file(settings, params)
if command == 'push':
if 'GIT_MESSAGE' in params:
return ('push', params['GIT_MESSAGE']) # depends on [control=['if'], data=['params']]
return ('push', None) # depends on [control=['if'], data=[]]
return (command, None)
|
def service_search(auth=None, **kwargs):
'''
Search services
CLI Example:
.. code-block:: bash
salt '*' keystoneng.service_search
salt '*' keystoneng.service_search name=glance
salt '*' keystoneng.service_search name=135f0403f8e544dc9008c6739ecda860
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.search_services(**kwargs)
|
def function[service_search, parameter[auth]]:
constant[
Search services
CLI Example:
.. code-block:: bash
salt '*' keystoneng.service_search
salt '*' keystoneng.service_search name=glance
salt '*' keystoneng.service_search name=135f0403f8e544dc9008c6739ecda860
]
variable[cloud] assign[=] call[name[get_operator_cloud], parameter[name[auth]]]
variable[kwargs] assign[=] call[name[_clean_kwargs], parameter[]]
return[call[name[cloud].search_services, parameter[]]]
|
keyword[def] identifier[service_search] ( identifier[auth] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[cloud] = identifier[get_operator_cloud] ( identifier[auth] )
identifier[kwargs] = identifier[_clean_kwargs] (** identifier[kwargs] )
keyword[return] identifier[cloud] . identifier[search_services] (** identifier[kwargs] )
|
def service_search(auth=None, **kwargs):
"""
Search services
CLI Example:
.. code-block:: bash
salt '*' keystoneng.service_search
salt '*' keystoneng.service_search name=glance
salt '*' keystoneng.service_search name=135f0403f8e544dc9008c6739ecda860
"""
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(**kwargs)
return cloud.search_services(**kwargs)
|
def get_external_tools_in_course(self, course_id, params={}):
"""
Return external tools for the passed canvas course id.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index
"""
url = COURSES_API.format(course_id) + "/external_tools"
external_tools = []
for data in self._get_paged_resource(url, params=params):
external_tools.append(data)
return external_tools
|
def function[get_external_tools_in_course, parameter[self, course_id, params]]:
constant[
Return external tools for the passed canvas course id.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index
]
variable[url] assign[=] binary_operation[call[name[COURSES_API].format, parameter[name[course_id]]] + constant[/external_tools]]
variable[external_tools] assign[=] list[[]]
for taget[name[data]] in starred[call[name[self]._get_paged_resource, parameter[name[url]]]] begin[:]
call[name[external_tools].append, parameter[name[data]]]
return[name[external_tools]]
|
keyword[def] identifier[get_external_tools_in_course] ( identifier[self] , identifier[course_id] , identifier[params] ={}):
literal[string]
identifier[url] = identifier[COURSES_API] . identifier[format] ( identifier[course_id] )+ literal[string]
identifier[external_tools] =[]
keyword[for] identifier[data] keyword[in] identifier[self] . identifier[_get_paged_resource] ( identifier[url] , identifier[params] = identifier[params] ):
identifier[external_tools] . identifier[append] ( identifier[data] )
keyword[return] identifier[external_tools]
|
def get_external_tools_in_course(self, course_id, params={}):
"""
Return external tools for the passed canvas course id.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index
"""
url = COURSES_API.format(course_id) + '/external_tools'
external_tools = []
for data in self._get_paged_resource(url, params=params):
external_tools.append(data) # depends on [control=['for'], data=['data']]
return external_tools
|
def visit_starred(self, node):
"""Check that a Starred expression is used in an assignment target."""
if isinstance(node.parent, astroid.Call):
# f(*args) is converted to Call(args=[Starred]), so ignore
# them for this check.
return
if PY35 and isinstance(
node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)
):
# PEP 448 unpacking.
return
stmt = node.statement()
if not isinstance(stmt, astroid.Assign):
return
if stmt.value is node or stmt.value.parent_of(node):
self.add_message("star-needs-assignment-target", node=node)
|
def function[visit_starred, parameter[self, node]]:
constant[Check that a Starred expression is used in an assignment target.]
if call[name[isinstance], parameter[name[node].parent, name[astroid].Call]] begin[:]
return[None]
if <ast.BoolOp object at 0x7da1b03155d0> begin[:]
return[None]
variable[stmt] assign[=] call[name[node].statement, parameter[]]
if <ast.UnaryOp object at 0x7da1b0314d90> begin[:]
return[None]
if <ast.BoolOp object at 0x7da1b0316bf0> begin[:]
call[name[self].add_message, parameter[constant[star-needs-assignment-target]]]
|
keyword[def] identifier[visit_starred] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[node] . identifier[parent] , identifier[astroid] . identifier[Call] ):
keyword[return]
keyword[if] identifier[PY35] keyword[and] identifier[isinstance] (
identifier[node] . identifier[parent] ,( identifier[astroid] . identifier[List] , identifier[astroid] . identifier[Tuple] , identifier[astroid] . identifier[Set] , identifier[astroid] . identifier[Dict] )
):
keyword[return]
identifier[stmt] = identifier[node] . identifier[statement] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[stmt] , identifier[astroid] . identifier[Assign] ):
keyword[return]
keyword[if] identifier[stmt] . identifier[value] keyword[is] identifier[node] keyword[or] identifier[stmt] . identifier[value] . identifier[parent_of] ( identifier[node] ):
identifier[self] . identifier[add_message] ( literal[string] , identifier[node] = identifier[node] )
|
def visit_starred(self, node):
"""Check that a Starred expression is used in an assignment target."""
if isinstance(node.parent, astroid.Call):
# f(*args) is converted to Call(args=[Starred]), so ignore
# them for this check.
return # depends on [control=['if'], data=[]]
if PY35 and isinstance(node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)):
# PEP 448 unpacking.
return # depends on [control=['if'], data=[]]
stmt = node.statement()
if not isinstance(stmt, astroid.Assign):
return # depends on [control=['if'], data=[]]
if stmt.value is node or stmt.value.parent_of(node):
self.add_message('star-needs-assignment-target', node=node) # depends on [control=['if'], data=[]]
|
def get_header(self, header_name):
"""
Returns a header with that name, creates it if it does not exist.
"""
if header_name in self.headers:
return self.headers[header_name]
return self.add_header_name(header_name)
|
def function[get_header, parameter[self, header_name]]:
constant[
Returns a header with that name, creates it if it does not exist.
]
if compare[name[header_name] in name[self].headers] begin[:]
return[call[name[self].headers][name[header_name]]]
return[call[name[self].add_header_name, parameter[name[header_name]]]]
|
keyword[def] identifier[get_header] ( identifier[self] , identifier[header_name] ):
literal[string]
keyword[if] identifier[header_name] keyword[in] identifier[self] . identifier[headers] :
keyword[return] identifier[self] . identifier[headers] [ identifier[header_name] ]
keyword[return] identifier[self] . identifier[add_header_name] ( identifier[header_name] )
|
def get_header(self, header_name):
"""
Returns a header with that name, creates it if it does not exist.
"""
if header_name in self.headers:
return self.headers[header_name] # depends on [control=['if'], data=['header_name']]
return self.add_header_name(header_name)
|
def _validate_dependencies(self, dependencies, field, value):
""" {'type': ('dict', 'hashable', 'list'),
'check_with': 'dependencies'} """
if isinstance(dependencies, _str_type) or not isinstance(
dependencies, (Iterable, Mapping)
):
dependencies = (dependencies,)
if isinstance(dependencies, Sequence):
self.__validate_dependencies_sequence(dependencies, field)
elif isinstance(dependencies, Mapping):
self.__validate_dependencies_mapping(dependencies, field)
if (
self.document_error_tree.fetch_node_from(
self.schema_path + (field, 'dependencies')
)
is not None
):
return True
|
def function[_validate_dependencies, parameter[self, dependencies, field, value]]:
constant[ {'type': ('dict', 'hashable', 'list'),
'check_with': 'dependencies'} ]
if <ast.BoolOp object at 0x7da2054a4430> begin[:]
variable[dependencies] assign[=] tuple[[<ast.Name object at 0x7da2054a7eb0>]]
if call[name[isinstance], parameter[name[dependencies], name[Sequence]]] begin[:]
call[name[self].__validate_dependencies_sequence, parameter[name[dependencies], name[field]]]
if compare[call[name[self].document_error_tree.fetch_node_from, parameter[binary_operation[name[self].schema_path + tuple[[<ast.Name object at 0x7da2054a61a0>, <ast.Constant object at 0x7da2054a51b0>]]]]] is_not constant[None]] begin[:]
return[constant[True]]
|
keyword[def] identifier[_validate_dependencies] ( identifier[self] , identifier[dependencies] , identifier[field] , identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[dependencies] , identifier[_str_type] ) keyword[or] keyword[not] identifier[isinstance] (
identifier[dependencies] ,( identifier[Iterable] , identifier[Mapping] )
):
identifier[dependencies] =( identifier[dependencies] ,)
keyword[if] identifier[isinstance] ( identifier[dependencies] , identifier[Sequence] ):
identifier[self] . identifier[__validate_dependencies_sequence] ( identifier[dependencies] , identifier[field] )
keyword[elif] identifier[isinstance] ( identifier[dependencies] , identifier[Mapping] ):
identifier[self] . identifier[__validate_dependencies_mapping] ( identifier[dependencies] , identifier[field] )
keyword[if] (
identifier[self] . identifier[document_error_tree] . identifier[fetch_node_from] (
identifier[self] . identifier[schema_path] +( identifier[field] , literal[string] )
)
keyword[is] keyword[not] keyword[None]
):
keyword[return] keyword[True]
|
def _validate_dependencies(self, dependencies, field, value):
""" {'type': ('dict', 'hashable', 'list'),
'check_with': 'dependencies'} """
if isinstance(dependencies, _str_type) or not isinstance(dependencies, (Iterable, Mapping)):
dependencies = (dependencies,) # depends on [control=['if'], data=[]]
if isinstance(dependencies, Sequence):
self.__validate_dependencies_sequence(dependencies, field) # depends on [control=['if'], data=[]]
elif isinstance(dependencies, Mapping):
self.__validate_dependencies_mapping(dependencies, field) # depends on [control=['if'], data=[]]
if self.document_error_tree.fetch_node_from(self.schema_path + (field, 'dependencies')) is not None:
return True # depends on [control=['if'], data=[]]
|
def _content_function(self):
"""
This returns a set containing the actively shown module.
This is so we only get update events triggered for these modules.
"""
# ensure that active is valid
self.active = self.active % len(self.items)
return set([self.items[self.active]])
|
def function[_content_function, parameter[self]]:
constant[
This returns a set containing the actively shown module.
This is so we only get update events triggered for these modules.
]
name[self].active assign[=] binary_operation[name[self].active <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[self].items]]]
return[call[name[set], parameter[list[[<ast.Subscript object at 0x7da1b1de1a80>]]]]]
|
keyword[def] identifier[_content_function] ( identifier[self] ):
literal[string]
identifier[self] . identifier[active] = identifier[self] . identifier[active] % identifier[len] ( identifier[self] . identifier[items] )
keyword[return] identifier[set] ([ identifier[self] . identifier[items] [ identifier[self] . identifier[active] ]])
|
def _content_function(self):
"""
This returns a set containing the actively shown module.
This is so we only get update events triggered for these modules.
"""
# ensure that active is valid
self.active = self.active % len(self.items)
return set([self.items[self.active]])
|
def simulate_feeddata(self, feedid, data, mime=None, time=None):
"""Simulate the last feeddata received for given feedid
Calls the registered callback for the feed with the last recieved feed data. Allows you to test your code
without having to wait for the remote thing to share again.
`feedid` (required) (string) local id of your Feed
`data` (optional) (as applicable) The data you want to use to simulate the arrival of remote feed data
`mime` (optional) (string) The mime type of your data. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
`time` (optional) (datetime) UTC timestamp for share. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
"""
self.__client.simulate_feeddata(feedid, data, mime, time)
|
def function[simulate_feeddata, parameter[self, feedid, data, mime, time]]:
constant[Simulate the last feeddata received for given feedid
Calls the registered callback for the feed with the last recieved feed data. Allows you to test your code
without having to wait for the remote thing to share again.
`feedid` (required) (string) local id of your Feed
`data` (optional) (as applicable) The data you want to use to simulate the arrival of remote feed data
`mime` (optional) (string) The mime type of your data. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
`time` (optional) (datetime) UTC timestamp for share. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
]
call[name[self].__client.simulate_feeddata, parameter[name[feedid], name[data], name[mime], name[time]]]
|
keyword[def] identifier[simulate_feeddata] ( identifier[self] , identifier[feedid] , identifier[data] , identifier[mime] = keyword[None] , identifier[time] = keyword[None] ):
literal[string]
identifier[self] . identifier[__client] . identifier[simulate_feeddata] ( identifier[feedid] , identifier[data] , identifier[mime] , identifier[time] )
|
def simulate_feeddata(self, feedid, data, mime=None, time=None):
"""Simulate the last feeddata received for given feedid
Calls the registered callback for the feed with the last recieved feed data. Allows you to test your code
without having to wait for the remote thing to share again.
`feedid` (required) (string) local id of your Feed
`data` (optional) (as applicable) The data you want to use to simulate the arrival of remote feed data
`mime` (optional) (string) The mime type of your data. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
`time` (optional) (datetime) UTC timestamp for share. See also:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share)
"""
self.__client.simulate_feeddata(feedid, data, mime, time)
|
def resolve_out(self, ins):
"""
Determine which stream the output is synchronised with. If the incoming streams have different sync values, then
it is unknown what synchronisation the outgoing stream should have.
:param ins: dictionary of the incoming streams' sync values
:return:
"""
values = set()
for value in ins.values():
values.update(value)
if len(values) > 1:
msg = 'Unable to resolve sync stream. Consider adding a custom resolver to {}.'
raise ValueError(msg.format(self.step.name))
return {key: values for key in self.step.outs}
|
def function[resolve_out, parameter[self, ins]]:
constant[
Determine which stream the output is synchronised with. If the incoming streams have different sync values, then
it is unknown what synchronisation the outgoing stream should have.
:param ins: dictionary of the incoming streams' sync values
:return:
]
variable[values] assign[=] call[name[set], parameter[]]
for taget[name[value]] in starred[call[name[ins].values, parameter[]]] begin[:]
call[name[values].update, parameter[name[value]]]
if compare[call[name[len], parameter[name[values]]] greater[>] constant[1]] begin[:]
variable[msg] assign[=] constant[Unable to resolve sync stream. Consider adding a custom resolver to {}.]
<ast.Raise object at 0x7da18f722bc0>
return[<ast.DictComp object at 0x7da18f723790>]
|
keyword[def] identifier[resolve_out] ( identifier[self] , identifier[ins] ):
literal[string]
identifier[values] = identifier[set] ()
keyword[for] identifier[value] keyword[in] identifier[ins] . identifier[values] ():
identifier[values] . identifier[update] ( identifier[value] )
keyword[if] identifier[len] ( identifier[values] )> literal[int] :
identifier[msg] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[msg] . identifier[format] ( identifier[self] . identifier[step] . identifier[name] ))
keyword[return] { identifier[key] : identifier[values] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[step] . identifier[outs] }
|
def resolve_out(self, ins):
"""
Determine which stream the output is synchronised with. If the incoming streams have different sync values, then
it is unknown what synchronisation the outgoing stream should have.
:param ins: dictionary of the incoming streams' sync values
:return:
"""
values = set()
for value in ins.values():
values.update(value) # depends on [control=['for'], data=['value']]
if len(values) > 1:
msg = 'Unable to resolve sync stream. Consider adding a custom resolver to {}.'
raise ValueError(msg.format(self.step.name)) # depends on [control=['if'], data=[]]
return {key: values for key in self.step.outs}
|
def do_daemon_init_and_start(self, set_proc_title=True):
"""Main daemon function.
Clean, allocates, initializes and starts all necessary resources to go in daemon mode.
The set_proc_title parameter is mainly useful for the Alignak unit tests.
This to avoid changing the test process name!
:param set_proc_title: if set (default), the process title is changed to the daemon name
:type set_proc_title: bool
:return: False if the HTTP daemon can not be initialized, else True
"""
if set_proc_title:
self.set_proctitle(self.name)
# Change to configured user/group account
self.change_to_user_group()
# Change the working directory
self.change_to_workdir()
# Check if I am still running
self.check_parallel_run()
# If we must daemonize, let's do it!
if self.is_daemon:
if not self.daemonize():
logger.error("I could not daemonize myself :(")
return False
else:
# Else, I set my own pid as the reference one
self.write_pid(os.getpid())
# # TODO: check if really necessary!
# # -------
# # Set ownership on some default log files. It may happen that these default
# # files are owned by a privileged user account
# try:
# for log_file in ['alignak.log', 'alignak-events.log']:
# if os.path.exists('/tmp/%s' % log_file):
# with open('/tmp/%s' % log_file, "w") as file_log_file:
# os.fchown(file_log_file.fileno(), self.uid, self.gid)
# if os.path.exists('/tmp/monitoring-log/%s' % log_file):
# with open('/tmp/monitoring-log/%s' % log_file, "w") as file_log_file:
# os.fchown(file_log_file.fileno(), self.uid, self.gid)
# except Exception as exp: # pylint: disable=broad-except
# # pragma: no cover
# print("Could not set default log files ownership, exception: %s" % str(exp))
# Configure the daemon logger
self.setup_alignak_logger()
# Setup the Web Services daemon
if not self.setup_communication_daemon():
logger.error("I could not setup my communication daemon :(")
return False
# Creating synchonisation manager (inter-daemon queues...)
self.sync_manager = self._create_manager()
# Start the CherryPy server through a detached thread
logger.info("Starting http_daemon thread")
# pylint: disable=bad-thread-instantiation
self.http_thread = threading.Thread(target=self.http_daemon_thread,
name='%s-http_thread' % self.name)
# Setting the thread as a daemon allows to Ctrl+C to kill the main daemon
self.http_thread.daemon = True
self.http_thread.start()
# time.sleep(1)
logger.info("HTTP daemon thread started")
return True
|
def function[do_daemon_init_and_start, parameter[self, set_proc_title]]:
constant[Main daemon function.
Clean, allocates, initializes and starts all necessary resources to go in daemon mode.
The set_proc_title parameter is mainly useful for the Alignak unit tests.
This to avoid changing the test process name!
:param set_proc_title: if set (default), the process title is changed to the daemon name
:type set_proc_title: bool
:return: False if the HTTP daemon can not be initialized, else True
]
if name[set_proc_title] begin[:]
call[name[self].set_proctitle, parameter[name[self].name]]
call[name[self].change_to_user_group, parameter[]]
call[name[self].change_to_workdir, parameter[]]
call[name[self].check_parallel_run, parameter[]]
if name[self].is_daemon begin[:]
if <ast.UnaryOp object at 0x7da18fe92410> begin[:]
call[name[logger].error, parameter[constant[I could not daemonize myself :(]]]
return[constant[False]]
call[name[self].setup_alignak_logger, parameter[]]
if <ast.UnaryOp object at 0x7da18fe910f0> begin[:]
call[name[logger].error, parameter[constant[I could not setup my communication daemon :(]]]
return[constant[False]]
name[self].sync_manager assign[=] call[name[self]._create_manager, parameter[]]
call[name[logger].info, parameter[constant[Starting http_daemon thread]]]
name[self].http_thread assign[=] call[name[threading].Thread, parameter[]]
name[self].http_thread.daemon assign[=] constant[True]
call[name[self].http_thread.start, parameter[]]
call[name[logger].info, parameter[constant[HTTP daemon thread started]]]
return[constant[True]]
|
keyword[def] identifier[do_daemon_init_and_start] ( identifier[self] , identifier[set_proc_title] = keyword[True] ):
literal[string]
keyword[if] identifier[set_proc_title] :
identifier[self] . identifier[set_proctitle] ( identifier[self] . identifier[name] )
identifier[self] . identifier[change_to_user_group] ()
identifier[self] . identifier[change_to_workdir] ()
identifier[self] . identifier[check_parallel_run] ()
keyword[if] identifier[self] . identifier[is_daemon] :
keyword[if] keyword[not] identifier[self] . identifier[daemonize] ():
identifier[logger] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
keyword[else] :
identifier[self] . identifier[write_pid] ( identifier[os] . identifier[getpid] ())
identifier[self] . identifier[setup_alignak_logger] ()
keyword[if] keyword[not] identifier[self] . identifier[setup_communication_daemon] ():
identifier[logger] . identifier[error] ( literal[string] )
keyword[return] keyword[False]
identifier[self] . identifier[sync_manager] = identifier[self] . identifier[_create_manager] ()
identifier[logger] . identifier[info] ( literal[string] )
identifier[self] . identifier[http_thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[http_daemon_thread] ,
identifier[name] = literal[string] % identifier[self] . identifier[name] )
identifier[self] . identifier[http_thread] . identifier[daemon] = keyword[True]
identifier[self] . identifier[http_thread] . identifier[start] ()
identifier[logger] . identifier[info] ( literal[string] )
keyword[return] keyword[True]
|
def do_daemon_init_and_start(self, set_proc_title=True):
"""Main daemon function.
Clean, allocates, initializes and starts all necessary resources to go in daemon mode.
The set_proc_title parameter is mainly useful for the Alignak unit tests.
This to avoid changing the test process name!
:param set_proc_title: if set (default), the process title is changed to the daemon name
:type set_proc_title: bool
:return: False if the HTTP daemon can not be initialized, else True
"""
if set_proc_title:
self.set_proctitle(self.name) # depends on [control=['if'], data=[]]
# Change to configured user/group account
self.change_to_user_group()
# Change the working directory
self.change_to_workdir()
# Check if I am still running
self.check_parallel_run()
# If we must daemonize, let's do it!
if self.is_daemon:
if not self.daemonize():
logger.error('I could not daemonize myself :(')
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Else, I set my own pid as the reference one
self.write_pid(os.getpid())
# # TODO: check if really necessary!
# # -------
# # Set ownership on some default log files. It may happen that these default
# # files are owned by a privileged user account
# try:
# for log_file in ['alignak.log', 'alignak-events.log']:
# if os.path.exists('/tmp/%s' % log_file):
# with open('/tmp/%s' % log_file, "w") as file_log_file:
# os.fchown(file_log_file.fileno(), self.uid, self.gid)
# if os.path.exists('/tmp/monitoring-log/%s' % log_file):
# with open('/tmp/monitoring-log/%s' % log_file, "w") as file_log_file:
# os.fchown(file_log_file.fileno(), self.uid, self.gid)
# except Exception as exp: # pylint: disable=broad-except
# # pragma: no cover
# print("Could not set default log files ownership, exception: %s" % str(exp))
# Configure the daemon logger
self.setup_alignak_logger()
# Setup the Web Services daemon
if not self.setup_communication_daemon():
logger.error('I could not setup my communication daemon :(')
return False # depends on [control=['if'], data=[]]
# Creating synchonisation manager (inter-daemon queues...)
self.sync_manager = self._create_manager()
# Start the CherryPy server through a detached thread
logger.info('Starting http_daemon thread')
# pylint: disable=bad-thread-instantiation
self.http_thread = threading.Thread(target=self.http_daemon_thread, name='%s-http_thread' % self.name)
# Setting the thread as a daemon allows to Ctrl+C to kill the main daemon
self.http_thread.daemon = True
self.http_thread.start()
# time.sleep(1)
logger.info('HTTP daemon thread started')
return True
|
def registerParentFlag(self, optionName, value):
'''Register a flag of a parent command
:Parameters:
- `optionName`: String. Name of option
- `value`: Mixed. Value of parsed flag`
'''
self.parentFlags.update({optionName: value})
return self
|
def function[registerParentFlag, parameter[self, optionName, value]]:
constant[Register a flag of a parent command
:Parameters:
- `optionName`: String. Name of option
- `value`: Mixed. Value of parsed flag`
]
call[name[self].parentFlags.update, parameter[dictionary[[<ast.Name object at 0x7da20c795960>], [<ast.Name object at 0x7da20c7957b0>]]]]
return[name[self]]
|
keyword[def] identifier[registerParentFlag] ( identifier[self] , identifier[optionName] , identifier[value] ):
literal[string]
identifier[self] . identifier[parentFlags] . identifier[update] ({ identifier[optionName] : identifier[value] })
keyword[return] identifier[self]
|
def registerParentFlag(self, optionName, value):
"""Register a flag of a parent command
:Parameters:
- `optionName`: String. Name of option
- `value`: Mixed. Value of parsed flag`
"""
self.parentFlags.update({optionName: value})
return self
|
def get_type(obj, **kwargs):
"""Return the type of an object. Do some regex to remove the "<class..." bit."""
t = type(obj)
s = extract_type(str(t))
return 'Type: {}'.format(s)
|
def function[get_type, parameter[obj]]:
constant[Return the type of an object. Do some regex to remove the "<class..." bit.]
variable[t] assign[=] call[name[type], parameter[name[obj]]]
variable[s] assign[=] call[name[extract_type], parameter[call[name[str], parameter[name[t]]]]]
return[call[constant[Type: {}].format, parameter[name[s]]]]
|
keyword[def] identifier[get_type] ( identifier[obj] ,** identifier[kwargs] ):
literal[string]
identifier[t] = identifier[type] ( identifier[obj] )
identifier[s] = identifier[extract_type] ( identifier[str] ( identifier[t] ))
keyword[return] literal[string] . identifier[format] ( identifier[s] )
|
def get_type(obj, **kwargs):
"""Return the type of an object. Do some regex to remove the "<class..." bit."""
t = type(obj)
s = extract_type(str(t))
return 'Type: {}'.format(s)
|
def set_tts(self, level):
"""
Set the values for
:data:`~aeneas.runtimeconfiguration.RuntimeConfiguration.TTS`
and
:data:`~aeneas.runtimeconfiguration.RuntimeConfiguration.TTS_PATH`
matching the given granularity level.
Currently supported levels:
* ``1`` (paragraph)
* ``2`` (sentence)
* ``3`` (word)
:param int level: the desired granularity level
"""
if level in self.TTS_GRANULARITY_MAP.keys():
tts_key, tts_path_key = self.TTS_GRANULARITY_MAP[level]
self[self.TTS] = self[tts_key]
self[self.TTS_PATH] = self[tts_path_key]
|
def function[set_tts, parameter[self, level]]:
constant[
Set the values for
:data:`~aeneas.runtimeconfiguration.RuntimeConfiguration.TTS`
and
:data:`~aeneas.runtimeconfiguration.RuntimeConfiguration.TTS_PATH`
matching the given granularity level.
Currently supported levels:
* ``1`` (paragraph)
* ``2`` (sentence)
* ``3`` (word)
:param int level: the desired granularity level
]
if compare[name[level] in call[name[self].TTS_GRANULARITY_MAP.keys, parameter[]]] begin[:]
<ast.Tuple object at 0x7da20c993d00> assign[=] call[name[self].TTS_GRANULARITY_MAP][name[level]]
call[name[self]][name[self].TTS] assign[=] call[name[self]][name[tts_key]]
call[name[self]][name[self].TTS_PATH] assign[=] call[name[self]][name[tts_path_key]]
|
keyword[def] identifier[set_tts] ( identifier[self] , identifier[level] ):
literal[string]
keyword[if] identifier[level] keyword[in] identifier[self] . identifier[TTS_GRANULARITY_MAP] . identifier[keys] ():
identifier[tts_key] , identifier[tts_path_key] = identifier[self] . identifier[TTS_GRANULARITY_MAP] [ identifier[level] ]
identifier[self] [ identifier[self] . identifier[TTS] ]= identifier[self] [ identifier[tts_key] ]
identifier[self] [ identifier[self] . identifier[TTS_PATH] ]= identifier[self] [ identifier[tts_path_key] ]
|
def set_tts(self, level):
"""
Set the values for
:data:`~aeneas.runtimeconfiguration.RuntimeConfiguration.TTS`
and
:data:`~aeneas.runtimeconfiguration.RuntimeConfiguration.TTS_PATH`
matching the given granularity level.
Currently supported levels:
* ``1`` (paragraph)
* ``2`` (sentence)
* ``3`` (word)
:param int level: the desired granularity level
"""
if level in self.TTS_GRANULARITY_MAP.keys():
(tts_key, tts_path_key) = self.TTS_GRANULARITY_MAP[level]
self[self.TTS] = self[tts_key]
self[self.TTS_PATH] = self[tts_path_key] # depends on [control=['if'], data=['level']]
|
def xy_reading_order(e1, e2):
"""
A comparator to sort bboxes from left to right, top to bottom
"""
b1 = e1.bbox
b2 = e2.bbox
if round(b1[x0]) == round(b2[x0]):
return float_cmp(b1[y0], b2[y0])
return float_cmp(b1[x0], b2[x0])
|
def function[xy_reading_order, parameter[e1, e2]]:
constant[
A comparator to sort bboxes from left to right, top to bottom
]
variable[b1] assign[=] name[e1].bbox
variable[b2] assign[=] name[e2].bbox
if compare[call[name[round], parameter[call[name[b1]][name[x0]]]] equal[==] call[name[round], parameter[call[name[b2]][name[x0]]]]] begin[:]
return[call[name[float_cmp], parameter[call[name[b1]][name[y0]], call[name[b2]][name[y0]]]]]
return[call[name[float_cmp], parameter[call[name[b1]][name[x0]], call[name[b2]][name[x0]]]]]
|
keyword[def] identifier[xy_reading_order] ( identifier[e1] , identifier[e2] ):
literal[string]
identifier[b1] = identifier[e1] . identifier[bbox]
identifier[b2] = identifier[e2] . identifier[bbox]
keyword[if] identifier[round] ( identifier[b1] [ identifier[x0] ])== identifier[round] ( identifier[b2] [ identifier[x0] ]):
keyword[return] identifier[float_cmp] ( identifier[b1] [ identifier[y0] ], identifier[b2] [ identifier[y0] ])
keyword[return] identifier[float_cmp] ( identifier[b1] [ identifier[x0] ], identifier[b2] [ identifier[x0] ])
|
def xy_reading_order(e1, e2):
"""
A comparator to sort bboxes from left to right, top to bottom
"""
b1 = e1.bbox
b2 = e2.bbox
if round(b1[x0]) == round(b2[x0]):
return float_cmp(b1[y0], b2[y0]) # depends on [control=['if'], data=[]]
return float_cmp(b1[x0], b2[x0])
|
def sample(records, k, random_seed=None):
"""Choose a length-``k`` subset of ``records``, retaining the input
order. If k > len(records), all are returned. If an integer
``random_seed`` is provided, sets ``random.seed()``
"""
if random_seed is not None:
random.seed(random_seed)
result = []
for i, record in enumerate(records):
if len(result) < k:
result.append(record)
else:
r = random.randint(0, i)
if r < k:
result[r] = record
return result
|
def function[sample, parameter[records, k, random_seed]]:
constant[Choose a length-``k`` subset of ``records``, retaining the input
order. If k > len(records), all are returned. If an integer
``random_seed`` is provided, sets ``random.seed()``
]
if compare[name[random_seed] is_not constant[None]] begin[:]
call[name[random].seed, parameter[name[random_seed]]]
variable[result] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1a46fe0>, <ast.Name object at 0x7da1b1a47640>]]] in starred[call[name[enumerate], parameter[name[records]]]] begin[:]
if compare[call[name[len], parameter[name[result]]] less[<] name[k]] begin[:]
call[name[result].append, parameter[name[record]]]
return[name[result]]
|
keyword[def] identifier[sample] ( identifier[records] , identifier[k] , identifier[random_seed] = keyword[None] ):
literal[string]
keyword[if] identifier[random_seed] keyword[is] keyword[not] keyword[None] :
identifier[random] . identifier[seed] ( identifier[random_seed] )
identifier[result] =[]
keyword[for] identifier[i] , identifier[record] keyword[in] identifier[enumerate] ( identifier[records] ):
keyword[if] identifier[len] ( identifier[result] )< identifier[k] :
identifier[result] . identifier[append] ( identifier[record] )
keyword[else] :
identifier[r] = identifier[random] . identifier[randint] ( literal[int] , identifier[i] )
keyword[if] identifier[r] < identifier[k] :
identifier[result] [ identifier[r] ]= identifier[record]
keyword[return] identifier[result]
|
def sample(records, k, random_seed=None):
"""Choose a length-``k`` subset of ``records``, retaining the input
order. If k > len(records), all are returned. If an integer
``random_seed`` is provided, sets ``random.seed()``
"""
if random_seed is not None:
random.seed(random_seed) # depends on [control=['if'], data=['random_seed']]
result = []
for (i, record) in enumerate(records):
if len(result) < k:
result.append(record) # depends on [control=['if'], data=[]]
else:
r = random.randint(0, i)
if r < k:
result[r] = record # depends on [control=['if'], data=['r']] # depends on [control=['for'], data=[]]
return result
|
def _send_mbus(self, frame):
"""Send modbus frame
:param frame: modbus frame to send (with MBAP for TCP/CRC for RTU)
:type frame: str (Python2) or class bytes (Python3)
:returns: number of bytes send or None if error
:rtype: int or None
"""
# for auto_open mode, check TCP and open if need
if self.__auto_open and not self.is_open():
self.open()
# send request
bytes_send = self._send(frame)
if bytes_send:
if self.__debug:
self._pretty_dump('Tx', frame)
return bytes_send
else:
return None
|
def function[_send_mbus, parameter[self, frame]]:
constant[Send modbus frame
:param frame: modbus frame to send (with MBAP for TCP/CRC for RTU)
:type frame: str (Python2) or class bytes (Python3)
:returns: number of bytes send or None if error
:rtype: int or None
]
if <ast.BoolOp object at 0x7da1b1721ea0> begin[:]
call[name[self].open, parameter[]]
variable[bytes_send] assign[=] call[name[self]._send, parameter[name[frame]]]
if name[bytes_send] begin[:]
if name[self].__debug begin[:]
call[name[self]._pretty_dump, parameter[constant[Tx], name[frame]]]
return[name[bytes_send]]
|
keyword[def] identifier[_send_mbus] ( identifier[self] , identifier[frame] ):
literal[string]
keyword[if] identifier[self] . identifier[__auto_open] keyword[and] keyword[not] identifier[self] . identifier[is_open] ():
identifier[self] . identifier[open] ()
identifier[bytes_send] = identifier[self] . identifier[_send] ( identifier[frame] )
keyword[if] identifier[bytes_send] :
keyword[if] identifier[self] . identifier[__debug] :
identifier[self] . identifier[_pretty_dump] ( literal[string] , identifier[frame] )
keyword[return] identifier[bytes_send]
keyword[else] :
keyword[return] keyword[None]
|
def _send_mbus(self, frame):
"""Send modbus frame
:param frame: modbus frame to send (with MBAP for TCP/CRC for RTU)
:type frame: str (Python2) or class bytes (Python3)
:returns: number of bytes send or None if error
:rtype: int or None
"""
# for auto_open mode, check TCP and open if need
if self.__auto_open and (not self.is_open()):
self.open() # depends on [control=['if'], data=[]]
# send request
bytes_send = self._send(frame)
if bytes_send:
if self.__debug:
self._pretty_dump('Tx', frame) # depends on [control=['if'], data=[]]
return bytes_send # depends on [control=['if'], data=[]]
else:
return None
|
def list_datastores(kwargs=None, call=None):
'''
List all the datastores for this VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -f list_datastores my-vmware-config
'''
if call != 'function':
raise SaltCloudSystemExit(
'The list_datastores function must be called with '
'-f or --function.'
)
return {'Datastores': salt.utils.vmware.list_datastores(_get_si())}
|
def function[list_datastores, parameter[kwargs, call]]:
constant[
List all the datastores for this VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -f list_datastores my-vmware-config
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da18f813010>
return[dictionary[[<ast.Constant object at 0x7da18f8133d0>], [<ast.Call object at 0x7da18f812800>]]]
|
keyword[def] identifier[list_datastores] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
keyword[return] { literal[string] : identifier[salt] . identifier[utils] . identifier[vmware] . identifier[list_datastores] ( identifier[_get_si] ())}
|
def list_datastores(kwargs=None, call=None):
"""
List all the datastores for this VMware environment
CLI Example:
.. code-block:: bash
salt-cloud -f list_datastores my-vmware-config
"""
if call != 'function':
raise SaltCloudSystemExit('The list_datastores function must be called with -f or --function.') # depends on [control=['if'], data=[]]
return {'Datastores': salt.utils.vmware.list_datastores(_get_si())}
|
def find_entries(self, users, start, *args, **kwargs):
"""
Find all entries for all users, from a given starting point.
If no starting point is provided, all entries are returned.
"""
forever = kwargs.get('all', False)
for user in users:
if forever:
entries = Entry.objects.filter(user=user).order_by('start_time')
else:
entries = Entry.objects.filter(
user=user, start_time__gte=start).order_by(
'start_time')
yield entries
|
def function[find_entries, parameter[self, users, start]]:
constant[
Find all entries for all users, from a given starting point.
If no starting point is provided, all entries are returned.
]
variable[forever] assign[=] call[name[kwargs].get, parameter[constant[all], constant[False]]]
for taget[name[user]] in starred[name[users]] begin[:]
if name[forever] begin[:]
variable[entries] assign[=] call[call[name[Entry].objects.filter, parameter[]].order_by, parameter[constant[start_time]]]
<ast.Yield object at 0x7da1b106a260>
|
keyword[def] identifier[find_entries] ( identifier[self] , identifier[users] , identifier[start] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[forever] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
keyword[for] identifier[user] keyword[in] identifier[users] :
keyword[if] identifier[forever] :
identifier[entries] = identifier[Entry] . identifier[objects] . identifier[filter] ( identifier[user] = identifier[user] ). identifier[order_by] ( literal[string] )
keyword[else] :
identifier[entries] = identifier[Entry] . identifier[objects] . identifier[filter] (
identifier[user] = identifier[user] , identifier[start_time__gte] = identifier[start] ). identifier[order_by] (
literal[string] )
keyword[yield] identifier[entries]
|
def find_entries(self, users, start, *args, **kwargs):
"""
Find all entries for all users, from a given starting point.
If no starting point is provided, all entries are returned.
"""
forever = kwargs.get('all', False)
for user in users:
if forever:
entries = Entry.objects.filter(user=user).order_by('start_time') # depends on [control=['if'], data=[]]
else:
entries = Entry.objects.filter(user=user, start_time__gte=start).order_by('start_time')
yield entries # depends on [control=['for'], data=['user']]
|
def stop(self):
"""Closes the websocket connection and ensures it won't reconnect."""
self._logger.debug("The Slack RTMClient is shutting down.")
self._stopped = True
self._close_websocket()
|
def function[stop, parameter[self]]:
constant[Closes the websocket connection and ensures it won't reconnect.]
call[name[self]._logger.debug, parameter[constant[The Slack RTMClient is shutting down.]]]
name[self]._stopped assign[=] constant[True]
call[name[self]._close_websocket, parameter[]]
|
keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_stopped] = keyword[True]
identifier[self] . identifier[_close_websocket] ()
|
def stop(self):
"""Closes the websocket connection and ensures it won't reconnect."""
self._logger.debug('The Slack RTMClient is shutting down.')
self._stopped = True
self._close_websocket()
|
def getPublicBundle(self):
"""
Fill a PublicBundle object with the public bundle data of this State.
:returns: An instance of PublicBundle, filled with the public data of this State.
"""
self.__checkSPKTimestamp()
ik_pub = self.__ik.pub
spk_pub = self.__spk["key"].pub
spk_sig = self.__spk["signature"]
otpk_pubs = [ otpk.pub for otpk in self.__otpks ]
return PublicBundle(ik_pub, spk_pub, spk_sig, otpk_pubs)
|
def function[getPublicBundle, parameter[self]]:
constant[
Fill a PublicBundle object with the public bundle data of this State.
:returns: An instance of PublicBundle, filled with the public data of this State.
]
call[name[self].__checkSPKTimestamp, parameter[]]
variable[ik_pub] assign[=] name[self].__ik.pub
variable[spk_pub] assign[=] call[name[self].__spk][constant[key]].pub
variable[spk_sig] assign[=] call[name[self].__spk][constant[signature]]
variable[otpk_pubs] assign[=] <ast.ListComp object at 0x7da18f09f940>
return[call[name[PublicBundle], parameter[name[ik_pub], name[spk_pub], name[spk_sig], name[otpk_pubs]]]]
|
keyword[def] identifier[getPublicBundle] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__checkSPKTimestamp] ()
identifier[ik_pub] = identifier[self] . identifier[__ik] . identifier[pub]
identifier[spk_pub] = identifier[self] . identifier[__spk] [ literal[string] ]. identifier[pub]
identifier[spk_sig] = identifier[self] . identifier[__spk] [ literal[string] ]
identifier[otpk_pubs] =[ identifier[otpk] . identifier[pub] keyword[for] identifier[otpk] keyword[in] identifier[self] . identifier[__otpks] ]
keyword[return] identifier[PublicBundle] ( identifier[ik_pub] , identifier[spk_pub] , identifier[spk_sig] , identifier[otpk_pubs] )
|
def getPublicBundle(self):
"""
Fill a PublicBundle object with the public bundle data of this State.
:returns: An instance of PublicBundle, filled with the public data of this State.
"""
self.__checkSPKTimestamp()
ik_pub = self.__ik.pub
spk_pub = self.__spk['key'].pub
spk_sig = self.__spk['signature']
otpk_pubs = [otpk.pub for otpk in self.__otpks]
return PublicBundle(ik_pub, spk_pub, spk_sig, otpk_pubs)
|
def validate_request_row_key(self, request):
'''
Validates that all requests have the different RowKey and adds RowKey
to existing RowKey list.
request:
the request to insert, update or delete entity
'''
if self.batch_row_keys:
if self.get_request_row_key(request) in self.batch_row_keys:
raise AzureBatchValidationError(_ERROR_DUPLICATE_ROW_KEY_IN_BATCH)
else:
self.batch_row_keys.append(self.get_request_row_key(request))
|
def function[validate_request_row_key, parameter[self, request]]:
constant[
Validates that all requests have the different RowKey and adds RowKey
to existing RowKey list.
request:
the request to insert, update or delete entity
]
if name[self].batch_row_keys begin[:]
if compare[call[name[self].get_request_row_key, parameter[name[request]]] in name[self].batch_row_keys] begin[:]
<ast.Raise object at 0x7da18dc99660>
|
keyword[def] identifier[validate_request_row_key] ( identifier[self] , identifier[request] ):
literal[string]
keyword[if] identifier[self] . identifier[batch_row_keys] :
keyword[if] identifier[self] . identifier[get_request_row_key] ( identifier[request] ) keyword[in] identifier[self] . identifier[batch_row_keys] :
keyword[raise] identifier[AzureBatchValidationError] ( identifier[_ERROR_DUPLICATE_ROW_KEY_IN_BATCH] )
keyword[else] :
identifier[self] . identifier[batch_row_keys] . identifier[append] ( identifier[self] . identifier[get_request_row_key] ( identifier[request] ))
|
def validate_request_row_key(self, request):
"""
Validates that all requests have the different RowKey and adds RowKey
to existing RowKey list.
request:
the request to insert, update or delete entity
"""
if self.batch_row_keys:
if self.get_request_row_key(request) in self.batch_row_keys:
raise AzureBatchValidationError(_ERROR_DUPLICATE_ROW_KEY_IN_BATCH) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.batch_row_keys.append(self.get_request_row_key(request))
|
def bilinear_interpolation_weights(self, lon, lat):
"""
Get the four neighbours for each (lon, lat) position and the weight
associated with each one for bilinear interpolation.
Parameters
----------
lon, lat : :class:`~astropy.units.Quantity`
The longitude and latitude values as
:class:`~astropy.units.Quantity` instances with angle units.
Returns
-------
indices : `~numpy.ndarray`
2-D array with shape (4, N) giving the four indices to use for the
interpolation
weights : `~numpy.ndarray`
2-D array with shape (4, N) giving the four weights to use for the
interpolation
"""
return bilinear_interpolation_weights(lon, lat, self.nside, order=self.order)
|
def function[bilinear_interpolation_weights, parameter[self, lon, lat]]:
constant[
Get the four neighbours for each (lon, lat) position and the weight
associated with each one for bilinear interpolation.
Parameters
----------
lon, lat : :class:`~astropy.units.Quantity`
The longitude and latitude values as
:class:`~astropy.units.Quantity` instances with angle units.
Returns
-------
indices : `~numpy.ndarray`
2-D array with shape (4, N) giving the four indices to use for the
interpolation
weights : `~numpy.ndarray`
2-D array with shape (4, N) giving the four weights to use for the
interpolation
]
return[call[name[bilinear_interpolation_weights], parameter[name[lon], name[lat], name[self].nside]]]
|
keyword[def] identifier[bilinear_interpolation_weights] ( identifier[self] , identifier[lon] , identifier[lat] ):
literal[string]
keyword[return] identifier[bilinear_interpolation_weights] ( identifier[lon] , identifier[lat] , identifier[self] . identifier[nside] , identifier[order] = identifier[self] . identifier[order] )
|
def bilinear_interpolation_weights(self, lon, lat):
"""
Get the four neighbours for each (lon, lat) position and the weight
associated with each one for bilinear interpolation.
Parameters
----------
lon, lat : :class:`~astropy.units.Quantity`
The longitude and latitude values as
:class:`~astropy.units.Quantity` instances with angle units.
Returns
-------
indices : `~numpy.ndarray`
2-D array with shape (4, N) giving the four indices to use for the
interpolation
weights : `~numpy.ndarray`
2-D array with shape (4, N) giving the four weights to use for the
interpolation
"""
return bilinear_interpolation_weights(lon, lat, self.nside, order=self.order)
|
def delete_status(app, user, status_id):
"""
Deletes a status with given ID.
https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md#deleting-a-status
"""
return http.delete(app, user, '/api/v1/statuses/{}'.format(status_id))
|
def function[delete_status, parameter[app, user, status_id]]:
constant[
Deletes a status with given ID.
https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md#deleting-a-status
]
return[call[name[http].delete, parameter[name[app], name[user], call[constant[/api/v1/statuses/{}].format, parameter[name[status_id]]]]]]
|
keyword[def] identifier[delete_status] ( identifier[app] , identifier[user] , identifier[status_id] ):
literal[string]
keyword[return] identifier[http] . identifier[delete] ( identifier[app] , identifier[user] , literal[string] . identifier[format] ( identifier[status_id] ))
|
def delete_status(app, user, status_id):
"""
Deletes a status with given ID.
https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md#deleting-a-status
"""
return http.delete(app, user, '/api/v1/statuses/{}'.format(status_id))
|
def _consume_python_token(self, token):
"""Consume the buffer up to the given token (from _peek_python_tokens).
Returns a single string that was consumed.
"""
ret = []
line, col = token[3]
if line > 1:
ret = self._buffer[:line-1]
self._buffer[:line-1] = []
ret.append(self._buffer[0][:col])
self._buffer[0] = self._buffer[0][col:]
return ''.join(ret)
|
def function[_consume_python_token, parameter[self, token]]:
constant[Consume the buffer up to the given token (from _peek_python_tokens).
Returns a single string that was consumed.
]
variable[ret] assign[=] list[[]]
<ast.Tuple object at 0x7da1b2533d90> assign[=] call[name[token]][constant[3]]
if compare[name[line] greater[>] constant[1]] begin[:]
variable[ret] assign[=] call[name[self]._buffer][<ast.Slice object at 0x7da1b2491de0>]
call[name[self]._buffer][<ast.Slice object at 0x7da1b2492470>] assign[=] list[[]]
call[name[ret].append, parameter[call[call[name[self]._buffer][constant[0]]][<ast.Slice object at 0x7da1b255aa40>]]]
call[name[self]._buffer][constant[0]] assign[=] call[call[name[self]._buffer][constant[0]]][<ast.Slice object at 0x7da1b255a6e0>]
return[call[constant[].join, parameter[name[ret]]]]
|
keyword[def] identifier[_consume_python_token] ( identifier[self] , identifier[token] ):
literal[string]
identifier[ret] =[]
identifier[line] , identifier[col] = identifier[token] [ literal[int] ]
keyword[if] identifier[line] > literal[int] :
identifier[ret] = identifier[self] . identifier[_buffer] [: identifier[line] - literal[int] ]
identifier[self] . identifier[_buffer] [: identifier[line] - literal[int] ]=[]
identifier[ret] . identifier[append] ( identifier[self] . identifier[_buffer] [ literal[int] ][: identifier[col] ])
identifier[self] . identifier[_buffer] [ literal[int] ]= identifier[self] . identifier[_buffer] [ literal[int] ][ identifier[col] :]
keyword[return] literal[string] . identifier[join] ( identifier[ret] )
|
def _consume_python_token(self, token):
"""Consume the buffer up to the given token (from _peek_python_tokens).
Returns a single string that was consumed.
"""
ret = []
(line, col) = token[3]
if line > 1:
ret = self._buffer[:line - 1]
self._buffer[:line - 1] = [] # depends on [control=['if'], data=['line']]
ret.append(self._buffer[0][:col])
self._buffer[0] = self._buffer[0][col:]
return ''.join(ret)
|
def Execute(cmd,
args,
time_limit=-1,
bypass_whitelist=False,
daemon=False,
use_client_context=False,
cwd=None):
"""Executes commands on the client.
This function is the only place where commands will be executed
by the GRR client. This makes sure that all issued commands are compared to a
white list and no malicious commands are issued on the client machine.
Args:
cmd: The command to be executed.
args: List of arguments.
time_limit: Time in seconds the process is allowed to run.
bypass_whitelist: Allow execution of things that are not in the whitelist.
Note that this should only ever be called on a binary that passes the
VerifySignedBlob check.
daemon: Start the new process in the background.
use_client_context: Run this script in the client's context. Defaults to
system context.
cwd: Current working directory for the command.
Returns:
A tuple of stdout, stderr, return value and time taken.
"""
if not bypass_whitelist and not IsExecutionWhitelisted(cmd, args):
# Whitelist doesn't contain this cmd/arg pair
logging.info("Execution disallowed by whitelist: %s %s.", cmd,
" ".join(args))
return (b"", b"Execution disallowed by whitelist.", -1, -1)
if daemon:
pid = os.fork()
if pid == 0:
# This is the child, it will run the daemon process. We call os.setsid
# here to become the session leader of this new session and the process
# group leader of the new process group so we don't get killed when the
# main process exits.
try:
os.setsid()
except OSError:
# This only works if the process is running as root.
pass
_Execute(
cmd, args, time_limit, use_client_context=use_client_context, cwd=cwd)
os._exit(0) # pylint: disable=protected-access
else:
return _Execute(
cmd, args, time_limit, use_client_context=use_client_context, cwd=cwd)
|
def function[Execute, parameter[cmd, args, time_limit, bypass_whitelist, daemon, use_client_context, cwd]]:
constant[Executes commands on the client.
This function is the only place where commands will be executed
by the GRR client. This makes sure that all issued commands are compared to a
white list and no malicious commands are issued on the client machine.
Args:
cmd: The command to be executed.
args: List of arguments.
time_limit: Time in seconds the process is allowed to run.
bypass_whitelist: Allow execution of things that are not in the whitelist.
Note that this should only ever be called on a binary that passes the
VerifySignedBlob check.
daemon: Start the new process in the background.
use_client_context: Run this script in the client's context. Defaults to
system context.
cwd: Current working directory for the command.
Returns:
A tuple of stdout, stderr, return value and time taken.
]
if <ast.BoolOp object at 0x7da2044c06d0> begin[:]
call[name[logging].info, parameter[constant[Execution disallowed by whitelist: %s %s.], name[cmd], call[constant[ ].join, parameter[name[args]]]]]
return[tuple[[<ast.Constant object at 0x7da2044c3730>, <ast.Constant object at 0x7da2044c3490>, <ast.UnaryOp object at 0x7da2044c1450>, <ast.UnaryOp object at 0x7da2044c2020>]]]
if name[daemon] begin[:]
variable[pid] assign[=] call[name[os].fork, parameter[]]
if compare[name[pid] equal[==] constant[0]] begin[:]
<ast.Try object at 0x7da2044c1150>
call[name[_Execute], parameter[name[cmd], name[args], name[time_limit]]]
call[name[os]._exit, parameter[constant[0]]]
|
keyword[def] identifier[Execute] ( identifier[cmd] ,
identifier[args] ,
identifier[time_limit] =- literal[int] ,
identifier[bypass_whitelist] = keyword[False] ,
identifier[daemon] = keyword[False] ,
identifier[use_client_context] = keyword[False] ,
identifier[cwd] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[bypass_whitelist] keyword[and] keyword[not] identifier[IsExecutionWhitelisted] ( identifier[cmd] , identifier[args] ):
identifier[logging] . identifier[info] ( literal[string] , identifier[cmd] ,
literal[string] . identifier[join] ( identifier[args] ))
keyword[return] ( literal[string] , literal[string] ,- literal[int] ,- literal[int] )
keyword[if] identifier[daemon] :
identifier[pid] = identifier[os] . identifier[fork] ()
keyword[if] identifier[pid] == literal[int] :
keyword[try] :
identifier[os] . identifier[setsid] ()
keyword[except] identifier[OSError] :
keyword[pass]
identifier[_Execute] (
identifier[cmd] , identifier[args] , identifier[time_limit] , identifier[use_client_context] = identifier[use_client_context] , identifier[cwd] = identifier[cwd] )
identifier[os] . identifier[_exit] ( literal[int] )
keyword[else] :
keyword[return] identifier[_Execute] (
identifier[cmd] , identifier[args] , identifier[time_limit] , identifier[use_client_context] = identifier[use_client_context] , identifier[cwd] = identifier[cwd] )
|
def Execute(cmd, args, time_limit=-1, bypass_whitelist=False, daemon=False, use_client_context=False, cwd=None):
"""Executes commands on the client.
This function is the only place where commands will be executed
by the GRR client. This makes sure that all issued commands are compared to a
white list and no malicious commands are issued on the client machine.
Args:
cmd: The command to be executed.
args: List of arguments.
time_limit: Time in seconds the process is allowed to run.
bypass_whitelist: Allow execution of things that are not in the whitelist.
Note that this should only ever be called on a binary that passes the
VerifySignedBlob check.
daemon: Start the new process in the background.
use_client_context: Run this script in the client's context. Defaults to
system context.
cwd: Current working directory for the command.
Returns:
A tuple of stdout, stderr, return value and time taken.
"""
if not bypass_whitelist and (not IsExecutionWhitelisted(cmd, args)):
# Whitelist doesn't contain this cmd/arg pair
logging.info('Execution disallowed by whitelist: %s %s.', cmd, ' '.join(args))
return (b'', b'Execution disallowed by whitelist.', -1, -1) # depends on [control=['if'], data=[]]
if daemon:
pid = os.fork()
if pid == 0:
# This is the child, it will run the daemon process. We call os.setsid
# here to become the session leader of this new session and the process
# group leader of the new process group so we don't get killed when the
# main process exits.
try:
os.setsid() # depends on [control=['try'], data=[]]
except OSError:
# This only works if the process is running as root.
pass # depends on [control=['except'], data=[]]
_Execute(cmd, args, time_limit, use_client_context=use_client_context, cwd=cwd)
os._exit(0) # pylint: disable=protected-access # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
return _Execute(cmd, args, time_limit, use_client_context=use_client_context, cwd=cwd)
|
def vsan_add_disks(host, username, password, protocol=None, port=None, host_names=None):
'''
Add any VSAN-eligible disks to the VSAN System for the given host or list of host_names.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to
tell vCenter which hosts need to add any VSAN-eligible disks to the host's
VSAN system.
If host_names is not provided, VSAN-eligible disks will be added to the hosts's
VSAN system for the ``host`` location instead. This is useful for when service
instance connection information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.vsan_add_disks my.esxi.host root bad-password
# Used for connecting to a vCenter Server
salt '*' vsphere.vsan_add_disks my.vcenter.location root bad-password \
host_names='[esxi-1.host.com, esxi-2.host.com]'
'''
service_instance = salt.utils.vmware.get_service_instance(host=host,
username=username,
password=password,
protocol=protocol,
port=port)
host_names = _check_hosts(service_instance, host, host_names)
response = _get_vsan_eligible_disks(service_instance, host, host_names)
ret = {}
for host_name, value in six.iteritems(response):
host_ref = _get_host_ref(service_instance, host, host_name=host_name)
vsan_system = host_ref.configManager.vsanSystem
# We must have a VSAN Config in place before we can manipulate it.
if vsan_system is None:
msg = 'VSAN System Config Manager is unset for host \'{0}\'. ' \
'VSAN configuration cannot be changed without a configured ' \
'VSAN System.'.format(host_name)
log.debug(msg)
ret.update({host_name: {'Error': msg}})
else:
eligible = value.get('Eligible')
error = value.get('Error')
if eligible and isinstance(eligible, list):
# If we have eligible, matching disks, add them to VSAN.
try:
task = vsan_system.AddDisks(eligible)
salt.utils.vmware.wait_for_task(task, host_name, 'Adding disks to VSAN', sleep_seconds=3)
except vim.fault.InsufficientDisks as err:
log.debug(err.msg)
ret.update({host_name: {'Error': err.msg}})
continue
except Exception as err:
msg = '\'vsphere.vsan_add_disks\' failed for host {0}: {1}'.format(host_name, err)
log.debug(msg)
ret.update({host_name: {'Error': msg}})
continue
log.debug(
'Successfully added disks to the VSAN system for host \'%s\'.',
host_name
)
# We need to return ONLY the disk names, otherwise Message Pack can't deserialize the disk objects.
disk_names = []
for disk in eligible:
disk_names.append(disk.canonicalName)
ret.update({host_name: {'Disks Added': disk_names}})
elif eligible and isinstance(eligible, six.string_types):
# If we have a string type in the eligible value, we don't
# have any VSAN-eligible disks. Pull the message through.
ret.update({host_name: {'Disks Added': eligible}})
elif error:
# If we hit an error, populate the Error return dict for state functions.
ret.update({host_name: {'Error': error}})
else:
# If we made it this far, we somehow have eligible disks, but they didn't
# match the disk list and just got an empty list of matching disks.
ret.update({host_name: {'Disks Added': 'No new VSAN-eligible disks were found to add.'}})
return ret
|
def function[vsan_add_disks, parameter[host, username, password, protocol, port, host_names]]:
constant[
Add any VSAN-eligible disks to the VSAN System for the given host or list of host_names.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to
tell vCenter which hosts need to add any VSAN-eligible disks to the host's
VSAN system.
If host_names is not provided, VSAN-eligible disks will be added to the hosts's
VSAN system for the ``host`` location instead. This is useful for when service
instance connection information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.vsan_add_disks my.esxi.host root bad-password
# Used for connecting to a vCenter Server
salt '*' vsphere.vsan_add_disks my.vcenter.location root bad-password host_names='[esxi-1.host.com, esxi-2.host.com]'
]
variable[service_instance] assign[=] call[name[salt].utils.vmware.get_service_instance, parameter[]]
variable[host_names] assign[=] call[name[_check_hosts], parameter[name[service_instance], name[host], name[host_names]]]
variable[response] assign[=] call[name[_get_vsan_eligible_disks], parameter[name[service_instance], name[host], name[host_names]]]
variable[ret] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2043473d0>, <ast.Name object at 0x7da204346a70>]]] in starred[call[name[six].iteritems, parameter[name[response]]]] begin[:]
variable[host_ref] assign[=] call[name[_get_host_ref], parameter[name[service_instance], name[host]]]
variable[vsan_system] assign[=] name[host_ref].configManager.vsanSystem
if compare[name[vsan_system] is constant[None]] begin[:]
variable[msg] assign[=] call[constant[VSAN System Config Manager is unset for host '{0}'. VSAN configuration cannot be changed without a configured VSAN System.].format, parameter[name[host_name]]]
call[name[log].debug, parameter[name[msg]]]
call[name[ret].update, parameter[dictionary[[<ast.Name object at 0x7da20c6c5b40>], [<ast.Dict object at 0x7da20c6c4340>]]]]
return[name[ret]]
|
keyword[def] identifier[vsan_add_disks] ( identifier[host] , identifier[username] , identifier[password] , identifier[protocol] = keyword[None] , identifier[port] = keyword[None] , identifier[host_names] = keyword[None] ):
literal[string]
identifier[service_instance] = identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_service_instance] ( identifier[host] = identifier[host] ,
identifier[username] = identifier[username] ,
identifier[password] = identifier[password] ,
identifier[protocol] = identifier[protocol] ,
identifier[port] = identifier[port] )
identifier[host_names] = identifier[_check_hosts] ( identifier[service_instance] , identifier[host] , identifier[host_names] )
identifier[response] = identifier[_get_vsan_eligible_disks] ( identifier[service_instance] , identifier[host] , identifier[host_names] )
identifier[ret] ={}
keyword[for] identifier[host_name] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[response] ):
identifier[host_ref] = identifier[_get_host_ref] ( identifier[service_instance] , identifier[host] , identifier[host_name] = identifier[host_name] )
identifier[vsan_system] = identifier[host_ref] . identifier[configManager] . identifier[vsanSystem]
keyword[if] identifier[vsan_system] keyword[is] keyword[None] :
identifier[msg] = literal[string] literal[string] literal[string] . identifier[format] ( identifier[host_name] )
identifier[log] . identifier[debug] ( identifier[msg] )
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : identifier[msg] }})
keyword[else] :
identifier[eligible] = identifier[value] . identifier[get] ( literal[string] )
identifier[error] = identifier[value] . identifier[get] ( literal[string] )
keyword[if] identifier[eligible] keyword[and] identifier[isinstance] ( identifier[eligible] , identifier[list] ):
keyword[try] :
identifier[task] = identifier[vsan_system] . identifier[AddDisks] ( identifier[eligible] )
identifier[salt] . identifier[utils] . identifier[vmware] . identifier[wait_for_task] ( identifier[task] , identifier[host_name] , literal[string] , identifier[sleep_seconds] = literal[int] )
keyword[except] identifier[vim] . identifier[fault] . identifier[InsufficientDisks] keyword[as] identifier[err] :
identifier[log] . identifier[debug] ( identifier[err] . identifier[msg] )
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : identifier[err] . identifier[msg] }})
keyword[continue]
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[msg] = literal[string] . identifier[format] ( identifier[host_name] , identifier[err] )
identifier[log] . identifier[debug] ( identifier[msg] )
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : identifier[msg] }})
keyword[continue]
identifier[log] . identifier[debug] (
literal[string] ,
identifier[host_name]
)
identifier[disk_names] =[]
keyword[for] identifier[disk] keyword[in] identifier[eligible] :
identifier[disk_names] . identifier[append] ( identifier[disk] . identifier[canonicalName] )
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : identifier[disk_names] }})
keyword[elif] identifier[eligible] keyword[and] identifier[isinstance] ( identifier[eligible] , identifier[six] . identifier[string_types] ):
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : identifier[eligible] }})
keyword[elif] identifier[error] :
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : identifier[error] }})
keyword[else] :
identifier[ret] . identifier[update] ({ identifier[host_name] :{ literal[string] : literal[string] }})
keyword[return] identifier[ret]
|
def vsan_add_disks(host, username, password, protocol=None, port=None, host_names=None):
"""
Add any VSAN-eligible disks to the VSAN System for the given host or list of host_names.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to
tell vCenter which hosts need to add any VSAN-eligible disks to the host's
VSAN system.
If host_names is not provided, VSAN-eligible disks will be added to the hosts's
VSAN system for the ``host`` location instead. This is useful for when service
instance connection information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.vsan_add_disks my.esxi.host root bad-password
# Used for connecting to a vCenter Server
salt '*' vsphere.vsan_add_disks my.vcenter.location root bad-password host_names='[esxi-1.host.com, esxi-2.host.com]'
"""
service_instance = salt.utils.vmware.get_service_instance(host=host, username=username, password=password, protocol=protocol, port=port)
host_names = _check_hosts(service_instance, host, host_names)
response = _get_vsan_eligible_disks(service_instance, host, host_names)
ret = {}
for (host_name, value) in six.iteritems(response):
host_ref = _get_host_ref(service_instance, host, host_name=host_name)
vsan_system = host_ref.configManager.vsanSystem
# We must have a VSAN Config in place before we can manipulate it.
if vsan_system is None:
msg = "VSAN System Config Manager is unset for host '{0}'. VSAN configuration cannot be changed without a configured VSAN System.".format(host_name)
log.debug(msg)
ret.update({host_name: {'Error': msg}}) # depends on [control=['if'], data=[]]
else:
eligible = value.get('Eligible')
error = value.get('Error')
if eligible and isinstance(eligible, list):
# If we have eligible, matching disks, add them to VSAN.
try:
task = vsan_system.AddDisks(eligible)
salt.utils.vmware.wait_for_task(task, host_name, 'Adding disks to VSAN', sleep_seconds=3) # depends on [control=['try'], data=[]]
except vim.fault.InsufficientDisks as err:
log.debug(err.msg)
ret.update({host_name: {'Error': err.msg}})
continue # depends on [control=['except'], data=['err']]
except Exception as err:
msg = "'vsphere.vsan_add_disks' failed for host {0}: {1}".format(host_name, err)
log.debug(msg)
ret.update({host_name: {'Error': msg}})
continue # depends on [control=['except'], data=['err']]
log.debug("Successfully added disks to the VSAN system for host '%s'.", host_name)
# We need to return ONLY the disk names, otherwise Message Pack can't deserialize the disk objects.
disk_names = []
for disk in eligible:
disk_names.append(disk.canonicalName) # depends on [control=['for'], data=['disk']]
ret.update({host_name: {'Disks Added': disk_names}}) # depends on [control=['if'], data=[]]
elif eligible and isinstance(eligible, six.string_types):
# If we have a string type in the eligible value, we don't
# have any VSAN-eligible disks. Pull the message through.
ret.update({host_name: {'Disks Added': eligible}}) # depends on [control=['if'], data=[]]
elif error:
# If we hit an error, populate the Error return dict for state functions.
ret.update({host_name: {'Error': error}}) # depends on [control=['if'], data=[]]
else:
# If we made it this far, we somehow have eligible disks, but they didn't
# match the disk list and just got an empty list of matching disks.
ret.update({host_name: {'Disks Added': 'No new VSAN-eligible disks were found to add.'}}) # depends on [control=['for'], data=[]]
return ret
|
def get_merge_command(self, revision):
"""Get the command to merge a revision into the current branch (without committing the result)."""
return [
'git',
'-c', 'user.name=%s' % self.author.name,
'-c', 'user.email=%s' % self.author.email,
'merge', '--no-commit', '--no-ff',
revision,
]
|
def function[get_merge_command, parameter[self, revision]]:
constant[Get the command to merge a revision into the current branch (without committing the result).]
return[list[[<ast.Constant object at 0x7da1b0a37d30>, <ast.Constant object at 0x7da1b0a37d60>, <ast.BinOp object at 0x7da1b0a36710>, <ast.Constant object at 0x7da1b0a36e30>, <ast.BinOp object at 0x7da1b0a37250>, <ast.Constant object at 0x7da1b0a34370>, <ast.Constant object at 0x7da1b0a375e0>, <ast.Constant object at 0x7da1b0a37f40>, <ast.Name object at 0x7da1b0a34d00>]]]
|
keyword[def] identifier[get_merge_command] ( identifier[self] , identifier[revision] ):
literal[string]
keyword[return] [
literal[string] ,
literal[string] , literal[string] % identifier[self] . identifier[author] . identifier[name] ,
literal[string] , literal[string] % identifier[self] . identifier[author] . identifier[email] ,
literal[string] , literal[string] , literal[string] ,
identifier[revision] ,
]
|
def get_merge_command(self, revision):
"""Get the command to merge a revision into the current branch (without committing the result)."""
return ['git', '-c', 'user.name=%s' % self.author.name, '-c', 'user.email=%s' % self.author.email, 'merge', '--no-commit', '--no-ff', revision]
|
def add_field(self, field_instance_or_string):
"""
Appends a field, can be a :class:`~es_fluent.fields.Field` or string.
"""
if isinstance(field_instance_or_string, basestring):
field_instance = Field(field_instance_or_string)
elif isinstance(field_instance_or_string, Field):
field_instance_or_string = field_instance
else:
raise ValueError('Expected a basetring or Field instance')
self.fields.append(field_instance)
return self
|
def function[add_field, parameter[self, field_instance_or_string]]:
constant[
Appends a field, can be a :class:`~es_fluent.fields.Field` or string.
]
if call[name[isinstance], parameter[name[field_instance_or_string], name[basestring]]] begin[:]
variable[field_instance] assign[=] call[name[Field], parameter[name[field_instance_or_string]]]
call[name[self].fields.append, parameter[name[field_instance]]]
return[name[self]]
|
keyword[def] identifier[add_field] ( identifier[self] , identifier[field_instance_or_string] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[field_instance_or_string] , identifier[basestring] ):
identifier[field_instance] = identifier[Field] ( identifier[field_instance_or_string] )
keyword[elif] identifier[isinstance] ( identifier[field_instance_or_string] , identifier[Field] ):
identifier[field_instance_or_string] = identifier[field_instance]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[fields] . identifier[append] ( identifier[field_instance] )
keyword[return] identifier[self]
|
def add_field(self, field_instance_or_string):
"""
Appends a field, can be a :class:`~es_fluent.fields.Field` or string.
"""
if isinstance(field_instance_or_string, basestring):
field_instance = Field(field_instance_or_string) # depends on [control=['if'], data=[]]
elif isinstance(field_instance_or_string, Field):
field_instance_or_string = field_instance # depends on [control=['if'], data=[]]
else:
raise ValueError('Expected a basetring or Field instance')
self.fields.append(field_instance)
return self
|
def start_worker(self, row):
'''为task新建一个后台下载线程, 并开始下载.'''
def on_worker_started(worker, fs_id):
pass
def on_worker_received(worker, fs_id, received, received_total):
GLib.idle_add(do_worker_received, fs_id, received, received_total)
def do_worker_received(fs_id, received, received_total):
self.download_speed_add(received)
row = None
if fs_id in self.workers:
row = self.workers[fs_id][1]
else:
row = self.get_row_by_fsid(fs_id)
if not row:
return
row[CURRSIZE_COL] = received_total
curr_size = util.get_human_size(row[CURRSIZE_COL], False)[0]
total_size = util.get_human_size(row[SIZE_COL])[0]
row[PERCENT_COL] = int(row[CURRSIZE_COL] / row[SIZE_COL] * 100)
row[HUMANSIZE_COL] = '{0} / {1}'.format(curr_size, total_size)
self.update_task_db(row)
def on_worker_downloaded(worker, fs_id):
GLib.idle_add(do_worker_downloaded, fs_id)
def do_worker_downloaded(fs_id):
row = None
if fs_id in self.workers:
row = self.workers[fs_id][1]
else:
row = self.get_row_by_fsid(fs_id)
if not row:
return
row[CURRSIZE_COL] = row[SIZE_COL]
row[STATE_COL] = State.FINISHED
row[PERCENT_COL] = 100
total_size = util.get_human_size(row[SIZE_COL])[0]
row[HUMANSIZE_COL] = '{0} / {1}'.format(total_size, total_size)
row[STATENAME_COL] = StateNames[State.FINISHED]
self.update_task_db(row)
self.check_commit(force=True)
self.workers.pop(row[FSID_COL], None)
self.app.toast(_('{0} downloaded'.format(row[NAME_COL])))
self.launch_app(fs_id)
self.scan_tasks()
def on_worker_network_error(worker, fs_id):
GLib.idle_add(do_worker_network_error, fs_id)
def do_worker_network_error(fs_id):
row = self.workers.get(fs_id, None)
if row:
row = row[1]
else:
row = self.get_row_by_fsid(fs_id)
if not row:
return
row[STATE_COL] = State.ERROR
row[STATENAME_COL] = StateNames[State.ERROR]
self.update_task_db(row)
self.remove_worker(row[FSID_COL], stop=False)
if self.app.profile['retries-each']:
GLib.timeout_add(self.app.profile['retries-each'] * 60000,
self.restart_task, row)
else:
self.app.toast(_('Error occurs will downloading {0}').format(
row[NAME_COL]))
self.scan_tasks()
def do_worker_disk_error(fs_id, tmp_filepath):
# do not retry on disk-error
self.app.toast(_('Disk Error: failed to read/write {0}').format(
tmp_filepath))
def on_worker_disk_error(worker, fs_id, tmp_filepath):
GLib.idle_add(do_worker_disk_error, fs_id, tmp_filepath)
if not row or row[FSID_COL] in self.workers:
return
row[STATE_COL] = State.DOWNLOADING
row[STATENAME_COL] = StateNames[State.DOWNLOADING]
worker = Downloader(self, row)
self.workers[row[FSID_COL]] = (worker, row)
worker.connect('started', on_worker_started)
worker.connect('received', on_worker_received)
worker.connect('downloaded', on_worker_downloaded)
worker.connect('network-error', on_worker_network_error)
worker.connect('disk-error', on_worker_disk_error)
worker.start()
|
def function[start_worker, parameter[self, row]]:
constant[为task新建一个后台下载线程, 并开始下载.]
def function[on_worker_started, parameter[worker, fs_id]]:
pass
def function[on_worker_received, parameter[worker, fs_id, received, received_total]]:
call[name[GLib].idle_add, parameter[name[do_worker_received], name[fs_id], name[received], name[received_total]]]
def function[do_worker_received, parameter[fs_id, received, received_total]]:
call[name[self].download_speed_add, parameter[name[received]]]
variable[row] assign[=] constant[None]
if compare[name[fs_id] in name[self].workers] begin[:]
variable[row] assign[=] call[call[name[self].workers][name[fs_id]]][constant[1]]
if <ast.UnaryOp object at 0x7da1b1d60070> begin[:]
return[None]
call[name[row]][name[CURRSIZE_COL]] assign[=] name[received_total]
variable[curr_size] assign[=] call[call[name[util].get_human_size, parameter[call[name[row]][name[CURRSIZE_COL]], constant[False]]]][constant[0]]
variable[total_size] assign[=] call[call[name[util].get_human_size, parameter[call[name[row]][name[SIZE_COL]]]]][constant[0]]
call[name[row]][name[PERCENT_COL]] assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[row]][name[CURRSIZE_COL]] / call[name[row]][name[SIZE_COL]]] * constant[100]]]]
call[name[row]][name[HUMANSIZE_COL]] assign[=] call[constant[{0} / {1}].format, parameter[name[curr_size], name[total_size]]]
call[name[self].update_task_db, parameter[name[row]]]
def function[on_worker_downloaded, parameter[worker, fs_id]]:
call[name[GLib].idle_add, parameter[name[do_worker_downloaded], name[fs_id]]]
def function[do_worker_downloaded, parameter[fs_id]]:
variable[row] assign[=] constant[None]
if compare[name[fs_id] in name[self].workers] begin[:]
variable[row] assign[=] call[call[name[self].workers][name[fs_id]]][constant[1]]
if <ast.UnaryOp object at 0x7da1b1d61ae0> begin[:]
return[None]
call[name[row]][name[CURRSIZE_COL]] assign[=] call[name[row]][name[SIZE_COL]]
call[name[row]][name[STATE_COL]] assign[=] name[State].FINISHED
call[name[row]][name[PERCENT_COL]] assign[=] constant[100]
variable[total_size] assign[=] call[call[name[util].get_human_size, parameter[call[name[row]][name[SIZE_COL]]]]][constant[0]]
call[name[row]][name[HUMANSIZE_COL]] assign[=] call[constant[{0} / {1}].format, parameter[name[total_size], name[total_size]]]
call[name[row]][name[STATENAME_COL]] assign[=] call[name[StateNames]][name[State].FINISHED]
call[name[self].update_task_db, parameter[name[row]]]
call[name[self].check_commit, parameter[]]
call[name[self].workers.pop, parameter[call[name[row]][name[FSID_COL]], constant[None]]]
call[name[self].app.toast, parameter[call[name[_], parameter[call[constant[{0} downloaded].format, parameter[call[name[row]][name[NAME_COL]]]]]]]]
call[name[self].launch_app, parameter[name[fs_id]]]
call[name[self].scan_tasks, parameter[]]
def function[on_worker_network_error, parameter[worker, fs_id]]:
call[name[GLib].idle_add, parameter[name[do_worker_network_error], name[fs_id]]]
def function[do_worker_network_error, parameter[fs_id]]:
variable[row] assign[=] call[name[self].workers.get, parameter[name[fs_id], constant[None]]]
if name[row] begin[:]
variable[row] assign[=] call[name[row]][constant[1]]
call[name[row]][name[STATE_COL]] assign[=] name[State].ERROR
call[name[row]][name[STATENAME_COL]] assign[=] call[name[StateNames]][name[State].ERROR]
call[name[self].update_task_db, parameter[name[row]]]
call[name[self].remove_worker, parameter[call[name[row]][name[FSID_COL]]]]
if call[name[self].app.profile][constant[retries-each]] begin[:]
call[name[GLib].timeout_add, parameter[binary_operation[call[name[self].app.profile][constant[retries-each]] * constant[60000]], name[self].restart_task, name[row]]]
call[name[self].scan_tasks, parameter[]]
def function[do_worker_disk_error, parameter[fs_id, tmp_filepath]]:
call[name[self].app.toast, parameter[call[call[name[_], parameter[constant[Disk Error: failed to read/write {0}]]].format, parameter[name[tmp_filepath]]]]]
def function[on_worker_disk_error, parameter[worker, fs_id, tmp_filepath]]:
call[name[GLib].idle_add, parameter[name[do_worker_disk_error], name[fs_id], name[tmp_filepath]]]
if <ast.BoolOp object at 0x7da1b1dfaa10> begin[:]
return[None]
call[name[row]][name[STATE_COL]] assign[=] name[State].DOWNLOADING
call[name[row]][name[STATENAME_COL]] assign[=] call[name[StateNames]][name[State].DOWNLOADING]
variable[worker] assign[=] call[name[Downloader], parameter[name[self], name[row]]]
call[name[self].workers][call[name[row]][name[FSID_COL]]] assign[=] tuple[[<ast.Name object at 0x7da1b1dfa410>, <ast.Name object at 0x7da1b1df90f0>]]
call[name[worker].connect, parameter[constant[started], name[on_worker_started]]]
call[name[worker].connect, parameter[constant[received], name[on_worker_received]]]
call[name[worker].connect, parameter[constant[downloaded], name[on_worker_downloaded]]]
call[name[worker].connect, parameter[constant[network-error], name[on_worker_network_error]]]
call[name[worker].connect, parameter[constant[disk-error], name[on_worker_disk_error]]]
call[name[worker].start, parameter[]]
|
keyword[def] identifier[start_worker] ( identifier[self] , identifier[row] ):
literal[string]
keyword[def] identifier[on_worker_started] ( identifier[worker] , identifier[fs_id] ):
keyword[pass]
keyword[def] identifier[on_worker_received] ( identifier[worker] , identifier[fs_id] , identifier[received] , identifier[received_total] ):
identifier[GLib] . identifier[idle_add] ( identifier[do_worker_received] , identifier[fs_id] , identifier[received] , identifier[received_total] )
keyword[def] identifier[do_worker_received] ( identifier[fs_id] , identifier[received] , identifier[received_total] ):
identifier[self] . identifier[download_speed_add] ( identifier[received] )
identifier[row] = keyword[None]
keyword[if] identifier[fs_id] keyword[in] identifier[self] . identifier[workers] :
identifier[row] = identifier[self] . identifier[workers] [ identifier[fs_id] ][ literal[int] ]
keyword[else] :
identifier[row] = identifier[self] . identifier[get_row_by_fsid] ( identifier[fs_id] )
keyword[if] keyword[not] identifier[row] :
keyword[return]
identifier[row] [ identifier[CURRSIZE_COL] ]= identifier[received_total]
identifier[curr_size] = identifier[util] . identifier[get_human_size] ( identifier[row] [ identifier[CURRSIZE_COL] ], keyword[False] )[ literal[int] ]
identifier[total_size] = identifier[util] . identifier[get_human_size] ( identifier[row] [ identifier[SIZE_COL] ])[ literal[int] ]
identifier[row] [ identifier[PERCENT_COL] ]= identifier[int] ( identifier[row] [ identifier[CURRSIZE_COL] ]/ identifier[row] [ identifier[SIZE_COL] ]* literal[int] )
identifier[row] [ identifier[HUMANSIZE_COL] ]= literal[string] . identifier[format] ( identifier[curr_size] , identifier[total_size] )
identifier[self] . identifier[update_task_db] ( identifier[row] )
keyword[def] identifier[on_worker_downloaded] ( identifier[worker] , identifier[fs_id] ):
identifier[GLib] . identifier[idle_add] ( identifier[do_worker_downloaded] , identifier[fs_id] )
keyword[def] identifier[do_worker_downloaded] ( identifier[fs_id] ):
identifier[row] = keyword[None]
keyword[if] identifier[fs_id] keyword[in] identifier[self] . identifier[workers] :
identifier[row] = identifier[self] . identifier[workers] [ identifier[fs_id] ][ literal[int] ]
keyword[else] :
identifier[row] = identifier[self] . identifier[get_row_by_fsid] ( identifier[fs_id] )
keyword[if] keyword[not] identifier[row] :
keyword[return]
identifier[row] [ identifier[CURRSIZE_COL] ]= identifier[row] [ identifier[SIZE_COL] ]
identifier[row] [ identifier[STATE_COL] ]= identifier[State] . identifier[FINISHED]
identifier[row] [ identifier[PERCENT_COL] ]= literal[int]
identifier[total_size] = identifier[util] . identifier[get_human_size] ( identifier[row] [ identifier[SIZE_COL] ])[ literal[int] ]
identifier[row] [ identifier[HUMANSIZE_COL] ]= literal[string] . identifier[format] ( identifier[total_size] , identifier[total_size] )
identifier[row] [ identifier[STATENAME_COL] ]= identifier[StateNames] [ identifier[State] . identifier[FINISHED] ]
identifier[self] . identifier[update_task_db] ( identifier[row] )
identifier[self] . identifier[check_commit] ( identifier[force] = keyword[True] )
identifier[self] . identifier[workers] . identifier[pop] ( identifier[row] [ identifier[FSID_COL] ], keyword[None] )
identifier[self] . identifier[app] . identifier[toast] ( identifier[_] ( literal[string] . identifier[format] ( identifier[row] [ identifier[NAME_COL] ])))
identifier[self] . identifier[launch_app] ( identifier[fs_id] )
identifier[self] . identifier[scan_tasks] ()
keyword[def] identifier[on_worker_network_error] ( identifier[worker] , identifier[fs_id] ):
identifier[GLib] . identifier[idle_add] ( identifier[do_worker_network_error] , identifier[fs_id] )
keyword[def] identifier[do_worker_network_error] ( identifier[fs_id] ):
identifier[row] = identifier[self] . identifier[workers] . identifier[get] ( identifier[fs_id] , keyword[None] )
keyword[if] identifier[row] :
identifier[row] = identifier[row] [ literal[int] ]
keyword[else] :
identifier[row] = identifier[self] . identifier[get_row_by_fsid] ( identifier[fs_id] )
keyword[if] keyword[not] identifier[row] :
keyword[return]
identifier[row] [ identifier[STATE_COL] ]= identifier[State] . identifier[ERROR]
identifier[row] [ identifier[STATENAME_COL] ]= identifier[StateNames] [ identifier[State] . identifier[ERROR] ]
identifier[self] . identifier[update_task_db] ( identifier[row] )
identifier[self] . identifier[remove_worker] ( identifier[row] [ identifier[FSID_COL] ], identifier[stop] = keyword[False] )
keyword[if] identifier[self] . identifier[app] . identifier[profile] [ literal[string] ]:
identifier[GLib] . identifier[timeout_add] ( identifier[self] . identifier[app] . identifier[profile] [ literal[string] ]* literal[int] ,
identifier[self] . identifier[restart_task] , identifier[row] )
keyword[else] :
identifier[self] . identifier[app] . identifier[toast] ( identifier[_] ( literal[string] ). identifier[format] (
identifier[row] [ identifier[NAME_COL] ]))
identifier[self] . identifier[scan_tasks] ()
keyword[def] identifier[do_worker_disk_error] ( identifier[fs_id] , identifier[tmp_filepath] ):
identifier[self] . identifier[app] . identifier[toast] ( identifier[_] ( literal[string] ). identifier[format] (
identifier[tmp_filepath] ))
keyword[def] identifier[on_worker_disk_error] ( identifier[worker] , identifier[fs_id] , identifier[tmp_filepath] ):
identifier[GLib] . identifier[idle_add] ( identifier[do_worker_disk_error] , identifier[fs_id] , identifier[tmp_filepath] )
keyword[if] keyword[not] identifier[row] keyword[or] identifier[row] [ identifier[FSID_COL] ] keyword[in] identifier[self] . identifier[workers] :
keyword[return]
identifier[row] [ identifier[STATE_COL] ]= identifier[State] . identifier[DOWNLOADING]
identifier[row] [ identifier[STATENAME_COL] ]= identifier[StateNames] [ identifier[State] . identifier[DOWNLOADING] ]
identifier[worker] = identifier[Downloader] ( identifier[self] , identifier[row] )
identifier[self] . identifier[workers] [ identifier[row] [ identifier[FSID_COL] ]]=( identifier[worker] , identifier[row] )
identifier[worker] . identifier[connect] ( literal[string] , identifier[on_worker_started] )
identifier[worker] . identifier[connect] ( literal[string] , identifier[on_worker_received] )
identifier[worker] . identifier[connect] ( literal[string] , identifier[on_worker_downloaded] )
identifier[worker] . identifier[connect] ( literal[string] , identifier[on_worker_network_error] )
identifier[worker] . identifier[connect] ( literal[string] , identifier[on_worker_disk_error] )
identifier[worker] . identifier[start] ()
|
def start_worker(self, row):
"""为task新建一个后台下载线程, 并开始下载."""
def on_worker_started(worker, fs_id):
pass
def on_worker_received(worker, fs_id, received, received_total):
GLib.idle_add(do_worker_received, fs_id, received, received_total)
def do_worker_received(fs_id, received, received_total):
self.download_speed_add(received)
row = None
if fs_id in self.workers:
row = self.workers[fs_id][1] # depends on [control=['if'], data=['fs_id']]
else:
row = self.get_row_by_fsid(fs_id)
if not row:
return # depends on [control=['if'], data=[]]
row[CURRSIZE_COL] = received_total
curr_size = util.get_human_size(row[CURRSIZE_COL], False)[0]
total_size = util.get_human_size(row[SIZE_COL])[0]
row[PERCENT_COL] = int(row[CURRSIZE_COL] / row[SIZE_COL] * 100)
row[HUMANSIZE_COL] = '{0} / {1}'.format(curr_size, total_size)
self.update_task_db(row)
def on_worker_downloaded(worker, fs_id):
GLib.idle_add(do_worker_downloaded, fs_id)
def do_worker_downloaded(fs_id):
row = None
if fs_id in self.workers:
row = self.workers[fs_id][1] # depends on [control=['if'], data=['fs_id']]
else:
row = self.get_row_by_fsid(fs_id)
if not row:
return # depends on [control=['if'], data=[]]
row[CURRSIZE_COL] = row[SIZE_COL]
row[STATE_COL] = State.FINISHED
row[PERCENT_COL] = 100
total_size = util.get_human_size(row[SIZE_COL])[0]
row[HUMANSIZE_COL] = '{0} / {1}'.format(total_size, total_size)
row[STATENAME_COL] = StateNames[State.FINISHED]
self.update_task_db(row)
self.check_commit(force=True)
self.workers.pop(row[FSID_COL], None)
self.app.toast(_('{0} downloaded'.format(row[NAME_COL])))
self.launch_app(fs_id)
self.scan_tasks()
def on_worker_network_error(worker, fs_id):
GLib.idle_add(do_worker_network_error, fs_id)
def do_worker_network_error(fs_id):
row = self.workers.get(fs_id, None)
if row:
row = row[1] # depends on [control=['if'], data=[]]
else:
row = self.get_row_by_fsid(fs_id)
if not row:
return # depends on [control=['if'], data=[]]
row[STATE_COL] = State.ERROR
row[STATENAME_COL] = StateNames[State.ERROR]
self.update_task_db(row)
self.remove_worker(row[FSID_COL], stop=False)
if self.app.profile['retries-each']:
GLib.timeout_add(self.app.profile['retries-each'] * 60000, self.restart_task, row) # depends on [control=['if'], data=[]]
else:
self.app.toast(_('Error occurs will downloading {0}').format(row[NAME_COL]))
self.scan_tasks()
def do_worker_disk_error(fs_id, tmp_filepath):
# do not retry on disk-error
self.app.toast(_('Disk Error: failed to read/write {0}').format(tmp_filepath))
def on_worker_disk_error(worker, fs_id, tmp_filepath):
GLib.idle_add(do_worker_disk_error, fs_id, tmp_filepath)
if not row or row[FSID_COL] in self.workers:
return # depends on [control=['if'], data=[]]
row[STATE_COL] = State.DOWNLOADING
row[STATENAME_COL] = StateNames[State.DOWNLOADING]
worker = Downloader(self, row)
self.workers[row[FSID_COL]] = (worker, row)
worker.connect('started', on_worker_started)
worker.connect('received', on_worker_received)
worker.connect('downloaded', on_worker_downloaded)
worker.connect('network-error', on_worker_network_error)
worker.connect('disk-error', on_worker_disk_error)
worker.start()
|
def add_websocket_route(
self, handler, uri, host=None, version=None, name=None
):
"""Create a blueprint websocket route from a function.
:param handler: function for handling uri requests. Accepts function,
or class instance with a view_class method.
:param uri: endpoint at which the route will be accessible.
:param host: IP Address of FQDN for the sanic server to use.
:param version: Blueprint Version
:param name: Unique name to identify the Websocket Route
:return: function or class instance
"""
self.websocket(uri=uri, host=host, version=version, name=name)(handler)
return handler
|
def function[add_websocket_route, parameter[self, handler, uri, host, version, name]]:
constant[Create a blueprint websocket route from a function.
:param handler: function for handling uri requests. Accepts function,
or class instance with a view_class method.
:param uri: endpoint at which the route will be accessible.
:param host: IP Address of FQDN for the sanic server to use.
:param version: Blueprint Version
:param name: Unique name to identify the Websocket Route
:return: function or class instance
]
call[call[name[self].websocket, parameter[]], parameter[name[handler]]]
return[name[handler]]
|
keyword[def] identifier[add_websocket_route] (
identifier[self] , identifier[handler] , identifier[uri] , identifier[host] = keyword[None] , identifier[version] = keyword[None] , identifier[name] = keyword[None]
):
literal[string]
identifier[self] . identifier[websocket] ( identifier[uri] = identifier[uri] , identifier[host] = identifier[host] , identifier[version] = identifier[version] , identifier[name] = identifier[name] )( identifier[handler] )
keyword[return] identifier[handler]
|
def add_websocket_route(self, handler, uri, host=None, version=None, name=None):
"""Create a blueprint websocket route from a function.
:param handler: function for handling uri requests. Accepts function,
or class instance with a view_class method.
:param uri: endpoint at which the route will be accessible.
:param host: IP Address of FQDN for the sanic server to use.
:param version: Blueprint Version
:param name: Unique name to identify the Websocket Route
:return: function or class instance
"""
self.websocket(uri=uri, host=host, version=version, name=name)(handler)
return handler
|
def read_var_uint32(self):
"""Reads a varint from the stream, interprets this varint
as an unsigned, 32-bit integer, and returns the integer.
"""
i = self.read_var_uint64()
if i > wire_format.UINT32_MAX:
raise errors.DecodeError('Value out of range for uint32: %d' % i)
return i
|
def function[read_var_uint32, parameter[self]]:
constant[Reads a varint from the stream, interprets this varint
as an unsigned, 32-bit integer, and returns the integer.
]
variable[i] assign[=] call[name[self].read_var_uint64, parameter[]]
if compare[name[i] greater[>] name[wire_format].UINT32_MAX] begin[:]
<ast.Raise object at 0x7da2045678b0>
return[name[i]]
|
keyword[def] identifier[read_var_uint32] ( identifier[self] ):
literal[string]
identifier[i] = identifier[self] . identifier[read_var_uint64] ()
keyword[if] identifier[i] > identifier[wire_format] . identifier[UINT32_MAX] :
keyword[raise] identifier[errors] . identifier[DecodeError] ( literal[string] % identifier[i] )
keyword[return] identifier[i]
|
def read_var_uint32(self):
"""Reads a varint from the stream, interprets this varint
as an unsigned, 32-bit integer, and returns the integer.
"""
i = self.read_var_uint64()
if i > wire_format.UINT32_MAX:
raise errors.DecodeError('Value out of range for uint32: %d' % i) # depends on [control=['if'], data=['i']]
return i
|
async def pin_message(self, message_id: int, disable_notification: bool = False):
"""
Use this method to pin a message in a supergroup.
The bot must be an administrator in the chat for this to work and must have the appropriate admin rights.
Source: https://core.telegram.org/bots/api#pinchatmessage
:param message_id: Identifier of a message to pin
:type message_id: :obj:`base.Integer`
:param disable_notification: Pass True, if it is not necessary to send a notification to
all group members about the new pinned message
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:return: Returns True on success.
:rtype: :obj:`base.Boolean`
"""
return await self.bot.pin_chat_message(self.id, message_id, disable_notification)
|
<ast.AsyncFunctionDef object at 0x7da1b1844be0>
|
keyword[async] keyword[def] identifier[pin_message] ( identifier[self] , identifier[message_id] : identifier[int] , identifier[disable_notification] : identifier[bool] = keyword[False] ):
literal[string]
keyword[return] keyword[await] identifier[self] . identifier[bot] . identifier[pin_chat_message] ( identifier[self] . identifier[id] , identifier[message_id] , identifier[disable_notification] )
|
async def pin_message(self, message_id: int, disable_notification: bool=False):
"""
Use this method to pin a message in a supergroup.
The bot must be an administrator in the chat for this to work and must have the appropriate admin rights.
Source: https://core.telegram.org/bots/api#pinchatmessage
:param message_id: Identifier of a message to pin
:type message_id: :obj:`base.Integer`
:param disable_notification: Pass True, if it is not necessary to send a notification to
all group members about the new pinned message
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:return: Returns True on success.
:rtype: :obj:`base.Boolean`
"""
return await self.bot.pin_chat_message(self.id, message_id, disable_notification)
|
def setPhysicalMinimum(self, edfsignal, physical_minimum):
"""
Sets the physical_minimum of signal edfsignal.
Parameters
----------
edfsignal: int
signal number
physical_minimum: float
Sets the physical minimum
Notes
-----
This function is required for every signal and can be called only after opening a file in writemode and before the first sample write action.
"""
if (edfsignal < 0 or edfsignal > self.n_channels):
raise ChannelDoesNotExist(edfsignal)
self.channels[edfsignal]['physical_min'] = physical_minimum
self.update_header()
|
def function[setPhysicalMinimum, parameter[self, edfsignal, physical_minimum]]:
constant[
Sets the physical_minimum of signal edfsignal.
Parameters
----------
edfsignal: int
signal number
physical_minimum: float
Sets the physical minimum
Notes
-----
This function is required for every signal and can be called only after opening a file in writemode and before the first sample write action.
]
if <ast.BoolOp object at 0x7da18f58fdc0> begin[:]
<ast.Raise object at 0x7da18f58d420>
call[call[name[self].channels][name[edfsignal]]][constant[physical_min]] assign[=] name[physical_minimum]
call[name[self].update_header, parameter[]]
|
keyword[def] identifier[setPhysicalMinimum] ( identifier[self] , identifier[edfsignal] , identifier[physical_minimum] ):
literal[string]
keyword[if] ( identifier[edfsignal] < literal[int] keyword[or] identifier[edfsignal] > identifier[self] . identifier[n_channels] ):
keyword[raise] identifier[ChannelDoesNotExist] ( identifier[edfsignal] )
identifier[self] . identifier[channels] [ identifier[edfsignal] ][ literal[string] ]= identifier[physical_minimum]
identifier[self] . identifier[update_header] ()
|
def setPhysicalMinimum(self, edfsignal, physical_minimum):
"""
Sets the physical_minimum of signal edfsignal.
Parameters
----------
edfsignal: int
signal number
physical_minimum: float
Sets the physical minimum
Notes
-----
This function is required for every signal and can be called only after opening a file in writemode and before the first sample write action.
"""
if edfsignal < 0 or edfsignal > self.n_channels:
raise ChannelDoesNotExist(edfsignal) # depends on [control=['if'], data=[]]
self.channels[edfsignal]['physical_min'] = physical_minimum
self.update_header()
|
def export_gltf(scene,
extras=None,
include_normals=False):
"""
Export a scene object as a GLTF directory.
This puts each mesh into a separate file (i.e. a `buffer`)
as opposed to one larger file.
Parameters
-----------
scene : trimesh.Scene
Scene to be exported
Returns
----------
export : dict
Format: {file name : file data}
"""
# if we were passed a bare Trimesh or Path3D object
if (not util.is_instance_named(scene, "Scene")
and hasattr(scene, "scene")):
scene = scene.scene()
# create the header and buffer data
tree, buffer_items = _create_gltf_structure(
scene=scene,
extras=extras,
include_normals=include_normals)
# store files as {name : data}
files = {}
# make one buffer per buffer_items
buffers = [None] * len(buffer_items)
# A bufferView is a slice of a file
views = [None] * len(buffer_items)
# create the buffer views
for i, item in enumerate(buffer_items):
views[i] = {
"buffer": i,
"byteOffset": 0,
"byteLength": len(item)}
buffer_data = _byte_pad(bytes().join(buffer_items[i: i + 2]))
buffer_name = "gltf_buffer_{}.bin".format(i)
buffers[i] = {
"uri": buffer_name,
"byteLength": len(buffer_data)}
files[buffer_name] = buffer_data
tree["buffers"] = buffers
tree["bufferViews"] = views
files["model.gltf"] = json.dumps(tree).encode("utf-8")
return files
|
def function[export_gltf, parameter[scene, extras, include_normals]]:
constant[
Export a scene object as a GLTF directory.
This puts each mesh into a separate file (i.e. a `buffer`)
as opposed to one larger file.
Parameters
-----------
scene : trimesh.Scene
Scene to be exported
Returns
----------
export : dict
Format: {file name : file data}
]
if <ast.BoolOp object at 0x7da1b22ba1a0> begin[:]
variable[scene] assign[=] call[name[scene].scene, parameter[]]
<ast.Tuple object at 0x7da1b220e3e0> assign[=] call[name[_create_gltf_structure], parameter[]]
variable[files] assign[=] dictionary[[], []]
variable[buffers] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b220e140>]] * call[name[len], parameter[name[buffer_items]]]]
variable[views] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b220e1a0>]] * call[name[len], parameter[name[buffer_items]]]]
for taget[tuple[[<ast.Name object at 0x7da1b220ed40>, <ast.Name object at 0x7da1b220f310>]]] in starred[call[name[enumerate], parameter[name[buffer_items]]]] begin[:]
call[name[views]][name[i]] assign[=] dictionary[[<ast.Constant object at 0x7da1b220f1f0>, <ast.Constant object at 0x7da1b220d600>, <ast.Constant object at 0x7da1b220ecb0>], [<ast.Name object at 0x7da1b220f520>, <ast.Constant object at 0x7da1b220f010>, <ast.Call object at 0x7da1b220e920>]]
variable[buffer_data] assign[=] call[name[_byte_pad], parameter[call[call[name[bytes], parameter[]].join, parameter[call[name[buffer_items]][<ast.Slice object at 0x7da2045646a0>]]]]]
variable[buffer_name] assign[=] call[constant[gltf_buffer_{}.bin].format, parameter[name[i]]]
call[name[buffers]][name[i]] assign[=] dictionary[[<ast.Constant object at 0x7da204566cb0>, <ast.Constant object at 0x7da204566b30>], [<ast.Name object at 0x7da204566770>, <ast.Call object at 0x7da204565de0>]]
call[name[files]][name[buffer_name]] assign[=] name[buffer_data]
call[name[tree]][constant[buffers]] assign[=] name[buffers]
call[name[tree]][constant[bufferViews]] assign[=] name[views]
call[name[files]][constant[model.gltf]] assign[=] call[call[name[json].dumps, parameter[name[tree]]].encode, parameter[constant[utf-8]]]
return[name[files]]
|
keyword[def] identifier[export_gltf] ( identifier[scene] ,
identifier[extras] = keyword[None] ,
identifier[include_normals] = keyword[False] ):
literal[string]
keyword[if] ( keyword[not] identifier[util] . identifier[is_instance_named] ( identifier[scene] , literal[string] )
keyword[and] identifier[hasattr] ( identifier[scene] , literal[string] )):
identifier[scene] = identifier[scene] . identifier[scene] ()
identifier[tree] , identifier[buffer_items] = identifier[_create_gltf_structure] (
identifier[scene] = identifier[scene] ,
identifier[extras] = identifier[extras] ,
identifier[include_normals] = identifier[include_normals] )
identifier[files] ={}
identifier[buffers] =[ keyword[None] ]* identifier[len] ( identifier[buffer_items] )
identifier[views] =[ keyword[None] ]* identifier[len] ( identifier[buffer_items] )
keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[buffer_items] ):
identifier[views] [ identifier[i] ]={
literal[string] : identifier[i] ,
literal[string] : literal[int] ,
literal[string] : identifier[len] ( identifier[item] )}
identifier[buffer_data] = identifier[_byte_pad] ( identifier[bytes] (). identifier[join] ( identifier[buffer_items] [ identifier[i] : identifier[i] + literal[int] ]))
identifier[buffer_name] = literal[string] . identifier[format] ( identifier[i] )
identifier[buffers] [ identifier[i] ]={
literal[string] : identifier[buffer_name] ,
literal[string] : identifier[len] ( identifier[buffer_data] )}
identifier[files] [ identifier[buffer_name] ]= identifier[buffer_data]
identifier[tree] [ literal[string] ]= identifier[buffers]
identifier[tree] [ literal[string] ]= identifier[views]
identifier[files] [ literal[string] ]= identifier[json] . identifier[dumps] ( identifier[tree] ). identifier[encode] ( literal[string] )
keyword[return] identifier[files]
|
def export_gltf(scene, extras=None, include_normals=False):
"""
Export a scene object as a GLTF directory.
This puts each mesh into a separate file (i.e. a `buffer`)
as opposed to one larger file.
Parameters
-----------
scene : trimesh.Scene
Scene to be exported
Returns
----------
export : dict
Format: {file name : file data}
"""
# if we were passed a bare Trimesh or Path3D object
if not util.is_instance_named(scene, 'Scene') and hasattr(scene, 'scene'):
scene = scene.scene() # depends on [control=['if'], data=[]]
# create the header and buffer data
(tree, buffer_items) = _create_gltf_structure(scene=scene, extras=extras, include_normals=include_normals)
# store files as {name : data}
files = {}
# make one buffer per buffer_items
buffers = [None] * len(buffer_items)
# A bufferView is a slice of a file
views = [None] * len(buffer_items)
# create the buffer views
for (i, item) in enumerate(buffer_items):
views[i] = {'buffer': i, 'byteOffset': 0, 'byteLength': len(item)}
buffer_data = _byte_pad(bytes().join(buffer_items[i:i + 2]))
buffer_name = 'gltf_buffer_{}.bin'.format(i)
buffers[i] = {'uri': buffer_name, 'byteLength': len(buffer_data)}
files[buffer_name] = buffer_data # depends on [control=['for'], data=[]]
tree['buffers'] = buffers
tree['bufferViews'] = views
files['model.gltf'] = json.dumps(tree).encode('utf-8')
return files
|
def _process_msg(self, client, state, reward, isOver):
"""
Process a message sent from some client.
"""
# in the first message, only state is valid,
# reward&isOver should be discarded
if len(client.memory) > 0:
client.memory[-1].reward = reward
if isOver:
# should clear client's memory and put to queue
self._parse_memory(0, client, True)
else:
if len(client.memory) == LOCAL_TIME_MAX + 1:
R = client.memory[-1].value
self._parse_memory(R, client, False)
# feed state and return action
self._on_state(state, client)
|
def function[_process_msg, parameter[self, client, state, reward, isOver]]:
constant[
Process a message sent from some client.
]
if compare[call[name[len], parameter[name[client].memory]] greater[>] constant[0]] begin[:]
call[name[client].memory][<ast.UnaryOp object at 0x7da18f58eec0>].reward assign[=] name[reward]
if name[isOver] begin[:]
call[name[self]._parse_memory, parameter[constant[0], name[client], constant[True]]]
call[name[self]._on_state, parameter[name[state], name[client]]]
|
keyword[def] identifier[_process_msg] ( identifier[self] , identifier[client] , identifier[state] , identifier[reward] , identifier[isOver] ):
literal[string]
keyword[if] identifier[len] ( identifier[client] . identifier[memory] )> literal[int] :
identifier[client] . identifier[memory] [- literal[int] ]. identifier[reward] = identifier[reward]
keyword[if] identifier[isOver] :
identifier[self] . identifier[_parse_memory] ( literal[int] , identifier[client] , keyword[True] )
keyword[else] :
keyword[if] identifier[len] ( identifier[client] . identifier[memory] )== identifier[LOCAL_TIME_MAX] + literal[int] :
identifier[R] = identifier[client] . identifier[memory] [- literal[int] ]. identifier[value]
identifier[self] . identifier[_parse_memory] ( identifier[R] , identifier[client] , keyword[False] )
identifier[self] . identifier[_on_state] ( identifier[state] , identifier[client] )
|
def _process_msg(self, client, state, reward, isOver):
"""
Process a message sent from some client.
"""
# in the first message, only state is valid,
# reward&isOver should be discarded
if len(client.memory) > 0:
client.memory[-1].reward = reward
if isOver:
# should clear client's memory and put to queue
self._parse_memory(0, client, True) # depends on [control=['if'], data=[]]
elif len(client.memory) == LOCAL_TIME_MAX + 1:
R = client.memory[-1].value
self._parse_memory(R, client, False) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# feed state and return action
self._on_state(state, client)
|
def simxSetObjectOrientation(clientID, objectHandle, relativeToObjectHandle, eulerAngles, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
angles = (ct.c_float*3)(*eulerAngles)
return c_SetObjectOrientation(clientID, objectHandle, relativeToObjectHandle, angles, operationMode)
|
def function[simxSetObjectOrientation, parameter[clientID, objectHandle, relativeToObjectHandle, eulerAngles, operationMode]]:
constant[
Please have a look at the function description/documentation in the V-REP user manual
]
variable[angles] assign[=] call[binary_operation[name[ct].c_float * constant[3]], parameter[<ast.Starred object at 0x7da207f03520>]]
return[call[name[c_SetObjectOrientation], parameter[name[clientID], name[objectHandle], name[relativeToObjectHandle], name[angles], name[operationMode]]]]
|
keyword[def] identifier[simxSetObjectOrientation] ( identifier[clientID] , identifier[objectHandle] , identifier[relativeToObjectHandle] , identifier[eulerAngles] , identifier[operationMode] ):
literal[string]
identifier[angles] =( identifier[ct] . identifier[c_float] * literal[int] )(* identifier[eulerAngles] )
keyword[return] identifier[c_SetObjectOrientation] ( identifier[clientID] , identifier[objectHandle] , identifier[relativeToObjectHandle] , identifier[angles] , identifier[operationMode] )
|
def simxSetObjectOrientation(clientID, objectHandle, relativeToObjectHandle, eulerAngles, operationMode):
"""
Please have a look at the function description/documentation in the V-REP user manual
"""
angles = (ct.c_float * 3)(*eulerAngles)
return c_SetObjectOrientation(clientID, objectHandle, relativeToObjectHandle, angles, operationMode)
|
def delete(self, ids):
"""
Method to delete equipments by their id's
:param ids: Identifiers of equipments
:return: None
"""
url = build_uri_with_ids('api/v4/equipment/%s/', ids)
return super(ApiV4Equipment, self).delete(url)
|
def function[delete, parameter[self, ids]]:
constant[
Method to delete equipments by their id's
:param ids: Identifiers of equipments
:return: None
]
variable[url] assign[=] call[name[build_uri_with_ids], parameter[constant[api/v4/equipment/%s/], name[ids]]]
return[call[call[name[super], parameter[name[ApiV4Equipment], name[self]]].delete, parameter[name[url]]]]
|
keyword[def] identifier[delete] ( identifier[self] , identifier[ids] ):
literal[string]
identifier[url] = identifier[build_uri_with_ids] ( literal[string] , identifier[ids] )
keyword[return] identifier[super] ( identifier[ApiV4Equipment] , identifier[self] ). identifier[delete] ( identifier[url] )
|
def delete(self, ids):
"""
Method to delete equipments by their id's
:param ids: Identifiers of equipments
:return: None
"""
url = build_uri_with_ids('api/v4/equipment/%s/', ids)
return super(ApiV4Equipment, self).delete(url)
|
def to_method(func):
"""
Lift :func:`func` to a method; it will be called with the first argument
'self' ignored.
:param func: Any callable object
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
"""Wrapper function.
"""
return func(*args[1:], **kwargs)
return wrapper
|
def function[to_method, parameter[func]]:
constant[
Lift :func:`func` to a method; it will be called with the first argument
'self' ignored.
:param func: Any callable object
]
def function[wrapper, parameter[]]:
constant[Wrapper function.
]
return[call[name[func], parameter[<ast.Starred object at 0x7da20e955cf0>]]]
return[name[wrapper]]
|
keyword[def] identifier[to_method] ( identifier[func] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[func] (* identifier[args] [ literal[int] :],** identifier[kwargs] )
keyword[return] identifier[wrapper]
|
def to_method(func):
"""
Lift :func:`func` to a method; it will be called with the first argument
'self' ignored.
:param func: Any callable object
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
"""Wrapper function.
"""
return func(*args[1:], **kwargs)
return wrapper
|
def get_users(self):
"""Return the configuration of the users."""
users = {}
_JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP = {
"super-user": 15,
"superuser": 15,
"operator": 5,
"read-only": 1,
"unauthorized": 0,
}
_DEFAULT_USER_DETAILS = {"level": 0, "password": "", "sshkeys": []}
users_table = junos_views.junos_users_table(self.device)
users_table.get()
users_items = users_table.items()
root_user = self._get_root()
for user_entry in users_items:
username = user_entry[0]
user_details = _DEFAULT_USER_DETAILS.copy()
user_details.update({d[0]: d[1] for d in user_entry[1] if d[1]})
user_class = user_details.pop("class", "")
user_details = {
key: py23_compat.text_type(user_details[key])
for key in user_details.keys()
}
level = _JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP.get(user_class, 0)
user_details.update({"level": level})
user_details["sshkeys"] = [
user_details.pop(key)
for key in ["ssh_rsa", "ssh_dsa", "ssh_ecdsa"]
if user_details.get(key, "")
]
users[username] = user_details
users.update(root_user)
return users
|
def function[get_users, parameter[self]]:
constant[Return the configuration of the users.]
variable[users] assign[=] dictionary[[], []]
variable[_JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d93b80>, <ast.Constant object at 0x7da1b1d91c60>, <ast.Constant object at 0x7da1b1d90430>, <ast.Constant object at 0x7da1b1d904c0>, <ast.Constant object at 0x7da1b1d903d0>], [<ast.Constant object at 0x7da1b1d93700>, <ast.Constant object at 0x7da1b1d93a30>, <ast.Constant object at 0x7da1b1d93ac0>, <ast.Constant object at 0x7da1b1d93940>, <ast.Constant object at 0x7da1b1d93880>]]
variable[_DEFAULT_USER_DETAILS] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d93790>, <ast.Constant object at 0x7da1b1d93760>, <ast.Constant object at 0x7da1b1d91ea0>], [<ast.Constant object at 0x7da1b1d91e10>, <ast.Constant object at 0x7da1b1d91e40>, <ast.List object at 0x7da1b1d93d90>]]
variable[users_table] assign[=] call[name[junos_views].junos_users_table, parameter[name[self].device]]
call[name[users_table].get, parameter[]]
variable[users_items] assign[=] call[name[users_table].items, parameter[]]
variable[root_user] assign[=] call[name[self]._get_root, parameter[]]
for taget[name[user_entry]] in starred[name[users_items]] begin[:]
variable[username] assign[=] call[name[user_entry]][constant[0]]
variable[user_details] assign[=] call[name[_DEFAULT_USER_DETAILS].copy, parameter[]]
call[name[user_details].update, parameter[<ast.DictComp object at 0x7da1b1d92350>]]
variable[user_class] assign[=] call[name[user_details].pop, parameter[constant[class], constant[]]]
variable[user_details] assign[=] <ast.DictComp object at 0x7da1b1d921a0>
variable[level] assign[=] call[name[_JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP].get, parameter[name[user_class], constant[0]]]
call[name[user_details].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1b87100>], [<ast.Name object at 0x7da1b1b86e30>]]]]
call[name[user_details]][constant[sshkeys]] assign[=] <ast.ListComp object at 0x7da1b1cef730>
call[name[users]][name[username]] assign[=] name[user_details]
call[name[users].update, parameter[name[root_user]]]
return[name[users]]
|
keyword[def] identifier[get_users] ( identifier[self] ):
literal[string]
identifier[users] ={}
identifier[_JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP] ={
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
}
identifier[_DEFAULT_USER_DETAILS] ={ literal[string] : literal[int] , literal[string] : literal[string] , literal[string] :[]}
identifier[users_table] = identifier[junos_views] . identifier[junos_users_table] ( identifier[self] . identifier[device] )
identifier[users_table] . identifier[get] ()
identifier[users_items] = identifier[users_table] . identifier[items] ()
identifier[root_user] = identifier[self] . identifier[_get_root] ()
keyword[for] identifier[user_entry] keyword[in] identifier[users_items] :
identifier[username] = identifier[user_entry] [ literal[int] ]
identifier[user_details] = identifier[_DEFAULT_USER_DETAILS] . identifier[copy] ()
identifier[user_details] . identifier[update] ({ identifier[d] [ literal[int] ]: identifier[d] [ literal[int] ] keyword[for] identifier[d] keyword[in] identifier[user_entry] [ literal[int] ] keyword[if] identifier[d] [ literal[int] ]})
identifier[user_class] = identifier[user_details] . identifier[pop] ( literal[string] , literal[string] )
identifier[user_details] ={
identifier[key] : identifier[py23_compat] . identifier[text_type] ( identifier[user_details] [ identifier[key] ])
keyword[for] identifier[key] keyword[in] identifier[user_details] . identifier[keys] ()
}
identifier[level] = identifier[_JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP] . identifier[get] ( identifier[user_class] , literal[int] )
identifier[user_details] . identifier[update] ({ literal[string] : identifier[level] })
identifier[user_details] [ literal[string] ]=[
identifier[user_details] . identifier[pop] ( identifier[key] )
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[user_details] . identifier[get] ( identifier[key] , literal[string] )
]
identifier[users] [ identifier[username] ]= identifier[user_details]
identifier[users] . identifier[update] ( identifier[root_user] )
keyword[return] identifier[users]
|
def get_users(self):
"""Return the configuration of the users."""
users = {}
_JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP = {'super-user': 15, 'superuser': 15, 'operator': 5, 'read-only': 1, 'unauthorized': 0}
_DEFAULT_USER_DETAILS = {'level': 0, 'password': '', 'sshkeys': []}
users_table = junos_views.junos_users_table(self.device)
users_table.get()
users_items = users_table.items()
root_user = self._get_root()
for user_entry in users_items:
username = user_entry[0]
user_details = _DEFAULT_USER_DETAILS.copy()
user_details.update({d[0]: d[1] for d in user_entry[1] if d[1]})
user_class = user_details.pop('class', '')
user_details = {key: py23_compat.text_type(user_details[key]) for key in user_details.keys()}
level = _JUNOS_CLASS_CISCO_PRIVILEGE_LEVEL_MAP.get(user_class, 0)
user_details.update({'level': level})
user_details['sshkeys'] = [user_details.pop(key) for key in ['ssh_rsa', 'ssh_dsa', 'ssh_ecdsa'] if user_details.get(key, '')]
users[username] = user_details # depends on [control=['for'], data=['user_entry']]
users.update(root_user)
return users
|
def needs_to_be_resolved(parent_obj, attr_name):
"""
This function determines, if a reference (CrossReference) needs to be
resolved or not (while creating the model, while resolving references).
Args:
parent_obj: the object containing the attribute to be resolved.
attr_name: the attribute identification object.
Returns:
True if the attribute needs to be resolved. Else False.
In case of lists of references, this function return true if any of the
references in the list needs to be resolved.
Note: outside the model building process (from_file or from_str) this
function always returns False.
"""
if hasattr(get_model(parent_obj), "_tx_reference_resolver"):
return get_model(parent_obj)._tx_reference_resolver. \
has_unresolved_crossrefs(parent_obj, attr_name)
else:
return False
|
def function[needs_to_be_resolved, parameter[parent_obj, attr_name]]:
constant[
This function determines, if a reference (CrossReference) needs to be
resolved or not (while creating the model, while resolving references).
Args:
parent_obj: the object containing the attribute to be resolved.
attr_name: the attribute identification object.
Returns:
True if the attribute needs to be resolved. Else False.
In case of lists of references, this function return true if any of the
references in the list needs to be resolved.
Note: outside the model building process (from_file or from_str) this
function always returns False.
]
if call[name[hasattr], parameter[call[name[get_model], parameter[name[parent_obj]]], constant[_tx_reference_resolver]]] begin[:]
return[call[call[name[get_model], parameter[name[parent_obj]]]._tx_reference_resolver.has_unresolved_crossrefs, parameter[name[parent_obj], name[attr_name]]]]
|
keyword[def] identifier[needs_to_be_resolved] ( identifier[parent_obj] , identifier[attr_name] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[get_model] ( identifier[parent_obj] ), literal[string] ):
keyword[return] identifier[get_model] ( identifier[parent_obj] ). identifier[_tx_reference_resolver] . identifier[has_unresolved_crossrefs] ( identifier[parent_obj] , identifier[attr_name] )
keyword[else] :
keyword[return] keyword[False]
|
def needs_to_be_resolved(parent_obj, attr_name):
"""
This function determines, if a reference (CrossReference) needs to be
resolved or not (while creating the model, while resolving references).
Args:
parent_obj: the object containing the attribute to be resolved.
attr_name: the attribute identification object.
Returns:
True if the attribute needs to be resolved. Else False.
In case of lists of references, this function return true if any of the
references in the list needs to be resolved.
Note: outside the model building process (from_file or from_str) this
function always returns False.
"""
if hasattr(get_model(parent_obj), '_tx_reference_resolver'):
return get_model(parent_obj)._tx_reference_resolver.has_unresolved_crossrefs(parent_obj, attr_name) # depends on [control=['if'], data=[]]
else:
return False
|
def GetProperties(cls, path_spec):
"""Retrieves a dictionary containing the path specification properties.
Args:
path_spec (PathSpec): path specification.
Returns:
dict[str, str]: path specification properties.
Raises:
dict: path specification properties.
"""
properties = {}
for property_name in cls.PROPERTY_NAMES:
# Note that we do not want to set the properties when not used.
if hasattr(path_spec, property_name):
properties[property_name] = getattr(path_spec, property_name)
return properties
|
def function[GetProperties, parameter[cls, path_spec]]:
constant[Retrieves a dictionary containing the path specification properties.
Args:
path_spec (PathSpec): path specification.
Returns:
dict[str, str]: path specification properties.
Raises:
dict: path specification properties.
]
variable[properties] assign[=] dictionary[[], []]
for taget[name[property_name]] in starred[name[cls].PROPERTY_NAMES] begin[:]
if call[name[hasattr], parameter[name[path_spec], name[property_name]]] begin[:]
call[name[properties]][name[property_name]] assign[=] call[name[getattr], parameter[name[path_spec], name[property_name]]]
return[name[properties]]
|
keyword[def] identifier[GetProperties] ( identifier[cls] , identifier[path_spec] ):
literal[string]
identifier[properties] ={}
keyword[for] identifier[property_name] keyword[in] identifier[cls] . identifier[PROPERTY_NAMES] :
keyword[if] identifier[hasattr] ( identifier[path_spec] , identifier[property_name] ):
identifier[properties] [ identifier[property_name] ]= identifier[getattr] ( identifier[path_spec] , identifier[property_name] )
keyword[return] identifier[properties]
|
def GetProperties(cls, path_spec):
"""Retrieves a dictionary containing the path specification properties.
Args:
path_spec (PathSpec): path specification.
Returns:
dict[str, str]: path specification properties.
Raises:
dict: path specification properties.
"""
properties = {}
for property_name in cls.PROPERTY_NAMES:
# Note that we do not want to set the properties when not used.
if hasattr(path_spec, property_name):
properties[property_name] = getattr(path_spec, property_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['property_name']]
return properties
|
def get_graderoster(section, instructor, requestor):
"""
Returns a restclients.GradeRoster for the passed Section model and
instructor Person.
"""
label = GradeRoster(section=section,
instructor=instructor).graderoster_label()
url = "{}/{}".format(graderoster_url, encode_section_label(label))
headers = {"Accept": "text/xhtml",
"Connection": "keep-alive",
"X-UW-Act-as": requestor.uwnetid}
response = SWS_GradeRoster_DAO().getURL(url, headers)
if response.status != 200:
root = etree.fromstring(response.data)
msg = root.find(".//*[@class='status_description']").text.strip()
raise DataFailureException(url, response.status, msg)
return GradeRoster(data=etree.fromstring(response.data.strip()),
section=section,
instructor=instructor)
|
def function[get_graderoster, parameter[section, instructor, requestor]]:
constant[
Returns a restclients.GradeRoster for the passed Section model and
instructor Person.
]
variable[label] assign[=] call[call[name[GradeRoster], parameter[]].graderoster_label, parameter[]]
variable[url] assign[=] call[constant[{}/{}].format, parameter[name[graderoster_url], call[name[encode_section_label], parameter[name[label]]]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8d00>, <ast.Constant object at 0x7da18bcca110>, <ast.Constant object at 0x7da18bcc9a80>], [<ast.Constant object at 0x7da18bcc9390>, <ast.Constant object at 0x7da18bccb5b0>, <ast.Attribute object at 0x7da18bccb9a0>]]
variable[response] assign[=] call[call[name[SWS_GradeRoster_DAO], parameter[]].getURL, parameter[name[url], name[headers]]]
if compare[name[response].status not_equal[!=] constant[200]] begin[:]
variable[root] assign[=] call[name[etree].fromstring, parameter[name[response].data]]
variable[msg] assign[=] call[call[name[root].find, parameter[constant[.//*[@class='status_description']]]].text.strip, parameter[]]
<ast.Raise object at 0x7da18bccabf0>
return[call[name[GradeRoster], parameter[]]]
|
keyword[def] identifier[get_graderoster] ( identifier[section] , identifier[instructor] , identifier[requestor] ):
literal[string]
identifier[label] = identifier[GradeRoster] ( identifier[section] = identifier[section] ,
identifier[instructor] = identifier[instructor] ). identifier[graderoster_label] ()
identifier[url] = literal[string] . identifier[format] ( identifier[graderoster_url] , identifier[encode_section_label] ( identifier[label] ))
identifier[headers] ={ literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[requestor] . identifier[uwnetid] }
identifier[response] = identifier[SWS_GradeRoster_DAO] (). identifier[getURL] ( identifier[url] , identifier[headers] )
keyword[if] identifier[response] . identifier[status] != literal[int] :
identifier[root] = identifier[etree] . identifier[fromstring] ( identifier[response] . identifier[data] )
identifier[msg] = identifier[root] . identifier[find] ( literal[string] ). identifier[text] . identifier[strip] ()
keyword[raise] identifier[DataFailureException] ( identifier[url] , identifier[response] . identifier[status] , identifier[msg] )
keyword[return] identifier[GradeRoster] ( identifier[data] = identifier[etree] . identifier[fromstring] ( identifier[response] . identifier[data] . identifier[strip] ()),
identifier[section] = identifier[section] ,
identifier[instructor] = identifier[instructor] )
|
def get_graderoster(section, instructor, requestor):
"""
Returns a restclients.GradeRoster for the passed Section model and
instructor Person.
"""
label = GradeRoster(section=section, instructor=instructor).graderoster_label()
url = '{}/{}'.format(graderoster_url, encode_section_label(label))
headers = {'Accept': 'text/xhtml', 'Connection': 'keep-alive', 'X-UW-Act-as': requestor.uwnetid}
response = SWS_GradeRoster_DAO().getURL(url, headers)
if response.status != 200:
root = etree.fromstring(response.data)
msg = root.find(".//*[@class='status_description']").text.strip()
raise DataFailureException(url, response.status, msg) # depends on [control=['if'], data=[]]
return GradeRoster(data=etree.fromstring(response.data.strip()), section=section, instructor=instructor)
|
def _submit_metrics(self, metrics, metric_name_and_type_by_property):
"""
Resolve metric names and types and submit it.
"""
for metric in metrics:
if (
metric.name not in metric_name_and_type_by_property
and metric.name.lower() not in metric_name_and_type_by_property
):
# Only report the metrics that were specified in the configration
# Ignore added properties like 'Timestamp_Sys100NS', `Frequency_Sys100NS`, etc ...
continue
if metric_name_and_type_by_property.get(metric.name):
metric_name, metric_type = metric_name_and_type_by_property[metric.name]
elif metric_name_and_type_by_property.get(metric.name.lower()):
metric_name, metric_type = metric_name_and_type_by_property[metric.name.lower()]
else:
continue
try:
func = getattr(self, metric_type.lower())
except AttributeError:
raise Exception(u"Invalid metric type: {0}".format(metric_type))
func(metric_name, metric.value, metric.tags)
|
def function[_submit_metrics, parameter[self, metrics, metric_name_and_type_by_property]]:
constant[
Resolve metric names and types and submit it.
]
for taget[name[metric]] in starred[name[metrics]] begin[:]
if <ast.BoolOp object at 0x7da20c6e79d0> begin[:]
continue
if call[name[metric_name_and_type_by_property].get, parameter[name[metric].name]] begin[:]
<ast.Tuple object at 0x7da20c6e43a0> assign[=] call[name[metric_name_and_type_by_property]][name[metric].name]
<ast.Try object at 0x7da204347370>
call[name[func], parameter[name[metric_name], name[metric].value, name[metric].tags]]
|
keyword[def] identifier[_submit_metrics] ( identifier[self] , identifier[metrics] , identifier[metric_name_and_type_by_property] ):
literal[string]
keyword[for] identifier[metric] keyword[in] identifier[metrics] :
keyword[if] (
identifier[metric] . identifier[name] keyword[not] keyword[in] identifier[metric_name_and_type_by_property]
keyword[and] identifier[metric] . identifier[name] . identifier[lower] () keyword[not] keyword[in] identifier[metric_name_and_type_by_property]
):
keyword[continue]
keyword[if] identifier[metric_name_and_type_by_property] . identifier[get] ( identifier[metric] . identifier[name] ):
identifier[metric_name] , identifier[metric_type] = identifier[metric_name_and_type_by_property] [ identifier[metric] . identifier[name] ]
keyword[elif] identifier[metric_name_and_type_by_property] . identifier[get] ( identifier[metric] . identifier[name] . identifier[lower] ()):
identifier[metric_name] , identifier[metric_type] = identifier[metric_name_and_type_by_property] [ identifier[metric] . identifier[name] . identifier[lower] ()]
keyword[else] :
keyword[continue]
keyword[try] :
identifier[func] = identifier[getattr] ( identifier[self] , identifier[metric_type] . identifier[lower] ())
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[metric_type] ))
identifier[func] ( identifier[metric_name] , identifier[metric] . identifier[value] , identifier[metric] . identifier[tags] )
|
def _submit_metrics(self, metrics, metric_name_and_type_by_property):
"""
Resolve metric names and types and submit it.
"""
for metric in metrics:
if metric.name not in metric_name_and_type_by_property and metric.name.lower() not in metric_name_and_type_by_property:
# Only report the metrics that were specified in the configration
# Ignore added properties like 'Timestamp_Sys100NS', `Frequency_Sys100NS`, etc ...
continue # depends on [control=['if'], data=[]]
if metric_name_and_type_by_property.get(metric.name):
(metric_name, metric_type) = metric_name_and_type_by_property[metric.name] # depends on [control=['if'], data=[]]
elif metric_name_and_type_by_property.get(metric.name.lower()):
(metric_name, metric_type) = metric_name_and_type_by_property[metric.name.lower()] # depends on [control=['if'], data=[]]
else:
continue
try:
func = getattr(self, metric_type.lower()) # depends on [control=['try'], data=[]]
except AttributeError:
raise Exception(u'Invalid metric type: {0}'.format(metric_type)) # depends on [control=['except'], data=[]]
func(metric_name, metric.value, metric.tags) # depends on [control=['for'], data=['metric']]
|
def __strip_extra_attributes(self, node: yaml.Node,
known_attrs: List[str]) -> None:
"""Strips tags from extra attributes.
This prevents nodes under attributes that are not part of our \
data model from being converted to objects. They'll be plain \
CommentedMaps instead, which then get converted to OrderedDicts \
for the user.
Args:
node: The node to process
known_attrs: The attributes to not strip
"""
known_keys = list(known_attrs)
known_keys.remove('self')
if 'yatiml_extra' in known_keys:
known_keys.remove('yatiml_extra')
for key_node, value_node in node.value:
if (not isinstance(key_node, yaml.ScalarNode)
or key_node.tag != 'tag:yaml.org,2002:str'):
raise RecognitionError(
('{}{}Mapping keys that are not of type'
' string are not supported by YAtiML.').format(
node.start_mark, os.linesep))
if key_node.value not in known_keys:
self.__strip_tags(value_node)
|
def function[__strip_extra_attributes, parameter[self, node, known_attrs]]:
constant[Strips tags from extra attributes.
This prevents nodes under attributes that are not part of our data model from being converted to objects. They'll be plain CommentedMaps instead, which then get converted to OrderedDicts for the user.
Args:
node: The node to process
known_attrs: The attributes to not strip
]
variable[known_keys] assign[=] call[name[list], parameter[name[known_attrs]]]
call[name[known_keys].remove, parameter[constant[self]]]
if compare[constant[yatiml_extra] in name[known_keys]] begin[:]
call[name[known_keys].remove, parameter[constant[yatiml_extra]]]
for taget[tuple[[<ast.Name object at 0x7da20cabd8a0>, <ast.Name object at 0x7da20cabdd20>]]] in starred[name[node].value] begin[:]
if <ast.BoolOp object at 0x7da20cabf910> begin[:]
<ast.Raise object at 0x7da20c6c5750>
if compare[name[key_node].value <ast.NotIn object at 0x7da2590d7190> name[known_keys]] begin[:]
call[name[self].__strip_tags, parameter[name[value_node]]]
|
keyword[def] identifier[__strip_extra_attributes] ( identifier[self] , identifier[node] : identifier[yaml] . identifier[Node] ,
identifier[known_attrs] : identifier[List] [ identifier[str] ])-> keyword[None] :
literal[string]
identifier[known_keys] = identifier[list] ( identifier[known_attrs] )
identifier[known_keys] . identifier[remove] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[known_keys] :
identifier[known_keys] . identifier[remove] ( literal[string] )
keyword[for] identifier[key_node] , identifier[value_node] keyword[in] identifier[node] . identifier[value] :
keyword[if] ( keyword[not] identifier[isinstance] ( identifier[key_node] , identifier[yaml] . identifier[ScalarNode] )
keyword[or] identifier[key_node] . identifier[tag] != literal[string] ):
keyword[raise] identifier[RecognitionError] (
( literal[string]
literal[string] ). identifier[format] (
identifier[node] . identifier[start_mark] , identifier[os] . identifier[linesep] ))
keyword[if] identifier[key_node] . identifier[value] keyword[not] keyword[in] identifier[known_keys] :
identifier[self] . identifier[__strip_tags] ( identifier[value_node] )
|
def __strip_extra_attributes(self, node: yaml.Node, known_attrs: List[str]) -> None:
"""Strips tags from extra attributes.
This prevents nodes under attributes that are not part of our data model from being converted to objects. They'll be plain CommentedMaps instead, which then get converted to OrderedDicts for the user.
Args:
node: The node to process
known_attrs: The attributes to not strip
"""
known_keys = list(known_attrs)
known_keys.remove('self')
if 'yatiml_extra' in known_keys:
known_keys.remove('yatiml_extra') # depends on [control=['if'], data=['known_keys']]
for (key_node, value_node) in node.value:
if not isinstance(key_node, yaml.ScalarNode) or key_node.tag != 'tag:yaml.org,2002:str':
raise RecognitionError('{}{}Mapping keys that are not of type string are not supported by YAtiML.'.format(node.start_mark, os.linesep)) # depends on [control=['if'], data=[]]
if key_node.value not in known_keys:
self.__strip_tags(value_node) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def update_user_group(self, id, **kwargs): # noqa: E501
"""Update a specific user group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param UserGroupWrite body: Example Body: <pre>{ \"id\": \"UserGroup identifier\", \"name\": \"UserGroup name\", \"permissions\": [ \"permission1\", \"permission2\", \"permission3\" ] }</pre>
:return: ResponseContainerUserGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_group_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_user_group_with_http_info(id, **kwargs) # noqa: E501
return data
|
def function[update_user_group, parameter[self, id]]:
constant[Update a specific user group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param UserGroupWrite body: Example Body: <pre>{ "id": "UserGroup identifier", "name": "UserGroup name", "permissions": [ "permission1", "permission2", "permission3" ] }</pre>
:return: ResponseContainerUserGroup
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].update_user_group_with_http_info, parameter[name[id]]]]
|
keyword[def] identifier[update_user_group] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[update_user_group_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[update_user_group_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def update_user_group(self, id, **kwargs): # noqa: E501
'Update a specific user group # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.update_user_group(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :param UserGroupWrite body: Example Body: <pre>{ "id": "UserGroup identifier", "name": "UserGroup name", "permissions": [ "permission1", "permission2", "permission3" ] }</pre>\n :return: ResponseContainerUserGroup\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_group_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.update_user_group_with_http_info(id, **kwargs) # noqa: E501
return data
|
def list_algorithms(self, page_size=None):
"""
Lists the algorithms visible to this client.
Algorithms are returned in lexicographical order.
:rtype: :class:`.Algorithm` iterator
"""
params = {}
if page_size is not None:
params['limit'] = page_size
return pagination.Iterator(
client=self._client,
path='/mdb/{}/algorithms'.format(self._instance),
params=params,
response_class=mdb_pb2.ListAlgorithmsResponse,
items_key='algorithm',
item_mapper=Algorithm,
)
|
def function[list_algorithms, parameter[self, page_size]]:
constant[
Lists the algorithms visible to this client.
Algorithms are returned in lexicographical order.
:rtype: :class:`.Algorithm` iterator
]
variable[params] assign[=] dictionary[[], []]
if compare[name[page_size] is_not constant[None]] begin[:]
call[name[params]][constant[limit]] assign[=] name[page_size]
return[call[name[pagination].Iterator, parameter[]]]
|
keyword[def] identifier[list_algorithms] ( identifier[self] , identifier[page_size] = keyword[None] ):
literal[string]
identifier[params] ={}
keyword[if] identifier[page_size] keyword[is] keyword[not] keyword[None] :
identifier[params] [ literal[string] ]= identifier[page_size]
keyword[return] identifier[pagination] . identifier[Iterator] (
identifier[client] = identifier[self] . identifier[_client] ,
identifier[path] = literal[string] . identifier[format] ( identifier[self] . identifier[_instance] ),
identifier[params] = identifier[params] ,
identifier[response_class] = identifier[mdb_pb2] . identifier[ListAlgorithmsResponse] ,
identifier[items_key] = literal[string] ,
identifier[item_mapper] = identifier[Algorithm] ,
)
|
def list_algorithms(self, page_size=None):
"""
Lists the algorithms visible to this client.
Algorithms are returned in lexicographical order.
:rtype: :class:`.Algorithm` iterator
"""
params = {}
if page_size is not None:
params['limit'] = page_size # depends on [control=['if'], data=['page_size']]
return pagination.Iterator(client=self._client, path='/mdb/{}/algorithms'.format(self._instance), params=params, response_class=mdb_pb2.ListAlgorithmsResponse, items_key='algorithm', item_mapper=Algorithm)
|
def scale_up(self, n, pods=None, **kwargs):
"""
Make sure we have n dask-workers available for this cluster
Examples
--------
>>> cluster.scale_up(20) # ask for twenty workers
"""
maximum = dask.config.get('kubernetes.count.max')
if maximum is not None and maximum < n:
logger.info("Tried to scale beyond maximum number of workers %d > %d",
n, maximum)
n = maximum
pods = pods or self._cleanup_terminated_pods(self.pods())
to_create = n - len(pods)
new_pods = []
for i in range(3):
try:
for _ in range(to_create):
new_pods.append(self.core_api.create_namespaced_pod(
self.namespace, self.pod_template))
to_create -= 1
break
except kubernetes.client.rest.ApiException as e:
if e.status == 500 and 'ServerTimeout' in e.body:
logger.info("Server timeout, retry #%d", i + 1)
time.sleep(1)
last_exception = e
continue
else:
raise
else:
raise last_exception
return new_pods
|
def function[scale_up, parameter[self, n, pods]]:
constant[
Make sure we have n dask-workers available for this cluster
Examples
--------
>>> cluster.scale_up(20) # ask for twenty workers
]
variable[maximum] assign[=] call[name[dask].config.get, parameter[constant[kubernetes.count.max]]]
if <ast.BoolOp object at 0x7da207f01960> begin[:]
call[name[logger].info, parameter[constant[Tried to scale beyond maximum number of workers %d > %d], name[n], name[maximum]]]
variable[n] assign[=] name[maximum]
variable[pods] assign[=] <ast.BoolOp object at 0x7da207f00640>
variable[to_create] assign[=] binary_operation[name[n] - call[name[len], parameter[name[pods]]]]
variable[new_pods] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
<ast.Try object at 0x7da18bc73d60>
return[name[new_pods]]
|
keyword[def] identifier[scale_up] ( identifier[self] , identifier[n] , identifier[pods] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[maximum] = identifier[dask] . identifier[config] . identifier[get] ( literal[string] )
keyword[if] identifier[maximum] keyword[is] keyword[not] keyword[None] keyword[and] identifier[maximum] < identifier[n] :
identifier[logger] . identifier[info] ( literal[string] ,
identifier[n] , identifier[maximum] )
identifier[n] = identifier[maximum]
identifier[pods] = identifier[pods] keyword[or] identifier[self] . identifier[_cleanup_terminated_pods] ( identifier[self] . identifier[pods] ())
identifier[to_create] = identifier[n] - identifier[len] ( identifier[pods] )
identifier[new_pods] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ):
keyword[try] :
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[to_create] ):
identifier[new_pods] . identifier[append] ( identifier[self] . identifier[core_api] . identifier[create_namespaced_pod] (
identifier[self] . identifier[namespace] , identifier[self] . identifier[pod_template] ))
identifier[to_create] -= literal[int]
keyword[break]
keyword[except] identifier[kubernetes] . identifier[client] . identifier[rest] . identifier[ApiException] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[status] == literal[int] keyword[and] literal[string] keyword[in] identifier[e] . identifier[body] :
identifier[logger] . identifier[info] ( literal[string] , identifier[i] + literal[int] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[last_exception] = identifier[e]
keyword[continue]
keyword[else] :
keyword[raise]
keyword[else] :
keyword[raise] identifier[last_exception]
keyword[return] identifier[new_pods]
|
def scale_up(self, n, pods=None, **kwargs):
"""
Make sure we have n dask-workers available for this cluster
Examples
--------
>>> cluster.scale_up(20) # ask for twenty workers
"""
maximum = dask.config.get('kubernetes.count.max')
if maximum is not None and maximum < n:
logger.info('Tried to scale beyond maximum number of workers %d > %d', n, maximum)
n = maximum # depends on [control=['if'], data=[]]
pods = pods or self._cleanup_terminated_pods(self.pods())
to_create = n - len(pods)
new_pods = []
for i in range(3):
try:
for _ in range(to_create):
new_pods.append(self.core_api.create_namespaced_pod(self.namespace, self.pod_template))
to_create -= 1 # depends on [control=['for'], data=[]]
break # depends on [control=['try'], data=[]]
except kubernetes.client.rest.ApiException as e:
if e.status == 500 and 'ServerTimeout' in e.body:
logger.info('Server timeout, retry #%d', i + 1)
time.sleep(1)
last_exception = e
continue # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['i']]
else:
raise last_exception
return new_pods
|
def annihilate(predicate: tuple, stack: tuple) -> tuple:
'''Squash and reduce the input stack.
Removes the elements of input that match predicate and only keeps the last
match at the end of the stack.
'''
extra = tuple(filter(lambda x: x not in predicate, stack))
head = reduce(lambda x, y: y if y in predicate else x, stack, None)
return extra + (head,) if head else extra
|
def function[annihilate, parameter[predicate, stack]]:
constant[Squash and reduce the input stack.
Removes the elements of input that match predicate and only keeps the last
match at the end of the stack.
]
variable[extra] assign[=] call[name[tuple], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b02843d0>, name[stack]]]]]
variable[head] assign[=] call[name[reduce], parameter[<ast.Lambda object at 0x7da1b0285120>, name[stack], constant[None]]]
return[<ast.IfExp object at 0x7da1b0286b30>]
|
keyword[def] identifier[annihilate] ( identifier[predicate] : identifier[tuple] , identifier[stack] : identifier[tuple] )-> identifier[tuple] :
literal[string]
identifier[extra] = identifier[tuple] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[x] keyword[not] keyword[in] identifier[predicate] , identifier[stack] ))
identifier[head] = identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[y] keyword[if] identifier[y] keyword[in] identifier[predicate] keyword[else] identifier[x] , identifier[stack] , keyword[None] )
keyword[return] identifier[extra] +( identifier[head] ,) keyword[if] identifier[head] keyword[else] identifier[extra]
|
def annihilate(predicate: tuple, stack: tuple) -> tuple:
"""Squash and reduce the input stack.
Removes the elements of input that match predicate and only keeps the last
match at the end of the stack.
"""
extra = tuple(filter(lambda x: x not in predicate, stack))
head = reduce(lambda x, y: y if y in predicate else x, stack, None)
return extra + (head,) if head else extra
|
def get_stock(self, symbol: str) -> Commodity:
"""Returns the stock/commodity object for the given symbol"""
# Check if we have the exchange name (namespace).
if ":" in symbol:
# We have a namespace
symbol_parts = symbol.split(":")
exchange = symbol_parts[0]
symbol = symbol_parts[1]
security = self.book.get(Commodity, namespace=exchange, mnemonic=symbol)
else:
#with database.Database().open_book() as book:
security = self.book.get(Commodity, mnemonic=symbol)
return security
|
def function[get_stock, parameter[self, symbol]]:
constant[Returns the stock/commodity object for the given symbol]
if compare[constant[:] in name[symbol]] begin[:]
variable[symbol_parts] assign[=] call[name[symbol].split, parameter[constant[:]]]
variable[exchange] assign[=] call[name[symbol_parts]][constant[0]]
variable[symbol] assign[=] call[name[symbol_parts]][constant[1]]
variable[security] assign[=] call[name[self].book.get, parameter[name[Commodity]]]
return[name[security]]
|
keyword[def] identifier[get_stock] ( identifier[self] , identifier[symbol] : identifier[str] )-> identifier[Commodity] :
literal[string]
keyword[if] literal[string] keyword[in] identifier[symbol] :
identifier[symbol_parts] = identifier[symbol] . identifier[split] ( literal[string] )
identifier[exchange] = identifier[symbol_parts] [ literal[int] ]
identifier[symbol] = identifier[symbol_parts] [ literal[int] ]
identifier[security] = identifier[self] . identifier[book] . identifier[get] ( identifier[Commodity] , identifier[namespace] = identifier[exchange] , identifier[mnemonic] = identifier[symbol] )
keyword[else] :
identifier[security] = identifier[self] . identifier[book] . identifier[get] ( identifier[Commodity] , identifier[mnemonic] = identifier[symbol] )
keyword[return] identifier[security]
|
def get_stock(self, symbol: str) -> Commodity:
"""Returns the stock/commodity object for the given symbol"""
# Check if we have the exchange name (namespace).
if ':' in symbol:
# We have a namespace
symbol_parts = symbol.split(':')
exchange = symbol_parts[0]
symbol = symbol_parts[1]
security = self.book.get(Commodity, namespace=exchange, mnemonic=symbol) # depends on [control=['if'], data=['symbol']]
else:
#with database.Database().open_book() as book:
security = self.book.get(Commodity, mnemonic=symbol)
return security
|
def get_artist_location(self, cache=True):
"""Get the location of a song's artist.
Args:
cache (bool): A boolean indicating whether or not the cached value should be used (if available). Defaults to True.
Returns:
An artist location object.
Example:
>>> s = song.Song('SOQKVPH12A58A7AF4D')
>>> s.artist_location
{u'latitude': 34.053489999999996, u'location': u'Los Angeles, CA', u'longitude': -118.24532000000001}
>>>
"""
if not (cache and ('artist_location' in self.cache)):
response = self.get_attribute('profile', bucket='artist_location')
self.cache['artist_location'] = response['songs'][0]['artist_location']
return self.cache['artist_location']
|
def function[get_artist_location, parameter[self, cache]]:
constant[Get the location of a song's artist.
Args:
cache (bool): A boolean indicating whether or not the cached value should be used (if available). Defaults to True.
Returns:
An artist location object.
Example:
>>> s = song.Song('SOQKVPH12A58A7AF4D')
>>> s.artist_location
{u'latitude': 34.053489999999996, u'location': u'Los Angeles, CA', u'longitude': -118.24532000000001}
>>>
]
if <ast.UnaryOp object at 0x7da1b040e860> begin[:]
variable[response] assign[=] call[name[self].get_attribute, parameter[constant[profile]]]
call[name[self].cache][constant[artist_location]] assign[=] call[call[call[name[response]][constant[songs]]][constant[0]]][constant[artist_location]]
return[call[name[self].cache][constant[artist_location]]]
|
keyword[def] identifier[get_artist_location] ( identifier[self] , identifier[cache] = keyword[True] ):
literal[string]
keyword[if] keyword[not] ( identifier[cache] keyword[and] ( literal[string] keyword[in] identifier[self] . identifier[cache] )):
identifier[response] = identifier[self] . identifier[get_attribute] ( literal[string] , identifier[bucket] = literal[string] )
identifier[self] . identifier[cache] [ literal[string] ]= identifier[response] [ literal[string] ][ literal[int] ][ literal[string] ]
keyword[return] identifier[self] . identifier[cache] [ literal[string] ]
|
def get_artist_location(self, cache=True):
"""Get the location of a song's artist.
Args:
cache (bool): A boolean indicating whether or not the cached value should be used (if available). Defaults to True.
Returns:
An artist location object.
Example:
>>> s = song.Song('SOQKVPH12A58A7AF4D')
>>> s.artist_location
{u'latitude': 34.053489999999996, u'location': u'Los Angeles, CA', u'longitude': -118.24532000000001}
>>>
"""
if not (cache and 'artist_location' in self.cache):
response = self.get_attribute('profile', bucket='artist_location')
self.cache['artist_location'] = response['songs'][0]['artist_location'] # depends on [control=['if'], data=[]]
return self.cache['artist_location']
|
def _check_convergence(self, F):
""" Checks if the solution has converged to within the specified
tolerance.
"""
normF = linalg.norm(F, Inf)
if normF < self.tolerance:
converged = True
else:
converged = False
if self.verbose:
logger.info("Difference: %.3f" % (normF - self.tolerance))
return converged
|
def function[_check_convergence, parameter[self, F]]:
constant[ Checks if the solution has converged to within the specified
tolerance.
]
variable[normF] assign[=] call[name[linalg].norm, parameter[name[F], name[Inf]]]
if compare[name[normF] less[<] name[self].tolerance] begin[:]
variable[converged] assign[=] constant[True]
return[name[converged]]
|
keyword[def] identifier[_check_convergence] ( identifier[self] , identifier[F] ):
literal[string]
identifier[normF] = identifier[linalg] . identifier[norm] ( identifier[F] , identifier[Inf] )
keyword[if] identifier[normF] < identifier[self] . identifier[tolerance] :
identifier[converged] = keyword[True]
keyword[else] :
identifier[converged] = keyword[False]
keyword[if] identifier[self] . identifier[verbose] :
identifier[logger] . identifier[info] ( literal[string] %( identifier[normF] - identifier[self] . identifier[tolerance] ))
keyword[return] identifier[converged]
|
def _check_convergence(self, F):
""" Checks if the solution has converged to within the specified
tolerance.
"""
normF = linalg.norm(F, Inf)
if normF < self.tolerance:
converged = True # depends on [control=['if'], data=[]]
else:
converged = False
if self.verbose:
logger.info('Difference: %.3f' % (normF - self.tolerance)) # depends on [control=['if'], data=[]]
return converged
|
def gates_in_isa(isa):
"""
Generate the full gateset associated with an ISA.
:param ISA isa: The instruction set architecture for a QPU.
:return: A sequence of Gate objects encapsulating all gates compatible with the ISA.
:rtype: Sequence[Gate]
"""
gates = []
for q in isa.qubits:
if q.dead:
# TODO: dead qubits may in the future lead to some implicit re-indexing
continue
if q.type in ["Xhalves"]:
gates.extend([
Gate("I", [], [unpack_qubit(q.id)]),
Gate("RX", [np.pi / 2], [unpack_qubit(q.id)]),
Gate("RX", [-np.pi / 2], [unpack_qubit(q.id)]),
Gate("RX", [np.pi], [unpack_qubit(q.id)]),
Gate("RX", [-np.pi], [unpack_qubit(q.id)]),
Gate("RZ", [THETA], [unpack_qubit(q.id)]),
])
else: # pragma no coverage
raise ValueError("Unknown qubit type: {}".format(q.type))
for e in isa.edges:
if e.dead:
continue
targets = [unpack_qubit(t) for t in e.targets]
if e.type in ["CZ", "ISWAP"]:
gates.append(Gate(e.type, [], targets))
gates.append(Gate(e.type, [], targets[::-1]))
elif e.type in ["CPHASE"]:
gates.append(Gate(e.type, [THETA], targets))
gates.append(Gate(e.type, [THETA], targets[::-1]))
else: # pragma no coverage
raise ValueError("Unknown edge type: {}".format(e.type))
return gates
|
def function[gates_in_isa, parameter[isa]]:
constant[
Generate the full gateset associated with an ISA.
:param ISA isa: The instruction set architecture for a QPU.
:return: A sequence of Gate objects encapsulating all gates compatible with the ISA.
:rtype: Sequence[Gate]
]
variable[gates] assign[=] list[[]]
for taget[name[q]] in starred[name[isa].qubits] begin[:]
if name[q].dead begin[:]
continue
if compare[name[q].type in list[[<ast.Constant object at 0x7da1b1bfb040>]]] begin[:]
call[name[gates].extend, parameter[list[[<ast.Call object at 0x7da1b1bf9780>, <ast.Call object at 0x7da1b1bf93c0>, <ast.Call object at 0x7da1b1bfb700>, <ast.Call object at 0x7da1b1bfa410>, <ast.Call object at 0x7da1b1bf9c90>, <ast.Call object at 0x7da1b1bf9720>]]]]
for taget[name[e]] in starred[name[isa].edges] begin[:]
if name[e].dead begin[:]
continue
variable[targets] assign[=] <ast.ListComp object at 0x7da1b1bf95d0>
if compare[name[e].type in list[[<ast.Constant object at 0x7da1b1bfa020>, <ast.Constant object at 0x7da1b1bf82b0>]]] begin[:]
call[name[gates].append, parameter[call[name[Gate], parameter[name[e].type, list[[]], name[targets]]]]]
call[name[gates].append, parameter[call[name[Gate], parameter[name[e].type, list[[]], call[name[targets]][<ast.Slice object at 0x7da1b1bf93f0>]]]]]
return[name[gates]]
|
keyword[def] identifier[gates_in_isa] ( identifier[isa] ):
literal[string]
identifier[gates] =[]
keyword[for] identifier[q] keyword[in] identifier[isa] . identifier[qubits] :
keyword[if] identifier[q] . identifier[dead] :
keyword[continue]
keyword[if] identifier[q] . identifier[type] keyword[in] [ literal[string] ]:
identifier[gates] . identifier[extend] ([
identifier[Gate] ( literal[string] ,[],[ identifier[unpack_qubit] ( identifier[q] . identifier[id] )]),
identifier[Gate] ( literal[string] ,[ identifier[np] . identifier[pi] / literal[int] ],[ identifier[unpack_qubit] ( identifier[q] . identifier[id] )]),
identifier[Gate] ( literal[string] ,[- identifier[np] . identifier[pi] / literal[int] ],[ identifier[unpack_qubit] ( identifier[q] . identifier[id] )]),
identifier[Gate] ( literal[string] ,[ identifier[np] . identifier[pi] ],[ identifier[unpack_qubit] ( identifier[q] . identifier[id] )]),
identifier[Gate] ( literal[string] ,[- identifier[np] . identifier[pi] ],[ identifier[unpack_qubit] ( identifier[q] . identifier[id] )]),
identifier[Gate] ( literal[string] ,[ identifier[THETA] ],[ identifier[unpack_qubit] ( identifier[q] . identifier[id] )]),
])
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[q] . identifier[type] ))
keyword[for] identifier[e] keyword[in] identifier[isa] . identifier[edges] :
keyword[if] identifier[e] . identifier[dead] :
keyword[continue]
identifier[targets] =[ identifier[unpack_qubit] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[e] . identifier[targets] ]
keyword[if] identifier[e] . identifier[type] keyword[in] [ literal[string] , literal[string] ]:
identifier[gates] . identifier[append] ( identifier[Gate] ( identifier[e] . identifier[type] ,[], identifier[targets] ))
identifier[gates] . identifier[append] ( identifier[Gate] ( identifier[e] . identifier[type] ,[], identifier[targets] [::- literal[int] ]))
keyword[elif] identifier[e] . identifier[type] keyword[in] [ literal[string] ]:
identifier[gates] . identifier[append] ( identifier[Gate] ( identifier[e] . identifier[type] ,[ identifier[THETA] ], identifier[targets] ))
identifier[gates] . identifier[append] ( identifier[Gate] ( identifier[e] . identifier[type] ,[ identifier[THETA] ], identifier[targets] [::- literal[int] ]))
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[e] . identifier[type] ))
keyword[return] identifier[gates]
|
def gates_in_isa(isa):
"""
Generate the full gateset associated with an ISA.
:param ISA isa: The instruction set architecture for a QPU.
:return: A sequence of Gate objects encapsulating all gates compatible with the ISA.
:rtype: Sequence[Gate]
"""
gates = []
for q in isa.qubits:
if q.dead:
# TODO: dead qubits may in the future lead to some implicit re-indexing
continue # depends on [control=['if'], data=[]]
if q.type in ['Xhalves']:
gates.extend([Gate('I', [], [unpack_qubit(q.id)]), Gate('RX', [np.pi / 2], [unpack_qubit(q.id)]), Gate('RX', [-np.pi / 2], [unpack_qubit(q.id)]), Gate('RX', [np.pi], [unpack_qubit(q.id)]), Gate('RX', [-np.pi], [unpack_qubit(q.id)]), Gate('RZ', [THETA], [unpack_qubit(q.id)])]) # depends on [control=['if'], data=[]]
else: # pragma no coverage
raise ValueError('Unknown qubit type: {}'.format(q.type)) # depends on [control=['for'], data=['q']]
for e in isa.edges:
if e.dead:
continue # depends on [control=['if'], data=[]]
targets = [unpack_qubit(t) for t in e.targets]
if e.type in ['CZ', 'ISWAP']:
gates.append(Gate(e.type, [], targets))
gates.append(Gate(e.type, [], targets[::-1])) # depends on [control=['if'], data=[]]
elif e.type in ['CPHASE']:
gates.append(Gate(e.type, [THETA], targets))
gates.append(Gate(e.type, [THETA], targets[::-1])) # depends on [control=['if'], data=[]]
else: # pragma no coverage
raise ValueError('Unknown edge type: {}'.format(e.type)) # depends on [control=['for'], data=['e']]
return gates
|
def new_edge(self, node_a, node_b, cost=1):
"""Adds a new, undirected edge between node_a and node_b with a cost.
Returns the edge id of the new edge."""
edge_id = super(UndirectedGraph, self).new_edge(node_a, node_b, cost)
self.nodes[node_b]['edges'].append(edge_id)
return edge_id
|
def function[new_edge, parameter[self, node_a, node_b, cost]]:
constant[Adds a new, undirected edge between node_a and node_b with a cost.
Returns the edge id of the new edge.]
variable[edge_id] assign[=] call[call[name[super], parameter[name[UndirectedGraph], name[self]]].new_edge, parameter[name[node_a], name[node_b], name[cost]]]
call[call[call[name[self].nodes][name[node_b]]][constant[edges]].append, parameter[name[edge_id]]]
return[name[edge_id]]
|
keyword[def] identifier[new_edge] ( identifier[self] , identifier[node_a] , identifier[node_b] , identifier[cost] = literal[int] ):
literal[string]
identifier[edge_id] = identifier[super] ( identifier[UndirectedGraph] , identifier[self] ). identifier[new_edge] ( identifier[node_a] , identifier[node_b] , identifier[cost] )
identifier[self] . identifier[nodes] [ identifier[node_b] ][ literal[string] ]. identifier[append] ( identifier[edge_id] )
keyword[return] identifier[edge_id]
|
def new_edge(self, node_a, node_b, cost=1):
"""Adds a new, undirected edge between node_a and node_b with a cost.
Returns the edge id of the new edge."""
edge_id = super(UndirectedGraph, self).new_edge(node_a, node_b, cost)
self.nodes[node_b]['edges'].append(edge_id)
return edge_id
|
def chk_statement(ctx, stmt, grammar, canonical=False):
"""Validate `stmt` according to `grammar`.
Marks each statement in the hierearchy with stmt.is_grammatically_valid,
which is a boolean.
Return True if stmt is valid, False otherwise.
"""
n = len(ctx.errors)
if canonical == True:
canspec = grammar
else:
canspec = []
_chk_stmts(ctx, stmt.pos, [stmt], None, (grammar, canspec), canonical)
return n == len(ctx.errors)
|
def function[chk_statement, parameter[ctx, stmt, grammar, canonical]]:
constant[Validate `stmt` according to `grammar`.
Marks each statement in the hierearchy with stmt.is_grammatically_valid,
which is a boolean.
Return True if stmt is valid, False otherwise.
]
variable[n] assign[=] call[name[len], parameter[name[ctx].errors]]
if compare[name[canonical] equal[==] constant[True]] begin[:]
variable[canspec] assign[=] name[grammar]
call[name[_chk_stmts], parameter[name[ctx], name[stmt].pos, list[[<ast.Name object at 0x7da18eb55150>]], constant[None], tuple[[<ast.Name object at 0x7da18eb55c90>, <ast.Name object at 0x7da18eb56680>]], name[canonical]]]
return[compare[name[n] equal[==] call[name[len], parameter[name[ctx].errors]]]]
|
keyword[def] identifier[chk_statement] ( identifier[ctx] , identifier[stmt] , identifier[grammar] , identifier[canonical] = keyword[False] ):
literal[string]
identifier[n] = identifier[len] ( identifier[ctx] . identifier[errors] )
keyword[if] identifier[canonical] == keyword[True] :
identifier[canspec] = identifier[grammar]
keyword[else] :
identifier[canspec] =[]
identifier[_chk_stmts] ( identifier[ctx] , identifier[stmt] . identifier[pos] ,[ identifier[stmt] ], keyword[None] ,( identifier[grammar] , identifier[canspec] ), identifier[canonical] )
keyword[return] identifier[n] == identifier[len] ( identifier[ctx] . identifier[errors] )
|
def chk_statement(ctx, stmt, grammar, canonical=False):
"""Validate `stmt` according to `grammar`.
Marks each statement in the hierearchy with stmt.is_grammatically_valid,
which is a boolean.
Return True if stmt is valid, False otherwise.
"""
n = len(ctx.errors)
if canonical == True:
canspec = grammar # depends on [control=['if'], data=[]]
else:
canspec = []
_chk_stmts(ctx, stmt.pos, [stmt], None, (grammar, canspec), canonical)
return n == len(ctx.errors)
|
def accept(self):
"""Do PetaBencana download and display it in QGIS.
.. versionadded: 3.3
"""
self.save_state()
try:
self.require_directory()
except CanceledImportDialogError:
return
QgsApplication.instance().setOverrideCursor(
QtGui.QCursor(QtCore.Qt.WaitCursor)
)
source = self.define_url()
# save the file as json first
name = 'jakarta_flood.json'
output_directory = self.output_directory.text()
output_prefix = self.filename_prefix.text()
overwrite = self.overwrite_flag.isChecked()
date_stamp_flag = self.include_date_flag.isChecked()
output_base_file_path = self.get_output_base_path(
output_directory,
output_prefix,
date_stamp_flag,
name,
overwrite)
title = self.tr("Can't access API")
try:
self.download(source, output_base_file_path)
# Open downloaded file as QgsMapLayer
options = QgsVectorLayer.LayerOptions(False)
layer = QgsVectorLayer(
output_base_file_path, 'flood', 'ogr', options)
except Exception as e:
disable_busy_cursor()
QMessageBox.critical(self, title, str(e))
return
self.time_stamp = time.strftime('%d-%b-%Y %H:%M:%S')
# Now save as shp
name = 'jakarta_flood.shp'
output_base_file_path = self.get_output_base_path(
output_directory,
output_prefix,
date_stamp_flag,
name,
overwrite)
QgsVectorFileWriter.writeAsVectorFormat(
layer,
output_base_file_path,
'CP1250',
QgsCoordinateTransform(),
'ESRI Shapefile')
# Get rid of the GeoJSON layer and rather use local shp
del layer
self.copy_style(output_base_file_path)
self.copy_keywords(output_base_file_path)
layer = self.add_flooded_field(output_base_file_path)
# check if the layer has feature or not
if layer.featureCount() <= 0:
city = self.city_combo_box.currentText()
message = self.tr(
'There are no floods data available on {city} '
'at this time.').format(city=city)
display_warning_message_box(
self,
self.tr('No data'),
message)
disable_busy_cursor()
else:
# add the layer to the map
project = QgsProject.instance()
project.addMapLayer(layer)
disable_busy_cursor()
self.done(QDialog.Accepted)
|
def function[accept, parameter[self]]:
constant[Do PetaBencana download and display it in QGIS.
.. versionadded: 3.3
]
call[name[self].save_state, parameter[]]
<ast.Try object at 0x7da1b0b0a350>
call[call[name[QgsApplication].instance, parameter[]].setOverrideCursor, parameter[call[name[QtGui].QCursor, parameter[name[QtCore].Qt.WaitCursor]]]]
variable[source] assign[=] call[name[self].define_url, parameter[]]
variable[name] assign[=] constant[jakarta_flood.json]
variable[output_directory] assign[=] call[name[self].output_directory.text, parameter[]]
variable[output_prefix] assign[=] call[name[self].filename_prefix.text, parameter[]]
variable[overwrite] assign[=] call[name[self].overwrite_flag.isChecked, parameter[]]
variable[date_stamp_flag] assign[=] call[name[self].include_date_flag.isChecked, parameter[]]
variable[output_base_file_path] assign[=] call[name[self].get_output_base_path, parameter[name[output_directory], name[output_prefix], name[date_stamp_flag], name[name], name[overwrite]]]
variable[title] assign[=] call[name[self].tr, parameter[constant[Can't access API]]]
<ast.Try object at 0x7da1b0c47a30>
name[self].time_stamp assign[=] call[name[time].strftime, parameter[constant[%d-%b-%Y %H:%M:%S]]]
variable[name] assign[=] constant[jakarta_flood.shp]
variable[output_base_file_path] assign[=] call[name[self].get_output_base_path, parameter[name[output_directory], name[output_prefix], name[date_stamp_flag], name[name], name[overwrite]]]
call[name[QgsVectorFileWriter].writeAsVectorFormat, parameter[name[layer], name[output_base_file_path], constant[CP1250], call[name[QgsCoordinateTransform], parameter[]], constant[ESRI Shapefile]]]
<ast.Delete object at 0x7da1b0c44460>
call[name[self].copy_style, parameter[name[output_base_file_path]]]
call[name[self].copy_keywords, parameter[name[output_base_file_path]]]
variable[layer] assign[=] call[name[self].add_flooded_field, parameter[name[output_base_file_path]]]
if compare[call[name[layer].featureCount, parameter[]] less_or_equal[<=] constant[0]] begin[:]
variable[city] assign[=] call[name[self].city_combo_box.currentText, parameter[]]
variable[message] assign[=] call[call[name[self].tr, parameter[constant[There are no floods data available on {city} at this time.]]].format, parameter[]]
call[name[display_warning_message_box], parameter[name[self], call[name[self].tr, parameter[constant[No data]]], name[message]]]
call[name[disable_busy_cursor], parameter[]]
|
keyword[def] identifier[accept] ( identifier[self] ):
literal[string]
identifier[self] . identifier[save_state] ()
keyword[try] :
identifier[self] . identifier[require_directory] ()
keyword[except] identifier[CanceledImportDialogError] :
keyword[return]
identifier[QgsApplication] . identifier[instance] (). identifier[setOverrideCursor] (
identifier[QtGui] . identifier[QCursor] ( identifier[QtCore] . identifier[Qt] . identifier[WaitCursor] )
)
identifier[source] = identifier[self] . identifier[define_url] ()
identifier[name] = literal[string]
identifier[output_directory] = identifier[self] . identifier[output_directory] . identifier[text] ()
identifier[output_prefix] = identifier[self] . identifier[filename_prefix] . identifier[text] ()
identifier[overwrite] = identifier[self] . identifier[overwrite_flag] . identifier[isChecked] ()
identifier[date_stamp_flag] = identifier[self] . identifier[include_date_flag] . identifier[isChecked] ()
identifier[output_base_file_path] = identifier[self] . identifier[get_output_base_path] (
identifier[output_directory] ,
identifier[output_prefix] ,
identifier[date_stamp_flag] ,
identifier[name] ,
identifier[overwrite] )
identifier[title] = identifier[self] . identifier[tr] ( literal[string] )
keyword[try] :
identifier[self] . identifier[download] ( identifier[source] , identifier[output_base_file_path] )
identifier[options] = identifier[QgsVectorLayer] . identifier[LayerOptions] ( keyword[False] )
identifier[layer] = identifier[QgsVectorLayer] (
identifier[output_base_file_path] , literal[string] , literal[string] , identifier[options] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[disable_busy_cursor] ()
identifier[QMessageBox] . identifier[critical] ( identifier[self] , identifier[title] , identifier[str] ( identifier[e] ))
keyword[return]
identifier[self] . identifier[time_stamp] = identifier[time] . identifier[strftime] ( literal[string] )
identifier[name] = literal[string]
identifier[output_base_file_path] = identifier[self] . identifier[get_output_base_path] (
identifier[output_directory] ,
identifier[output_prefix] ,
identifier[date_stamp_flag] ,
identifier[name] ,
identifier[overwrite] )
identifier[QgsVectorFileWriter] . identifier[writeAsVectorFormat] (
identifier[layer] ,
identifier[output_base_file_path] ,
literal[string] ,
identifier[QgsCoordinateTransform] (),
literal[string] )
keyword[del] identifier[layer]
identifier[self] . identifier[copy_style] ( identifier[output_base_file_path] )
identifier[self] . identifier[copy_keywords] ( identifier[output_base_file_path] )
identifier[layer] = identifier[self] . identifier[add_flooded_field] ( identifier[output_base_file_path] )
keyword[if] identifier[layer] . identifier[featureCount] ()<= literal[int] :
identifier[city] = identifier[self] . identifier[city_combo_box] . identifier[currentText] ()
identifier[message] = identifier[self] . identifier[tr] (
literal[string]
literal[string] ). identifier[format] ( identifier[city] = identifier[city] )
identifier[display_warning_message_box] (
identifier[self] ,
identifier[self] . identifier[tr] ( literal[string] ),
identifier[message] )
identifier[disable_busy_cursor] ()
keyword[else] :
identifier[project] = identifier[QgsProject] . identifier[instance] ()
identifier[project] . identifier[addMapLayer] ( identifier[layer] )
identifier[disable_busy_cursor] ()
identifier[self] . identifier[done] ( identifier[QDialog] . identifier[Accepted] )
|
def accept(self):
"""Do PetaBencana download and display it in QGIS.
.. versionadded: 3.3
"""
self.save_state()
try:
self.require_directory() # depends on [control=['try'], data=[]]
except CanceledImportDialogError:
return # depends on [control=['except'], data=[]]
QgsApplication.instance().setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
source = self.define_url()
# save the file as json first
name = 'jakarta_flood.json'
output_directory = self.output_directory.text()
output_prefix = self.filename_prefix.text()
overwrite = self.overwrite_flag.isChecked()
date_stamp_flag = self.include_date_flag.isChecked()
output_base_file_path = self.get_output_base_path(output_directory, output_prefix, date_stamp_flag, name, overwrite)
title = self.tr("Can't access API")
try:
self.download(source, output_base_file_path)
# Open downloaded file as QgsMapLayer
options = QgsVectorLayer.LayerOptions(False)
layer = QgsVectorLayer(output_base_file_path, 'flood', 'ogr', options) # depends on [control=['try'], data=[]]
except Exception as e:
disable_busy_cursor()
QMessageBox.critical(self, title, str(e))
return # depends on [control=['except'], data=['e']]
self.time_stamp = time.strftime('%d-%b-%Y %H:%M:%S')
# Now save as shp
name = 'jakarta_flood.shp'
output_base_file_path = self.get_output_base_path(output_directory, output_prefix, date_stamp_flag, name, overwrite)
QgsVectorFileWriter.writeAsVectorFormat(layer, output_base_file_path, 'CP1250', QgsCoordinateTransform(), 'ESRI Shapefile')
# Get rid of the GeoJSON layer and rather use local shp
del layer
self.copy_style(output_base_file_path)
self.copy_keywords(output_base_file_path)
layer = self.add_flooded_field(output_base_file_path)
# check if the layer has feature or not
if layer.featureCount() <= 0:
city = self.city_combo_box.currentText()
message = self.tr('There are no floods data available on {city} at this time.').format(city=city)
display_warning_message_box(self, self.tr('No data'), message)
disable_busy_cursor() # depends on [control=['if'], data=[]]
else:
# add the layer to the map
project = QgsProject.instance()
project.addMapLayer(layer)
disable_busy_cursor()
self.done(QDialog.Accepted)
|
def authenticated(function):
"""Re-authenticate if session expired."""
def wrapped(*args):
"""Wrap function."""
try:
return function(*args)
except FedexError:
_login(*args)
return function(*args)
return wrapped
|
def function[authenticated, parameter[function]]:
constant[Re-authenticate if session expired.]
def function[wrapped, parameter[]]:
constant[Wrap function.]
<ast.Try object at 0x7da18f58ee90>
return[name[wrapped]]
|
keyword[def] identifier[authenticated] ( identifier[function] ):
literal[string]
keyword[def] identifier[wrapped] (* identifier[args] ):
literal[string]
keyword[try] :
keyword[return] identifier[function] (* identifier[args] )
keyword[except] identifier[FedexError] :
identifier[_login] (* identifier[args] )
keyword[return] identifier[function] (* identifier[args] )
keyword[return] identifier[wrapped]
|
def authenticated(function):
"""Re-authenticate if session expired."""
def wrapped(*args):
"""Wrap function."""
try:
return function(*args) # depends on [control=['try'], data=[]]
except FedexError:
_login(*args)
return function(*args) # depends on [control=['except'], data=[]]
return wrapped
|
def naive(
year, month, day, hour=0, minute=0, second=0, microsecond=0
): # type: (int, int, int, int, int, int, int) -> DateTime
"""
Return a naive DateTime.
"""
return DateTime(year, month, day, hour, minute, second, microsecond)
|
def function[naive, parameter[year, month, day, hour, minute, second, microsecond]]:
constant[
Return a naive DateTime.
]
return[call[name[DateTime], parameter[name[year], name[month], name[day], name[hour], name[minute], name[second], name[microsecond]]]]
|
keyword[def] identifier[naive] (
identifier[year] , identifier[month] , identifier[day] , identifier[hour] = literal[int] , identifier[minute] = literal[int] , identifier[second] = literal[int] , identifier[microsecond] = literal[int]
):
literal[string]
keyword[return] identifier[DateTime] ( identifier[year] , identifier[month] , identifier[day] , identifier[hour] , identifier[minute] , identifier[second] , identifier[microsecond] )
|
def naive(year, month, day, hour=0, minute=0, second=0, microsecond=0): # type: (int, int, int, int, int, int, int) -> DateTime
'\n Return a naive DateTime.\n '
return DateTime(year, month, day, hour, minute, second, microsecond)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.