code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def get_layer_description_from_browser(self, category):
"""Obtain the description of the browser layer selected by user.
:param category: The category of the layer to get the description.
:type category: string
:returns: Tuple of boolean and string. Boolean is true if layer is
validated as compatible for current role (impact function and
category) and false otherwise. String contains a description
of the selected layer or an error message.
:rtype: tuple
"""
if category == 'hazard':
browser = self.tvBrowserHazard
elif category == 'exposure':
browser = self.tvBrowserExposure
elif category == 'aggregation':
browser = self.tvBrowserAggregation
else:
raise InaSAFEError
index = browser.selectionModel().currentIndex()
if not index:
return False, ''
# Map the proxy model index to the source model index
index = browser.model().mapToSource(index)
item = browser.model().sourceModel().dataItem(index)
if not item:
return False, ''
item_class_name = item.metaObject().className()
# if not itemClassName.endswith('LayerItem'):
if not item.type() == QgsDataItem.Layer:
if item_class_name == 'QgsPGRootItem' and not item.children():
return False, create_postGIS_connection_first
else:
return False, ''
if item_class_name not in [
'QgsOgrLayerItem', 'QgsGdalLayerItem', 'QgsPGLayerItem',
'QgsLayerItem', ]:
return False, ''
path = item.path()
if item_class_name in ['QgsOgrLayerItem', 'QgsGdalLayerItem',
'QgsLayerItem'] and not os.path.exists(path):
return False, ''
# try to create the layer
if item_class_name == 'QgsOgrLayerItem':
layer = QgsVectorLayer(path, '', 'ogr')
elif item_class_name == 'QgsPGLayerItem':
uri = self.postgis_path_to_uri(path)
if uri:
layer = QgsVectorLayer(uri.uri(), uri.table(), 'postgres')
else:
layer = None
else:
layer = QgsRasterLayer(path, '', 'gdal')
if not layer or not layer.isValid():
return False, self.tr('Not a valid layer.')
try:
keywords = self.keyword_io.read_keywords(layer)
if 'layer_purpose' not in keywords:
keywords = None
except (HashNotFoundError,
OperationalError,
NoKeywordsFoundError,
KeywordNotFoundError,
InvalidParameterError,
UnsupportedProviderError,
MissingMetadata):
keywords = None
# set the layer name for further use in the step_fc_summary
if keywords:
if qgis_version() >= 21800:
layer.setName(keywords.get('title'))
else:
layer.setLayerName(keywords.get('title'))
if not self.parent.is_layer_compatible(layer, category, keywords):
label_text = '%s<br/>%s' % (
self.tr(
'This layer\'s keywords or type are not suitable:'),
self.unsuitable_layer_description_html(
layer, category, keywords))
return False, label_text
# set the current layer (e.g. for the keyword creation sub-thread
# or for adding the layer to mapCanvas)
self.parent.layer = layer
if category == 'hazard':
self.parent.hazard_layer = layer
elif category == 'exposure':
self.parent.exposure_layer = layer
else:
self.parent.aggregation_layer = layer
# Check if the layer is keywordless
if keywords and 'keyword_version' in keywords:
kw_ver = str(keywords['keyword_version'])
self.parent.is_selected_layer_keywordless = (
not is_keyword_version_supported(kw_ver))
else:
self.parent.is_selected_layer_keywordless = True
desc = layer_description_html(layer, keywords)
return True, desc
|
def function[get_layer_description_from_browser, parameter[self, category]]:
constant[Obtain the description of the browser layer selected by user.
:param category: The category of the layer to get the description.
:type category: string
:returns: Tuple of boolean and string. Boolean is true if layer is
validated as compatible for current role (impact function and
category) and false otherwise. String contains a description
of the selected layer or an error message.
:rtype: tuple
]
if compare[name[category] equal[==] constant[hazard]] begin[:]
variable[browser] assign[=] name[self].tvBrowserHazard
variable[index] assign[=] call[call[name[browser].selectionModel, parameter[]].currentIndex, parameter[]]
if <ast.UnaryOp object at 0x7da204623790> begin[:]
return[tuple[[<ast.Constant object at 0x7da2046228c0>, <ast.Constant object at 0x7da204623760>]]]
variable[index] assign[=] call[call[name[browser].model, parameter[]].mapToSource, parameter[name[index]]]
variable[item] assign[=] call[call[call[name[browser].model, parameter[]].sourceModel, parameter[]].dataItem, parameter[name[index]]]
if <ast.UnaryOp object at 0x7da204623550> begin[:]
return[tuple[[<ast.Constant object at 0x7da204621870>, <ast.Constant object at 0x7da204621e10>]]]
variable[item_class_name] assign[=] call[call[name[item].metaObject, parameter[]].className, parameter[]]
if <ast.UnaryOp object at 0x7da2046224d0> begin[:]
if <ast.BoolOp object at 0x7da204623730> begin[:]
return[tuple[[<ast.Constant object at 0x7da204623130>, <ast.Name object at 0x7da2046202e0>]]]
if compare[name[item_class_name] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da2046226e0>, <ast.Constant object at 0x7da204620310>, <ast.Constant object at 0x7da204622aa0>, <ast.Constant object at 0x7da204622560>]]] begin[:]
return[tuple[[<ast.Constant object at 0x7da2046220b0>, <ast.Constant object at 0x7da2046217b0>]]]
variable[path] assign[=] call[name[item].path, parameter[]]
if <ast.BoolOp object at 0x7da204621ed0> begin[:]
return[tuple[[<ast.Constant object at 0x7da2046238b0>, <ast.Constant object at 0x7da204621f30>]]]
if compare[name[item_class_name] equal[==] constant[QgsOgrLayerItem]] begin[:]
variable[layer] assign[=] call[name[QgsVectorLayer], parameter[name[path], constant[], constant[ogr]]]
if <ast.BoolOp object at 0x7da204622e00> begin[:]
return[tuple[[<ast.Constant object at 0x7da2046218d0>, <ast.Call object at 0x7da204622650>]]]
<ast.Try object at 0x7da2046226b0>
if name[keywords] begin[:]
if compare[call[name[qgis_version], parameter[]] greater_or_equal[>=] constant[21800]] begin[:]
call[name[layer].setName, parameter[call[name[keywords].get, parameter[constant[title]]]]]
if <ast.UnaryOp object at 0x7da2046237c0> begin[:]
variable[label_text] assign[=] binary_operation[constant[%s<br/>%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204621ff0>, <ast.Call object at 0x7da2046200a0>]]]
return[tuple[[<ast.Constant object at 0x7da204620a60>, <ast.Name object at 0x7da204621240>]]]
name[self].parent.layer assign[=] name[layer]
if compare[name[category] equal[==] constant[hazard]] begin[:]
name[self].parent.hazard_layer assign[=] name[layer]
if <ast.BoolOp object at 0x7da1b0c3fd60> begin[:]
variable[kw_ver] assign[=] call[name[str], parameter[call[name[keywords]][constant[keyword_version]]]]
name[self].parent.is_selected_layer_keywordless assign[=] <ast.UnaryOp object at 0x7da1b0c3f3d0>
variable[desc] assign[=] call[name[layer_description_html], parameter[name[layer], name[keywords]]]
return[tuple[[<ast.Constant object at 0x7da1b0c3fdc0>, <ast.Name object at 0x7da1b0c3ceb0>]]]
|
keyword[def] identifier[get_layer_description_from_browser] ( identifier[self] , identifier[category] ):
literal[string]
keyword[if] identifier[category] == literal[string] :
identifier[browser] = identifier[self] . identifier[tvBrowserHazard]
keyword[elif] identifier[category] == literal[string] :
identifier[browser] = identifier[self] . identifier[tvBrowserExposure]
keyword[elif] identifier[category] == literal[string] :
identifier[browser] = identifier[self] . identifier[tvBrowserAggregation]
keyword[else] :
keyword[raise] identifier[InaSAFEError]
identifier[index] = identifier[browser] . identifier[selectionModel] (). identifier[currentIndex] ()
keyword[if] keyword[not] identifier[index] :
keyword[return] keyword[False] , literal[string]
identifier[index] = identifier[browser] . identifier[model] (). identifier[mapToSource] ( identifier[index] )
identifier[item] = identifier[browser] . identifier[model] (). identifier[sourceModel] (). identifier[dataItem] ( identifier[index] )
keyword[if] keyword[not] identifier[item] :
keyword[return] keyword[False] , literal[string]
identifier[item_class_name] = identifier[item] . identifier[metaObject] (). identifier[className] ()
keyword[if] keyword[not] identifier[item] . identifier[type] ()== identifier[QgsDataItem] . identifier[Layer] :
keyword[if] identifier[item_class_name] == literal[string] keyword[and] keyword[not] identifier[item] . identifier[children] ():
keyword[return] keyword[False] , identifier[create_postGIS_connection_first]
keyword[else] :
keyword[return] keyword[False] , literal[string]
keyword[if] identifier[item_class_name] keyword[not] keyword[in] [
literal[string] , literal[string] , literal[string] ,
literal[string] ,]:
keyword[return] keyword[False] , literal[string]
identifier[path] = identifier[item] . identifier[path] ()
keyword[if] identifier[item_class_name] keyword[in] [ literal[string] , literal[string] ,
literal[string] ] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[return] keyword[False] , literal[string]
keyword[if] identifier[item_class_name] == literal[string] :
identifier[layer] = identifier[QgsVectorLayer] ( identifier[path] , literal[string] , literal[string] )
keyword[elif] identifier[item_class_name] == literal[string] :
identifier[uri] = identifier[self] . identifier[postgis_path_to_uri] ( identifier[path] )
keyword[if] identifier[uri] :
identifier[layer] = identifier[QgsVectorLayer] ( identifier[uri] . identifier[uri] (), identifier[uri] . identifier[table] (), literal[string] )
keyword[else] :
identifier[layer] = keyword[None]
keyword[else] :
identifier[layer] = identifier[QgsRasterLayer] ( identifier[path] , literal[string] , literal[string] )
keyword[if] keyword[not] identifier[layer] keyword[or] keyword[not] identifier[layer] . identifier[isValid] ():
keyword[return] keyword[False] , identifier[self] . identifier[tr] ( literal[string] )
keyword[try] :
identifier[keywords] = identifier[self] . identifier[keyword_io] . identifier[read_keywords] ( identifier[layer] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[keywords] :
identifier[keywords] = keyword[None]
keyword[except] ( identifier[HashNotFoundError] ,
identifier[OperationalError] ,
identifier[NoKeywordsFoundError] ,
identifier[KeywordNotFoundError] ,
identifier[InvalidParameterError] ,
identifier[UnsupportedProviderError] ,
identifier[MissingMetadata] ):
identifier[keywords] = keyword[None]
keyword[if] identifier[keywords] :
keyword[if] identifier[qgis_version] ()>= literal[int] :
identifier[layer] . identifier[setName] ( identifier[keywords] . identifier[get] ( literal[string] ))
keyword[else] :
identifier[layer] . identifier[setLayerName] ( identifier[keywords] . identifier[get] ( literal[string] ))
keyword[if] keyword[not] identifier[self] . identifier[parent] . identifier[is_layer_compatible] ( identifier[layer] , identifier[category] , identifier[keywords] ):
identifier[label_text] = literal[string] %(
identifier[self] . identifier[tr] (
literal[string] ),
identifier[self] . identifier[unsuitable_layer_description_html] (
identifier[layer] , identifier[category] , identifier[keywords] ))
keyword[return] keyword[False] , identifier[label_text]
identifier[self] . identifier[parent] . identifier[layer] = identifier[layer]
keyword[if] identifier[category] == literal[string] :
identifier[self] . identifier[parent] . identifier[hazard_layer] = identifier[layer]
keyword[elif] identifier[category] == literal[string] :
identifier[self] . identifier[parent] . identifier[exposure_layer] = identifier[layer]
keyword[else] :
identifier[self] . identifier[parent] . identifier[aggregation_layer] = identifier[layer]
keyword[if] identifier[keywords] keyword[and] literal[string] keyword[in] identifier[keywords] :
identifier[kw_ver] = identifier[str] ( identifier[keywords] [ literal[string] ])
identifier[self] . identifier[parent] . identifier[is_selected_layer_keywordless] =(
keyword[not] identifier[is_keyword_version_supported] ( identifier[kw_ver] ))
keyword[else] :
identifier[self] . identifier[parent] . identifier[is_selected_layer_keywordless] = keyword[True]
identifier[desc] = identifier[layer_description_html] ( identifier[layer] , identifier[keywords] )
keyword[return] keyword[True] , identifier[desc]
|
def get_layer_description_from_browser(self, category):
"""Obtain the description of the browser layer selected by user.
:param category: The category of the layer to get the description.
:type category: string
:returns: Tuple of boolean and string. Boolean is true if layer is
validated as compatible for current role (impact function and
category) and false otherwise. String contains a description
of the selected layer or an error message.
:rtype: tuple
"""
if category == 'hazard':
browser = self.tvBrowserHazard # depends on [control=['if'], data=[]]
elif category == 'exposure':
browser = self.tvBrowserExposure # depends on [control=['if'], data=[]]
elif category == 'aggregation':
browser = self.tvBrowserAggregation # depends on [control=['if'], data=[]]
else:
raise InaSAFEError
index = browser.selectionModel().currentIndex()
if not index:
return (False, '') # depends on [control=['if'], data=[]]
# Map the proxy model index to the source model index
index = browser.model().mapToSource(index)
item = browser.model().sourceModel().dataItem(index)
if not item:
return (False, '') # depends on [control=['if'], data=[]]
item_class_name = item.metaObject().className()
# if not itemClassName.endswith('LayerItem'):
if not item.type() == QgsDataItem.Layer:
if item_class_name == 'QgsPGRootItem' and (not item.children()):
return (False, create_postGIS_connection_first) # depends on [control=['if'], data=[]]
else:
return (False, '') # depends on [control=['if'], data=[]]
if item_class_name not in ['QgsOgrLayerItem', 'QgsGdalLayerItem', 'QgsPGLayerItem', 'QgsLayerItem']:
return (False, '') # depends on [control=['if'], data=[]]
path = item.path()
if item_class_name in ['QgsOgrLayerItem', 'QgsGdalLayerItem', 'QgsLayerItem'] and (not os.path.exists(path)):
return (False, '') # depends on [control=['if'], data=[]]
# try to create the layer
if item_class_name == 'QgsOgrLayerItem':
layer = QgsVectorLayer(path, '', 'ogr') # depends on [control=['if'], data=[]]
elif item_class_name == 'QgsPGLayerItem':
uri = self.postgis_path_to_uri(path)
if uri:
layer = QgsVectorLayer(uri.uri(), uri.table(), 'postgres') # depends on [control=['if'], data=[]]
else:
layer = None # depends on [control=['if'], data=[]]
else:
layer = QgsRasterLayer(path, '', 'gdal')
if not layer or not layer.isValid():
return (False, self.tr('Not a valid layer.')) # depends on [control=['if'], data=[]]
try:
keywords = self.keyword_io.read_keywords(layer)
if 'layer_purpose' not in keywords:
keywords = None # depends on [control=['if'], data=['keywords']] # depends on [control=['try'], data=[]]
except (HashNotFoundError, OperationalError, NoKeywordsFoundError, KeywordNotFoundError, InvalidParameterError, UnsupportedProviderError, MissingMetadata):
keywords = None # depends on [control=['except'], data=[]]
# set the layer name for further use in the step_fc_summary
if keywords:
if qgis_version() >= 21800:
layer.setName(keywords.get('title')) # depends on [control=['if'], data=[]]
else:
layer.setLayerName(keywords.get('title')) # depends on [control=['if'], data=[]]
if not self.parent.is_layer_compatible(layer, category, keywords):
label_text = '%s<br/>%s' % (self.tr("This layer's keywords or type are not suitable:"), self.unsuitable_layer_description_html(layer, category, keywords))
return (False, label_text) # depends on [control=['if'], data=[]]
# set the current layer (e.g. for the keyword creation sub-thread
# or for adding the layer to mapCanvas)
self.parent.layer = layer
if category == 'hazard':
self.parent.hazard_layer = layer # depends on [control=['if'], data=[]]
elif category == 'exposure':
self.parent.exposure_layer = layer # depends on [control=['if'], data=[]]
else:
self.parent.aggregation_layer = layer
# Check if the layer is keywordless
if keywords and 'keyword_version' in keywords:
kw_ver = str(keywords['keyword_version'])
self.parent.is_selected_layer_keywordless = not is_keyword_version_supported(kw_ver) # depends on [control=['if'], data=[]]
else:
self.parent.is_selected_layer_keywordless = True
desc = layer_description_html(layer, keywords)
return (True, desc)
|
def on_drag_data_received(self, widget, context, x, y, data, info, time):
"""Receives state_id from LibraryTree and moves the state to the position of the mouse
:param widget:
:param context:
:param x: Integer: x-position of mouse
:param y: Integer: y-position of mouse
:param data: SelectionData: contains state_id
:param info:
:param time:
"""
state_id_insert = data.get_text()
parent_m = self.model.selection.get_selected_state()
if not isinstance(parent_m, ContainerStateModel):
return
state_v = self.canvas.get_view_for_model(parent_m.states[state_id_insert])
pos_start = state_v.model.get_meta_data_editor()['rel_pos']
motion = InMotion(state_v, self.view.editor)
motion.start_move(self.view.editor.get_matrix_i2v(state_v).transform_point(pos_start[0], pos_start[1]))
motion.move((x, y))
motion.stop_move()
state_v.model.set_meta_data_editor('rel_pos', motion.item.position)
self.canvas.wait_for_update(trigger_update=True)
self._meta_data_changed(None, state_v.model, 'append_to_last_change', True)
|
def function[on_drag_data_received, parameter[self, widget, context, x, y, data, info, time]]:
constant[Receives state_id from LibraryTree and moves the state to the position of the mouse
:param widget:
:param context:
:param x: Integer: x-position of mouse
:param y: Integer: y-position of mouse
:param data: SelectionData: contains state_id
:param info:
:param time:
]
variable[state_id_insert] assign[=] call[name[data].get_text, parameter[]]
variable[parent_m] assign[=] call[name[self].model.selection.get_selected_state, parameter[]]
if <ast.UnaryOp object at 0x7da1b1acb3a0> begin[:]
return[None]
variable[state_v] assign[=] call[name[self].canvas.get_view_for_model, parameter[call[name[parent_m].states][name[state_id_insert]]]]
variable[pos_start] assign[=] call[call[name[state_v].model.get_meta_data_editor, parameter[]]][constant[rel_pos]]
variable[motion] assign[=] call[name[InMotion], parameter[name[state_v], name[self].view.editor]]
call[name[motion].start_move, parameter[call[call[name[self].view.editor.get_matrix_i2v, parameter[name[state_v]]].transform_point, parameter[call[name[pos_start]][constant[0]], call[name[pos_start]][constant[1]]]]]]
call[name[motion].move, parameter[tuple[[<ast.Name object at 0x7da1b1c7e110>, <ast.Name object at 0x7da1b1c7f7f0>]]]]
call[name[motion].stop_move, parameter[]]
call[name[state_v].model.set_meta_data_editor, parameter[constant[rel_pos], name[motion].item.position]]
call[name[self].canvas.wait_for_update, parameter[]]
call[name[self]._meta_data_changed, parameter[constant[None], name[state_v].model, constant[append_to_last_change], constant[True]]]
|
keyword[def] identifier[on_drag_data_received] ( identifier[self] , identifier[widget] , identifier[context] , identifier[x] , identifier[y] , identifier[data] , identifier[info] , identifier[time] ):
literal[string]
identifier[state_id_insert] = identifier[data] . identifier[get_text] ()
identifier[parent_m] = identifier[self] . identifier[model] . identifier[selection] . identifier[get_selected_state] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[parent_m] , identifier[ContainerStateModel] ):
keyword[return]
identifier[state_v] = identifier[self] . identifier[canvas] . identifier[get_view_for_model] ( identifier[parent_m] . identifier[states] [ identifier[state_id_insert] ])
identifier[pos_start] = identifier[state_v] . identifier[model] . identifier[get_meta_data_editor] ()[ literal[string] ]
identifier[motion] = identifier[InMotion] ( identifier[state_v] , identifier[self] . identifier[view] . identifier[editor] )
identifier[motion] . identifier[start_move] ( identifier[self] . identifier[view] . identifier[editor] . identifier[get_matrix_i2v] ( identifier[state_v] ). identifier[transform_point] ( identifier[pos_start] [ literal[int] ], identifier[pos_start] [ literal[int] ]))
identifier[motion] . identifier[move] (( identifier[x] , identifier[y] ))
identifier[motion] . identifier[stop_move] ()
identifier[state_v] . identifier[model] . identifier[set_meta_data_editor] ( literal[string] , identifier[motion] . identifier[item] . identifier[position] )
identifier[self] . identifier[canvas] . identifier[wait_for_update] ( identifier[trigger_update] = keyword[True] )
identifier[self] . identifier[_meta_data_changed] ( keyword[None] , identifier[state_v] . identifier[model] , literal[string] , keyword[True] )
|
def on_drag_data_received(self, widget, context, x, y, data, info, time):
"""Receives state_id from LibraryTree and moves the state to the position of the mouse
:param widget:
:param context:
:param x: Integer: x-position of mouse
:param y: Integer: y-position of mouse
:param data: SelectionData: contains state_id
:param info:
:param time:
"""
state_id_insert = data.get_text()
parent_m = self.model.selection.get_selected_state()
if not isinstance(parent_m, ContainerStateModel):
return # depends on [control=['if'], data=[]]
state_v = self.canvas.get_view_for_model(parent_m.states[state_id_insert])
pos_start = state_v.model.get_meta_data_editor()['rel_pos']
motion = InMotion(state_v, self.view.editor)
motion.start_move(self.view.editor.get_matrix_i2v(state_v).transform_point(pos_start[0], pos_start[1]))
motion.move((x, y))
motion.stop_move()
state_v.model.set_meta_data_editor('rel_pos', motion.item.position)
self.canvas.wait_for_update(trigger_update=True)
self._meta_data_changed(None, state_v.model, 'append_to_last_change', True)
|
def id(opts):
'''
Return a unique ID for this proxy minion. This ID MUST NOT CHANGE.
If it changes while the proxy is running the salt-master will get
really confused and may stop talking to this minion
'''
r = salt.utils.http.query(opts['proxy']['url']+'id', decode_type='json', decode=True)
return r['dict']['id'].encode('ascii', 'ignore')
|
def function[id, parameter[opts]]:
constant[
Return a unique ID for this proxy minion. This ID MUST NOT CHANGE.
If it changes while the proxy is running the salt-master will get
really confused and may stop talking to this minion
]
variable[r] assign[=] call[name[salt].utils.http.query, parameter[binary_operation[call[call[name[opts]][constant[proxy]]][constant[url]] + constant[id]]]]
return[call[call[call[name[r]][constant[dict]]][constant[id]].encode, parameter[constant[ascii], constant[ignore]]]]
|
keyword[def] identifier[id] ( identifier[opts] ):
literal[string]
identifier[r] = identifier[salt] . identifier[utils] . identifier[http] . identifier[query] ( identifier[opts] [ literal[string] ][ literal[string] ]+ literal[string] , identifier[decode_type] = literal[string] , identifier[decode] = keyword[True] )
keyword[return] identifier[r] [ literal[string] ][ literal[string] ]. identifier[encode] ( literal[string] , literal[string] )
|
def id(opts):
"""
Return a unique ID for this proxy minion. This ID MUST NOT CHANGE.
If it changes while the proxy is running the salt-master will get
really confused and may stop talking to this minion
"""
r = salt.utils.http.query(opts['proxy']['url'] + 'id', decode_type='json', decode=True)
return r['dict']['id'].encode('ascii', 'ignore')
|
def build_error_response(self, version, reason, message):
"""
Build a simple ResponseMessage with a single error result.
Args:
version (ProtocolVersion): The protocol version the response
should be addressed with.
reason (ResultReason): An enumeration classifying the type of
error occurred.
message (str): A string providing additional information about
the error.
Returns:
ResponseMessage: The simple ResponseMessage containing a
single error result.
"""
batch_item = messages.ResponseBatchItem(
result_status=contents.ResultStatus(
enums.ResultStatus.OPERATION_FAILED
),
result_reason=contents.ResultReason(reason),
result_message=contents.ResultMessage(message)
)
return self._build_response(version, [batch_item])
|
def function[build_error_response, parameter[self, version, reason, message]]:
constant[
Build a simple ResponseMessage with a single error result.
Args:
version (ProtocolVersion): The protocol version the response
should be addressed with.
reason (ResultReason): An enumeration classifying the type of
error occurred.
message (str): A string providing additional information about
the error.
Returns:
ResponseMessage: The simple ResponseMessage containing a
single error result.
]
variable[batch_item] assign[=] call[name[messages].ResponseBatchItem, parameter[]]
return[call[name[self]._build_response, parameter[name[version], list[[<ast.Name object at 0x7da18bc726b0>]]]]]
|
keyword[def] identifier[build_error_response] ( identifier[self] , identifier[version] , identifier[reason] , identifier[message] ):
literal[string]
identifier[batch_item] = identifier[messages] . identifier[ResponseBatchItem] (
identifier[result_status] = identifier[contents] . identifier[ResultStatus] (
identifier[enums] . identifier[ResultStatus] . identifier[OPERATION_FAILED]
),
identifier[result_reason] = identifier[contents] . identifier[ResultReason] ( identifier[reason] ),
identifier[result_message] = identifier[contents] . identifier[ResultMessage] ( identifier[message] )
)
keyword[return] identifier[self] . identifier[_build_response] ( identifier[version] ,[ identifier[batch_item] ])
|
def build_error_response(self, version, reason, message):
"""
Build a simple ResponseMessage with a single error result.
Args:
version (ProtocolVersion): The protocol version the response
should be addressed with.
reason (ResultReason): An enumeration classifying the type of
error occurred.
message (str): A string providing additional information about
the error.
Returns:
ResponseMessage: The simple ResponseMessage containing a
single error result.
"""
batch_item = messages.ResponseBatchItem(result_status=contents.ResultStatus(enums.ResultStatus.OPERATION_FAILED), result_reason=contents.ResultReason(reason), result_message=contents.ResultMessage(message))
return self._build_response(version, [batch_item])
|
def parse_source(self, filename):
"""
Extract the statements from the given file, look for function calls
`sass_processor(scss_file)` and compile the filename into CSS.
"""
callvisitor = FuncCallVisitor('sass_processor')
tree = ast.parse(open(filename, 'rb').read())
callvisitor.visit(tree)
for sass_fileurl in callvisitor.sass_files:
sass_filename = find_file(sass_fileurl)
if not sass_filename or sass_filename in self.processed_files:
continue
if self.delete_files:
self.delete_file(sass_filename, sass_fileurl)
else:
self.compile_sass(sass_filename, sass_fileurl)
|
def function[parse_source, parameter[self, filename]]:
constant[
Extract the statements from the given file, look for function calls
`sass_processor(scss_file)` and compile the filename into CSS.
]
variable[callvisitor] assign[=] call[name[FuncCallVisitor], parameter[constant[sass_processor]]]
variable[tree] assign[=] call[name[ast].parse, parameter[call[call[name[open], parameter[name[filename], constant[rb]]].read, parameter[]]]]
call[name[callvisitor].visit, parameter[name[tree]]]
for taget[name[sass_fileurl]] in starred[name[callvisitor].sass_files] begin[:]
variable[sass_filename] assign[=] call[name[find_file], parameter[name[sass_fileurl]]]
if <ast.BoolOp object at 0x7da20cabf820> begin[:]
continue
if name[self].delete_files begin[:]
call[name[self].delete_file, parameter[name[sass_filename], name[sass_fileurl]]]
|
keyword[def] identifier[parse_source] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[callvisitor] = identifier[FuncCallVisitor] ( literal[string] )
identifier[tree] = identifier[ast] . identifier[parse] ( identifier[open] ( identifier[filename] , literal[string] ). identifier[read] ())
identifier[callvisitor] . identifier[visit] ( identifier[tree] )
keyword[for] identifier[sass_fileurl] keyword[in] identifier[callvisitor] . identifier[sass_files] :
identifier[sass_filename] = identifier[find_file] ( identifier[sass_fileurl] )
keyword[if] keyword[not] identifier[sass_filename] keyword[or] identifier[sass_filename] keyword[in] identifier[self] . identifier[processed_files] :
keyword[continue]
keyword[if] identifier[self] . identifier[delete_files] :
identifier[self] . identifier[delete_file] ( identifier[sass_filename] , identifier[sass_fileurl] )
keyword[else] :
identifier[self] . identifier[compile_sass] ( identifier[sass_filename] , identifier[sass_fileurl] )
|
def parse_source(self, filename):
"""
Extract the statements from the given file, look for function calls
`sass_processor(scss_file)` and compile the filename into CSS.
"""
callvisitor = FuncCallVisitor('sass_processor')
tree = ast.parse(open(filename, 'rb').read())
callvisitor.visit(tree)
for sass_fileurl in callvisitor.sass_files:
sass_filename = find_file(sass_fileurl)
if not sass_filename or sass_filename in self.processed_files:
continue # depends on [control=['if'], data=[]]
if self.delete_files:
self.delete_file(sass_filename, sass_fileurl) # depends on [control=['if'], data=[]]
else:
self.compile_sass(sass_filename, sass_fileurl) # depends on [control=['for'], data=['sass_fileurl']]
|
def run_parallel_map_providers_query(data, queue=None):
'''
This function will be called from another process when building the
providers map.
'''
salt.utils.crypt.reinit_crypto()
cloud = Cloud(data['opts'])
try:
with salt.utils.context.func_globals_inject(
cloud.clouds[data['fun']],
__active_provider_name__=':'.join([
data['alias'],
data['driver']
])
):
return (
data['alias'],
data['driver'],
salt.utils.data.simple_types_filter(
cloud.clouds[data['fun']]()
)
)
except Exception as err:
log.debug(
'Failed to execute \'%s()\' while querying for running nodes: %s',
data['fun'], err, exc_info_on_loglevel=logging.DEBUG
)
# Failed to communicate with the provider, don't list any nodes
return data['alias'], data['driver'], ()
|
def function[run_parallel_map_providers_query, parameter[data, queue]]:
constant[
This function will be called from another process when building the
providers map.
]
call[name[salt].utils.crypt.reinit_crypto, parameter[]]
variable[cloud] assign[=] call[name[Cloud], parameter[call[name[data]][constant[opts]]]]
<ast.Try object at 0x7da1b20ece20>
|
keyword[def] identifier[run_parallel_map_providers_query] ( identifier[data] , identifier[queue] = keyword[None] ):
literal[string]
identifier[salt] . identifier[utils] . identifier[crypt] . identifier[reinit_crypto] ()
identifier[cloud] = identifier[Cloud] ( identifier[data] [ literal[string] ])
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[context] . identifier[func_globals_inject] (
identifier[cloud] . identifier[clouds] [ identifier[data] [ literal[string] ]],
identifier[__active_provider_name__] = literal[string] . identifier[join] ([
identifier[data] [ literal[string] ],
identifier[data] [ literal[string] ]
])
):
keyword[return] (
identifier[data] [ literal[string] ],
identifier[data] [ literal[string] ],
identifier[salt] . identifier[utils] . identifier[data] . identifier[simple_types_filter] (
identifier[cloud] . identifier[clouds] [ identifier[data] [ literal[string] ]]()
)
)
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[log] . identifier[debug] (
literal[string] ,
identifier[data] [ literal[string] ], identifier[err] , identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG]
)
keyword[return] identifier[data] [ literal[string] ], identifier[data] [ literal[string] ],()
|
def run_parallel_map_providers_query(data, queue=None):
"""
This function will be called from another process when building the
providers map.
"""
salt.utils.crypt.reinit_crypto()
cloud = Cloud(data['opts'])
try:
with salt.utils.context.func_globals_inject(cloud.clouds[data['fun']], __active_provider_name__=':'.join([data['alias'], data['driver']])):
return (data['alias'], data['driver'], salt.utils.data.simple_types_filter(cloud.clouds[data['fun']]())) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except Exception as err:
log.debug("Failed to execute '%s()' while querying for running nodes: %s", data['fun'], err, exc_info_on_loglevel=logging.DEBUG)
# Failed to communicate with the provider, don't list any nodes
return (data['alias'], data['driver'], ()) # depends on [control=['except'], data=['err']]
|
def install(**kwargs):
"""setup entry point"""
if USE_SETUPTOOLS:
if "--force-manifest" in sys.argv:
sys.argv.remove("--force-manifest")
packages = [modname] + get_packages(join(base_dir, "pylint"), modname)
if USE_SETUPTOOLS:
if install_requires:
kwargs["install_requires"] = install_requires
kwargs["dependency_links"] = dependency_links
kwargs["entry_points"] = {
"console_scripts": [
"pylint = pylint:run_pylint",
"epylint = pylint:run_epylint",
"pyreverse = pylint:run_pyreverse",
"symilar = pylint:run_symilar",
]
}
kwargs["packages"] = packages
cmdclass = {"install_lib": MyInstallLib, "build_py": build_py}
if easy_install_lib:
cmdclass["easy_install"] = easy_install
return setup(
name=distname,
version=__pkginfo__["version"],
license=__pkginfo__["license"],
description=__pkginfo__["description"],
long_description=long_description,
author=__pkginfo__["author"],
author_email=__pkginfo__["author_email"],
url=__pkginfo__["web"],
scripts=ensure_scripts(scripts),
classifiers=__pkginfo__["classifiers"],
data_files=data_files,
ext_modules=ext_modules,
cmdclass=cmdclass,
extras_require=extras_require,
test_suite="test",
python_requires=">=3.4.*",
setup_requires=["pytest-runner"],
tests_require=["pytest"],
**kwargs
)
|
def function[install, parameter[]]:
constant[setup entry point]
if name[USE_SETUPTOOLS] begin[:]
if compare[constant[--force-manifest] in name[sys].argv] begin[:]
call[name[sys].argv.remove, parameter[constant[--force-manifest]]]
variable[packages] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b020e410>]] + call[name[get_packages], parameter[call[name[join], parameter[name[base_dir], constant[pylint]]], name[modname]]]]
if name[USE_SETUPTOOLS] begin[:]
if name[install_requires] begin[:]
call[name[kwargs]][constant[install_requires]] assign[=] name[install_requires]
call[name[kwargs]][constant[dependency_links]] assign[=] name[dependency_links]
call[name[kwargs]][constant[entry_points]] assign[=] dictionary[[<ast.Constant object at 0x7da1b020e080>], [<ast.List object at 0x7da1b020dc30>]]
call[name[kwargs]][constant[packages]] assign[=] name[packages]
variable[cmdclass] assign[=] dictionary[[<ast.Constant object at 0x7da1b020ee00>, <ast.Constant object at 0x7da1b020e980>], [<ast.Name object at 0x7da1b020dd80>, <ast.Name object at 0x7da1b020feb0>]]
if name[easy_install_lib] begin[:]
call[name[cmdclass]][constant[easy_install]] assign[=] name[easy_install]
return[call[name[setup], parameter[]]]
|
keyword[def] identifier[install] (** identifier[kwargs] ):
literal[string]
keyword[if] identifier[USE_SETUPTOOLS] :
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[sys] . identifier[argv] . identifier[remove] ( literal[string] )
identifier[packages] =[ identifier[modname] ]+ identifier[get_packages] ( identifier[join] ( identifier[base_dir] , literal[string] ), identifier[modname] )
keyword[if] identifier[USE_SETUPTOOLS] :
keyword[if] identifier[install_requires] :
identifier[kwargs] [ literal[string] ]= identifier[install_requires]
identifier[kwargs] [ literal[string] ]= identifier[dependency_links]
identifier[kwargs] [ literal[string] ]={
literal[string] :[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
}
identifier[kwargs] [ literal[string] ]= identifier[packages]
identifier[cmdclass] ={ literal[string] : identifier[MyInstallLib] , literal[string] : identifier[build_py] }
keyword[if] identifier[easy_install_lib] :
identifier[cmdclass] [ literal[string] ]= identifier[easy_install]
keyword[return] identifier[setup] (
identifier[name] = identifier[distname] ,
identifier[version] = identifier[__pkginfo__] [ literal[string] ],
identifier[license] = identifier[__pkginfo__] [ literal[string] ],
identifier[description] = identifier[__pkginfo__] [ literal[string] ],
identifier[long_description] = identifier[long_description] ,
identifier[author] = identifier[__pkginfo__] [ literal[string] ],
identifier[author_email] = identifier[__pkginfo__] [ literal[string] ],
identifier[url] = identifier[__pkginfo__] [ literal[string] ],
identifier[scripts] = identifier[ensure_scripts] ( identifier[scripts] ),
identifier[classifiers] = identifier[__pkginfo__] [ literal[string] ],
identifier[data_files] = identifier[data_files] ,
identifier[ext_modules] = identifier[ext_modules] ,
identifier[cmdclass] = identifier[cmdclass] ,
identifier[extras_require] = identifier[extras_require] ,
identifier[test_suite] = literal[string] ,
identifier[python_requires] = literal[string] ,
identifier[setup_requires] =[ literal[string] ],
identifier[tests_require] =[ literal[string] ],
** identifier[kwargs]
)
|
def install(**kwargs):
"""setup entry point"""
if USE_SETUPTOOLS:
if '--force-manifest' in sys.argv:
sys.argv.remove('--force-manifest') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
packages = [modname] + get_packages(join(base_dir, 'pylint'), modname)
if USE_SETUPTOOLS:
if install_requires:
kwargs['install_requires'] = install_requires
kwargs['dependency_links'] = dependency_links # depends on [control=['if'], data=[]]
kwargs['entry_points'] = {'console_scripts': ['pylint = pylint:run_pylint', 'epylint = pylint:run_epylint', 'pyreverse = pylint:run_pyreverse', 'symilar = pylint:run_symilar']} # depends on [control=['if'], data=[]]
kwargs['packages'] = packages
cmdclass = {'install_lib': MyInstallLib, 'build_py': build_py}
if easy_install_lib:
cmdclass['easy_install'] = easy_install # depends on [control=['if'], data=[]]
return setup(name=distname, version=__pkginfo__['version'], license=__pkginfo__['license'], description=__pkginfo__['description'], long_description=long_description, author=__pkginfo__['author'], author_email=__pkginfo__['author_email'], url=__pkginfo__['web'], scripts=ensure_scripts(scripts), classifiers=__pkginfo__['classifiers'], data_files=data_files, ext_modules=ext_modules, cmdclass=cmdclass, extras_require=extras_require, test_suite='test', python_requires='>=3.4.*', setup_requires=['pytest-runner'], tests_require=['pytest'], **kwargs)
|
def read_json(cls,
url,
orient='records'):
"""
Reads a JSON file representing a table into an SFrame.
Parameters
----------
url : string
Location of the CSV file or directory to load. If URL is a directory
or a "glob" pattern, all matching files will be loaded.
orient : string, optional. Either "records" or "lines"
If orient="records" the file is expected to contain a single JSON
array, where each array element is a dictionary. If orient="lines",
the file is expected to contain a JSON element per line.
Examples
--------
The orient parameter describes the expected input format of the JSON
file.
If orient="records", the JSON file is expected to contain a single
JSON Array where each array element is a dictionary describing the row.
For instance:
>>> !cat input.json
[{'a':1,'b':1}, {'a':2,'b':2}, {'a':3,'b':3}]
>>> SFrame.read_json('input.json', orient='records')
Columns:
a int
b int
Rows: 3
Data:
+---+---+
| a | b |
+---+---+
| 1 | 1 |
| 2 | 2 |
| 3 | 3 |
+---+---+
If orient="lines", the JSON file is expected to contain a JSON element
per line. If each line contains a dictionary, it is automatically
unpacked.
>>> !cat input.json
{'a':1,'b':1}
{'a':2,'b':2}
{'a':3,'b':3}
>>> g = SFrame.read_json('input.json', orient='lines')
Columns:
a int
b int
Rows: 3
Data:
+---+---+
| a | b |
+---+---+
| 1 | 1 |
| 2 | 2 |
| 3 | 3 |
+---+---+
If the lines are not dictionaries, the original format is maintained.
>>> !cat input.json
['a','b','c']
['d','e','f']
['g','h','i']
[1,2,3]
>>> g = SFrame.read_json('input.json', orient='lines')
Columns:
X1 list
Rows: 3
Data:
+-----------+
| X1 |
+-----------+
| [a, b, c] |
| [d, e, f] |
| [g, h, i] |
+-----------+
[3 rows x 1 columns]
"""
if orient == "records":
g = SArray.read_json(url)
if len(g) == 0:
return SFrame()
g = SFrame({'X1':g})
return g.unpack('X1','')
elif orient == "lines":
g = cls.read_csv(url, header=False,na_values=['null'],true_values=['true'],false_values=['false'],
_only_raw_string_substitutions=True)
if g.num_rows() == 0:
return SFrame()
if g.num_columns() != 1:
raise RuntimeError("Input JSON not of expected format")
if g['X1'].dtype == dict:
return g.unpack('X1','')
else:
return g
else:
raise ValueError("Invalid value for orient parameter (" + str(orient) + ")")
|
def function[read_json, parameter[cls, url, orient]]:
constant[
Reads a JSON file representing a table into an SFrame.
Parameters
----------
url : string
Location of the CSV file or directory to load. If URL is a directory
or a "glob" pattern, all matching files will be loaded.
orient : string, optional. Either "records" or "lines"
If orient="records" the file is expected to contain a single JSON
array, where each array element is a dictionary. If orient="lines",
the file is expected to contain a JSON element per line.
Examples
--------
The orient parameter describes the expected input format of the JSON
file.
If orient="records", the JSON file is expected to contain a single
JSON Array where each array element is a dictionary describing the row.
For instance:
>>> !cat input.json
[{'a':1,'b':1}, {'a':2,'b':2}, {'a':3,'b':3}]
>>> SFrame.read_json('input.json', orient='records')
Columns:
a int
b int
Rows: 3
Data:
+---+---+
| a | b |
+---+---+
| 1 | 1 |
| 2 | 2 |
| 3 | 3 |
+---+---+
If orient="lines", the JSON file is expected to contain a JSON element
per line. If each line contains a dictionary, it is automatically
unpacked.
>>> !cat input.json
{'a':1,'b':1}
{'a':2,'b':2}
{'a':3,'b':3}
>>> g = SFrame.read_json('input.json', orient='lines')
Columns:
a int
b int
Rows: 3
Data:
+---+---+
| a | b |
+---+---+
| 1 | 1 |
| 2 | 2 |
| 3 | 3 |
+---+---+
If the lines are not dictionaries, the original format is maintained.
>>> !cat input.json
['a','b','c']
['d','e','f']
['g','h','i']
[1,2,3]
>>> g = SFrame.read_json('input.json', orient='lines')
Columns:
X1 list
Rows: 3
Data:
+-----------+
| X1 |
+-----------+
| [a, b, c] |
| [d, e, f] |
| [g, h, i] |
+-----------+
[3 rows x 1 columns]
]
if compare[name[orient] equal[==] constant[records]] begin[:]
variable[g] assign[=] call[name[SArray].read_json, parameter[name[url]]]
if compare[call[name[len], parameter[name[g]]] equal[==] constant[0]] begin[:]
return[call[name[SFrame], parameter[]]]
variable[g] assign[=] call[name[SFrame], parameter[dictionary[[<ast.Constant object at 0x7da1b2013130>], [<ast.Name object at 0x7da1b2011e10>]]]]
return[call[name[g].unpack, parameter[constant[X1], constant[]]]]
|
keyword[def] identifier[read_json] ( identifier[cls] ,
identifier[url] ,
identifier[orient] = literal[string] ):
literal[string]
keyword[if] identifier[orient] == literal[string] :
identifier[g] = identifier[SArray] . identifier[read_json] ( identifier[url] )
keyword[if] identifier[len] ( identifier[g] )== literal[int] :
keyword[return] identifier[SFrame] ()
identifier[g] = identifier[SFrame] ({ literal[string] : identifier[g] })
keyword[return] identifier[g] . identifier[unpack] ( literal[string] , literal[string] )
keyword[elif] identifier[orient] == literal[string] :
identifier[g] = identifier[cls] . identifier[read_csv] ( identifier[url] , identifier[header] = keyword[False] , identifier[na_values] =[ literal[string] ], identifier[true_values] =[ literal[string] ], identifier[false_values] =[ literal[string] ],
identifier[_only_raw_string_substitutions] = keyword[True] )
keyword[if] identifier[g] . identifier[num_rows] ()== literal[int] :
keyword[return] identifier[SFrame] ()
keyword[if] identifier[g] . identifier[num_columns] ()!= literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[if] identifier[g] [ literal[string] ]. identifier[dtype] == identifier[dict] :
keyword[return] identifier[g] . identifier[unpack] ( literal[string] , literal[string] )
keyword[else] :
keyword[return] identifier[g]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[orient] )+ literal[string] )
|
def read_json(cls, url, orient='records'):
"""
Reads a JSON file representing a table into an SFrame.
Parameters
----------
url : string
Location of the CSV file or directory to load. If URL is a directory
or a "glob" pattern, all matching files will be loaded.
orient : string, optional. Either "records" or "lines"
If orient="records" the file is expected to contain a single JSON
array, where each array element is a dictionary. If orient="lines",
the file is expected to contain a JSON element per line.
Examples
--------
The orient parameter describes the expected input format of the JSON
file.
If orient="records", the JSON file is expected to contain a single
JSON Array where each array element is a dictionary describing the row.
For instance:
>>> !cat input.json
[{'a':1,'b':1}, {'a':2,'b':2}, {'a':3,'b':3}]
>>> SFrame.read_json('input.json', orient='records')
Columns:
a int
b int
Rows: 3
Data:
+---+---+
| a | b |
+---+---+
| 1 | 1 |
| 2 | 2 |
| 3 | 3 |
+---+---+
If orient="lines", the JSON file is expected to contain a JSON element
per line. If each line contains a dictionary, it is automatically
unpacked.
>>> !cat input.json
{'a':1,'b':1}
{'a':2,'b':2}
{'a':3,'b':3}
>>> g = SFrame.read_json('input.json', orient='lines')
Columns:
a int
b int
Rows: 3
Data:
+---+---+
| a | b |
+---+---+
| 1 | 1 |
| 2 | 2 |
| 3 | 3 |
+---+---+
If the lines are not dictionaries, the original format is maintained.
>>> !cat input.json
['a','b','c']
['d','e','f']
['g','h','i']
[1,2,3]
>>> g = SFrame.read_json('input.json', orient='lines')
Columns:
X1 list
Rows: 3
Data:
+-----------+
| X1 |
+-----------+
| [a, b, c] |
| [d, e, f] |
| [g, h, i] |
+-----------+
[3 rows x 1 columns]
"""
if orient == 'records':
g = SArray.read_json(url)
if len(g) == 0:
return SFrame() # depends on [control=['if'], data=[]]
g = SFrame({'X1': g})
return g.unpack('X1', '') # depends on [control=['if'], data=[]]
elif orient == 'lines':
g = cls.read_csv(url, header=False, na_values=['null'], true_values=['true'], false_values=['false'], _only_raw_string_substitutions=True)
if g.num_rows() == 0:
return SFrame() # depends on [control=['if'], data=[]]
if g.num_columns() != 1:
raise RuntimeError('Input JSON not of expected format') # depends on [control=['if'], data=[]]
if g['X1'].dtype == dict:
return g.unpack('X1', '') # depends on [control=['if'], data=[]]
else:
return g # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid value for orient parameter (' + str(orient) + ')')
|
def stor(ftp=None):
"""Same as ftplib's storbinary() but just sends dummy data
instead of reading it from a real file.
"""
if ftp is None:
ftp = connect()
quit = True
else:
quit = False
ftp.voidcmd('TYPE I')
with contextlib.closing(ftp.transfercmd("STOR " + TESTFN)) as conn:
chunk = b'x' * BUFFER_LEN
total_sent = 0
while True:
sent = conn.send(chunk)
total_sent += sent
if total_sent >= FILE_SIZE:
break
ftp.voidresp()
if quit:
ftp.quit()
return ftp
|
def function[stor, parameter[ftp]]:
constant[Same as ftplib's storbinary() but just sends dummy data
instead of reading it from a real file.
]
if compare[name[ftp] is constant[None]] begin[:]
variable[ftp] assign[=] call[name[connect], parameter[]]
variable[quit] assign[=] constant[True]
call[name[ftp].voidcmd, parameter[constant[TYPE I]]]
with call[name[contextlib].closing, parameter[call[name[ftp].transfercmd, parameter[binary_operation[constant[STOR ] + name[TESTFN]]]]]] begin[:]
variable[chunk] assign[=] binary_operation[constant[b'x'] * name[BUFFER_LEN]]
variable[total_sent] assign[=] constant[0]
while constant[True] begin[:]
variable[sent] assign[=] call[name[conn].send, parameter[name[chunk]]]
<ast.AugAssign object at 0x7da1b0089c90>
if compare[name[total_sent] greater_or_equal[>=] name[FILE_SIZE]] begin[:]
break
call[name[ftp].voidresp, parameter[]]
if name[quit] begin[:]
call[name[ftp].quit, parameter[]]
return[name[ftp]]
|
keyword[def] identifier[stor] ( identifier[ftp] = keyword[None] ):
literal[string]
keyword[if] identifier[ftp] keyword[is] keyword[None] :
identifier[ftp] = identifier[connect] ()
identifier[quit] = keyword[True]
keyword[else] :
identifier[quit] = keyword[False]
identifier[ftp] . identifier[voidcmd] ( literal[string] )
keyword[with] identifier[contextlib] . identifier[closing] ( identifier[ftp] . identifier[transfercmd] ( literal[string] + identifier[TESTFN] )) keyword[as] identifier[conn] :
identifier[chunk] = literal[string] * identifier[BUFFER_LEN]
identifier[total_sent] = literal[int]
keyword[while] keyword[True] :
identifier[sent] = identifier[conn] . identifier[send] ( identifier[chunk] )
identifier[total_sent] += identifier[sent]
keyword[if] identifier[total_sent] >= identifier[FILE_SIZE] :
keyword[break]
identifier[ftp] . identifier[voidresp] ()
keyword[if] identifier[quit] :
identifier[ftp] . identifier[quit] ()
keyword[return] identifier[ftp]
|
def stor(ftp=None):
"""Same as ftplib's storbinary() but just sends dummy data
instead of reading it from a real file.
"""
if ftp is None:
ftp = connect()
quit = True # depends on [control=['if'], data=['ftp']]
else:
quit = False
ftp.voidcmd('TYPE I')
with contextlib.closing(ftp.transfercmd('STOR ' + TESTFN)) as conn:
chunk = b'x' * BUFFER_LEN
total_sent = 0
while True:
sent = conn.send(chunk)
total_sent += sent
if total_sent >= FILE_SIZE:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['conn']]
ftp.voidresp()
if quit:
ftp.quit() # depends on [control=['if'], data=[]]
return ftp
|
def _get_hash_object(hash_algo_name):
"""Create a hash object based on given algorithm.
:param hash_algo_name: name of the hashing algorithm.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a hash object based on the given named algorithm.
"""
algorithms = (hashlib.algorithms_guaranteed if six.PY3
else hashlib.algorithms)
if hash_algo_name not in algorithms:
msg = ("Unsupported/Invalid hash name '%s' provided."
% hash_algo_name)
raise exception.InvalidInputError(msg)
return getattr(hashlib, hash_algo_name)()
|
def function[_get_hash_object, parameter[hash_algo_name]]:
constant[Create a hash object based on given algorithm.
:param hash_algo_name: name of the hashing algorithm.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a hash object based on the given named algorithm.
]
variable[algorithms] assign[=] <ast.IfExp object at 0x7da1b1a79330>
if compare[name[hash_algo_name] <ast.NotIn object at 0x7da2590d7190> name[algorithms]] begin[:]
variable[msg] assign[=] binary_operation[constant[Unsupported/Invalid hash name '%s' provided.] <ast.Mod object at 0x7da2590d6920> name[hash_algo_name]]
<ast.Raise object at 0x7da1b1a7ad40>
return[call[call[name[getattr], parameter[name[hashlib], name[hash_algo_name]]], parameter[]]]
|
keyword[def] identifier[_get_hash_object] ( identifier[hash_algo_name] ):
literal[string]
identifier[algorithms] =( identifier[hashlib] . identifier[algorithms_guaranteed] keyword[if] identifier[six] . identifier[PY3]
keyword[else] identifier[hashlib] . identifier[algorithms] )
keyword[if] identifier[hash_algo_name] keyword[not] keyword[in] identifier[algorithms] :
identifier[msg] =( literal[string]
% identifier[hash_algo_name] )
keyword[raise] identifier[exception] . identifier[InvalidInputError] ( identifier[msg] )
keyword[return] identifier[getattr] ( identifier[hashlib] , identifier[hash_algo_name] )()
|
def _get_hash_object(hash_algo_name):
"""Create a hash object based on given algorithm.
:param hash_algo_name: name of the hashing algorithm.
:raises: InvalidInputError, on unsupported or invalid input.
:returns: a hash object based on the given named algorithm.
"""
algorithms = hashlib.algorithms_guaranteed if six.PY3 else hashlib.algorithms
if hash_algo_name not in algorithms:
msg = "Unsupported/Invalid hash name '%s' provided." % hash_algo_name
raise exception.InvalidInputError(msg) # depends on [control=['if'], data=['hash_algo_name']]
return getattr(hashlib, hash_algo_name)()
|
def update_sis_id(self, course_id, sis_course_id):
"""
Updates the SIS ID for the course identified by the passed course ID.
https://canvas.instructure.com/doc/api/courses.html#method.courses.update
"""
url = COURSES_API.format(course_id)
body = {"course": {"sis_course_id": sis_course_id}}
return CanvasCourse(data=self._put_resource(url, body))
|
def function[update_sis_id, parameter[self, course_id, sis_course_id]]:
constant[
Updates the SIS ID for the course identified by the passed course ID.
https://canvas.instructure.com/doc/api/courses.html#method.courses.update
]
variable[url] assign[=] call[name[COURSES_API].format, parameter[name[course_id]]]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b020ae60>], [<ast.Dict object at 0x7da1b0209e70>]]
return[call[name[CanvasCourse], parameter[]]]
|
keyword[def] identifier[update_sis_id] ( identifier[self] , identifier[course_id] , identifier[sis_course_id] ):
literal[string]
identifier[url] = identifier[COURSES_API] . identifier[format] ( identifier[course_id] )
identifier[body] ={ literal[string] :{ literal[string] : identifier[sis_course_id] }}
keyword[return] identifier[CanvasCourse] ( identifier[data] = identifier[self] . identifier[_put_resource] ( identifier[url] , identifier[body] ))
|
def update_sis_id(self, course_id, sis_course_id):
"""
Updates the SIS ID for the course identified by the passed course ID.
https://canvas.instructure.com/doc/api/courses.html#method.courses.update
"""
url = COURSES_API.format(course_id)
body = {'course': {'sis_course_id': sis_course_id}}
return CanvasCourse(data=self._put_resource(url, body))
|
def _load_properties(self):
"""Load User properties from Flickr."""
method = 'flickr.people.getInfo'
data = _doget(method, user_id=self.__id)
self.__loaded = True
person = data.rsp.person
self.__isadmin = person.isadmin
self.__ispro = person.ispro
self.__icon_server = person.iconserver
if int(person.iconserver) > 0:
self.__icon_url = 'http://photos%s.flickr.com/buddyicons/%s.jpg' \
% (person.iconserver, self.__id)
else:
self.__icon_url = 'http://www.flickr.com/images/buddyicon.jpg'
self.__username = person.username.text
self.__realname = person.realname.text
self.__location = person.location.text
self.__photos_firstdate = person.photos.firstdate.text
self.__photos_firstdatetaken = person.photos.firstdatetaken.text
self.__photos_count = person.photos.count.text
|
def function[_load_properties, parameter[self]]:
constant[Load User properties from Flickr.]
variable[method] assign[=] constant[flickr.people.getInfo]
variable[data] assign[=] call[name[_doget], parameter[name[method]]]
name[self].__loaded assign[=] constant[True]
variable[person] assign[=] name[data].rsp.person
name[self].__isadmin assign[=] name[person].isadmin
name[self].__ispro assign[=] name[person].ispro
name[self].__icon_server assign[=] name[person].iconserver
if compare[call[name[int], parameter[name[person].iconserver]] greater[>] constant[0]] begin[:]
name[self].__icon_url assign[=] binary_operation[constant[http://photos%s.flickr.com/buddyicons/%s.jpg] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b2652c20>, <ast.Attribute object at 0x7da1b2651960>]]]
name[self].__username assign[=] name[person].username.text
name[self].__realname assign[=] name[person].realname.text
name[self].__location assign[=] name[person].location.text
name[self].__photos_firstdate assign[=] name[person].photos.firstdate.text
name[self].__photos_firstdatetaken assign[=] name[person].photos.firstdatetaken.text
name[self].__photos_count assign[=] name[person].photos.count.text
|
keyword[def] identifier[_load_properties] ( identifier[self] ):
literal[string]
identifier[method] = literal[string]
identifier[data] = identifier[_doget] ( identifier[method] , identifier[user_id] = identifier[self] . identifier[__id] )
identifier[self] . identifier[__loaded] = keyword[True]
identifier[person] = identifier[data] . identifier[rsp] . identifier[person]
identifier[self] . identifier[__isadmin] = identifier[person] . identifier[isadmin]
identifier[self] . identifier[__ispro] = identifier[person] . identifier[ispro]
identifier[self] . identifier[__icon_server] = identifier[person] . identifier[iconserver]
keyword[if] identifier[int] ( identifier[person] . identifier[iconserver] )> literal[int] :
identifier[self] . identifier[__icon_url] = literal[string] %( identifier[person] . identifier[iconserver] , identifier[self] . identifier[__id] )
keyword[else] :
identifier[self] . identifier[__icon_url] = literal[string]
identifier[self] . identifier[__username] = identifier[person] . identifier[username] . identifier[text]
identifier[self] . identifier[__realname] = identifier[person] . identifier[realname] . identifier[text]
identifier[self] . identifier[__location] = identifier[person] . identifier[location] . identifier[text]
identifier[self] . identifier[__photos_firstdate] = identifier[person] . identifier[photos] . identifier[firstdate] . identifier[text]
identifier[self] . identifier[__photos_firstdatetaken] = identifier[person] . identifier[photos] . identifier[firstdatetaken] . identifier[text]
identifier[self] . identifier[__photos_count] = identifier[person] . identifier[photos] . identifier[count] . identifier[text]
|
def _load_properties(self):
"""Load User properties from Flickr."""
method = 'flickr.people.getInfo'
data = _doget(method, user_id=self.__id)
self.__loaded = True
person = data.rsp.person
self.__isadmin = person.isadmin
self.__ispro = person.ispro
self.__icon_server = person.iconserver
if int(person.iconserver) > 0:
self.__icon_url = 'http://photos%s.flickr.com/buddyicons/%s.jpg' % (person.iconserver, self.__id) # depends on [control=['if'], data=[]]
else:
self.__icon_url = 'http://www.flickr.com/images/buddyicon.jpg'
self.__username = person.username.text
self.__realname = person.realname.text
self.__location = person.location.text
self.__photos_firstdate = person.photos.firstdate.text
self.__photos_firstdatetaken = person.photos.firstdatetaken.text
self.__photos_count = person.photos.count.text
|
def check_is_fitted(estimator, attributes, msg=None, all_or_any=all):
"""Perform is_fitted validation for estimator.
Checks if the estimator is fitted by verifying the presence of
"all_or_any" of the passed attributes and raises a NotFittedError with the
given message.
Parameters
----------
estimator : estimator instance.
estimator instance for which the check is performed.
attributes : attribute name(s) given as string or a list/tuple of strings
Eg.:
``["coef_", "estimator_", ...], "coef_"``
msg : string
The default error message is, "This %(name)s instance is not fitted
yet. Call 'fit' with appropriate arguments before using this method."
For custom messages if "%(name)s" is present in the message string,
it is substituted for the estimator name.
Eg. : "Estimator, %(name)s, must be fitted before sparsifying".
all_or_any : callable, {all, any}, default all
Specify whether all or any of the given attributes must exist.
Returns
-------
None
Raises
------
NotFittedError
If the attributes are not found.
"""
if msg is None:
msg = ("This %(name)s instance is not fitted yet. Call 'fit' with "
"appropriate arguments before using this method.")
if not hasattr(estimator, 'fit'):
raise TypeError("%s is not an estimator instance." % (estimator))
if not isinstance(attributes, (list, tuple)):
attributes = [attributes]
if not all_or_any([hasattr(estimator, attr) for attr in attributes]):
raise NotFittedError(msg % {'name': type(estimator).__name__})
|
def function[check_is_fitted, parameter[estimator, attributes, msg, all_or_any]]:
constant[Perform is_fitted validation for estimator.
Checks if the estimator is fitted by verifying the presence of
"all_or_any" of the passed attributes and raises a NotFittedError with the
given message.
Parameters
----------
estimator : estimator instance.
estimator instance for which the check is performed.
attributes : attribute name(s) given as string or a list/tuple of strings
Eg.:
``["coef_", "estimator_", ...], "coef_"``
msg : string
The default error message is, "This %(name)s instance is not fitted
yet. Call 'fit' with appropriate arguments before using this method."
For custom messages if "%(name)s" is present in the message string,
it is substituted for the estimator name.
Eg. : "Estimator, %(name)s, must be fitted before sparsifying".
all_or_any : callable, {all, any}, default all
Specify whether all or any of the given attributes must exist.
Returns
-------
None
Raises
------
NotFittedError
If the attributes are not found.
]
if compare[name[msg] is constant[None]] begin[:]
variable[msg] assign[=] constant[This %(name)s instance is not fitted yet. Call 'fit' with appropriate arguments before using this method.]
if <ast.UnaryOp object at 0x7da20c6e5690> begin[:]
<ast.Raise object at 0x7da20c6e77c0>
if <ast.UnaryOp object at 0x7da20c6e6410> begin[:]
variable[attributes] assign[=] list[[<ast.Name object at 0x7da20c6e5480>]]
if <ast.UnaryOp object at 0x7da20c6e6980> begin[:]
<ast.Raise object at 0x7da20c6e40a0>
|
keyword[def] identifier[check_is_fitted] ( identifier[estimator] , identifier[attributes] , identifier[msg] = keyword[None] , identifier[all_or_any] = identifier[all] ):
literal[string]
keyword[if] identifier[msg] keyword[is] keyword[None] :
identifier[msg] =( literal[string]
literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[estimator] , literal[string] ):
keyword[raise] identifier[TypeError] ( literal[string] %( identifier[estimator] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[attributes] ,( identifier[list] , identifier[tuple] )):
identifier[attributes] =[ identifier[attributes] ]
keyword[if] keyword[not] identifier[all_or_any] ([ identifier[hasattr] ( identifier[estimator] , identifier[attr] ) keyword[for] identifier[attr] keyword[in] identifier[attributes] ]):
keyword[raise] identifier[NotFittedError] ( identifier[msg] %{ literal[string] : identifier[type] ( identifier[estimator] ). identifier[__name__] })
|
def check_is_fitted(estimator, attributes, msg=None, all_or_any=all):
"""Perform is_fitted validation for estimator.
Checks if the estimator is fitted by verifying the presence of
"all_or_any" of the passed attributes and raises a NotFittedError with the
given message.
Parameters
----------
estimator : estimator instance.
estimator instance for which the check is performed.
attributes : attribute name(s) given as string or a list/tuple of strings
Eg.:
``["coef_", "estimator_", ...], "coef_"``
msg : string
The default error message is, "This %(name)s instance is not fitted
yet. Call 'fit' with appropriate arguments before using this method."
For custom messages if "%(name)s" is present in the message string,
it is substituted for the estimator name.
Eg. : "Estimator, %(name)s, must be fitted before sparsifying".
all_or_any : callable, {all, any}, default all
Specify whether all or any of the given attributes must exist.
Returns
-------
None
Raises
------
NotFittedError
If the attributes are not found.
"""
if msg is None:
msg = "This %(name)s instance is not fitted yet. Call 'fit' with appropriate arguments before using this method." # depends on [control=['if'], data=['msg']]
if not hasattr(estimator, 'fit'):
raise TypeError('%s is not an estimator instance.' % estimator) # depends on [control=['if'], data=[]]
if not isinstance(attributes, (list, tuple)):
attributes = [attributes] # depends on [control=['if'], data=[]]
if not all_or_any([hasattr(estimator, attr) for attr in attributes]):
raise NotFittedError(msg % {'name': type(estimator).__name__}) # depends on [control=['if'], data=[]]
|
def save_code(self, authorization_code):
"""
Stores the data belonging to an authorization code token in memcache.
See :class:`oauth2.store.AuthCodeStore`.
"""
key = self._generate_cache_key(authorization_code.code)
self.mc.set(key, {"client_id": authorization_code.client_id,
"code": authorization_code.code,
"expires_at": authorization_code.expires_at,
"redirect_uri": authorization_code.redirect_uri,
"scopes": authorization_code.scopes,
"data": authorization_code.data,
"user_id": authorization_code.user_id})
|
def function[save_code, parameter[self, authorization_code]]:
constant[
Stores the data belonging to an authorization code token in memcache.
See :class:`oauth2.store.AuthCodeStore`.
]
variable[key] assign[=] call[name[self]._generate_cache_key, parameter[name[authorization_code].code]]
call[name[self].mc.set, parameter[name[key], dictionary[[<ast.Constant object at 0x7da1b11a1c60>, <ast.Constant object at 0x7da1b11a0a00>, <ast.Constant object at 0x7da1b11a07c0>, <ast.Constant object at 0x7da1b11a1b70>, <ast.Constant object at 0x7da1b11a0cd0>, <ast.Constant object at 0x7da1b11a36d0>, <ast.Constant object at 0x7da1b11a3a60>], [<ast.Attribute object at 0x7da1b11a3670>, <ast.Attribute object at 0x7da1b11a3b50>, <ast.Attribute object at 0x7da1b11a0c10>, <ast.Attribute object at 0x7da1b11a3340>, <ast.Attribute object at 0x7da1b11a3430>, <ast.Attribute object at 0x7da1b11a23e0>, <ast.Attribute object at 0x7da1b11a2620>]]]]
|
keyword[def] identifier[save_code] ( identifier[self] , identifier[authorization_code] ):
literal[string]
identifier[key] = identifier[self] . identifier[_generate_cache_key] ( identifier[authorization_code] . identifier[code] )
identifier[self] . identifier[mc] . identifier[set] ( identifier[key] ,{ literal[string] : identifier[authorization_code] . identifier[client_id] ,
literal[string] : identifier[authorization_code] . identifier[code] ,
literal[string] : identifier[authorization_code] . identifier[expires_at] ,
literal[string] : identifier[authorization_code] . identifier[redirect_uri] ,
literal[string] : identifier[authorization_code] . identifier[scopes] ,
literal[string] : identifier[authorization_code] . identifier[data] ,
literal[string] : identifier[authorization_code] . identifier[user_id] })
|
def save_code(self, authorization_code):
"""
Stores the data belonging to an authorization code token in memcache.
See :class:`oauth2.store.AuthCodeStore`.
"""
key = self._generate_cache_key(authorization_code.code)
self.mc.set(key, {'client_id': authorization_code.client_id, 'code': authorization_code.code, 'expires_at': authorization_code.expires_at, 'redirect_uri': authorization_code.redirect_uri, 'scopes': authorization_code.scopes, 'data': authorization_code.data, 'user_id': authorization_code.user_id})
|
def get_ordered_entries(self, queryset=False):
"""
Custom ordering. First we get the average views and rating for
the categories's entries. Second we created a rank by multiplying
both. Last, we sort categories by this rank from top to bottom.
Example:
- Cat_1
- Entry_1 (500 Views, Rating 2)
- Entry_2 (200 Views, Rating -4)
- Entry_3 (100 Views, Rating 3)
- Cat_2
- Entry_1 (200 Views, Rating 7)
- Entry_2 (50 Views, Rating 2)
Result:
Cat_1 has a rank by: 88.88 (avg. views: 266.66, avg. rating: 0.33)
Cat_2 has a rank by: 562.5 (avg. views: 125, avg. rating: 4.5)
Cat_2 will be displayed at the top. The algorithm is quality-oriented,
as you can see.
"""
if queryset:
self.queryset = queryset
else:
self.queryset = EntryCategory.objects.all()
if self.queryset:
for category in self.queryset:
entries = category.get_entries()
if entries:
amount_list = [e.amount_of_views for e in entries]
rating_list = [e.rating() for e in entries]
views_per_entry = fsum(amount_list) / len(amount_list)
rating_per_entry = fsum(rating_list) / len(rating_list)
category.last_rank = views_per_entry * rating_per_entry
category.save()
else:
self.queryset = self.queryset.exclude(pk=category.pk)
self.queryset = sorted(self.queryset, key=lambda c: c.last_rank,
reverse=True)
return self.queryset
|
def function[get_ordered_entries, parameter[self, queryset]]:
constant[
Custom ordering. First we get the average views and rating for
the categories's entries. Second we created a rank by multiplying
both. Last, we sort categories by this rank from top to bottom.
Example:
- Cat_1
- Entry_1 (500 Views, Rating 2)
- Entry_2 (200 Views, Rating -4)
- Entry_3 (100 Views, Rating 3)
- Cat_2
- Entry_1 (200 Views, Rating 7)
- Entry_2 (50 Views, Rating 2)
Result:
Cat_1 has a rank by: 88.88 (avg. views: 266.66, avg. rating: 0.33)
Cat_2 has a rank by: 562.5 (avg. views: 125, avg. rating: 4.5)
Cat_2 will be displayed at the top. The algorithm is quality-oriented,
as you can see.
]
if name[queryset] begin[:]
name[self].queryset assign[=] name[queryset]
if name[self].queryset begin[:]
for taget[name[category]] in starred[name[self].queryset] begin[:]
variable[entries] assign[=] call[name[category].get_entries, parameter[]]
if name[entries] begin[:]
variable[amount_list] assign[=] <ast.ListComp object at 0x7da20c796620>
variable[rating_list] assign[=] <ast.ListComp object at 0x7da20c796a70>
variable[views_per_entry] assign[=] binary_operation[call[name[fsum], parameter[name[amount_list]]] / call[name[len], parameter[name[amount_list]]]]
variable[rating_per_entry] assign[=] binary_operation[call[name[fsum], parameter[name[rating_list]]] / call[name[len], parameter[name[rating_list]]]]
name[category].last_rank assign[=] binary_operation[name[views_per_entry] * name[rating_per_entry]]
call[name[category].save, parameter[]]
name[self].queryset assign[=] call[name[sorted], parameter[name[self].queryset]]
return[name[self].queryset]
|
keyword[def] identifier[get_ordered_entries] ( identifier[self] , identifier[queryset] = keyword[False] ):
literal[string]
keyword[if] identifier[queryset] :
identifier[self] . identifier[queryset] = identifier[queryset]
keyword[else] :
identifier[self] . identifier[queryset] = identifier[EntryCategory] . identifier[objects] . identifier[all] ()
keyword[if] identifier[self] . identifier[queryset] :
keyword[for] identifier[category] keyword[in] identifier[self] . identifier[queryset] :
identifier[entries] = identifier[category] . identifier[get_entries] ()
keyword[if] identifier[entries] :
identifier[amount_list] =[ identifier[e] . identifier[amount_of_views] keyword[for] identifier[e] keyword[in] identifier[entries] ]
identifier[rating_list] =[ identifier[e] . identifier[rating] () keyword[for] identifier[e] keyword[in] identifier[entries] ]
identifier[views_per_entry] = identifier[fsum] ( identifier[amount_list] )/ identifier[len] ( identifier[amount_list] )
identifier[rating_per_entry] = identifier[fsum] ( identifier[rating_list] )/ identifier[len] ( identifier[rating_list] )
identifier[category] . identifier[last_rank] = identifier[views_per_entry] * identifier[rating_per_entry]
identifier[category] . identifier[save] ()
keyword[else] :
identifier[self] . identifier[queryset] = identifier[self] . identifier[queryset] . identifier[exclude] ( identifier[pk] = identifier[category] . identifier[pk] )
identifier[self] . identifier[queryset] = identifier[sorted] ( identifier[self] . identifier[queryset] , identifier[key] = keyword[lambda] identifier[c] : identifier[c] . identifier[last_rank] ,
identifier[reverse] = keyword[True] )
keyword[return] identifier[self] . identifier[queryset]
|
def get_ordered_entries(self, queryset=False):
"""
Custom ordering. First we get the average views and rating for
the categories's entries. Second we created a rank by multiplying
both. Last, we sort categories by this rank from top to bottom.
Example:
- Cat_1
- Entry_1 (500 Views, Rating 2)
- Entry_2 (200 Views, Rating -4)
- Entry_3 (100 Views, Rating 3)
- Cat_2
- Entry_1 (200 Views, Rating 7)
- Entry_2 (50 Views, Rating 2)
Result:
Cat_1 has a rank by: 88.88 (avg. views: 266.66, avg. rating: 0.33)
Cat_2 has a rank by: 562.5 (avg. views: 125, avg. rating: 4.5)
Cat_2 will be displayed at the top. The algorithm is quality-oriented,
as you can see.
"""
if queryset:
self.queryset = queryset # depends on [control=['if'], data=[]]
else:
self.queryset = EntryCategory.objects.all()
if self.queryset:
for category in self.queryset:
entries = category.get_entries()
if entries:
amount_list = [e.amount_of_views for e in entries]
rating_list = [e.rating() for e in entries]
views_per_entry = fsum(amount_list) / len(amount_list)
rating_per_entry = fsum(rating_list) / len(rating_list)
category.last_rank = views_per_entry * rating_per_entry
category.save() # depends on [control=['if'], data=[]]
else:
self.queryset = self.queryset.exclude(pk=category.pk) # depends on [control=['for'], data=['category']]
self.queryset = sorted(self.queryset, key=lambda c: c.last_rank, reverse=True) # depends on [control=['if'], data=[]]
return self.queryset
|
def create_knowledge_base(project_id, display_name):
"""Creates a Knowledge base.
Args:
project_id: The GCP project linked with the agent.
display_name: The display name of the Knowledge base."""
import dialogflow_v2beta1 as dialogflow
client = dialogflow.KnowledgeBasesClient()
project_path = client.project_path(project_id)
knowledge_base = dialogflow.types.KnowledgeBase(
display_name=display_name)
response = client.create_knowledge_base(project_path, knowledge_base)
print('Knowledge Base created:\n')
print('Display Name: {}\n'.format(response.display_name))
print('Knowledge ID: {}\n'.format(response.name))
|
def function[create_knowledge_base, parameter[project_id, display_name]]:
constant[Creates a Knowledge base.
Args:
project_id: The GCP project linked with the agent.
display_name: The display name of the Knowledge base.]
import module[dialogflow_v2beta1] as alias[dialogflow]
variable[client] assign[=] call[name[dialogflow].KnowledgeBasesClient, parameter[]]
variable[project_path] assign[=] call[name[client].project_path, parameter[name[project_id]]]
variable[knowledge_base] assign[=] call[name[dialogflow].types.KnowledgeBase, parameter[]]
variable[response] assign[=] call[name[client].create_knowledge_base, parameter[name[project_path], name[knowledge_base]]]
call[name[print], parameter[constant[Knowledge Base created:
]]]
call[name[print], parameter[call[constant[Display Name: {}
].format, parameter[name[response].display_name]]]]
call[name[print], parameter[call[constant[Knowledge ID: {}
].format, parameter[name[response].name]]]]
|
keyword[def] identifier[create_knowledge_base] ( identifier[project_id] , identifier[display_name] ):
literal[string]
keyword[import] identifier[dialogflow_v2beta1] keyword[as] identifier[dialogflow]
identifier[client] = identifier[dialogflow] . identifier[KnowledgeBasesClient] ()
identifier[project_path] = identifier[client] . identifier[project_path] ( identifier[project_id] )
identifier[knowledge_base] = identifier[dialogflow] . identifier[types] . identifier[KnowledgeBase] (
identifier[display_name] = identifier[display_name] )
identifier[response] = identifier[client] . identifier[create_knowledge_base] ( identifier[project_path] , identifier[knowledge_base] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[display_name] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[response] . identifier[name] ))
|
def create_knowledge_base(project_id, display_name):
"""Creates a Knowledge base.
Args:
project_id: The GCP project linked with the agent.
display_name: The display name of the Knowledge base."""
import dialogflow_v2beta1 as dialogflow
client = dialogflow.KnowledgeBasesClient()
project_path = client.project_path(project_id)
knowledge_base = dialogflow.types.KnowledgeBase(display_name=display_name)
response = client.create_knowledge_base(project_path, knowledge_base)
print('Knowledge Base created:\n')
print('Display Name: {}\n'.format(response.display_name))
print('Knowledge ID: {}\n'.format(response.name))
|
def read(self, filename=None):
"""Read and parse mdp file *filename*."""
self._init_filename(filename)
def BLANK(i):
return "B{0:04d}".format(i)
def COMMENT(i):
return "C{0:04d}".format(i)
data = odict()
iblank = icomment = 0
with open(self.real_filename) as mdp:
for line in mdp:
line = line.strip()
if len(line) == 0:
iblank += 1
data[BLANK(iblank)] = ''
continue
m = self.COMMENT.match(line)
if m:
icomment += 1
data[COMMENT(icomment)] = m.group('value')
continue
# parameter
m = self.PARAMETER.match(line)
if m:
# check for comments after parameter?? -- currently discarded
parameter = m.group('parameter')
value = self._transform(m.group('value'))
data[parameter] = value
else:
errmsg = '{filename!r}: unknown line in mdp file, {line!r}'.format(**vars())
self.logger.error(errmsg)
raise ParseError(errmsg)
super(MDP,self).update(data)
|
def function[read, parameter[self, filename]]:
constant[Read and parse mdp file *filename*.]
call[name[self]._init_filename, parameter[name[filename]]]
def function[BLANK, parameter[i]]:
return[call[constant[B{0:04d}].format, parameter[name[i]]]]
def function[COMMENT, parameter[i]]:
return[call[constant[C{0:04d}].format, parameter[name[i]]]]
variable[data] assign[=] call[name[odict], parameter[]]
variable[iblank] assign[=] constant[0]
with call[name[open], parameter[name[self].real_filename]] begin[:]
for taget[name[line]] in starred[name[mdp]] begin[:]
variable[line] assign[=] call[name[line].strip, parameter[]]
if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18bccad70>
call[name[data]][call[name[BLANK], parameter[name[iblank]]]] assign[=] constant[]
continue
variable[m] assign[=] call[name[self].COMMENT.match, parameter[name[line]]]
if name[m] begin[:]
<ast.AugAssign object at 0x7da18bccb8e0>
call[name[data]][call[name[COMMENT], parameter[name[icomment]]]] assign[=] call[name[m].group, parameter[constant[value]]]
continue
variable[m] assign[=] call[name[self].PARAMETER.match, parameter[name[line]]]
if name[m] begin[:]
variable[parameter] assign[=] call[name[m].group, parameter[constant[parameter]]]
variable[value] assign[=] call[name[self]._transform, parameter[call[name[m].group, parameter[constant[value]]]]]
call[name[data]][name[parameter]] assign[=] name[value]
call[call[name[super], parameter[name[MDP], name[self]]].update, parameter[name[data]]]
|
keyword[def] identifier[read] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
identifier[self] . identifier[_init_filename] ( identifier[filename] )
keyword[def] identifier[BLANK] ( identifier[i] ):
keyword[return] literal[string] . identifier[format] ( identifier[i] )
keyword[def] identifier[COMMENT] ( identifier[i] ):
keyword[return] literal[string] . identifier[format] ( identifier[i] )
identifier[data] = identifier[odict] ()
identifier[iblank] = identifier[icomment] = literal[int]
keyword[with] identifier[open] ( identifier[self] . identifier[real_filename] ) keyword[as] identifier[mdp] :
keyword[for] identifier[line] keyword[in] identifier[mdp] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[len] ( identifier[line] )== literal[int] :
identifier[iblank] += literal[int]
identifier[data] [ identifier[BLANK] ( identifier[iblank] )]= literal[string]
keyword[continue]
identifier[m] = identifier[self] . identifier[COMMENT] . identifier[match] ( identifier[line] )
keyword[if] identifier[m] :
identifier[icomment] += literal[int]
identifier[data] [ identifier[COMMENT] ( identifier[icomment] )]= identifier[m] . identifier[group] ( literal[string] )
keyword[continue]
identifier[m] = identifier[self] . identifier[PARAMETER] . identifier[match] ( identifier[line] )
keyword[if] identifier[m] :
identifier[parameter] = identifier[m] . identifier[group] ( literal[string] )
identifier[value] = identifier[self] . identifier[_transform] ( identifier[m] . identifier[group] ( literal[string] ))
identifier[data] [ identifier[parameter] ]= identifier[value]
keyword[else] :
identifier[errmsg] = literal[string] . identifier[format] (** identifier[vars] ())
identifier[self] . identifier[logger] . identifier[error] ( identifier[errmsg] )
keyword[raise] identifier[ParseError] ( identifier[errmsg] )
identifier[super] ( identifier[MDP] , identifier[self] ). identifier[update] ( identifier[data] )
|
def read(self, filename=None):
"""Read and parse mdp file *filename*."""
self._init_filename(filename)
def BLANK(i):
return 'B{0:04d}'.format(i)
def COMMENT(i):
return 'C{0:04d}'.format(i)
data = odict()
iblank = icomment = 0
with open(self.real_filename) as mdp:
for line in mdp:
line = line.strip()
if len(line) == 0:
iblank += 1
data[BLANK(iblank)] = ''
continue # depends on [control=['if'], data=[]]
m = self.COMMENT.match(line)
if m:
icomment += 1
data[COMMENT(icomment)] = m.group('value')
continue # depends on [control=['if'], data=[]]
# parameter
m = self.PARAMETER.match(line)
if m:
# check for comments after parameter?? -- currently discarded
parameter = m.group('parameter')
value = self._transform(m.group('value'))
data[parameter] = value # depends on [control=['if'], data=[]]
else:
errmsg = '{filename!r}: unknown line in mdp file, {line!r}'.format(**vars())
self.logger.error(errmsg)
raise ParseError(errmsg) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['mdp']]
super(MDP, self).update(data)
|
def disable_cloud_integration(self, id, **kwargs): # noqa: E501
"""Disable a specific cloud integration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_cloud_integration(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerCloudIntegration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.disable_cloud_integration_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.disable_cloud_integration_with_http_info(id, **kwargs) # noqa: E501
return data
|
def function[disable_cloud_integration, parameter[self, id]]:
constant[Disable a specific cloud integration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_cloud_integration(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerCloudIntegration
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].disable_cloud_integration_with_http_info, parameter[name[id]]]]
|
keyword[def] identifier[disable_cloud_integration] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[disable_cloud_integration_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[disable_cloud_integration_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def disable_cloud_integration(self, id, **kwargs): # noqa: E501
'Disable a specific cloud integration # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.disable_cloud_integration(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: ResponseContainerCloudIntegration\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.disable_cloud_integration_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.disable_cloud_integration_with_http_info(id, **kwargs) # noqa: E501
return data
|
def fetch_liked_projects(self, **kwargs):
"""
List liked projects
Fetch projects that the currently authenticated user likes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.fetch_liked_projects(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: PaginatedProjectResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.fetch_liked_projects_with_http_info(**kwargs)
else:
(data) = self.fetch_liked_projects_with_http_info(**kwargs)
return data
|
def function[fetch_liked_projects, parameter[self]]:
constant[
List liked projects
Fetch projects that the currently authenticated user likes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.fetch_liked_projects(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: PaginatedProjectResults
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[callback]]] begin[:]
return[call[name[self].fetch_liked_projects_with_http_info, parameter[]]]
|
keyword[def] identifier[fetch_liked_projects] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[fetch_liked_projects_with_http_info] (** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[fetch_liked_projects_with_http_info] (** identifier[kwargs] )
keyword[return] identifier[data]
|
def fetch_liked_projects(self, **kwargs):
"""
List liked projects
Fetch projects that the currently authenticated user likes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.fetch_liked_projects(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: PaginatedProjectResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.fetch_liked_projects_with_http_info(**kwargs) # depends on [control=['if'], data=[]]
else:
data = self.fetch_liked_projects_with_http_info(**kwargs)
return data
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).\
comunicar_certificado_icpbrasil(certificado)
return RespostaSAT.comunicar_certificado_icpbrasil(retorno)
|
def function[comunicar_certificado_icpbrasil, parameter[self, certificado]]:
constant[Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
]
variable[retorno] assign[=] call[call[name[super], parameter[name[ClienteSATLocal], name[self]]].comunicar_certificado_icpbrasil, parameter[name[certificado]]]
return[call[name[RespostaSAT].comunicar_certificado_icpbrasil, parameter[name[retorno]]]]
|
keyword[def] identifier[comunicar_certificado_icpbrasil] ( identifier[self] , identifier[certificado] ):
literal[string]
identifier[retorno] = identifier[super] ( identifier[ClienteSATLocal] , identifier[self] ). identifier[comunicar_certificado_icpbrasil] ( identifier[certificado] )
keyword[return] identifier[RespostaSAT] . identifier[comunicar_certificado_icpbrasil] ( identifier[retorno] )
|
def comunicar_certificado_icpbrasil(self, certificado):
"""Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`.
:return: Uma resposta SAT padrão.
:rtype: satcfe.resposta.padrao.RespostaSAT
"""
retorno = super(ClienteSATLocal, self).comunicar_certificado_icpbrasil(certificado)
return RespostaSAT.comunicar_certificado_icpbrasil(retorno)
|
def _message_symbol(self, msgid):
"""Get the message symbol of the given message id
Return the original message id if the message does not
exist.
"""
try:
return [md.symbol for md in self.msgs_store.get_message_definitions(msgid)]
except UnknownMessageError:
return msgid
|
def function[_message_symbol, parameter[self, msgid]]:
constant[Get the message symbol of the given message id
Return the original message id if the message does not
exist.
]
<ast.Try object at 0x7da1b02585e0>
|
keyword[def] identifier[_message_symbol] ( identifier[self] , identifier[msgid] ):
literal[string]
keyword[try] :
keyword[return] [ identifier[md] . identifier[symbol] keyword[for] identifier[md] keyword[in] identifier[self] . identifier[msgs_store] . identifier[get_message_definitions] ( identifier[msgid] )]
keyword[except] identifier[UnknownMessageError] :
keyword[return] identifier[msgid]
|
def _message_symbol(self, msgid):
"""Get the message symbol of the given message id
Return the original message id if the message does not
exist.
"""
try:
return [md.symbol for md in self.msgs_store.get_message_definitions(msgid)] # depends on [control=['try'], data=[]]
except UnknownMessageError:
return msgid # depends on [control=['except'], data=[]]
|
def read(self, callback=None):
"""
The device returns an MSB and LSB (in that order) for each axis.
These are 12 bit values - that is only the upper 4 bits of the LSB are used.
To make things more confusing, firmata returns each axis as 4 bytes, and reverses the order because
it looks at the world as lsb, msb order.
:param callback: Callback function
:returns: callback data is set with x,y,z raw (integers) followed by x,y,z corrected ( floating point)
Call available() first to make sure new data is really available.
"""
register = self.MMA8452Q_Register['OUT_X_MSB']
self.board.i2c_read_request(self.address, register, 6,
Constants.I2C_READ | Constants.I2C_END_TX_MASK,
self.data_val, Constants.CB_TYPE_DIRECT)
# get x y z data
xyz = self.wait_for_read_result()
self.board.sleep(.001) # string off address and register bytes
xyz = xyz[2:]
xmsb = xyz[0]
xlsb = xyz[1]
ymsb = xyz[2]
ylsb = xyz[3]
zmsb = xyz[4]
zlsb = xyz[5]
xa = int((xmsb << 8) | xlsb) >> 4
if xmsb > 127:
xa = 4095 - xa
xa = ~xa + 1
ya = int(((ymsb << 8) | ylsb)) >> 4
if ymsb > 127:
ya = 4095 - ya
ya = ~ya + 1
za = int((zmsb << 8) | zlsb) >> 4
if zmsb > 127:
za = 4095 - za
za = ~za + 1
cx = xa / 2048 * self.scale
cy = ya / 2048 * self.scale
cz = za / 2048 * self.scale
angle_xz = 180 * math.atan2(xa, za) / math.pi
angle_xy = 180 * math.atan2(xa, ya) / math.pi
angle_yz = 180 * math.atan2(ya, za) / math.pi
if callback:
callback([xa, ya, za, cx, cy, cz, angle_xz, angle_yz, angle_xy])
self.board.sleep(.001)
return [xa, ya, za, cx, cy, cz, angle_xz, angle_yz, angle_xy]
|
def function[read, parameter[self, callback]]:
constant[
The device returns an MSB and LSB (in that order) for each axis.
These are 12 bit values - that is only the upper 4 bits of the LSB are used.
To make things more confusing, firmata returns each axis as 4 bytes, and reverses the order because
it looks at the world as lsb, msb order.
:param callback: Callback function
:returns: callback data is set with x,y,z raw (integers) followed by x,y,z corrected ( floating point)
Call available() first to make sure new data is really available.
]
variable[register] assign[=] call[name[self].MMA8452Q_Register][constant[OUT_X_MSB]]
call[name[self].board.i2c_read_request, parameter[name[self].address, name[register], constant[6], binary_operation[name[Constants].I2C_READ <ast.BitOr object at 0x7da2590d6aa0> name[Constants].I2C_END_TX_MASK], name[self].data_val, name[Constants].CB_TYPE_DIRECT]]
variable[xyz] assign[=] call[name[self].wait_for_read_result, parameter[]]
call[name[self].board.sleep, parameter[constant[0.001]]]
variable[xyz] assign[=] call[name[xyz]][<ast.Slice object at 0x7da2049621a0>]
variable[xmsb] assign[=] call[name[xyz]][constant[0]]
variable[xlsb] assign[=] call[name[xyz]][constant[1]]
variable[ymsb] assign[=] call[name[xyz]][constant[2]]
variable[ylsb] assign[=] call[name[xyz]][constant[3]]
variable[zmsb] assign[=] call[name[xyz]][constant[4]]
variable[zlsb] assign[=] call[name[xyz]][constant[5]]
variable[xa] assign[=] binary_operation[call[name[int], parameter[binary_operation[binary_operation[name[xmsb] <ast.LShift object at 0x7da2590d69e0> constant[8]] <ast.BitOr object at 0x7da2590d6aa0> name[xlsb]]]] <ast.RShift object at 0x7da2590d6a40> constant[4]]
if compare[name[xmsb] greater[>] constant[127]] begin[:]
variable[xa] assign[=] binary_operation[constant[4095] - name[xa]]
variable[xa] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20e74b1c0> + constant[1]]
variable[ya] assign[=] binary_operation[call[name[int], parameter[binary_operation[binary_operation[name[ymsb] <ast.LShift object at 0x7da2590d69e0> constant[8]] <ast.BitOr object at 0x7da2590d6aa0> name[ylsb]]]] <ast.RShift object at 0x7da2590d6a40> constant[4]]
if compare[name[ymsb] greater[>] constant[127]] begin[:]
variable[ya] assign[=] binary_operation[constant[4095] - name[ya]]
variable[ya] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c9905e0> + constant[1]]
variable[za] assign[=] binary_operation[call[name[int], parameter[binary_operation[binary_operation[name[zmsb] <ast.LShift object at 0x7da2590d69e0> constant[8]] <ast.BitOr object at 0x7da2590d6aa0> name[zlsb]]]] <ast.RShift object at 0x7da2590d6a40> constant[4]]
if compare[name[zmsb] greater[>] constant[127]] begin[:]
variable[za] assign[=] binary_operation[constant[4095] - name[za]]
variable[za] assign[=] binary_operation[<ast.UnaryOp object at 0x7da20c991cf0> + constant[1]]
variable[cx] assign[=] binary_operation[binary_operation[name[xa] / constant[2048]] * name[self].scale]
variable[cy] assign[=] binary_operation[binary_operation[name[ya] / constant[2048]] * name[self].scale]
variable[cz] assign[=] binary_operation[binary_operation[name[za] / constant[2048]] * name[self].scale]
variable[angle_xz] assign[=] binary_operation[binary_operation[constant[180] * call[name[math].atan2, parameter[name[xa], name[za]]]] / name[math].pi]
variable[angle_xy] assign[=] binary_operation[binary_operation[constant[180] * call[name[math].atan2, parameter[name[xa], name[ya]]]] / name[math].pi]
variable[angle_yz] assign[=] binary_operation[binary_operation[constant[180] * call[name[math].atan2, parameter[name[ya], name[za]]]] / name[math].pi]
if name[callback] begin[:]
call[name[callback], parameter[list[[<ast.Name object at 0x7da20c992e30>, <ast.Name object at 0x7da20c991c30>, <ast.Name object at 0x7da20c9908e0>, <ast.Name object at 0x7da20c992b00>, <ast.Name object at 0x7da20c9903d0>, <ast.Name object at 0x7da20c992440>, <ast.Name object at 0x7da20c990910>, <ast.Name object at 0x7da20c991de0>, <ast.Name object at 0x7da20c9911e0>]]]]
call[name[self].board.sleep, parameter[constant[0.001]]]
return[list[[<ast.Name object at 0x7da20c993e20>, <ast.Name object at 0x7da20c992bf0>, <ast.Name object at 0x7da20c990f70>, <ast.Name object at 0x7da20c9937c0>, <ast.Name object at 0x7da20c990f10>, <ast.Name object at 0x7da20c992c80>, <ast.Name object at 0x7da20c992e00>, <ast.Name object at 0x7da20c9935e0>, <ast.Name object at 0x7da20c991cc0>]]]
|
keyword[def] identifier[read] ( identifier[self] , identifier[callback] = keyword[None] ):
literal[string]
identifier[register] = identifier[self] . identifier[MMA8452Q_Register] [ literal[string] ]
identifier[self] . identifier[board] . identifier[i2c_read_request] ( identifier[self] . identifier[address] , identifier[register] , literal[int] ,
identifier[Constants] . identifier[I2C_READ] | identifier[Constants] . identifier[I2C_END_TX_MASK] ,
identifier[self] . identifier[data_val] , identifier[Constants] . identifier[CB_TYPE_DIRECT] )
identifier[xyz] = identifier[self] . identifier[wait_for_read_result] ()
identifier[self] . identifier[board] . identifier[sleep] ( literal[int] )
identifier[xyz] = identifier[xyz] [ literal[int] :]
identifier[xmsb] = identifier[xyz] [ literal[int] ]
identifier[xlsb] = identifier[xyz] [ literal[int] ]
identifier[ymsb] = identifier[xyz] [ literal[int] ]
identifier[ylsb] = identifier[xyz] [ literal[int] ]
identifier[zmsb] = identifier[xyz] [ literal[int] ]
identifier[zlsb] = identifier[xyz] [ literal[int] ]
identifier[xa] = identifier[int] (( identifier[xmsb] << literal[int] )| identifier[xlsb] )>> literal[int]
keyword[if] identifier[xmsb] > literal[int] :
identifier[xa] = literal[int] - identifier[xa]
identifier[xa] =~ identifier[xa] + literal[int]
identifier[ya] = identifier[int] ((( identifier[ymsb] << literal[int] )| identifier[ylsb] ))>> literal[int]
keyword[if] identifier[ymsb] > literal[int] :
identifier[ya] = literal[int] - identifier[ya]
identifier[ya] =~ identifier[ya] + literal[int]
identifier[za] = identifier[int] (( identifier[zmsb] << literal[int] )| identifier[zlsb] )>> literal[int]
keyword[if] identifier[zmsb] > literal[int] :
identifier[za] = literal[int] - identifier[za]
identifier[za] =~ identifier[za] + literal[int]
identifier[cx] = identifier[xa] / literal[int] * identifier[self] . identifier[scale]
identifier[cy] = identifier[ya] / literal[int] * identifier[self] . identifier[scale]
identifier[cz] = identifier[za] / literal[int] * identifier[self] . identifier[scale]
identifier[angle_xz] = literal[int] * identifier[math] . identifier[atan2] ( identifier[xa] , identifier[za] )/ identifier[math] . identifier[pi]
identifier[angle_xy] = literal[int] * identifier[math] . identifier[atan2] ( identifier[xa] , identifier[ya] )/ identifier[math] . identifier[pi]
identifier[angle_yz] = literal[int] * identifier[math] . identifier[atan2] ( identifier[ya] , identifier[za] )/ identifier[math] . identifier[pi]
keyword[if] identifier[callback] :
identifier[callback] ([ identifier[xa] , identifier[ya] , identifier[za] , identifier[cx] , identifier[cy] , identifier[cz] , identifier[angle_xz] , identifier[angle_yz] , identifier[angle_xy] ])
identifier[self] . identifier[board] . identifier[sleep] ( literal[int] )
keyword[return] [ identifier[xa] , identifier[ya] , identifier[za] , identifier[cx] , identifier[cy] , identifier[cz] , identifier[angle_xz] , identifier[angle_yz] , identifier[angle_xy] ]
|
def read(self, callback=None):
"""
The device returns an MSB and LSB (in that order) for each axis.
These are 12 bit values - that is only the upper 4 bits of the LSB are used.
To make things more confusing, firmata returns each axis as 4 bytes, and reverses the order because
it looks at the world as lsb, msb order.
:param callback: Callback function
:returns: callback data is set with x,y,z raw (integers) followed by x,y,z corrected ( floating point)
Call available() first to make sure new data is really available.
"""
register = self.MMA8452Q_Register['OUT_X_MSB']
self.board.i2c_read_request(self.address, register, 6, Constants.I2C_READ | Constants.I2C_END_TX_MASK, self.data_val, Constants.CB_TYPE_DIRECT)
# get x y z data
xyz = self.wait_for_read_result()
self.board.sleep(0.001) # string off address and register bytes
xyz = xyz[2:]
xmsb = xyz[0]
xlsb = xyz[1]
ymsb = xyz[2]
ylsb = xyz[3]
zmsb = xyz[4]
zlsb = xyz[5]
xa = int(xmsb << 8 | xlsb) >> 4
if xmsb > 127:
xa = 4095 - xa
xa = ~xa + 1 # depends on [control=['if'], data=[]]
ya = int(ymsb << 8 | ylsb) >> 4
if ymsb > 127:
ya = 4095 - ya
ya = ~ya + 1 # depends on [control=['if'], data=[]]
za = int(zmsb << 8 | zlsb) >> 4
if zmsb > 127:
za = 4095 - za
za = ~za + 1 # depends on [control=['if'], data=[]]
cx = xa / 2048 * self.scale
cy = ya / 2048 * self.scale
cz = za / 2048 * self.scale
angle_xz = 180 * math.atan2(xa, za) / math.pi
angle_xy = 180 * math.atan2(xa, ya) / math.pi
angle_yz = 180 * math.atan2(ya, za) / math.pi
if callback:
callback([xa, ya, za, cx, cy, cz, angle_xz, angle_yz, angle_xy]) # depends on [control=['if'], data=[]]
self.board.sleep(0.001)
return [xa, ya, za, cx, cy, cz, angle_xz, angle_yz, angle_xy]
|
def parse_group(self, stream):
"""Block Name must match Block Name in paired End Group Statement if
Block Name is present in End Group Statement.
BeginGroupStmt ::=
BeginGroupKeywd WSC AssignmentSymbol WSC BlockName StatementDelim
"""
self.expect_in(stream, self.begin_group_tokens)
self.ensure_assignment(stream)
name = self.next_token(stream)
self.skip_statement_delimiter(stream)
statements = self.parse_block(stream, self.has_end_group)
self.expect_in(stream, self.end_group_tokens)
self.parse_end_assignment(stream, name)
self.skip_statement_delimiter(stream)
return name.decode('utf-8'), PVLGroup(statements)
|
def function[parse_group, parameter[self, stream]]:
constant[Block Name must match Block Name in paired End Group Statement if
Block Name is present in End Group Statement.
BeginGroupStmt ::=
BeginGroupKeywd WSC AssignmentSymbol WSC BlockName StatementDelim
]
call[name[self].expect_in, parameter[name[stream], name[self].begin_group_tokens]]
call[name[self].ensure_assignment, parameter[name[stream]]]
variable[name] assign[=] call[name[self].next_token, parameter[name[stream]]]
call[name[self].skip_statement_delimiter, parameter[name[stream]]]
variable[statements] assign[=] call[name[self].parse_block, parameter[name[stream], name[self].has_end_group]]
call[name[self].expect_in, parameter[name[stream], name[self].end_group_tokens]]
call[name[self].parse_end_assignment, parameter[name[stream], name[name]]]
call[name[self].skip_statement_delimiter, parameter[name[stream]]]
return[tuple[[<ast.Call object at 0x7da1b0506260>, <ast.Call object at 0x7da1b0505720>]]]
|
keyword[def] identifier[parse_group] ( identifier[self] , identifier[stream] ):
literal[string]
identifier[self] . identifier[expect_in] ( identifier[stream] , identifier[self] . identifier[begin_group_tokens] )
identifier[self] . identifier[ensure_assignment] ( identifier[stream] )
identifier[name] = identifier[self] . identifier[next_token] ( identifier[stream] )
identifier[self] . identifier[skip_statement_delimiter] ( identifier[stream] )
identifier[statements] = identifier[self] . identifier[parse_block] ( identifier[stream] , identifier[self] . identifier[has_end_group] )
identifier[self] . identifier[expect_in] ( identifier[stream] , identifier[self] . identifier[end_group_tokens] )
identifier[self] . identifier[parse_end_assignment] ( identifier[stream] , identifier[name] )
identifier[self] . identifier[skip_statement_delimiter] ( identifier[stream] )
keyword[return] identifier[name] . identifier[decode] ( literal[string] ), identifier[PVLGroup] ( identifier[statements] )
|
def parse_group(self, stream):
"""Block Name must match Block Name in paired End Group Statement if
Block Name is present in End Group Statement.
BeginGroupStmt ::=
BeginGroupKeywd WSC AssignmentSymbol WSC BlockName StatementDelim
"""
self.expect_in(stream, self.begin_group_tokens)
self.ensure_assignment(stream)
name = self.next_token(stream)
self.skip_statement_delimiter(stream)
statements = self.parse_block(stream, self.has_end_group)
self.expect_in(stream, self.end_group_tokens)
self.parse_end_assignment(stream, name)
self.skip_statement_delimiter(stream)
return (name.decode('utf-8'), PVLGroup(statements))
|
def main(manual_args=None):
"""Foremast, your ship's support."""
parser = argparse.ArgumentParser(description=main.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.set_defaults(func=parser.print_help)
add_debug(parser)
parser.add_argument(
'-s',
'--short-log',
action='store_const',
const=SHORT_LOGGING_FORMAT,
default=LOGGING_FORMAT,
help='Truncated logging format')
parser.add_argument('-v', '--version', action='store_true', help=print_version.__doc__)
subparsers = parser.add_subparsers(title='Commands', description='Available activies')
add_infra(subparsers)
add_pipeline(subparsers)
add_rebuild(subparsers)
add_autoscaling(subparsers)
add_validate(subparsers)
CliArgs = collections.namedtuple('CliArgs', ['parsed', 'extra'])
parsed, extra = parser.parse_known_args(args=manual_args)
args = CliArgs(parsed, extra)
logging.basicConfig(format=args.parsed.short_log)
package, *_ = __package__.split('.')
logging.getLogger(package).setLevel(args.parsed.debug)
LOG.debug('Arguments: %s', args)
if args.parsed.version:
args.parsed.func = print_version
try:
args.parsed.func(args)
except (AttributeError, TypeError):
args.parsed.func()
|
def function[main, parameter[manual_args]]:
constant[Foremast, your ship's support.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].set_defaults, parameter[]]
call[name[add_debug], parameter[name[parser]]]
call[name[parser].add_argument, parameter[constant[-s], constant[--short-log]]]
call[name[parser].add_argument, parameter[constant[-v], constant[--version]]]
variable[subparsers] assign[=] call[name[parser].add_subparsers, parameter[]]
call[name[add_infra], parameter[name[subparsers]]]
call[name[add_pipeline], parameter[name[subparsers]]]
call[name[add_rebuild], parameter[name[subparsers]]]
call[name[add_autoscaling], parameter[name[subparsers]]]
call[name[add_validate], parameter[name[subparsers]]]
variable[CliArgs] assign[=] call[name[collections].namedtuple, parameter[constant[CliArgs], list[[<ast.Constant object at 0x7da207f00640>, <ast.Constant object at 0x7da207f00190>]]]]
<ast.Tuple object at 0x7da207f02a70> assign[=] call[name[parser].parse_known_args, parameter[]]
variable[args] assign[=] call[name[CliArgs], parameter[name[parsed], name[extra]]]
call[name[logging].basicConfig, parameter[]]
<ast.Tuple object at 0x7da207f00460> assign[=] call[name[__package__].split, parameter[constant[.]]]
call[call[name[logging].getLogger, parameter[name[package]]].setLevel, parameter[name[args].parsed.debug]]
call[name[LOG].debug, parameter[constant[Arguments: %s], name[args]]]
if name[args].parsed.version begin[:]
name[args].parsed.func assign[=] name[print_version]
<ast.Try object at 0x7da207f025c0>
|
keyword[def] identifier[main] ( identifier[manual_args] = keyword[None] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[main] . identifier[__doc__] , identifier[formatter_class] = identifier[argparse] . identifier[ArgumentDefaultsHelpFormatter] )
identifier[parser] . identifier[set_defaults] ( identifier[func] = identifier[parser] . identifier[print_help] )
identifier[add_debug] ( identifier[parser] )
identifier[parser] . identifier[add_argument] (
literal[string] ,
literal[string] ,
identifier[action] = literal[string] ,
identifier[const] = identifier[SHORT_LOGGING_FORMAT] ,
identifier[default] = identifier[LOGGING_FORMAT] ,
identifier[help] = literal[string] )
identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = identifier[print_version] . identifier[__doc__] )
identifier[subparsers] = identifier[parser] . identifier[add_subparsers] ( identifier[title] = literal[string] , identifier[description] = literal[string] )
identifier[add_infra] ( identifier[subparsers] )
identifier[add_pipeline] ( identifier[subparsers] )
identifier[add_rebuild] ( identifier[subparsers] )
identifier[add_autoscaling] ( identifier[subparsers] )
identifier[add_validate] ( identifier[subparsers] )
identifier[CliArgs] = identifier[collections] . identifier[namedtuple] ( literal[string] ,[ literal[string] , literal[string] ])
identifier[parsed] , identifier[extra] = identifier[parser] . identifier[parse_known_args] ( identifier[args] = identifier[manual_args] )
identifier[args] = identifier[CliArgs] ( identifier[parsed] , identifier[extra] )
identifier[logging] . identifier[basicConfig] ( identifier[format] = identifier[args] . identifier[parsed] . identifier[short_log] )
identifier[package] ,* identifier[_] = identifier[__package__] . identifier[split] ( literal[string] )
identifier[logging] . identifier[getLogger] ( identifier[package] ). identifier[setLevel] ( identifier[args] . identifier[parsed] . identifier[debug] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[args] )
keyword[if] identifier[args] . identifier[parsed] . identifier[version] :
identifier[args] . identifier[parsed] . identifier[func] = identifier[print_version]
keyword[try] :
identifier[args] . identifier[parsed] . identifier[func] ( identifier[args] )
keyword[except] ( identifier[AttributeError] , identifier[TypeError] ):
identifier[args] . identifier[parsed] . identifier[func] ()
|
def main(manual_args=None):
"""Foremast, your ship's support."""
parser = argparse.ArgumentParser(description=main.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.set_defaults(func=parser.print_help)
add_debug(parser)
parser.add_argument('-s', '--short-log', action='store_const', const=SHORT_LOGGING_FORMAT, default=LOGGING_FORMAT, help='Truncated logging format')
parser.add_argument('-v', '--version', action='store_true', help=print_version.__doc__)
subparsers = parser.add_subparsers(title='Commands', description='Available activies')
add_infra(subparsers)
add_pipeline(subparsers)
add_rebuild(subparsers)
add_autoscaling(subparsers)
add_validate(subparsers)
CliArgs = collections.namedtuple('CliArgs', ['parsed', 'extra'])
(parsed, extra) = parser.parse_known_args(args=manual_args)
args = CliArgs(parsed, extra)
logging.basicConfig(format=args.parsed.short_log)
(package, *_) = __package__.split('.')
logging.getLogger(package).setLevel(args.parsed.debug)
LOG.debug('Arguments: %s', args)
if args.parsed.version:
args.parsed.func = print_version # depends on [control=['if'], data=[]]
try:
args.parsed.func(args) # depends on [control=['try'], data=[]]
except (AttributeError, TypeError):
args.parsed.func() # depends on [control=['except'], data=[]]
|
def convert_to_process_params_dict(opt):
"""
Takes the namespace object (opt) from the multi-detector interface and
returns a dictionary of command line options that will be handled correctly
by the register_to_process_params ligolw function.
"""
opt = copy.deepcopy(opt)
for arg, val in vars(opt).items():
if isinstance(val, DictWithDefaultReturn):
new_val = []
for key in val.keys():
if isinstance(val[key], list):
for item in val[key]:
if item is not None:
new_val.append(':'.join([key, str(item)]))
else:
if val[key] is not None:
new_val.append(':'.join([key, str(val[key])]))
setattr(opt, arg, new_val)
return vars(opt)
|
def function[convert_to_process_params_dict, parameter[opt]]:
constant[
Takes the namespace object (opt) from the multi-detector interface and
returns a dictionary of command line options that will be handled correctly
by the register_to_process_params ligolw function.
]
variable[opt] assign[=] call[name[copy].deepcopy, parameter[name[opt]]]
for taget[tuple[[<ast.Name object at 0x7da20c991a80>, <ast.Name object at 0x7da20c990eb0>]]] in starred[call[call[name[vars], parameter[name[opt]]].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[val], name[DictWithDefaultReturn]]] begin[:]
variable[new_val] assign[=] list[[]]
for taget[name[key]] in starred[call[name[val].keys, parameter[]]] begin[:]
if call[name[isinstance], parameter[call[name[val]][name[key]], name[list]]] begin[:]
for taget[name[item]] in starred[call[name[val]][name[key]]] begin[:]
if compare[name[item] is_not constant[None]] begin[:]
call[name[new_val].append, parameter[call[constant[:].join, parameter[list[[<ast.Name object at 0x7da20c7c9db0>, <ast.Call object at 0x7da20c7c9330>]]]]]]
call[name[setattr], parameter[name[opt], name[arg], name[new_val]]]
return[call[name[vars], parameter[name[opt]]]]
|
keyword[def] identifier[convert_to_process_params_dict] ( identifier[opt] ):
literal[string]
identifier[opt] = identifier[copy] . identifier[deepcopy] ( identifier[opt] )
keyword[for] identifier[arg] , identifier[val] keyword[in] identifier[vars] ( identifier[opt] ). identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[val] , identifier[DictWithDefaultReturn] ):
identifier[new_val] =[]
keyword[for] identifier[key] keyword[in] identifier[val] . identifier[keys] ():
keyword[if] identifier[isinstance] ( identifier[val] [ identifier[key] ], identifier[list] ):
keyword[for] identifier[item] keyword[in] identifier[val] [ identifier[key] ]:
keyword[if] identifier[item] keyword[is] keyword[not] keyword[None] :
identifier[new_val] . identifier[append] ( literal[string] . identifier[join] ([ identifier[key] , identifier[str] ( identifier[item] )]))
keyword[else] :
keyword[if] identifier[val] [ identifier[key] ] keyword[is] keyword[not] keyword[None] :
identifier[new_val] . identifier[append] ( literal[string] . identifier[join] ([ identifier[key] , identifier[str] ( identifier[val] [ identifier[key] ])]))
identifier[setattr] ( identifier[opt] , identifier[arg] , identifier[new_val] )
keyword[return] identifier[vars] ( identifier[opt] )
|
def convert_to_process_params_dict(opt):
"""
Takes the namespace object (opt) from the multi-detector interface and
returns a dictionary of command line options that will be handled correctly
by the register_to_process_params ligolw function.
"""
opt = copy.deepcopy(opt)
for (arg, val) in vars(opt).items():
if isinstance(val, DictWithDefaultReturn):
new_val = []
for key in val.keys():
if isinstance(val[key], list):
for item in val[key]:
if item is not None:
new_val.append(':'.join([key, str(item)])) # depends on [control=['if'], data=['item']] # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
elif val[key] is not None:
new_val.append(':'.join([key, str(val[key])])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
setattr(opt, arg, new_val) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return vars(opt)
|
def get(self, workflow_id, email_id):
"""
Get information about an individual Automation workflow email.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
:param email_id: The unique id for the Automation workflow email.
:type email_id: :py:class:`str`
"""
self.workflow_id = workflow_id
self.email_id = email_id
return self._mc_client._get(url=self._build_path(workflow_id, 'emails', email_id))
|
def function[get, parameter[self, workflow_id, email_id]]:
constant[
Get information about an individual Automation workflow email.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
:param email_id: The unique id for the Automation workflow email.
:type email_id: :py:class:`str`
]
name[self].workflow_id assign[=] name[workflow_id]
name[self].email_id assign[=] name[email_id]
return[call[name[self]._mc_client._get, parameter[]]]
|
keyword[def] identifier[get] ( identifier[self] , identifier[workflow_id] , identifier[email_id] ):
literal[string]
identifier[self] . identifier[workflow_id] = identifier[workflow_id]
identifier[self] . identifier[email_id] = identifier[email_id]
keyword[return] identifier[self] . identifier[_mc_client] . identifier[_get] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[workflow_id] , literal[string] , identifier[email_id] ))
|
def get(self, workflow_id, email_id):
"""
Get information about an individual Automation workflow email.
:param workflow_id: The unique id for the Automation workflow.
:type workflow_id: :py:class:`str`
:param email_id: The unique id for the Automation workflow email.
:type email_id: :py:class:`str`
"""
self.workflow_id = workflow_id
self.email_id = email_id
return self._mc_client._get(url=self._build_path(workflow_id, 'emails', email_id))
|
def add_health_monitor(self, type, delay=10, timeout=10,
attemptsBeforeDeactivation=3, path="/", statusRegex=None,
bodyRegex=None, hostHeader=None):
"""
Adds a health monitor to the load balancer. If a monitor already
exists, it is updated with the supplied settings.
"""
abd = attemptsBeforeDeactivation
return self.manager.add_health_monitor(self, type=type, delay=delay,
timeout=timeout, attemptsBeforeDeactivation=abd,
path=path, statusRegex=statusRegex, bodyRegex=bodyRegex,
hostHeader=hostHeader)
|
def function[add_health_monitor, parameter[self, type, delay, timeout, attemptsBeforeDeactivation, path, statusRegex, bodyRegex, hostHeader]]:
constant[
Adds a health monitor to the load balancer. If a monitor already
exists, it is updated with the supplied settings.
]
variable[abd] assign[=] name[attemptsBeforeDeactivation]
return[call[name[self].manager.add_health_monitor, parameter[name[self]]]]
|
keyword[def] identifier[add_health_monitor] ( identifier[self] , identifier[type] , identifier[delay] = literal[int] , identifier[timeout] = literal[int] ,
identifier[attemptsBeforeDeactivation] = literal[int] , identifier[path] = literal[string] , identifier[statusRegex] = keyword[None] ,
identifier[bodyRegex] = keyword[None] , identifier[hostHeader] = keyword[None] ):
literal[string]
identifier[abd] = identifier[attemptsBeforeDeactivation]
keyword[return] identifier[self] . identifier[manager] . identifier[add_health_monitor] ( identifier[self] , identifier[type] = identifier[type] , identifier[delay] = identifier[delay] ,
identifier[timeout] = identifier[timeout] , identifier[attemptsBeforeDeactivation] = identifier[abd] ,
identifier[path] = identifier[path] , identifier[statusRegex] = identifier[statusRegex] , identifier[bodyRegex] = identifier[bodyRegex] ,
identifier[hostHeader] = identifier[hostHeader] )
|
def add_health_monitor(self, type, delay=10, timeout=10, attemptsBeforeDeactivation=3, path='/', statusRegex=None, bodyRegex=None, hostHeader=None):
"""
Adds a health monitor to the load balancer. If a monitor already
exists, it is updated with the supplied settings.
"""
abd = attemptsBeforeDeactivation
return self.manager.add_health_monitor(self, type=type, delay=delay, timeout=timeout, attemptsBeforeDeactivation=abd, path=path, statusRegex=statusRegex, bodyRegex=bodyRegex, hostHeader=hostHeader)
|
def get(cls, resolvable_string, options_builder=None, interpreter=None):
"""Get a :class:`Resolvable` from a string.
:returns: A :class:`Resolvable` or ``None`` if no implementation was appropriate.
"""
options_builder = options_builder or ResolverOptionsBuilder()
for resolvable_impl in cls._REGISTRY:
try:
return resolvable_impl.from_string(resolvable_string,
options_builder,
interpreter=interpreter)
except cls.InvalidRequirement:
continue
raise cls.InvalidRequirement('Unknown requirement type: %s' % resolvable_string)
|
def function[get, parameter[cls, resolvable_string, options_builder, interpreter]]:
constant[Get a :class:`Resolvable` from a string.
:returns: A :class:`Resolvable` or ``None`` if no implementation was appropriate.
]
variable[options_builder] assign[=] <ast.BoolOp object at 0x7da2041dafb0>
for taget[name[resolvable_impl]] in starred[name[cls]._REGISTRY] begin[:]
<ast.Try object at 0x7da2047e97e0>
<ast.Raise object at 0x7da20e955f30>
|
keyword[def] identifier[get] ( identifier[cls] , identifier[resolvable_string] , identifier[options_builder] = keyword[None] , identifier[interpreter] = keyword[None] ):
literal[string]
identifier[options_builder] = identifier[options_builder] keyword[or] identifier[ResolverOptionsBuilder] ()
keyword[for] identifier[resolvable_impl] keyword[in] identifier[cls] . identifier[_REGISTRY] :
keyword[try] :
keyword[return] identifier[resolvable_impl] . identifier[from_string] ( identifier[resolvable_string] ,
identifier[options_builder] ,
identifier[interpreter] = identifier[interpreter] )
keyword[except] identifier[cls] . identifier[InvalidRequirement] :
keyword[continue]
keyword[raise] identifier[cls] . identifier[InvalidRequirement] ( literal[string] % identifier[resolvable_string] )
|
def get(cls, resolvable_string, options_builder=None, interpreter=None):
"""Get a :class:`Resolvable` from a string.
:returns: A :class:`Resolvable` or ``None`` if no implementation was appropriate.
"""
options_builder = options_builder or ResolverOptionsBuilder()
for resolvable_impl in cls._REGISTRY:
try:
return resolvable_impl.from_string(resolvable_string, options_builder, interpreter=interpreter) # depends on [control=['try'], data=[]]
except cls.InvalidRequirement:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['resolvable_impl']]
raise cls.InvalidRequirement('Unknown requirement type: %s' % resolvable_string)
|
def pltnp(point, v1, v2, v3):
"""
Find the nearest point on a triangular plate to a given point.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnp_c.html
:param point: A point in 3-dimensional space.
:type point: 3-Element Array of floats
:param v1: Vertices of a triangular plate.
:type v1: 3-Element Array of floats
:param v2: Vertices of a triangular plate.
:type v2: 3-Element Array of floats
:param v3: Vertices of a triangular plate.
:type v3: 3-Element Array of floats
:return: the nearest point on a triangular plate to a given point and distance
:rtype: tuple
"""
point = stypes.toDoubleVector(point)
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
v3 = stypes.toDoubleVector(v3)
pnear = stypes.emptyDoubleVector(3)
dist = ctypes.c_double()
libspice.pltnp_c(point, v1, v2, v3, pnear, ctypes.byref(dist))
return stypes.cVectorToPython(pnear), dist.value
|
def function[pltnp, parameter[point, v1, v2, v3]]:
constant[
Find the nearest point on a triangular plate to a given point.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnp_c.html
:param point: A point in 3-dimensional space.
:type point: 3-Element Array of floats
:param v1: Vertices of a triangular plate.
:type v1: 3-Element Array of floats
:param v2: Vertices of a triangular plate.
:type v2: 3-Element Array of floats
:param v3: Vertices of a triangular plate.
:type v3: 3-Element Array of floats
:return: the nearest point on a triangular plate to a given point and distance
:rtype: tuple
]
variable[point] assign[=] call[name[stypes].toDoubleVector, parameter[name[point]]]
variable[v1] assign[=] call[name[stypes].toDoubleVector, parameter[name[v1]]]
variable[v2] assign[=] call[name[stypes].toDoubleVector, parameter[name[v2]]]
variable[v3] assign[=] call[name[stypes].toDoubleVector, parameter[name[v3]]]
variable[pnear] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
variable[dist] assign[=] call[name[ctypes].c_double, parameter[]]
call[name[libspice].pltnp_c, parameter[name[point], name[v1], name[v2], name[v3], name[pnear], call[name[ctypes].byref, parameter[name[dist]]]]]
return[tuple[[<ast.Call object at 0x7da2054a5240>, <ast.Attribute object at 0x7da2054a4b80>]]]
|
keyword[def] identifier[pltnp] ( identifier[point] , identifier[v1] , identifier[v2] , identifier[v3] ):
literal[string]
identifier[point] = identifier[stypes] . identifier[toDoubleVector] ( identifier[point] )
identifier[v1] = identifier[stypes] . identifier[toDoubleVector] ( identifier[v1] )
identifier[v2] = identifier[stypes] . identifier[toDoubleVector] ( identifier[v2] )
identifier[v3] = identifier[stypes] . identifier[toDoubleVector] ( identifier[v3] )
identifier[pnear] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[dist] = identifier[ctypes] . identifier[c_double] ()
identifier[libspice] . identifier[pltnp_c] ( identifier[point] , identifier[v1] , identifier[v2] , identifier[v3] , identifier[pnear] , identifier[ctypes] . identifier[byref] ( identifier[dist] ))
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[pnear] ), identifier[dist] . identifier[value]
|
def pltnp(point, v1, v2, v3):
"""
Find the nearest point on a triangular plate to a given point.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pltnp_c.html
:param point: A point in 3-dimensional space.
:type point: 3-Element Array of floats
:param v1: Vertices of a triangular plate.
:type v1: 3-Element Array of floats
:param v2: Vertices of a triangular plate.
:type v2: 3-Element Array of floats
:param v3: Vertices of a triangular plate.
:type v3: 3-Element Array of floats
:return: the nearest point on a triangular plate to a given point and distance
:rtype: tuple
"""
point = stypes.toDoubleVector(point)
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
v3 = stypes.toDoubleVector(v3)
pnear = stypes.emptyDoubleVector(3)
dist = ctypes.c_double()
libspice.pltnp_c(point, v1, v2, v3, pnear, ctypes.byref(dist))
return (stypes.cVectorToPython(pnear), dist.value)
|
def mode(self, predicate, args, recall=1, head=False):
'''
Emits mode declarations in Aleph-like format.
:param predicate: predicate name
:param args: predicate arguments with input/output specification, e.g.:
>>> [('+', 'train'), ('-', 'car')]
:param recall: recall setting (see `Aleph manual <http://www.cs.ox.ac.uk/activities/machinelearning/Aleph/aleph>`_)
:param head: set to True for head clauses
'''
return ':- mode%s(%s, %s(%s)).' % (
'h' if head else 'b', str(recall), predicate, ','.join([t + arg for t, arg in args]))
|
def function[mode, parameter[self, predicate, args, recall, head]]:
constant[
Emits mode declarations in Aleph-like format.
:param predicate: predicate name
:param args: predicate arguments with input/output specification, e.g.:
>>> [('+', 'train'), ('-', 'car')]
:param recall: recall setting (see `Aleph manual <http://www.cs.ox.ac.uk/activities/machinelearning/Aleph/aleph>`_)
:param head: set to True for head clauses
]
return[binary_operation[constant[:- mode%s(%s, %s(%s)).] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.IfExp object at 0x7da18ede7340>, <ast.Call object at 0x7da18ede5900>, <ast.Name object at 0x7da18ede47f0>, <ast.Call object at 0x7da18ede7d60>]]]]
|
keyword[def] identifier[mode] ( identifier[self] , identifier[predicate] , identifier[args] , identifier[recall] = literal[int] , identifier[head] = keyword[False] ):
literal[string]
keyword[return] literal[string] %(
literal[string] keyword[if] identifier[head] keyword[else] literal[string] , identifier[str] ( identifier[recall] ), identifier[predicate] , literal[string] . identifier[join] ([ identifier[t] + identifier[arg] keyword[for] identifier[t] , identifier[arg] keyword[in] identifier[args] ]))
|
def mode(self, predicate, args, recall=1, head=False):
"""
Emits mode declarations in Aleph-like format.
:param predicate: predicate name
:param args: predicate arguments with input/output specification, e.g.:
>>> [('+', 'train'), ('-', 'car')]
:param recall: recall setting (see `Aleph manual <http://www.cs.ox.ac.uk/activities/machinelearning/Aleph/aleph>`_)
:param head: set to True for head clauses
"""
return ':- mode%s(%s, %s(%s)).' % ('h' if head else 'b', str(recall), predicate, ','.join([t + arg for (t, arg) in args]))
|
def _ends_with_vowel(self, letter_group: str) -> bool:
"""Check if a string ends with a vowel."""
if len(letter_group) == 0:
return False
return self._contains_vowels(letter_group[-1])
|
def function[_ends_with_vowel, parameter[self, letter_group]]:
constant[Check if a string ends with a vowel.]
if compare[call[name[len], parameter[name[letter_group]]] equal[==] constant[0]] begin[:]
return[constant[False]]
return[call[name[self]._contains_vowels, parameter[call[name[letter_group]][<ast.UnaryOp object at 0x7da20e954a60>]]]]
|
keyword[def] identifier[_ends_with_vowel] ( identifier[self] , identifier[letter_group] : identifier[str] )-> identifier[bool] :
literal[string]
keyword[if] identifier[len] ( identifier[letter_group] )== literal[int] :
keyword[return] keyword[False]
keyword[return] identifier[self] . identifier[_contains_vowels] ( identifier[letter_group] [- literal[int] ])
|
def _ends_with_vowel(self, letter_group: str) -> bool:
"""Check if a string ends with a vowel."""
if len(letter_group) == 0:
return False # depends on [control=['if'], data=[]]
return self._contains_vowels(letter_group[-1])
|
def get_abbreviations(self):
"""
Get abbreviations of the names of the author.
:return: a list of strings (empty list if no abbreviations available).
"""
abbreviations = []
try:
type_abbreviation = self.session.get_resource(BASE_URI_TYPES % "abbreviation"
, self.session.get_class(surf.ns.ECRM['E55_Type']))
abbreviations = [unicode(label)
for name in self.ecrm_P1_is_identified_by
for abbreviation in name.ecrm_P139_has_alternative_form
for label in abbreviation.rdfs_label
if name.uri == surf.ns.EFRBROO['F12_Name']
and abbreviation.ecrm_P2_has_type.first == type_abbreviation]
except Exception as e:
logger.debug("Exception raised when getting abbreviations for %a"%self)
finally:
return abbreviations
|
def function[get_abbreviations, parameter[self]]:
constant[
Get abbreviations of the names of the author.
:return: a list of strings (empty list if no abbreviations available).
]
variable[abbreviations] assign[=] list[[]]
<ast.Try object at 0x7da2041d99c0>
|
keyword[def] identifier[get_abbreviations] ( identifier[self] ):
literal[string]
identifier[abbreviations] =[]
keyword[try] :
identifier[type_abbreviation] = identifier[self] . identifier[session] . identifier[get_resource] ( identifier[BASE_URI_TYPES] % literal[string]
, identifier[self] . identifier[session] . identifier[get_class] ( identifier[surf] . identifier[ns] . identifier[ECRM] [ literal[string] ]))
identifier[abbreviations] =[ identifier[unicode] ( identifier[label] )
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[ecrm_P1_is_identified_by]
keyword[for] identifier[abbreviation] keyword[in] identifier[name] . identifier[ecrm_P139_has_alternative_form]
keyword[for] identifier[label] keyword[in] identifier[abbreviation] . identifier[rdfs_label]
keyword[if] identifier[name] . identifier[uri] == identifier[surf] . identifier[ns] . identifier[EFRBROO] [ literal[string] ]
keyword[and] identifier[abbreviation] . identifier[ecrm_P2_has_type] . identifier[first] == identifier[type_abbreviation] ]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[debug] ( literal[string] % identifier[self] )
keyword[finally] :
keyword[return] identifier[abbreviations]
|
def get_abbreviations(self):
"""
Get abbreviations of the names of the author.
:return: a list of strings (empty list if no abbreviations available).
"""
abbreviations = []
try:
type_abbreviation = self.session.get_resource(BASE_URI_TYPES % 'abbreviation', self.session.get_class(surf.ns.ECRM['E55_Type']))
abbreviations = [unicode(label) for name in self.ecrm_P1_is_identified_by for abbreviation in name.ecrm_P139_has_alternative_form for label in abbreviation.rdfs_label if name.uri == surf.ns.EFRBROO['F12_Name'] and abbreviation.ecrm_P2_has_type.first == type_abbreviation] # depends on [control=['try'], data=[]]
except Exception as e:
logger.debug('Exception raised when getting abbreviations for %a' % self) # depends on [control=['except'], data=[]]
finally:
return abbreviations
|
def update_consumer_group(self, project, logstore, consumer_group, timeout=None, in_order=None):
""" Update consumer group
:type project: string
:param project: project name
:type logstore: string
:param logstore: logstore name
:type consumer_group: string
:param consumer_group: consumer group name
:type timeout: int
:param timeout: timeout
:type in_order: bool
:param in_order: order
:return: None
"""
if in_order is None and timeout is None:
raise ValueError('in_order and timeout can\'t all be None')
elif in_order is not None and timeout is not None:
body_dict = {
'order': in_order,
'timeout': timeout
}
elif in_order is not None:
body_dict = {
'order': in_order
}
else:
body_dict = {
'timeout': timeout
}
body_str = six.b(json.dumps(body_dict))
headers = {
"x-log-bodyrawsize": str(len(body_str)),
"Content-Type": "application/json"
}
params = {}
resource = "/logstores/" + logstore + "/consumergroups/" + consumer_group
(resp, header) = self._send("PUT", project, body_str, resource, params, headers)
return UpdateConsumerGroupResponse(header, resp)
|
def function[update_consumer_group, parameter[self, project, logstore, consumer_group, timeout, in_order]]:
constant[ Update consumer group
:type project: string
:param project: project name
:type logstore: string
:param logstore: logstore name
:type consumer_group: string
:param consumer_group: consumer group name
:type timeout: int
:param timeout: timeout
:type in_order: bool
:param in_order: order
:return: None
]
if <ast.BoolOp object at 0x7da1b0862740> begin[:]
<ast.Raise object at 0x7da1b08626e0>
variable[body_str] assign[=] call[name[six].b, parameter[call[name[json].dumps, parameter[name[body_dict]]]]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da2043459c0>, <ast.Constant object at 0x7da204346380>], [<ast.Call object at 0x7da204346ec0>, <ast.Constant object at 0x7da204347250>]]
variable[params] assign[=] dictionary[[], []]
variable[resource] assign[=] binary_operation[binary_operation[binary_operation[constant[/logstores/] + name[logstore]] + constant[/consumergroups/]] + name[consumer_group]]
<ast.Tuple object at 0x7da2043454e0> assign[=] call[name[self]._send, parameter[constant[PUT], name[project], name[body_str], name[resource], name[params], name[headers]]]
return[call[name[UpdateConsumerGroupResponse], parameter[name[header], name[resp]]]]
|
keyword[def] identifier[update_consumer_group] ( identifier[self] , identifier[project] , identifier[logstore] , identifier[consumer_group] , identifier[timeout] = keyword[None] , identifier[in_order] = keyword[None] ):
literal[string]
keyword[if] identifier[in_order] keyword[is] keyword[None] keyword[and] identifier[timeout] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[in_order] keyword[is] keyword[not] keyword[None] keyword[and] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[body_dict] ={
literal[string] : identifier[in_order] ,
literal[string] : identifier[timeout]
}
keyword[elif] identifier[in_order] keyword[is] keyword[not] keyword[None] :
identifier[body_dict] ={
literal[string] : identifier[in_order]
}
keyword[else] :
identifier[body_dict] ={
literal[string] : identifier[timeout]
}
identifier[body_str] = identifier[six] . identifier[b] ( identifier[json] . identifier[dumps] ( identifier[body_dict] ))
identifier[headers] ={
literal[string] : identifier[str] ( identifier[len] ( identifier[body_str] )),
literal[string] : literal[string]
}
identifier[params] ={}
identifier[resource] = literal[string] + identifier[logstore] + literal[string] + identifier[consumer_group]
( identifier[resp] , identifier[header] )= identifier[self] . identifier[_send] ( literal[string] , identifier[project] , identifier[body_str] , identifier[resource] , identifier[params] , identifier[headers] )
keyword[return] identifier[UpdateConsumerGroupResponse] ( identifier[header] , identifier[resp] )
|
def update_consumer_group(self, project, logstore, consumer_group, timeout=None, in_order=None):
""" Update consumer group
:type project: string
:param project: project name
:type logstore: string
:param logstore: logstore name
:type consumer_group: string
:param consumer_group: consumer group name
:type timeout: int
:param timeout: timeout
:type in_order: bool
:param in_order: order
:return: None
"""
if in_order is None and timeout is None:
raise ValueError("in_order and timeout can't all be None") # depends on [control=['if'], data=[]]
elif in_order is not None and timeout is not None:
body_dict = {'order': in_order, 'timeout': timeout} # depends on [control=['if'], data=[]]
elif in_order is not None:
body_dict = {'order': in_order} # depends on [control=['if'], data=['in_order']]
else:
body_dict = {'timeout': timeout}
body_str = six.b(json.dumps(body_dict))
headers = {'x-log-bodyrawsize': str(len(body_str)), 'Content-Type': 'application/json'}
params = {}
resource = '/logstores/' + logstore + '/consumergroups/' + consumer_group
(resp, header) = self._send('PUT', project, body_str, resource, params, headers)
return UpdateConsumerGroupResponse(header, resp)
|
def run_to_selected_state(self, path, state_machine_id=None):
"""Execute the state machine until a specific state. This state won't be executed. This is an asynchronous task
"""
if self.state_machine_manager.get_active_state_machine() is not None:
self.state_machine_manager.get_active_state_machine().root_state.recursively_resume_states()
if not self.finished_or_stopped():
logger.debug("Resume execution engine and run to selected state!")
self.run_to_states = []
self.run_to_states.append(path)
self.set_execution_mode(StateMachineExecutionStatus.RUN_TO_SELECTED_STATE)
else:
logger.debug("Start execution engine and run to selected state!")
if state_machine_id is not None:
self.state_machine_manager.active_state_machine_id = state_machine_id
self.set_execution_mode(StateMachineExecutionStatus.RUN_TO_SELECTED_STATE)
self.run_to_states = []
self.run_to_states.append(path)
self._run_active_state_machine()
|
def function[run_to_selected_state, parameter[self, path, state_machine_id]]:
constant[Execute the state machine until a specific state. This state won't be executed. This is an asynchronous task
]
if compare[call[name[self].state_machine_manager.get_active_state_machine, parameter[]] is_not constant[None]] begin[:]
call[call[name[self].state_machine_manager.get_active_state_machine, parameter[]].root_state.recursively_resume_states, parameter[]]
if <ast.UnaryOp object at 0x7da18eb57730> begin[:]
call[name[logger].debug, parameter[constant[Resume execution engine and run to selected state!]]]
name[self].run_to_states assign[=] list[[]]
call[name[self].run_to_states.append, parameter[name[path]]]
call[name[self].set_execution_mode, parameter[name[StateMachineExecutionStatus].RUN_TO_SELECTED_STATE]]
|
keyword[def] identifier[run_to_selected_state] ( identifier[self] , identifier[path] , identifier[state_machine_id] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[state_machine_manager] . identifier[get_active_state_machine] () keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[state_machine_manager] . identifier[get_active_state_machine] (). identifier[root_state] . identifier[recursively_resume_states] ()
keyword[if] keyword[not] identifier[self] . identifier[finished_or_stopped] ():
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[run_to_states] =[]
identifier[self] . identifier[run_to_states] . identifier[append] ( identifier[path] )
identifier[self] . identifier[set_execution_mode] ( identifier[StateMachineExecutionStatus] . identifier[RUN_TO_SELECTED_STATE] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[state_machine_id] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[state_machine_manager] . identifier[active_state_machine_id] = identifier[state_machine_id]
identifier[self] . identifier[set_execution_mode] ( identifier[StateMachineExecutionStatus] . identifier[RUN_TO_SELECTED_STATE] )
identifier[self] . identifier[run_to_states] =[]
identifier[self] . identifier[run_to_states] . identifier[append] ( identifier[path] )
identifier[self] . identifier[_run_active_state_machine] ()
|
def run_to_selected_state(self, path, state_machine_id=None):
"""Execute the state machine until a specific state. This state won't be executed. This is an asynchronous task
"""
if self.state_machine_manager.get_active_state_machine() is not None:
self.state_machine_manager.get_active_state_machine().root_state.recursively_resume_states() # depends on [control=['if'], data=[]]
if not self.finished_or_stopped():
logger.debug('Resume execution engine and run to selected state!')
self.run_to_states = []
self.run_to_states.append(path)
self.set_execution_mode(StateMachineExecutionStatus.RUN_TO_SELECTED_STATE) # depends on [control=['if'], data=[]]
else:
logger.debug('Start execution engine and run to selected state!')
if state_machine_id is not None:
self.state_machine_manager.active_state_machine_id = state_machine_id # depends on [control=['if'], data=['state_machine_id']]
self.set_execution_mode(StateMachineExecutionStatus.RUN_TO_SELECTED_STATE)
self.run_to_states = []
self.run_to_states.append(path)
self._run_active_state_machine()
|
def callMLlibFunc(name, *args):
""" Call API in PythonMLLibAPI """
sc = SparkContext.getOrCreate()
api = getattr(sc._jvm.PythonMLLibAPI(), name)
return callJavaFunc(sc, api, *args)
|
def function[callMLlibFunc, parameter[name]]:
constant[ Call API in PythonMLLibAPI ]
variable[sc] assign[=] call[name[SparkContext].getOrCreate, parameter[]]
variable[api] assign[=] call[name[getattr], parameter[call[name[sc]._jvm.PythonMLLibAPI, parameter[]], name[name]]]
return[call[name[callJavaFunc], parameter[name[sc], name[api], <ast.Starred object at 0x7da18f58ccd0>]]]
|
keyword[def] identifier[callMLlibFunc] ( identifier[name] ,* identifier[args] ):
literal[string]
identifier[sc] = identifier[SparkContext] . identifier[getOrCreate] ()
identifier[api] = identifier[getattr] ( identifier[sc] . identifier[_jvm] . identifier[PythonMLLibAPI] (), identifier[name] )
keyword[return] identifier[callJavaFunc] ( identifier[sc] , identifier[api] ,* identifier[args] )
|
def callMLlibFunc(name, *args):
""" Call API in PythonMLLibAPI """
sc = SparkContext.getOrCreate()
api = getattr(sc._jvm.PythonMLLibAPI(), name)
return callJavaFunc(sc, api, *args)
|
def _humanize_bytes(num_bytes, precision=1):
"""
Return a humanized string representation of a number of num_bytes.
from:
http://code.activestate.com/recipes/
577081-humanized-representation-of-a-number-of-num_bytes/
Assumes `from __future__ import division`.
>>> humanize_bytes(1)
'1 byte'
>>> humanize_bytes(1024)
'1.0 kB'
>>> humanize_bytes(1024*123)
'123.0 kB'
>>> humanize_bytes(1024*12342)
'12.1 MB'
>>> humanize_bytes(1024*12342,2)
'12.05 MB'
>>> humanize_bytes(1024*1234,2)
'1.21 MB'
>>> humanize_bytes(1024*1234*1111,2)
'1.31 GB'
>>> humanize_bytes(1024*1234*1111,1)
'1.3 GB'
"""
if num_bytes == 0:
return 'no bytes'
if num_bytes == 1:
return '1 byte'
factored_bytes = 0
factor_suffix = 'bytes'
for factor, suffix in ABBREVS:
if num_bytes >= factor:
factored_bytes = num_bytes / factor
factor_suffix = suffix
break
if factored_bytes == 1:
precision = 0
return '{:.{prec}f} {}'.format(factored_bytes, factor_suffix,
prec=precision)
|
def function[_humanize_bytes, parameter[num_bytes, precision]]:
constant[
Return a humanized string representation of a number of num_bytes.
from:
http://code.activestate.com/recipes/
577081-humanized-representation-of-a-number-of-num_bytes/
Assumes `from __future__ import division`.
>>> humanize_bytes(1)
'1 byte'
>>> humanize_bytes(1024)
'1.0 kB'
>>> humanize_bytes(1024*123)
'123.0 kB'
>>> humanize_bytes(1024*12342)
'12.1 MB'
>>> humanize_bytes(1024*12342,2)
'12.05 MB'
>>> humanize_bytes(1024*1234,2)
'1.21 MB'
>>> humanize_bytes(1024*1234*1111,2)
'1.31 GB'
>>> humanize_bytes(1024*1234*1111,1)
'1.3 GB'
]
if compare[name[num_bytes] equal[==] constant[0]] begin[:]
return[constant[no bytes]]
if compare[name[num_bytes] equal[==] constant[1]] begin[:]
return[constant[1 byte]]
variable[factored_bytes] assign[=] constant[0]
variable[factor_suffix] assign[=] constant[bytes]
for taget[tuple[[<ast.Name object at 0x7da1b26acc10>, <ast.Name object at 0x7da1b196f790>]]] in starred[name[ABBREVS]] begin[:]
if compare[name[num_bytes] greater_or_equal[>=] name[factor]] begin[:]
variable[factored_bytes] assign[=] binary_operation[name[num_bytes] / name[factor]]
variable[factor_suffix] assign[=] name[suffix]
break
if compare[name[factored_bytes] equal[==] constant[1]] begin[:]
variable[precision] assign[=] constant[0]
return[call[constant[{:.{prec}f} {}].format, parameter[name[factored_bytes], name[factor_suffix]]]]
|
keyword[def] identifier[_humanize_bytes] ( identifier[num_bytes] , identifier[precision] = literal[int] ):
literal[string]
keyword[if] identifier[num_bytes] == literal[int] :
keyword[return] literal[string]
keyword[if] identifier[num_bytes] == literal[int] :
keyword[return] literal[string]
identifier[factored_bytes] = literal[int]
identifier[factor_suffix] = literal[string]
keyword[for] identifier[factor] , identifier[suffix] keyword[in] identifier[ABBREVS] :
keyword[if] identifier[num_bytes] >= identifier[factor] :
identifier[factored_bytes] = identifier[num_bytes] / identifier[factor]
identifier[factor_suffix] = identifier[suffix]
keyword[break]
keyword[if] identifier[factored_bytes] == literal[int] :
identifier[precision] = literal[int]
keyword[return] literal[string] . identifier[format] ( identifier[factored_bytes] , identifier[factor_suffix] ,
identifier[prec] = identifier[precision] )
|
def _humanize_bytes(num_bytes, precision=1):
"""
Return a humanized string representation of a number of num_bytes.
from:
http://code.activestate.com/recipes/
577081-humanized-representation-of-a-number-of-num_bytes/
Assumes `from __future__ import division`.
>>> humanize_bytes(1)
'1 byte'
>>> humanize_bytes(1024)
'1.0 kB'
>>> humanize_bytes(1024*123)
'123.0 kB'
>>> humanize_bytes(1024*12342)
'12.1 MB'
>>> humanize_bytes(1024*12342,2)
'12.05 MB'
>>> humanize_bytes(1024*1234,2)
'1.21 MB'
>>> humanize_bytes(1024*1234*1111,2)
'1.31 GB'
>>> humanize_bytes(1024*1234*1111,1)
'1.3 GB'
"""
if num_bytes == 0:
return 'no bytes' # depends on [control=['if'], data=[]]
if num_bytes == 1:
return '1 byte' # depends on [control=['if'], data=[]]
factored_bytes = 0
factor_suffix = 'bytes'
for (factor, suffix) in ABBREVS:
if num_bytes >= factor:
factored_bytes = num_bytes / factor
factor_suffix = suffix
break # depends on [control=['if'], data=['num_bytes', 'factor']] # depends on [control=['for'], data=[]]
if factored_bytes == 1:
precision = 0 # depends on [control=['if'], data=[]]
return '{:.{prec}f} {}'.format(factored_bytes, factor_suffix, prec=precision)
|
def pdb_downloader_and_metadata(self, outdir=None, pdb_file_type=None, force_rerun=False):
"""Download ALL mapped experimental structures to each protein's structures directory.
Args:
outdir (str): Path to output directory, if GEM-PRO directories were not set or other output directory is
desired
pdb_file_type (str): Type of PDB file to download, if not already set or other format is desired
force_rerun (bool): If files should be re-downloaded if they already exist
"""
if not pdb_file_type:
pdb_file_type = self.pdb_file_type
counter = 0
for g in tqdm(self.genes):
pdbs = g.protein.pdb_downloader_and_metadata(outdir=outdir, pdb_file_type=pdb_file_type, force_rerun=force_rerun)
if pdbs:
counter += len(pdbs)
log.info('Updated PDB metadata dataframe. See the "df_pdb_metadata" attribute for a summary dataframe.')
log.info('Saved {} structures total'.format(counter))
|
def function[pdb_downloader_and_metadata, parameter[self, outdir, pdb_file_type, force_rerun]]:
constant[Download ALL mapped experimental structures to each protein's structures directory.
Args:
outdir (str): Path to output directory, if GEM-PRO directories were not set or other output directory is
desired
pdb_file_type (str): Type of PDB file to download, if not already set or other format is desired
force_rerun (bool): If files should be re-downloaded if they already exist
]
if <ast.UnaryOp object at 0x7da1b0e2e500> begin[:]
variable[pdb_file_type] assign[=] name[self].pdb_file_type
variable[counter] assign[=] constant[0]
for taget[name[g]] in starred[call[name[tqdm], parameter[name[self].genes]]] begin[:]
variable[pdbs] assign[=] call[name[g].protein.pdb_downloader_and_metadata, parameter[]]
if name[pdbs] begin[:]
<ast.AugAssign object at 0x7da1b0e2e4a0>
call[name[log].info, parameter[constant[Updated PDB metadata dataframe. See the "df_pdb_metadata" attribute for a summary dataframe.]]]
call[name[log].info, parameter[call[constant[Saved {} structures total].format, parameter[name[counter]]]]]
|
keyword[def] identifier[pdb_downloader_and_metadata] ( identifier[self] , identifier[outdir] = keyword[None] , identifier[pdb_file_type] = keyword[None] , identifier[force_rerun] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[pdb_file_type] :
identifier[pdb_file_type] = identifier[self] . identifier[pdb_file_type]
identifier[counter] = literal[int]
keyword[for] identifier[g] keyword[in] identifier[tqdm] ( identifier[self] . identifier[genes] ):
identifier[pdbs] = identifier[g] . identifier[protein] . identifier[pdb_downloader_and_metadata] ( identifier[outdir] = identifier[outdir] , identifier[pdb_file_type] = identifier[pdb_file_type] , identifier[force_rerun] = identifier[force_rerun] )
keyword[if] identifier[pdbs] :
identifier[counter] += identifier[len] ( identifier[pdbs] )
identifier[log] . identifier[info] ( literal[string] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[counter] ))
|
def pdb_downloader_and_metadata(self, outdir=None, pdb_file_type=None, force_rerun=False):
"""Download ALL mapped experimental structures to each protein's structures directory.
Args:
outdir (str): Path to output directory, if GEM-PRO directories were not set or other output directory is
desired
pdb_file_type (str): Type of PDB file to download, if not already set or other format is desired
force_rerun (bool): If files should be re-downloaded if they already exist
"""
if not pdb_file_type:
pdb_file_type = self.pdb_file_type # depends on [control=['if'], data=[]]
counter = 0
for g in tqdm(self.genes):
pdbs = g.protein.pdb_downloader_and_metadata(outdir=outdir, pdb_file_type=pdb_file_type, force_rerun=force_rerun)
if pdbs:
counter += len(pdbs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['g']]
log.info('Updated PDB metadata dataframe. See the "df_pdb_metadata" attribute for a summary dataframe.')
log.info('Saved {} structures total'.format(counter))
|
def wrap(query, container, namespace):
"""
NORMALIZE QUERY SO IT CAN STILL BE JSON
"""
if is_op(query, QueryOp) or query == None:
return query
query = wrap(query)
table = container.get_table(query['from'])
schema = table.schema
output = QueryOp(
frum=table,
format=query.format,
limit=mo_math.min(MAX_LIMIT, coalesce(query.limit, DEFAULT_LIMIT))
)
if query.select or isinstance(query.select, (Mapping, list)):
output.select = _normalize_selects(query.select, query.frum, schema=schema)
else:
if query.edges or query.groupby:
output.select = DEFAULT_SELECT
else:
output.select = _normalize_selects(".", query.frum)
if query.groupby and query.edges:
Log.error("You can not use both the `groupby` and `edges` clauses in the same query!")
elif query.edges:
output.edges = _normalize_edges(query.edges, limit=output.limit, schema=schema)
output.groupby = Null
elif query.groupby:
output.edges = Null
output.groupby = _normalize_groupby(query.groupby, limit=output.limit, schema=schema)
else:
output.edges = Null
output.groupby = Null
output.where = _normalize_where(query.where, schema=schema)
output.window = [_normalize_window(w) for w in listwrap(query.window)]
output.having = None
output.sort = _normalize_sort(query.sort)
if not mo_math.is_integer(output.limit) or output.limit < 0:
Log.error("Expecting limit >= 0")
output.isLean = query.isLean
return output
|
def function[wrap, parameter[query, container, namespace]]:
constant[
NORMALIZE QUERY SO IT CAN STILL BE JSON
]
if <ast.BoolOp object at 0x7da1b0ab48e0> begin[:]
return[name[query]]
variable[query] assign[=] call[name[wrap], parameter[name[query]]]
variable[table] assign[=] call[name[container].get_table, parameter[call[name[query]][constant[from]]]]
variable[schema] assign[=] name[table].schema
variable[output] assign[=] call[name[QueryOp], parameter[]]
if <ast.BoolOp object at 0x7da1b0acbfd0> begin[:]
name[output].select assign[=] call[name[_normalize_selects], parameter[name[query].select, name[query].frum]]
if <ast.BoolOp object at 0x7da1b0ac88e0> begin[:]
call[name[Log].error, parameter[constant[You can not use both the `groupby` and `edges` clauses in the same query!]]]
name[output].where assign[=] call[name[_normalize_where], parameter[name[query].where]]
name[output].window assign[=] <ast.ListComp object at 0x7da1b0a061d0>
name[output].having assign[=] constant[None]
name[output].sort assign[=] call[name[_normalize_sort], parameter[name[query].sort]]
if <ast.BoolOp object at 0x7da1b0a83910> begin[:]
call[name[Log].error, parameter[constant[Expecting limit >= 0]]]
name[output].isLean assign[=] name[query].isLean
return[name[output]]
|
keyword[def] identifier[wrap] ( identifier[query] , identifier[container] , identifier[namespace] ):
literal[string]
keyword[if] identifier[is_op] ( identifier[query] , identifier[QueryOp] ) keyword[or] identifier[query] == keyword[None] :
keyword[return] identifier[query]
identifier[query] = identifier[wrap] ( identifier[query] )
identifier[table] = identifier[container] . identifier[get_table] ( identifier[query] [ literal[string] ])
identifier[schema] = identifier[table] . identifier[schema]
identifier[output] = identifier[QueryOp] (
identifier[frum] = identifier[table] ,
identifier[format] = identifier[query] . identifier[format] ,
identifier[limit] = identifier[mo_math] . identifier[min] ( identifier[MAX_LIMIT] , identifier[coalesce] ( identifier[query] . identifier[limit] , identifier[DEFAULT_LIMIT] ))
)
keyword[if] identifier[query] . identifier[select] keyword[or] identifier[isinstance] ( identifier[query] . identifier[select] ,( identifier[Mapping] , identifier[list] )):
identifier[output] . identifier[select] = identifier[_normalize_selects] ( identifier[query] . identifier[select] , identifier[query] . identifier[frum] , identifier[schema] = identifier[schema] )
keyword[else] :
keyword[if] identifier[query] . identifier[edges] keyword[or] identifier[query] . identifier[groupby] :
identifier[output] . identifier[select] = identifier[DEFAULT_SELECT]
keyword[else] :
identifier[output] . identifier[select] = identifier[_normalize_selects] ( literal[string] , identifier[query] . identifier[frum] )
keyword[if] identifier[query] . identifier[groupby] keyword[and] identifier[query] . identifier[edges] :
identifier[Log] . identifier[error] ( literal[string] )
keyword[elif] identifier[query] . identifier[edges] :
identifier[output] . identifier[edges] = identifier[_normalize_edges] ( identifier[query] . identifier[edges] , identifier[limit] = identifier[output] . identifier[limit] , identifier[schema] = identifier[schema] )
identifier[output] . identifier[groupby] = identifier[Null]
keyword[elif] identifier[query] . identifier[groupby] :
identifier[output] . identifier[edges] = identifier[Null]
identifier[output] . identifier[groupby] = identifier[_normalize_groupby] ( identifier[query] . identifier[groupby] , identifier[limit] = identifier[output] . identifier[limit] , identifier[schema] = identifier[schema] )
keyword[else] :
identifier[output] . identifier[edges] = identifier[Null]
identifier[output] . identifier[groupby] = identifier[Null]
identifier[output] . identifier[where] = identifier[_normalize_where] ( identifier[query] . identifier[where] , identifier[schema] = identifier[schema] )
identifier[output] . identifier[window] =[ identifier[_normalize_window] ( identifier[w] ) keyword[for] identifier[w] keyword[in] identifier[listwrap] ( identifier[query] . identifier[window] )]
identifier[output] . identifier[having] = keyword[None]
identifier[output] . identifier[sort] = identifier[_normalize_sort] ( identifier[query] . identifier[sort] )
keyword[if] keyword[not] identifier[mo_math] . identifier[is_integer] ( identifier[output] . identifier[limit] ) keyword[or] identifier[output] . identifier[limit] < literal[int] :
identifier[Log] . identifier[error] ( literal[string] )
identifier[output] . identifier[isLean] = identifier[query] . identifier[isLean]
keyword[return] identifier[output]
|
def wrap(query, container, namespace):
"""
NORMALIZE QUERY SO IT CAN STILL BE JSON
"""
if is_op(query, QueryOp) or query == None:
return query # depends on [control=['if'], data=[]]
query = wrap(query)
table = container.get_table(query['from'])
schema = table.schema
output = QueryOp(frum=table, format=query.format, limit=mo_math.min(MAX_LIMIT, coalesce(query.limit, DEFAULT_LIMIT)))
if query.select or isinstance(query.select, (Mapping, list)):
output.select = _normalize_selects(query.select, query.frum, schema=schema) # depends on [control=['if'], data=[]]
elif query.edges or query.groupby:
output.select = DEFAULT_SELECT # depends on [control=['if'], data=[]]
else:
output.select = _normalize_selects('.', query.frum)
if query.groupby and query.edges:
Log.error('You can not use both the `groupby` and `edges` clauses in the same query!') # depends on [control=['if'], data=[]]
elif query.edges:
output.edges = _normalize_edges(query.edges, limit=output.limit, schema=schema)
output.groupby = Null # depends on [control=['if'], data=[]]
elif query.groupby:
output.edges = Null
output.groupby = _normalize_groupby(query.groupby, limit=output.limit, schema=schema) # depends on [control=['if'], data=[]]
else:
output.edges = Null
output.groupby = Null
output.where = _normalize_where(query.where, schema=schema)
output.window = [_normalize_window(w) for w in listwrap(query.window)]
output.having = None
output.sort = _normalize_sort(query.sort)
if not mo_math.is_integer(output.limit) or output.limit < 0:
Log.error('Expecting limit >= 0') # depends on [control=['if'], data=[]]
output.isLean = query.isLean
return output
|
def load(cls, data):
"""Construct a Constant class from it's dict data.
.. versionadded:: 0.0.2
"""
if len(data) == 1:
for key, value in data.items():
if "__classname__" not in value: # pragma: no cover
raise ValueError
name = key
bases = (Constant,)
attrs = dict()
for k, v in value.items():
if isinstance(v, dict):
if "__classname__" in v:
attrs[k] = cls.load({k: v})
else:
attrs[k] = v
else:
attrs[k] = v
return type(name, bases, attrs)
else: # pragma: no cover
raise ValueError
|
def function[load, parameter[cls, data]]:
constant[Construct a Constant class from it's dict data.
.. versionadded:: 0.0.2
]
if compare[call[name[len], parameter[name[data]]] equal[==] constant[1]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0b3a380>, <ast.Name object at 0x7da1b0b38490>]]] in starred[call[name[data].items, parameter[]]] begin[:]
if compare[constant[__classname__] <ast.NotIn object at 0x7da2590d7190> name[value]] begin[:]
<ast.Raise object at 0x7da18ede6b00>
variable[name] assign[=] name[key]
variable[bases] assign[=] tuple[[<ast.Name object at 0x7da1b0b38460>]]
variable[attrs] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0b3b4c0>, <ast.Name object at 0x7da1b0b3a2f0>]]] in starred[call[name[value].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[v], name[dict]]] begin[:]
if compare[constant[__classname__] in name[v]] begin[:]
call[name[attrs]][name[k]] assign[=] call[name[cls].load, parameter[dictionary[[<ast.Name object at 0x7da18c4cc250>], [<ast.Name object at 0x7da18c4cddb0>]]]]
return[call[name[type], parameter[name[name], name[bases], name[attrs]]]]
|
keyword[def] identifier[load] ( identifier[cls] , identifier[data] ):
literal[string]
keyword[if] identifier[len] ( identifier[data] )== literal[int] :
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[data] . identifier[items] ():
keyword[if] literal[string] keyword[not] keyword[in] identifier[value] :
keyword[raise] identifier[ValueError]
identifier[name] = identifier[key]
identifier[bases] =( identifier[Constant] ,)
identifier[attrs] = identifier[dict] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[value] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[v] , identifier[dict] ):
keyword[if] literal[string] keyword[in] identifier[v] :
identifier[attrs] [ identifier[k] ]= identifier[cls] . identifier[load] ({ identifier[k] : identifier[v] })
keyword[else] :
identifier[attrs] [ identifier[k] ]= identifier[v]
keyword[else] :
identifier[attrs] [ identifier[k] ]= identifier[v]
keyword[return] identifier[type] ( identifier[name] , identifier[bases] , identifier[attrs] )
keyword[else] :
keyword[raise] identifier[ValueError]
|
def load(cls, data):
"""Construct a Constant class from it's dict data.
.. versionadded:: 0.0.2
"""
if len(data) == 1:
for (key, value) in data.items():
if '__classname__' not in value: # pragma: no cover
raise ValueError # depends on [control=['if'], data=[]]
name = key
bases = (Constant,)
attrs = dict()
for (k, v) in value.items():
if isinstance(v, dict):
if '__classname__' in v:
attrs[k] = cls.load({k: v}) # depends on [control=['if'], data=['v']]
else:
attrs[k] = v # depends on [control=['if'], data=[]]
else:
attrs[k] = v # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
return type(name, bases, attrs) # depends on [control=['if'], data=[]]
else: # pragma: no cover
raise ValueError
|
def _parse_decorated_functions(self, code):
"""Return URL rule, HTTP methods and docstring."""
matches = re.finditer(r"""
# @rest decorators
(?P<decorators>
(?:@rest\(.+?\)\n)+ # one or more @rest decorators inside
)
# docstring delimited by 3 double quotes
.+?"{3}(?P<docstring>.+?)"{3}
""", code, re.VERBOSE | re.DOTALL)
for function_match in matches:
m_dict = function_match.groupdict()
self._parse_docstring(m_dict['docstring'])
self._add_function_paths(m_dict['decorators'])
|
def function[_parse_decorated_functions, parameter[self, code]]:
constant[Return URL rule, HTTP methods and docstring.]
variable[matches] assign[=] call[name[re].finditer, parameter[constant[
# @rest decorators
(?P<decorators>
(?:@rest\(.+?\)\n)+ # one or more @rest decorators inside
)
# docstring delimited by 3 double quotes
.+?"{3}(?P<docstring>.+?)"{3}
], name[code], binary_operation[name[re].VERBOSE <ast.BitOr object at 0x7da2590d6aa0> name[re].DOTALL]]]
for taget[name[function_match]] in starred[name[matches]] begin[:]
variable[m_dict] assign[=] call[name[function_match].groupdict, parameter[]]
call[name[self]._parse_docstring, parameter[call[name[m_dict]][constant[docstring]]]]
call[name[self]._add_function_paths, parameter[call[name[m_dict]][constant[decorators]]]]
|
keyword[def] identifier[_parse_decorated_functions] ( identifier[self] , identifier[code] ):
literal[string]
identifier[matches] = identifier[re] . identifier[finditer] ( literal[string] , identifier[code] , identifier[re] . identifier[VERBOSE] | identifier[re] . identifier[DOTALL] )
keyword[for] identifier[function_match] keyword[in] identifier[matches] :
identifier[m_dict] = identifier[function_match] . identifier[groupdict] ()
identifier[self] . identifier[_parse_docstring] ( identifier[m_dict] [ literal[string] ])
identifier[self] . identifier[_add_function_paths] ( identifier[m_dict] [ literal[string] ])
|
def _parse_decorated_functions(self, code):
"""Return URL rule, HTTP methods and docstring."""
matches = re.finditer('\n # @rest decorators\n (?P<decorators>\n (?:@rest\\(.+?\\)\\n)+ # one or more @rest decorators inside\n )\n # docstring delimited by 3 double quotes\n .+?"{3}(?P<docstring>.+?)"{3}\n ', code, re.VERBOSE | re.DOTALL)
for function_match in matches:
m_dict = function_match.groupdict()
self._parse_docstring(m_dict['docstring'])
self._add_function_paths(m_dict['decorators']) # depends on [control=['for'], data=['function_match']]
|
def remove_members(self, project, params={}, **options):
"""Removes the specified list of members from the project. Returns the updated project record.
Parameters
----------
project : {Id} The project to remove members from.
[data] : {Object} Data for the request
- members : {Array} An array of members to remove from the project.
"""
path = "/projects/%s/removeMembers" % (project)
return self.client.post(path, params, **options)
|
def function[remove_members, parameter[self, project, params]]:
constant[Removes the specified list of members from the project. Returns the updated project record.
Parameters
----------
project : {Id} The project to remove members from.
[data] : {Object} Data for the request
- members : {Array} An array of members to remove from the project.
]
variable[path] assign[=] binary_operation[constant[/projects/%s/removeMembers] <ast.Mod object at 0x7da2590d6920> name[project]]
return[call[name[self].client.post, parameter[name[path], name[params]]]]
|
keyword[def] identifier[remove_members] ( identifier[self] , identifier[project] , identifier[params] ={},** identifier[options] ):
literal[string]
identifier[path] = literal[string] %( identifier[project] )
keyword[return] identifier[self] . identifier[client] . identifier[post] ( identifier[path] , identifier[params] ,** identifier[options] )
|
def remove_members(self, project, params={}, **options):
"""Removes the specified list of members from the project. Returns the updated project record.
Parameters
----------
project : {Id} The project to remove members from.
[data] : {Object} Data for the request
- members : {Array} An array of members to remove from the project.
"""
path = '/projects/%s/removeMembers' % project
return self.client.post(path, params, **options)
|
def encode_request(request_line, **headers):
'''Creates the data for a SSDP request.
Args:
request_line (string): The request line for the request (e.g.
``"M-SEARCH * HTTP/1.1"``).
headers (dict of string -> string): Dictionary of header name - header
value pairs to present in the request.
Returns:
bytes: The encoded request.
'''
lines = [request_line]
lines.extend(['%s: %s' % kv for kv in headers.items()])
return ('\r\n'.join(lines) + '\r\n\r\n').encode('utf-8')
|
def function[encode_request, parameter[request_line]]:
constant[Creates the data for a SSDP request.
Args:
request_line (string): The request line for the request (e.g.
``"M-SEARCH * HTTP/1.1"``).
headers (dict of string -> string): Dictionary of header name - header
value pairs to present in the request.
Returns:
bytes: The encoded request.
]
variable[lines] assign[=] list[[<ast.Name object at 0x7da1b08054e0>]]
call[name[lines].extend, parameter[<ast.ListComp object at 0x7da207f990c0>]]
return[call[binary_operation[call[constant[
].join, parameter[name[lines]]] + constant[
]].encode, parameter[constant[utf-8]]]]
|
keyword[def] identifier[encode_request] ( identifier[request_line] ,** identifier[headers] ):
literal[string]
identifier[lines] =[ identifier[request_line] ]
identifier[lines] . identifier[extend] ([ literal[string] % identifier[kv] keyword[for] identifier[kv] keyword[in] identifier[headers] . identifier[items] ()])
keyword[return] ( literal[string] . identifier[join] ( identifier[lines] )+ literal[string] ). identifier[encode] ( literal[string] )
|
def encode_request(request_line, **headers):
"""Creates the data for a SSDP request.
Args:
request_line (string): The request line for the request (e.g.
``"M-SEARCH * HTTP/1.1"``).
headers (dict of string -> string): Dictionary of header name - header
value pairs to present in the request.
Returns:
bytes: The encoded request.
"""
lines = [request_line]
lines.extend(['%s: %s' % kv for kv in headers.items()])
return ('\r\n'.join(lines) + '\r\n\r\n').encode('utf-8')
|
def figure_chronological(self):
"""plot every sweep of an ABF file (with comments)."""
self.log.debug("creating chronological plot")
self.figure()
for sweep in range(self.abf.sweeps):
self.abf.setsweep(sweep)
self.setColorBySweep()
if self.abf.derivative:
plt.plot(self.abf.sweepX,self.abf.sweepD,**self.kwargs)
else:
plt.plot(self.abf.sweepX,self.abf.sweepY,**self.kwargs)
self.comments()
self.decorate()
|
def function[figure_chronological, parameter[self]]:
constant[plot every sweep of an ABF file (with comments).]
call[name[self].log.debug, parameter[constant[creating chronological plot]]]
call[name[self].figure, parameter[]]
for taget[name[sweep]] in starred[call[name[range], parameter[name[self].abf.sweeps]]] begin[:]
call[name[self].abf.setsweep, parameter[name[sweep]]]
call[name[self].setColorBySweep, parameter[]]
if name[self].abf.derivative begin[:]
call[name[plt].plot, parameter[name[self].abf.sweepX, name[self].abf.sweepD]]
call[name[self].comments, parameter[]]
call[name[self].decorate, parameter[]]
|
keyword[def] identifier[figure_chronological] ( identifier[self] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[figure] ()
keyword[for] identifier[sweep] keyword[in] identifier[range] ( identifier[self] . identifier[abf] . identifier[sweeps] ):
identifier[self] . identifier[abf] . identifier[setsweep] ( identifier[sweep] )
identifier[self] . identifier[setColorBySweep] ()
keyword[if] identifier[self] . identifier[abf] . identifier[derivative] :
identifier[plt] . identifier[plot] ( identifier[self] . identifier[abf] . identifier[sweepX] , identifier[self] . identifier[abf] . identifier[sweepD] ,** identifier[self] . identifier[kwargs] )
keyword[else] :
identifier[plt] . identifier[plot] ( identifier[self] . identifier[abf] . identifier[sweepX] , identifier[self] . identifier[abf] . identifier[sweepY] ,** identifier[self] . identifier[kwargs] )
identifier[self] . identifier[comments] ()
identifier[self] . identifier[decorate] ()
|
def figure_chronological(self):
"""plot every sweep of an ABF file (with comments)."""
self.log.debug('creating chronological plot')
self.figure()
for sweep in range(self.abf.sweeps):
self.abf.setsweep(sweep)
self.setColorBySweep()
if self.abf.derivative:
plt.plot(self.abf.sweepX, self.abf.sweepD, **self.kwargs) # depends on [control=['if'], data=[]]
else:
plt.plot(self.abf.sweepX, self.abf.sweepY, **self.kwargs) # depends on [control=['for'], data=['sweep']]
self.comments()
self.decorate()
|
def data_size(self):
""" Returns the size of the :attr:`data` object in bytes (read-only)."""
# Container
if is_container(self._data):
byte_length, bit_length = self._data.container_size()
return byte_length + math.ceil(bit_length / 8)
# Field
elif is_field(self._data):
return math.ceil(self._data.bit_size / 8)
else:
return 0
|
def function[data_size, parameter[self]]:
constant[ Returns the size of the :attr:`data` object in bytes (read-only).]
if call[name[is_container], parameter[name[self]._data]] begin[:]
<ast.Tuple object at 0x7da1b209d4e0> assign[=] call[name[self]._data.container_size, parameter[]]
return[binary_operation[name[byte_length] + call[name[math].ceil, parameter[binary_operation[name[bit_length] / constant[8]]]]]]
|
keyword[def] identifier[data_size] ( identifier[self] ):
literal[string]
keyword[if] identifier[is_container] ( identifier[self] . identifier[_data] ):
identifier[byte_length] , identifier[bit_length] = identifier[self] . identifier[_data] . identifier[container_size] ()
keyword[return] identifier[byte_length] + identifier[math] . identifier[ceil] ( identifier[bit_length] / literal[int] )
keyword[elif] identifier[is_field] ( identifier[self] . identifier[_data] ):
keyword[return] identifier[math] . identifier[ceil] ( identifier[self] . identifier[_data] . identifier[bit_size] / literal[int] )
keyword[else] :
keyword[return] literal[int]
|
def data_size(self):
""" Returns the size of the :attr:`data` object in bytes (read-only)."""
# Container
if is_container(self._data):
(byte_length, bit_length) = self._data.container_size()
return byte_length + math.ceil(bit_length / 8) # depends on [control=['if'], data=[]]
# Field
elif is_field(self._data):
return math.ceil(self._data.bit_size / 8) # depends on [control=['if'], data=[]]
else:
return 0
|
async def vcx_agent_provision(config: str) -> None:
"""
Provision an agent in the agency, populate configuration and wallet for this agent.
Example:
import json
enterprise_config = {
'agency_url': 'http://localhost:8080',
'agency_did': 'VsKV7grR1BUE29mG2Fm2kX',
'agency_verkey': "Hezce2UWMZ3wUhVkh2LfKSs8nDzWwzs2Win7EzNN3YaR",
'wallet_name': 'LIBVCX_SDK_WALLET',
'agent_seed': '00000000000000000000000001234561',
'enterprise_seed': '000000000000000000000000Trustee1',
'wallet_key': '1234'
}
vcx_config = await vcx_agent_provision(json.dumps(enterprise_config))
:param config: JSON configuration
:return: Configuration for vcx_init call.
"""
logger = logging.getLogger(__name__)
if not hasattr(vcx_agent_provision, "cb"):
logger.debug("vcx_agent_provision: Creating callback")
vcx_agent_provision.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32, c_char_p))
c_config = c_char_p(config.encode('utf-8'))
result = await do_call('vcx_agent_provision_async',
c_config,
vcx_agent_provision.cb)
logger.debug("vcx_agent_provision completed")
return result.decode()
|
<ast.AsyncFunctionDef object at 0x7da18f09e650>
|
keyword[async] keyword[def] identifier[vcx_agent_provision] ( identifier[config] : identifier[str] )-> keyword[None] :
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[vcx_agent_provision] , literal[string] ):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[vcx_agent_provision] . identifier[cb] = identifier[create_cb] ( identifier[CFUNCTYPE] ( keyword[None] , identifier[c_uint32] , identifier[c_uint32] , identifier[c_char_p] ))
identifier[c_config] = identifier[c_char_p] ( identifier[config] . identifier[encode] ( literal[string] ))
identifier[result] = keyword[await] identifier[do_call] ( literal[string] ,
identifier[c_config] ,
identifier[vcx_agent_provision] . identifier[cb] )
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] identifier[result] . identifier[decode] ()
|
async def vcx_agent_provision(config: str) -> None:
"""
Provision an agent in the agency, populate configuration and wallet for this agent.
Example:
import json
enterprise_config = {
'agency_url': 'http://localhost:8080',
'agency_did': 'VsKV7grR1BUE29mG2Fm2kX',
'agency_verkey': "Hezce2UWMZ3wUhVkh2LfKSs8nDzWwzs2Win7EzNN3YaR",
'wallet_name': 'LIBVCX_SDK_WALLET',
'agent_seed': '00000000000000000000000001234561',
'enterprise_seed': '000000000000000000000000Trustee1',
'wallet_key': '1234'
}
vcx_config = await vcx_agent_provision(json.dumps(enterprise_config))
:param config: JSON configuration
:return: Configuration for vcx_init call.
"""
logger = logging.getLogger(__name__)
if not hasattr(vcx_agent_provision, 'cb'):
logger.debug('vcx_agent_provision: Creating callback')
vcx_agent_provision.cb = create_cb(CFUNCTYPE(None, c_uint32, c_uint32, c_char_p)) # depends on [control=['if'], data=[]]
c_config = c_char_p(config.encode('utf-8'))
result = await do_call('vcx_agent_provision_async', c_config, vcx_agent_provision.cb)
logger.debug('vcx_agent_provision completed')
return result.decode()
|
def as_mpl_artists(shape_list,
properties_func=None,
text_offset=5.0, origin=1):
"""
Converts a region list to a list of patches and a list of artists.
Optional Keywords:
[ text_offset ] - If there is text associated with the regions, add
some vertical offset (in pixels) to the text so that it doesn't overlap
with the regions.
Often, the regions files implicitly assume the lower-left corner
of the image as a coordinate (1,1). However, the python convetion
is that the array index starts from 0. By default (origin = 1),
coordinates of the returned mpl artists have coordinate shifted by
(1, 1). If you do not want this shift, set origin=0.
"""
patch_list = []
artist_list = []
if properties_func is None:
properties_func = properties_func_default
# properties for continued(? multiline?) regions
saved_attrs = None
for shape in shape_list:
patches = []
if saved_attrs is None:
_attrs = [], {}
else:
_attrs = copy.copy(saved_attrs[0]), copy.copy(saved_attrs[1])
kwargs = properties_func(shape, _attrs)
if shape.name == "composite":
saved_attrs = shape.attr
continue
if saved_attrs is None and shape.continued:
saved_attrs = shape.attr
# elif (shape.name in shape.attr[1]):
# if (shape.attr[1][shape.name] != "ignore"):
# saved_attrs = shape.attr
if not shape.continued:
saved_attrs = None
# text associated with the shape
txt = shape.attr[1].get("text")
if shape.name == "polygon":
xy = np.array(shape.coord_list)
xy.shape = -1, 2
# -1 for change origin to 0,0
patches = [mpatches.Polygon(xy - origin, closed=True, **kwargs)]
elif shape.name == "rotbox" or shape.name == "box":
xc, yc, w, h, rot = shape.coord_list
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
_box = np.array([[-w / 2., -h / 2.],
[-w / 2., h / 2.],
[w / 2., h / 2.],
[w / 2., -h / 2.]])
box = _box + [xc, yc]
rotbox = rotated_polygon(box, xc, yc, rot)
patches = [mpatches.Polygon(rotbox, closed=True, **kwargs)]
elif shape.name == "ellipse":
xc, yc = shape.coord_list[:2]
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
angle = shape.coord_list[-1]
maj_list, min_list = shape.coord_list[2:-1:2], shape.coord_list[3:-1:2]
patches = [mpatches.Ellipse((xc, yc), 2 * maj, 2 * min,
angle=angle, **kwargs)
for maj, min in zip(maj_list, min_list)]
elif shape.name == "annulus":
xc, yc = shape.coord_list[:2]
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
r_list = shape.coord_list[2:]
patches = [mpatches.Ellipse((xc, yc), 2 * r, 2 * r, **kwargs) for r in r_list]
elif shape.name == "circle":
xc, yc, major = shape.coord_list
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
patches = [mpatches.Ellipse((xc, yc), 2 * major, 2 * major, angle=0, **kwargs)]
elif shape.name == "panda":
xc, yc, a1, a2, an, r1, r2, rn = shape.coord_list
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
patches = [mpatches.Arc((xc, yc), rr * 2, rr * 2, angle=0,
theta1=a1, theta2=a2, **kwargs)
for rr in np.linspace(r1, r2, rn + 1)]
for aa in np.linspace(a1, a2, an + 1):
xx = np.array([r1, r2]) * np.cos(aa / 180. * np.pi) + xc
yy = np.array([r1, r2]) * np.sin(aa / 180. * np.pi) + yc
p = Path(np.transpose([xx, yy]))
patches.append(mpatches.PathPatch(p, **kwargs))
elif shape.name == "pie":
xc, yc, r1, r2, a1, a2 = shape.coord_list
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
patches = [mpatches.Arc((xc, yc), rr * 2, rr * 2, angle=0,
theta1=a1, theta2=a2, **kwargs)
for rr in [r1, r2]]
for aa in [a1, a2]:
xx = np.array([r1, r2]) * np.cos(aa / 180. * np.pi) + xc
yy = np.array([r1, r2]) * np.sin(aa / 180. * np.pi) + yc
p = Path(np.transpose([xx, yy]))
patches.append(mpatches.PathPatch(p, **kwargs))
elif shape.name == "epanda":
xc, yc, a1, a2, an, r11, r12, r21, r22, rn, angle = shape.coord_list
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
# mpl takes angle a1, a2 as angle as in circle before
# transformation to ellipse.
x1, y1 = cos(a1 / 180. * pi), sin(a1 / 180. * pi) * r11 / r12
x2, y2 = cos(a2 / 180. * pi), sin(a2 / 180. * pi) * r11 / r12
a1, a2 = atan2(y1, x1) / pi * 180., atan2(y2, x2) / pi * 180.
patches = [mpatches.Arc((xc, yc), rr1 * 2, rr2 * 2,
angle=angle, theta1=a1, theta2=a2,
**kwargs)
for rr1, rr2 in zip(np.linspace(r11, r21, rn + 1),
np.linspace(r12, r22, rn + 1))]
for aa in np.linspace(a1, a2, an + 1):
xx = np.array([r11, r21]) * np.cos(aa / 180. * np.pi)
yy = np.array([r11, r21]) * np.sin(aa / 180. * np.pi)
p = Path(np.transpose([xx, yy]))
tr = Affine2D().scale(1, r12 / r11).rotate_deg(angle).translate(xc, yc)
p2 = tr.transform_path(p)
patches.append(mpatches.PathPatch(p2, **kwargs))
elif shape.name == "text":
xc, yc = shape.coord_list[:2]
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
if txt:
_t = _get_text(txt, xc, yc, 0, 0, **kwargs)
artist_list.append(_t)
elif shape.name == "point":
xc, yc = shape.coord_list[:2]
# -1 for change origin to 0,0
xc, yc = xc - origin, yc - origin
artist_list.append(Line2D([xc], [yc],
**kwargs))
if txt:
textshape = copy.copy(shape)
textshape.name = "text"
textkwargs = properties_func(textshape, _attrs)
_t = _get_text(txt, xc, yc, 0, text_offset,
va="bottom",
**textkwargs)
artist_list.append(_t)
elif shape.name in ["line", "vector"]:
if shape.name == "line":
x1, y1, x2, y2 = shape.coord_list[:4]
# -1 for change origin to 0,0
x1, y1, x2, y2 = x1 - origin, y1 - origin, x2 - origin, y2 - origin
a1, a2 = shape.attr[1].get("line", "0 0").strip().split()[:2]
arrowstyle = "-"
if int(a1):
arrowstyle = "<" + arrowstyle
if int(a2):
arrowstyle = arrowstyle + ">"
else: # shape.name == "vector"
x1, y1, l, a = shape.coord_list[:4]
# -1 for change origin to 0,0
x1, y1 = x1 - origin, y1 - origin
x2, y2 = x1 + l * np.cos(a / 180. * np.pi), y1 + l * np.sin(a / 180. * np.pi)
v1 = int(shape.attr[1].get("vector", "0").strip())
if v1:
arrowstyle = "->"
else:
arrowstyle = "-"
patches = [mpatches.FancyArrowPatch(posA=(x1, y1),
posB=(x2, y2),
arrowstyle=arrowstyle,
arrow_transmuter=None,
connectionstyle="arc3",
patchA=None, patchB=None,
shrinkA=0, shrinkB=0,
connector=None,
**kwargs)]
else:
warnings.warn("'as_mpl_artists' does not know how to convert {0} "
"to mpl artist".format(shape.name))
patch_list.extend(patches)
if txt and patches:
# the text associated with a shape uses different
# matplotlib keywords than the shape itself for, e.g.,
# color
textshape = copy.copy(shape)
textshape.name = "text"
textkwargs = properties_func(textshape, _attrs)
# calculate the text position
_bb = [p.get_window_extent() for p in patches]
# this is to work around backward-incompatible change made
# in matplotlib 1.2. This change is later reverted so only
# some versions are affected. With affected version of
# matplotlib, get_window_extent method calls get_transform
# method which sets the _transformSet to True, which is
# not desired.
for p in patches:
p._transformSet = False
_bbox = Bbox.union(_bb)
x0, y0, x1, y1 = _bbox.extents
xc = .5 * (x0 + x1)
_t = _get_text(txt, xc, y1, 0, text_offset,
va="bottom",
**textkwargs)
artist_list.append(_t)
return patch_list, artist_list
|
def function[as_mpl_artists, parameter[shape_list, properties_func, text_offset, origin]]:
constant[
Converts a region list to a list of patches and a list of artists.
Optional Keywords:
[ text_offset ] - If there is text associated with the regions, add
some vertical offset (in pixels) to the text so that it doesn't overlap
with the regions.
Often, the regions files implicitly assume the lower-left corner
of the image as a coordinate (1,1). However, the python convetion
is that the array index starts from 0. By default (origin = 1),
coordinates of the returned mpl artists have coordinate shifted by
(1, 1). If you do not want this shift, set origin=0.
]
variable[patch_list] assign[=] list[[]]
variable[artist_list] assign[=] list[[]]
if compare[name[properties_func] is constant[None]] begin[:]
variable[properties_func] assign[=] name[properties_func_default]
variable[saved_attrs] assign[=] constant[None]
for taget[name[shape]] in starred[name[shape_list]] begin[:]
variable[patches] assign[=] list[[]]
if compare[name[saved_attrs] is constant[None]] begin[:]
variable[_attrs] assign[=] tuple[[<ast.List object at 0x7da18f09d060>, <ast.Dict object at 0x7da18f09c100>]]
variable[kwargs] assign[=] call[name[properties_func], parameter[name[shape], name[_attrs]]]
if compare[name[shape].name equal[==] constant[composite]] begin[:]
variable[saved_attrs] assign[=] name[shape].attr
continue
if <ast.BoolOp object at 0x7da18f09e2f0> begin[:]
variable[saved_attrs] assign[=] name[shape].attr
if <ast.UnaryOp object at 0x7da18f09cca0> begin[:]
variable[saved_attrs] assign[=] constant[None]
variable[txt] assign[=] call[call[name[shape].attr][constant[1]].get, parameter[constant[text]]]
if compare[name[shape].name equal[==] constant[polygon]] begin[:]
variable[xy] assign[=] call[name[np].array, parameter[name[shape].coord_list]]
name[xy].shape assign[=] tuple[[<ast.UnaryOp object at 0x7da18f09c7f0>, <ast.Constant object at 0x7da18f09fe20>]]
variable[patches] assign[=] list[[<ast.Call object at 0x7da18f09d750>]]
call[name[patch_list].extend, parameter[name[patches]]]
if <ast.BoolOp object at 0x7da18eb55b40> begin[:]
variable[textshape] assign[=] call[name[copy].copy, parameter[name[shape]]]
name[textshape].name assign[=] constant[text]
variable[textkwargs] assign[=] call[name[properties_func], parameter[name[textshape], name[_attrs]]]
variable[_bb] assign[=] <ast.ListComp object at 0x7da18eb559c0>
for taget[name[p]] in starred[name[patches]] begin[:]
name[p]._transformSet assign[=] constant[False]
variable[_bbox] assign[=] call[name[Bbox].union, parameter[name[_bb]]]
<ast.Tuple object at 0x7da18eb57340> assign[=] name[_bbox].extents
variable[xc] assign[=] binary_operation[constant[0.5] * binary_operation[name[x0] + name[x1]]]
variable[_t] assign[=] call[name[_get_text], parameter[name[txt], name[xc], name[y1], constant[0], name[text_offset]]]
call[name[artist_list].append, parameter[name[_t]]]
return[tuple[[<ast.Name object at 0x7da18eb54520>, <ast.Name object at 0x7da18eb54580>]]]
|
keyword[def] identifier[as_mpl_artists] ( identifier[shape_list] ,
identifier[properties_func] = keyword[None] ,
identifier[text_offset] = literal[int] , identifier[origin] = literal[int] ):
literal[string]
identifier[patch_list] =[]
identifier[artist_list] =[]
keyword[if] identifier[properties_func] keyword[is] keyword[None] :
identifier[properties_func] = identifier[properties_func_default]
identifier[saved_attrs] = keyword[None]
keyword[for] identifier[shape] keyword[in] identifier[shape_list] :
identifier[patches] =[]
keyword[if] identifier[saved_attrs] keyword[is] keyword[None] :
identifier[_attrs] =[],{}
keyword[else] :
identifier[_attrs] = identifier[copy] . identifier[copy] ( identifier[saved_attrs] [ literal[int] ]), identifier[copy] . identifier[copy] ( identifier[saved_attrs] [ literal[int] ])
identifier[kwargs] = identifier[properties_func] ( identifier[shape] , identifier[_attrs] )
keyword[if] identifier[shape] . identifier[name] == literal[string] :
identifier[saved_attrs] = identifier[shape] . identifier[attr]
keyword[continue]
keyword[if] identifier[saved_attrs] keyword[is] keyword[None] keyword[and] identifier[shape] . identifier[continued] :
identifier[saved_attrs] = identifier[shape] . identifier[attr]
keyword[if] keyword[not] identifier[shape] . identifier[continued] :
identifier[saved_attrs] = keyword[None]
identifier[txt] = identifier[shape] . identifier[attr] [ literal[int] ]. identifier[get] ( literal[string] )
keyword[if] identifier[shape] . identifier[name] == literal[string] :
identifier[xy] = identifier[np] . identifier[array] ( identifier[shape] . identifier[coord_list] )
identifier[xy] . identifier[shape] =- literal[int] , literal[int]
identifier[patches] =[ identifier[mpatches] . identifier[Polygon] ( identifier[xy] - identifier[origin] , identifier[closed] = keyword[True] ,** identifier[kwargs] )]
keyword[elif] identifier[shape] . identifier[name] == literal[string] keyword[or] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] , identifier[w] , identifier[h] , identifier[rot] = identifier[shape] . identifier[coord_list]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[_box] = identifier[np] . identifier[array] ([[- identifier[w] / literal[int] ,- identifier[h] / literal[int] ],
[- identifier[w] / literal[int] , identifier[h] / literal[int] ],
[ identifier[w] / literal[int] , identifier[h] / literal[int] ],
[ identifier[w] / literal[int] ,- identifier[h] / literal[int] ]])
identifier[box] = identifier[_box] +[ identifier[xc] , identifier[yc] ]
identifier[rotbox] = identifier[rotated_polygon] ( identifier[box] , identifier[xc] , identifier[yc] , identifier[rot] )
identifier[patches] =[ identifier[mpatches] . identifier[Polygon] ( identifier[rotbox] , identifier[closed] = keyword[True] ,** identifier[kwargs] )]
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] = identifier[shape] . identifier[coord_list] [: literal[int] ]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[angle] = identifier[shape] . identifier[coord_list] [- literal[int] ]
identifier[maj_list] , identifier[min_list] = identifier[shape] . identifier[coord_list] [ literal[int] :- literal[int] : literal[int] ], identifier[shape] . identifier[coord_list] [ literal[int] :- literal[int] : literal[int] ]
identifier[patches] =[ identifier[mpatches] . identifier[Ellipse] (( identifier[xc] , identifier[yc] ), literal[int] * identifier[maj] , literal[int] * identifier[min] ,
identifier[angle] = identifier[angle] ,** identifier[kwargs] )
keyword[for] identifier[maj] , identifier[min] keyword[in] identifier[zip] ( identifier[maj_list] , identifier[min_list] )]
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] = identifier[shape] . identifier[coord_list] [: literal[int] ]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[r_list] = identifier[shape] . identifier[coord_list] [ literal[int] :]
identifier[patches] =[ identifier[mpatches] . identifier[Ellipse] (( identifier[xc] , identifier[yc] ), literal[int] * identifier[r] , literal[int] * identifier[r] ,** identifier[kwargs] ) keyword[for] identifier[r] keyword[in] identifier[r_list] ]
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] , identifier[major] = identifier[shape] . identifier[coord_list]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[patches] =[ identifier[mpatches] . identifier[Ellipse] (( identifier[xc] , identifier[yc] ), literal[int] * identifier[major] , literal[int] * identifier[major] , identifier[angle] = literal[int] ,** identifier[kwargs] )]
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] , identifier[a1] , identifier[a2] , identifier[an] , identifier[r1] , identifier[r2] , identifier[rn] = identifier[shape] . identifier[coord_list]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[patches] =[ identifier[mpatches] . identifier[Arc] (( identifier[xc] , identifier[yc] ), identifier[rr] * literal[int] , identifier[rr] * literal[int] , identifier[angle] = literal[int] ,
identifier[theta1] = identifier[a1] , identifier[theta2] = identifier[a2] ,** identifier[kwargs] )
keyword[for] identifier[rr] keyword[in] identifier[np] . identifier[linspace] ( identifier[r1] , identifier[r2] , identifier[rn] + literal[int] )]
keyword[for] identifier[aa] keyword[in] identifier[np] . identifier[linspace] ( identifier[a1] , identifier[a2] , identifier[an] + literal[int] ):
identifier[xx] = identifier[np] . identifier[array] ([ identifier[r1] , identifier[r2] ])* identifier[np] . identifier[cos] ( identifier[aa] / literal[int] * identifier[np] . identifier[pi] )+ identifier[xc]
identifier[yy] = identifier[np] . identifier[array] ([ identifier[r1] , identifier[r2] ])* identifier[np] . identifier[sin] ( identifier[aa] / literal[int] * identifier[np] . identifier[pi] )+ identifier[yc]
identifier[p] = identifier[Path] ( identifier[np] . identifier[transpose] ([ identifier[xx] , identifier[yy] ]))
identifier[patches] . identifier[append] ( identifier[mpatches] . identifier[PathPatch] ( identifier[p] ,** identifier[kwargs] ))
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] , identifier[r1] , identifier[r2] , identifier[a1] , identifier[a2] = identifier[shape] . identifier[coord_list]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[patches] =[ identifier[mpatches] . identifier[Arc] (( identifier[xc] , identifier[yc] ), identifier[rr] * literal[int] , identifier[rr] * literal[int] , identifier[angle] = literal[int] ,
identifier[theta1] = identifier[a1] , identifier[theta2] = identifier[a2] ,** identifier[kwargs] )
keyword[for] identifier[rr] keyword[in] [ identifier[r1] , identifier[r2] ]]
keyword[for] identifier[aa] keyword[in] [ identifier[a1] , identifier[a2] ]:
identifier[xx] = identifier[np] . identifier[array] ([ identifier[r1] , identifier[r2] ])* identifier[np] . identifier[cos] ( identifier[aa] / literal[int] * identifier[np] . identifier[pi] )+ identifier[xc]
identifier[yy] = identifier[np] . identifier[array] ([ identifier[r1] , identifier[r2] ])* identifier[np] . identifier[sin] ( identifier[aa] / literal[int] * identifier[np] . identifier[pi] )+ identifier[yc]
identifier[p] = identifier[Path] ( identifier[np] . identifier[transpose] ([ identifier[xx] , identifier[yy] ]))
identifier[patches] . identifier[append] ( identifier[mpatches] . identifier[PathPatch] ( identifier[p] ,** identifier[kwargs] ))
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] , identifier[a1] , identifier[a2] , identifier[an] , identifier[r11] , identifier[r12] , identifier[r21] , identifier[r22] , identifier[rn] , identifier[angle] = identifier[shape] . identifier[coord_list]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[x1] , identifier[y1] = identifier[cos] ( identifier[a1] / literal[int] * identifier[pi] ), identifier[sin] ( identifier[a1] / literal[int] * identifier[pi] )* identifier[r11] / identifier[r12]
identifier[x2] , identifier[y2] = identifier[cos] ( identifier[a2] / literal[int] * identifier[pi] ), identifier[sin] ( identifier[a2] / literal[int] * identifier[pi] )* identifier[r11] / identifier[r12]
identifier[a1] , identifier[a2] = identifier[atan2] ( identifier[y1] , identifier[x1] )/ identifier[pi] * literal[int] , identifier[atan2] ( identifier[y2] , identifier[x2] )/ identifier[pi] * literal[int]
identifier[patches] =[ identifier[mpatches] . identifier[Arc] (( identifier[xc] , identifier[yc] ), identifier[rr1] * literal[int] , identifier[rr2] * literal[int] ,
identifier[angle] = identifier[angle] , identifier[theta1] = identifier[a1] , identifier[theta2] = identifier[a2] ,
** identifier[kwargs] )
keyword[for] identifier[rr1] , identifier[rr2] keyword[in] identifier[zip] ( identifier[np] . identifier[linspace] ( identifier[r11] , identifier[r21] , identifier[rn] + literal[int] ),
identifier[np] . identifier[linspace] ( identifier[r12] , identifier[r22] , identifier[rn] + literal[int] ))]
keyword[for] identifier[aa] keyword[in] identifier[np] . identifier[linspace] ( identifier[a1] , identifier[a2] , identifier[an] + literal[int] ):
identifier[xx] = identifier[np] . identifier[array] ([ identifier[r11] , identifier[r21] ])* identifier[np] . identifier[cos] ( identifier[aa] / literal[int] * identifier[np] . identifier[pi] )
identifier[yy] = identifier[np] . identifier[array] ([ identifier[r11] , identifier[r21] ])* identifier[np] . identifier[sin] ( identifier[aa] / literal[int] * identifier[np] . identifier[pi] )
identifier[p] = identifier[Path] ( identifier[np] . identifier[transpose] ([ identifier[xx] , identifier[yy] ]))
identifier[tr] = identifier[Affine2D] (). identifier[scale] ( literal[int] , identifier[r12] / identifier[r11] ). identifier[rotate_deg] ( identifier[angle] ). identifier[translate] ( identifier[xc] , identifier[yc] )
identifier[p2] = identifier[tr] . identifier[transform_path] ( identifier[p] )
identifier[patches] . identifier[append] ( identifier[mpatches] . identifier[PathPatch] ( identifier[p2] ,** identifier[kwargs] ))
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] = identifier[shape] . identifier[coord_list] [: literal[int] ]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
keyword[if] identifier[txt] :
identifier[_t] = identifier[_get_text] ( identifier[txt] , identifier[xc] , identifier[yc] , literal[int] , literal[int] ,** identifier[kwargs] )
identifier[artist_list] . identifier[append] ( identifier[_t] )
keyword[elif] identifier[shape] . identifier[name] == literal[string] :
identifier[xc] , identifier[yc] = identifier[shape] . identifier[coord_list] [: literal[int] ]
identifier[xc] , identifier[yc] = identifier[xc] - identifier[origin] , identifier[yc] - identifier[origin]
identifier[artist_list] . identifier[append] ( identifier[Line2D] ([ identifier[xc] ],[ identifier[yc] ],
** identifier[kwargs] ))
keyword[if] identifier[txt] :
identifier[textshape] = identifier[copy] . identifier[copy] ( identifier[shape] )
identifier[textshape] . identifier[name] = literal[string]
identifier[textkwargs] = identifier[properties_func] ( identifier[textshape] , identifier[_attrs] )
identifier[_t] = identifier[_get_text] ( identifier[txt] , identifier[xc] , identifier[yc] , literal[int] , identifier[text_offset] ,
identifier[va] = literal[string] ,
** identifier[textkwargs] )
identifier[artist_list] . identifier[append] ( identifier[_t] )
keyword[elif] identifier[shape] . identifier[name] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[shape] . identifier[name] == literal[string] :
identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] = identifier[shape] . identifier[coord_list] [: literal[int] ]
identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] = identifier[x1] - identifier[origin] , identifier[y1] - identifier[origin] , identifier[x2] - identifier[origin] , identifier[y2] - identifier[origin]
identifier[a1] , identifier[a2] = identifier[shape] . identifier[attr] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ). identifier[strip] (). identifier[split] ()[: literal[int] ]
identifier[arrowstyle] = literal[string]
keyword[if] identifier[int] ( identifier[a1] ):
identifier[arrowstyle] = literal[string] + identifier[arrowstyle]
keyword[if] identifier[int] ( identifier[a2] ):
identifier[arrowstyle] = identifier[arrowstyle] + literal[string]
keyword[else] :
identifier[x1] , identifier[y1] , identifier[l] , identifier[a] = identifier[shape] . identifier[coord_list] [: literal[int] ]
identifier[x1] , identifier[y1] = identifier[x1] - identifier[origin] , identifier[y1] - identifier[origin]
identifier[x2] , identifier[y2] = identifier[x1] + identifier[l] * identifier[np] . identifier[cos] ( identifier[a] / literal[int] * identifier[np] . identifier[pi] ), identifier[y1] + identifier[l] * identifier[np] . identifier[sin] ( identifier[a] / literal[int] * identifier[np] . identifier[pi] )
identifier[v1] = identifier[int] ( identifier[shape] . identifier[attr] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ). identifier[strip] ())
keyword[if] identifier[v1] :
identifier[arrowstyle] = literal[string]
keyword[else] :
identifier[arrowstyle] = literal[string]
identifier[patches] =[ identifier[mpatches] . identifier[FancyArrowPatch] ( identifier[posA] =( identifier[x1] , identifier[y1] ),
identifier[posB] =( identifier[x2] , identifier[y2] ),
identifier[arrowstyle] = identifier[arrowstyle] ,
identifier[arrow_transmuter] = keyword[None] ,
identifier[connectionstyle] = literal[string] ,
identifier[patchA] = keyword[None] , identifier[patchB] = keyword[None] ,
identifier[shrinkA] = literal[int] , identifier[shrinkB] = literal[int] ,
identifier[connector] = keyword[None] ,
** identifier[kwargs] )]
keyword[else] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] . identifier[format] ( identifier[shape] . identifier[name] ))
identifier[patch_list] . identifier[extend] ( identifier[patches] )
keyword[if] identifier[txt] keyword[and] identifier[patches] :
identifier[textshape] = identifier[copy] . identifier[copy] ( identifier[shape] )
identifier[textshape] . identifier[name] = literal[string]
identifier[textkwargs] = identifier[properties_func] ( identifier[textshape] , identifier[_attrs] )
identifier[_bb] =[ identifier[p] . identifier[get_window_extent] () keyword[for] identifier[p] keyword[in] identifier[patches] ]
keyword[for] identifier[p] keyword[in] identifier[patches] :
identifier[p] . identifier[_transformSet] = keyword[False]
identifier[_bbox] = identifier[Bbox] . identifier[union] ( identifier[_bb] )
identifier[x0] , identifier[y0] , identifier[x1] , identifier[y1] = identifier[_bbox] . identifier[extents]
identifier[xc] = literal[int] *( identifier[x0] + identifier[x1] )
identifier[_t] = identifier[_get_text] ( identifier[txt] , identifier[xc] , identifier[y1] , literal[int] , identifier[text_offset] ,
identifier[va] = literal[string] ,
** identifier[textkwargs] )
identifier[artist_list] . identifier[append] ( identifier[_t] )
keyword[return] identifier[patch_list] , identifier[artist_list]
|
def as_mpl_artists(shape_list, properties_func=None, text_offset=5.0, origin=1):
"""
Converts a region list to a list of patches and a list of artists.
Optional Keywords:
[ text_offset ] - If there is text associated with the regions, add
some vertical offset (in pixels) to the text so that it doesn't overlap
with the regions.
Often, the regions files implicitly assume the lower-left corner
of the image as a coordinate (1,1). However, the python convetion
is that the array index starts from 0. By default (origin = 1),
coordinates of the returned mpl artists have coordinate shifted by
(1, 1). If you do not want this shift, set origin=0.
"""
patch_list = []
artist_list = []
if properties_func is None:
properties_func = properties_func_default # depends on [control=['if'], data=['properties_func']]
# properties for continued(? multiline?) regions
saved_attrs = None
for shape in shape_list:
patches = []
if saved_attrs is None:
_attrs = ([], {}) # depends on [control=['if'], data=[]]
else:
_attrs = (copy.copy(saved_attrs[0]), copy.copy(saved_attrs[1]))
kwargs = properties_func(shape, _attrs)
if shape.name == 'composite':
saved_attrs = shape.attr
continue # depends on [control=['if'], data=[]]
if saved_attrs is None and shape.continued:
saved_attrs = shape.attr # depends on [control=['if'], data=[]]
# elif (shape.name in shape.attr[1]):
# if (shape.attr[1][shape.name] != "ignore"):
# saved_attrs = shape.attr
if not shape.continued:
saved_attrs = None # depends on [control=['if'], data=[]]
# text associated with the shape
txt = shape.attr[1].get('text')
if shape.name == 'polygon':
xy = np.array(shape.coord_list)
xy.shape = (-1, 2)
# -1 for change origin to 0,0
patches = [mpatches.Polygon(xy - origin, closed=True, **kwargs)] # depends on [control=['if'], data=[]]
elif shape.name == 'rotbox' or shape.name == 'box':
(xc, yc, w, h, rot) = shape.coord_list
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
_box = np.array([[-w / 2.0, -h / 2.0], [-w / 2.0, h / 2.0], [w / 2.0, h / 2.0], [w / 2.0, -h / 2.0]])
box = _box + [xc, yc]
rotbox = rotated_polygon(box, xc, yc, rot)
patches = [mpatches.Polygon(rotbox, closed=True, **kwargs)] # depends on [control=['if'], data=[]]
elif shape.name == 'ellipse':
(xc, yc) = shape.coord_list[:2]
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
angle = shape.coord_list[-1]
(maj_list, min_list) = (shape.coord_list[2:-1:2], shape.coord_list[3:-1:2])
patches = [mpatches.Ellipse((xc, yc), 2 * maj, 2 * min, angle=angle, **kwargs) for (maj, min) in zip(maj_list, min_list)] # depends on [control=['if'], data=[]]
elif shape.name == 'annulus':
(xc, yc) = shape.coord_list[:2]
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
r_list = shape.coord_list[2:]
patches = [mpatches.Ellipse((xc, yc), 2 * r, 2 * r, **kwargs) for r in r_list] # depends on [control=['if'], data=[]]
elif shape.name == 'circle':
(xc, yc, major) = shape.coord_list
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
patches = [mpatches.Ellipse((xc, yc), 2 * major, 2 * major, angle=0, **kwargs)] # depends on [control=['if'], data=[]]
elif shape.name == 'panda':
(xc, yc, a1, a2, an, r1, r2, rn) = shape.coord_list
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
patches = [mpatches.Arc((xc, yc), rr * 2, rr * 2, angle=0, theta1=a1, theta2=a2, **kwargs) for rr in np.linspace(r1, r2, rn + 1)]
for aa in np.linspace(a1, a2, an + 1):
xx = np.array([r1, r2]) * np.cos(aa / 180.0 * np.pi) + xc
yy = np.array([r1, r2]) * np.sin(aa / 180.0 * np.pi) + yc
p = Path(np.transpose([xx, yy]))
patches.append(mpatches.PathPatch(p, **kwargs)) # depends on [control=['for'], data=['aa']] # depends on [control=['if'], data=[]]
elif shape.name == 'pie':
(xc, yc, r1, r2, a1, a2) = shape.coord_list
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
patches = [mpatches.Arc((xc, yc), rr * 2, rr * 2, angle=0, theta1=a1, theta2=a2, **kwargs) for rr in [r1, r2]]
for aa in [a1, a2]:
xx = np.array([r1, r2]) * np.cos(aa / 180.0 * np.pi) + xc
yy = np.array([r1, r2]) * np.sin(aa / 180.0 * np.pi) + yc
p = Path(np.transpose([xx, yy]))
patches.append(mpatches.PathPatch(p, **kwargs)) # depends on [control=['for'], data=['aa']] # depends on [control=['if'], data=[]]
elif shape.name == 'epanda':
(xc, yc, a1, a2, an, r11, r12, r21, r22, rn, angle) = shape.coord_list
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
# mpl takes angle a1, a2 as angle as in circle before
# transformation to ellipse.
(x1, y1) = (cos(a1 / 180.0 * pi), sin(a1 / 180.0 * pi) * r11 / r12)
(x2, y2) = (cos(a2 / 180.0 * pi), sin(a2 / 180.0 * pi) * r11 / r12)
(a1, a2) = (atan2(y1, x1) / pi * 180.0, atan2(y2, x2) / pi * 180.0)
patches = [mpatches.Arc((xc, yc), rr1 * 2, rr2 * 2, angle=angle, theta1=a1, theta2=a2, **kwargs) for (rr1, rr2) in zip(np.linspace(r11, r21, rn + 1), np.linspace(r12, r22, rn + 1))]
for aa in np.linspace(a1, a2, an + 1):
xx = np.array([r11, r21]) * np.cos(aa / 180.0 * np.pi)
yy = np.array([r11, r21]) * np.sin(aa / 180.0 * np.pi)
p = Path(np.transpose([xx, yy]))
tr = Affine2D().scale(1, r12 / r11).rotate_deg(angle).translate(xc, yc)
p2 = tr.transform_path(p)
patches.append(mpatches.PathPatch(p2, **kwargs)) # depends on [control=['for'], data=['aa']] # depends on [control=['if'], data=[]]
elif shape.name == 'text':
(xc, yc) = shape.coord_list[:2]
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
if txt:
_t = _get_text(txt, xc, yc, 0, 0, **kwargs)
artist_list.append(_t) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif shape.name == 'point':
(xc, yc) = shape.coord_list[:2]
# -1 for change origin to 0,0
(xc, yc) = (xc - origin, yc - origin)
artist_list.append(Line2D([xc], [yc], **kwargs))
if txt:
textshape = copy.copy(shape)
textshape.name = 'text'
textkwargs = properties_func(textshape, _attrs)
_t = _get_text(txt, xc, yc, 0, text_offset, va='bottom', **textkwargs)
artist_list.append(_t) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif shape.name in ['line', 'vector']:
if shape.name == 'line':
(x1, y1, x2, y2) = shape.coord_list[:4]
# -1 for change origin to 0,0
(x1, y1, x2, y2) = (x1 - origin, y1 - origin, x2 - origin, y2 - origin)
(a1, a2) = shape.attr[1].get('line', '0 0').strip().split()[:2]
arrowstyle = '-'
if int(a1):
arrowstyle = '<' + arrowstyle # depends on [control=['if'], data=[]]
if int(a2):
arrowstyle = arrowstyle + '>' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else: # shape.name == "vector"
(x1, y1, l, a) = shape.coord_list[:4]
# -1 for change origin to 0,0
(x1, y1) = (x1 - origin, y1 - origin)
(x2, y2) = (x1 + l * np.cos(a / 180.0 * np.pi), y1 + l * np.sin(a / 180.0 * np.pi))
v1 = int(shape.attr[1].get('vector', '0').strip())
if v1:
arrowstyle = '->' # depends on [control=['if'], data=[]]
else:
arrowstyle = '-'
patches = [mpatches.FancyArrowPatch(posA=(x1, y1), posB=(x2, y2), arrowstyle=arrowstyle, arrow_transmuter=None, connectionstyle='arc3', patchA=None, patchB=None, shrinkA=0, shrinkB=0, connector=None, **kwargs)] # depends on [control=['if'], data=[]]
else:
warnings.warn("'as_mpl_artists' does not know how to convert {0} to mpl artist".format(shape.name))
patch_list.extend(patches)
if txt and patches:
# the text associated with a shape uses different
# matplotlib keywords than the shape itself for, e.g.,
# color
textshape = copy.copy(shape)
textshape.name = 'text'
textkwargs = properties_func(textshape, _attrs)
# calculate the text position
_bb = [p.get_window_extent() for p in patches]
# this is to work around backward-incompatible change made
# in matplotlib 1.2. This change is later reverted so only
# some versions are affected. With affected version of
# matplotlib, get_window_extent method calls get_transform
# method which sets the _transformSet to True, which is
# not desired.
for p in patches:
p._transformSet = False # depends on [control=['for'], data=['p']]
_bbox = Bbox.union(_bb)
(x0, y0, x1, y1) = _bbox.extents
xc = 0.5 * (x0 + x1)
_t = _get_text(txt, xc, y1, 0, text_offset, va='bottom', **textkwargs)
artist_list.append(_t) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['shape']]
return (patch_list, artist_list)
|
def Grieves_Thodos(zs, Tcs, Aijs):
r'''Calculates critical temperature of a mixture according to
mixing rules in [1]_.
.. math::
T_{cm} = \sum_{i} \frac{T_{ci}}{1 + (1/x_i)\sum_j A_{ij} x_j}
For a binary mxiture, this simplifies to:
.. math::
T_{cm} = \frac{T_{c1}}{1 + (x_2/x_1)A_{12}} + \frac{T_{c2}}
{1 + (x_1/x_2)A_{21}}
Parameters
----------
zs : array-like
Mole fractions of all components
Tcs : array-like
Critical temperatures of all components, [K]
Aijs : array-like of shape `zs` by `zs`
Interaction parameters
Returns
-------
Tcm : float
Critical temperatures of the mixture, [K]
Notes
-----
All parameters, even if zero, must be given to this function.
Giving 0s gives really bad results however.
Examples
--------
butane/pentane/hexane 0.6449/0.2359/0.1192 mixture, exp: 450.22 K.
>>> Grieves_Thodos([0.6449, 0.2359, 0.1192], [425.12, 469.7, 507.6], [[0, 1.2503, 1.516], [0.799807, 0, 1.23843], [0.659633, 0.807474, 0]])
450.1839618758971
References
----------
.. [1] Grieves, Robert B., and George Thodos. "The Critical Temperatures of
Multicomponent Hydrocarbon Systems." AIChE Journal 8, no. 4
(September 1, 1962): 550-53. doi:10.1002/aic.690080426.
.. [2] Najafi, Hamidreza, Babak Maghbooli, and Mohammad Amin Sobati.
"Prediction of True Critical Temperature of Multi-Component Mixtures:
Extending Fast Estimation Methods." Fluid Phase Equilibria 392
(April 25, 2015): 104-26. doi:10.1016/j.fluid.2015.02.001.
'''
if not none_and_length_check([zs, Tcs]):
raise Exception('Function inputs are incorrect format')
Tcm = 0
for i in range(len(zs)):
Tcm += Tcs[i]/(1. + 1./zs[i]*sum(Aijs[i][j]*zs[j] for j in range(len(zs))))
return Tcm
|
def function[Grieves_Thodos, parameter[zs, Tcs, Aijs]]:
constant[Calculates critical temperature of a mixture according to
mixing rules in [1]_.
.. math::
T_{cm} = \sum_{i} \frac{T_{ci}}{1 + (1/x_i)\sum_j A_{ij} x_j}
For a binary mxiture, this simplifies to:
.. math::
T_{cm} = \frac{T_{c1}}{1 + (x_2/x_1)A_{12}} + \frac{T_{c2}}
{1 + (x_1/x_2)A_{21}}
Parameters
----------
zs : array-like
Mole fractions of all components
Tcs : array-like
Critical temperatures of all components, [K]
Aijs : array-like of shape `zs` by `zs`
Interaction parameters
Returns
-------
Tcm : float
Critical temperatures of the mixture, [K]
Notes
-----
All parameters, even if zero, must be given to this function.
Giving 0s gives really bad results however.
Examples
--------
butane/pentane/hexane 0.6449/0.2359/0.1192 mixture, exp: 450.22 K.
>>> Grieves_Thodos([0.6449, 0.2359, 0.1192], [425.12, 469.7, 507.6], [[0, 1.2503, 1.516], [0.799807, 0, 1.23843], [0.659633, 0.807474, 0]])
450.1839618758971
References
----------
.. [1] Grieves, Robert B., and George Thodos. "The Critical Temperatures of
Multicomponent Hydrocarbon Systems." AIChE Journal 8, no. 4
(September 1, 1962): 550-53. doi:10.1002/aic.690080426.
.. [2] Najafi, Hamidreza, Babak Maghbooli, and Mohammad Amin Sobati.
"Prediction of True Critical Temperature of Multi-Component Mixtures:
Extending Fast Estimation Methods." Fluid Phase Equilibria 392
(April 25, 2015): 104-26. doi:10.1016/j.fluid.2015.02.001.
]
if <ast.UnaryOp object at 0x7da18f00ceb0> begin[:]
<ast.Raise object at 0x7da18f00c850>
variable[Tcm] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[zs]]]]]] begin[:]
<ast.AugAssign object at 0x7da18f00f2e0>
return[name[Tcm]]
|
keyword[def] identifier[Grieves_Thodos] ( identifier[zs] , identifier[Tcs] , identifier[Aijs] ):
literal[string]
keyword[if] keyword[not] identifier[none_and_length_check] ([ identifier[zs] , identifier[Tcs] ]):
keyword[raise] identifier[Exception] ( literal[string] )
identifier[Tcm] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[zs] )):
identifier[Tcm] += identifier[Tcs] [ identifier[i] ]/( literal[int] + literal[int] / identifier[zs] [ identifier[i] ]* identifier[sum] ( identifier[Aijs] [ identifier[i] ][ identifier[j] ]* identifier[zs] [ identifier[j] ] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[zs] ))))
keyword[return] identifier[Tcm]
|
def Grieves_Thodos(zs, Tcs, Aijs):
"""Calculates critical temperature of a mixture according to
mixing rules in [1]_.
.. math::
T_{cm} = \\sum_{i} \\frac{T_{ci}}{1 + (1/x_i)\\sum_j A_{ij} x_j}
For a binary mxiture, this simplifies to:
.. math::
T_{cm} = \\frac{T_{c1}}{1 + (x_2/x_1)A_{12}} + \\frac{T_{c2}}
{1 + (x_1/x_2)A_{21}}
Parameters
----------
zs : array-like
Mole fractions of all components
Tcs : array-like
Critical temperatures of all components, [K]
Aijs : array-like of shape `zs` by `zs`
Interaction parameters
Returns
-------
Tcm : float
Critical temperatures of the mixture, [K]
Notes
-----
All parameters, even if zero, must be given to this function.
Giving 0s gives really bad results however.
Examples
--------
butane/pentane/hexane 0.6449/0.2359/0.1192 mixture, exp: 450.22 K.
>>> Grieves_Thodos([0.6449, 0.2359, 0.1192], [425.12, 469.7, 507.6], [[0, 1.2503, 1.516], [0.799807, 0, 1.23843], [0.659633, 0.807474, 0]])
450.1839618758971
References
----------
.. [1] Grieves, Robert B., and George Thodos. "The Critical Temperatures of
Multicomponent Hydrocarbon Systems." AIChE Journal 8, no. 4
(September 1, 1962): 550-53. doi:10.1002/aic.690080426.
.. [2] Najafi, Hamidreza, Babak Maghbooli, and Mohammad Amin Sobati.
"Prediction of True Critical Temperature of Multi-Component Mixtures:
Extending Fast Estimation Methods." Fluid Phase Equilibria 392
(April 25, 2015): 104-26. doi:10.1016/j.fluid.2015.02.001.
"""
if not none_and_length_check([zs, Tcs]):
raise Exception('Function inputs are incorrect format') # depends on [control=['if'], data=[]]
Tcm = 0
for i in range(len(zs)):
Tcm += Tcs[i] / (1.0 + 1.0 / zs[i] * sum((Aijs[i][j] * zs[j] for j in range(len(zs))))) # depends on [control=['for'], data=['i']]
return Tcm
|
def import_or_die(module_name, entrypoint_names):
'''
Import user code; return reference to usercode function.
(str) -> function reference
'''
log_debug("Importing {}".format(module_name))
module_name = os.path.abspath(module_name)
if module_name.endswith('.py'):
module_name,ext = os.path.splitext(module_name)
modname = os.path.basename(module_name)
dirname = os.path.dirname(module_name)
if dirname and dirname not in sys.path:
sys.path.append(dirname)
# first, try to reload code
if modname in sys.modules:
user_module = sys.modules.get(modname)
user_module = importlib.reload(user_module)
# if it isn't in sys.modules, load it for the first time, or
# try to.
else:
try:
mypaths = [ x for x in sys.path if ("Cellar" not in x and "packages" not in x)]
# print("Loading {} from {} ({})".format(modname, dirname, mypaths))
# user_module = importlib.import_module(modname)
user_module = importlib.__import__(modname)
except ImportError as e:
log_failure("Fatal error: couldn't import module (error: {}) while executing {}".format(str(e), modname))
raise ImportError(e)
# if there aren't any functions to call into, then the caller
# just wanted the module/code to be imported, and that's it.
if not entrypoint_names:
return
existing_names = dir(user_module)
for method in entrypoint_names:
if method in existing_names:
return getattr(user_module, method)
if len(entrypoint_names) > 1:
entrypoints = "one of {}".format(', '.join(entrypoint_names))
else:
entrypoints = entrypoint_names[0]
raise ImportError("Required entrypoint function or symbol ({}) not found in your code".format(entrypoints))
|
def function[import_or_die, parameter[module_name, entrypoint_names]]:
constant[
Import user code; return reference to usercode function.
(str) -> function reference
]
call[name[log_debug], parameter[call[constant[Importing {}].format, parameter[name[module_name]]]]]
variable[module_name] assign[=] call[name[os].path.abspath, parameter[name[module_name]]]
if call[name[module_name].endswith, parameter[constant[.py]]] begin[:]
<ast.Tuple object at 0x7da18eb57820> assign[=] call[name[os].path.splitext, parameter[name[module_name]]]
variable[modname] assign[=] call[name[os].path.basename, parameter[name[module_name]]]
variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[module_name]]]
if <ast.BoolOp object at 0x7da18eb54340> begin[:]
call[name[sys].path.append, parameter[name[dirname]]]
if compare[name[modname] in name[sys].modules] begin[:]
variable[user_module] assign[=] call[name[sys].modules.get, parameter[name[modname]]]
variable[user_module] assign[=] call[name[importlib].reload, parameter[name[user_module]]]
if <ast.UnaryOp object at 0x7da18eb55f90> begin[:]
return[None]
variable[existing_names] assign[=] call[name[dir], parameter[name[user_module]]]
for taget[name[method]] in starred[name[entrypoint_names]] begin[:]
if compare[name[method] in name[existing_names]] begin[:]
return[call[name[getattr], parameter[name[user_module], name[method]]]]
if compare[call[name[len], parameter[name[entrypoint_names]]] greater[>] constant[1]] begin[:]
variable[entrypoints] assign[=] call[constant[one of {}].format, parameter[call[constant[, ].join, parameter[name[entrypoint_names]]]]]
<ast.Raise object at 0x7da18eb55270>
|
keyword[def] identifier[import_or_die] ( identifier[module_name] , identifier[entrypoint_names] ):
literal[string]
identifier[log_debug] ( literal[string] . identifier[format] ( identifier[module_name] ))
identifier[module_name] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[module_name] )
keyword[if] identifier[module_name] . identifier[endswith] ( literal[string] ):
identifier[module_name] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[module_name] )
identifier[modname] = identifier[os] . identifier[path] . identifier[basename] ( identifier[module_name] )
identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[module_name] )
keyword[if] identifier[dirname] keyword[and] identifier[dirname] keyword[not] keyword[in] identifier[sys] . identifier[path] :
identifier[sys] . identifier[path] . identifier[append] ( identifier[dirname] )
keyword[if] identifier[modname] keyword[in] identifier[sys] . identifier[modules] :
identifier[user_module] = identifier[sys] . identifier[modules] . identifier[get] ( identifier[modname] )
identifier[user_module] = identifier[importlib] . identifier[reload] ( identifier[user_module] )
keyword[else] :
keyword[try] :
identifier[mypaths] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[sys] . identifier[path] keyword[if] ( literal[string] keyword[not] keyword[in] identifier[x] keyword[and] literal[string] keyword[not] keyword[in] identifier[x] )]
identifier[user_module] = identifier[importlib] . identifier[__import__] ( identifier[modname] )
keyword[except] identifier[ImportError] keyword[as] identifier[e] :
identifier[log_failure] ( literal[string] . identifier[format] ( identifier[str] ( identifier[e] ), identifier[modname] ))
keyword[raise] identifier[ImportError] ( identifier[e] )
keyword[if] keyword[not] identifier[entrypoint_names] :
keyword[return]
identifier[existing_names] = identifier[dir] ( identifier[user_module] )
keyword[for] identifier[method] keyword[in] identifier[entrypoint_names] :
keyword[if] identifier[method] keyword[in] identifier[existing_names] :
keyword[return] identifier[getattr] ( identifier[user_module] , identifier[method] )
keyword[if] identifier[len] ( identifier[entrypoint_names] )> literal[int] :
identifier[entrypoints] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[entrypoint_names] ))
keyword[else] :
identifier[entrypoints] = identifier[entrypoint_names] [ literal[int] ]
keyword[raise] identifier[ImportError] ( literal[string] . identifier[format] ( identifier[entrypoints] ))
|
def import_or_die(module_name, entrypoint_names):
"""
Import user code; return reference to usercode function.
(str) -> function reference
"""
log_debug('Importing {}'.format(module_name))
module_name = os.path.abspath(module_name)
if module_name.endswith('.py'):
(module_name, ext) = os.path.splitext(module_name) # depends on [control=['if'], data=[]]
modname = os.path.basename(module_name)
dirname = os.path.dirname(module_name)
if dirname and dirname not in sys.path:
sys.path.append(dirname) # depends on [control=['if'], data=[]]
# first, try to reload code
if modname in sys.modules:
user_module = sys.modules.get(modname)
user_module = importlib.reload(user_module) # depends on [control=['if'], data=['modname']]
else:
# if it isn't in sys.modules, load it for the first time, or
# try to.
try:
mypaths = [x for x in sys.path if 'Cellar' not in x and 'packages' not in x]
# print("Loading {} from {} ({})".format(modname, dirname, mypaths))
# user_module = importlib.import_module(modname)
user_module = importlib.__import__(modname) # depends on [control=['try'], data=[]]
except ImportError as e:
log_failure("Fatal error: couldn't import module (error: {}) while executing {}".format(str(e), modname))
raise ImportError(e) # depends on [control=['except'], data=['e']]
# if there aren't any functions to call into, then the caller
# just wanted the module/code to be imported, and that's it.
if not entrypoint_names:
return # depends on [control=['if'], data=[]]
existing_names = dir(user_module)
for method in entrypoint_names:
if method in existing_names:
return getattr(user_module, method) # depends on [control=['if'], data=['method']] # depends on [control=['for'], data=['method']]
if len(entrypoint_names) > 1:
entrypoints = 'one of {}'.format(', '.join(entrypoint_names)) # depends on [control=['if'], data=[]]
else:
entrypoints = entrypoint_names[0]
raise ImportError('Required entrypoint function or symbol ({}) not found in your code'.format(entrypoints))
|
def _getBlobFromURL(cls, url, exists=False):
"""
Gets the blob specified by the url.
caution: makes no api request. blob may not ACTUALLY exist
:param urlparse.ParseResult url: the URL
:param bool exists: if True, then syncs local blob object with cloud
and raises exceptions if it doesn't exist remotely
:return: the blob requested
:rtype: :class:`~google.cloud.storage.blob.Blob`
"""
bucketName = url.netloc
fileName = url.path
# remove leading '/', which can cause problems if fileName is a path
if fileName.startswith('/'):
fileName = fileName[1:]
storageClient = storage.Client()
bucket = storageClient.get_bucket(bucketName)
blob = bucket.blob(bytes(fileName))
if exists:
if not blob.exists():
raise NoSuchFileException
# sync with cloud so info like size is available
blob.reload()
return blob
|
def function[_getBlobFromURL, parameter[cls, url, exists]]:
constant[
Gets the blob specified by the url.
caution: makes no api request. blob may not ACTUALLY exist
:param urlparse.ParseResult url: the URL
:param bool exists: if True, then syncs local blob object with cloud
and raises exceptions if it doesn't exist remotely
:return: the blob requested
:rtype: :class:`~google.cloud.storage.blob.Blob`
]
variable[bucketName] assign[=] name[url].netloc
variable[fileName] assign[=] name[url].path
if call[name[fileName].startswith, parameter[constant[/]]] begin[:]
variable[fileName] assign[=] call[name[fileName]][<ast.Slice object at 0x7da18f58ded0>]
variable[storageClient] assign[=] call[name[storage].Client, parameter[]]
variable[bucket] assign[=] call[name[storageClient].get_bucket, parameter[name[bucketName]]]
variable[blob] assign[=] call[name[bucket].blob, parameter[call[name[bytes], parameter[name[fileName]]]]]
if name[exists] begin[:]
if <ast.UnaryOp object at 0x7da20c7cb490> begin[:]
<ast.Raise object at 0x7da20c7cb730>
call[name[blob].reload, parameter[]]
return[name[blob]]
|
keyword[def] identifier[_getBlobFromURL] ( identifier[cls] , identifier[url] , identifier[exists] = keyword[False] ):
literal[string]
identifier[bucketName] = identifier[url] . identifier[netloc]
identifier[fileName] = identifier[url] . identifier[path]
keyword[if] identifier[fileName] . identifier[startswith] ( literal[string] ):
identifier[fileName] = identifier[fileName] [ literal[int] :]
identifier[storageClient] = identifier[storage] . identifier[Client] ()
identifier[bucket] = identifier[storageClient] . identifier[get_bucket] ( identifier[bucketName] )
identifier[blob] = identifier[bucket] . identifier[blob] ( identifier[bytes] ( identifier[fileName] ))
keyword[if] identifier[exists] :
keyword[if] keyword[not] identifier[blob] . identifier[exists] ():
keyword[raise] identifier[NoSuchFileException]
identifier[blob] . identifier[reload] ()
keyword[return] identifier[blob]
|
def _getBlobFromURL(cls, url, exists=False):
"""
Gets the blob specified by the url.
caution: makes no api request. blob may not ACTUALLY exist
:param urlparse.ParseResult url: the URL
:param bool exists: if True, then syncs local blob object with cloud
and raises exceptions if it doesn't exist remotely
:return: the blob requested
:rtype: :class:`~google.cloud.storage.blob.Blob`
"""
bucketName = url.netloc
fileName = url.path
# remove leading '/', which can cause problems if fileName is a path
if fileName.startswith('/'):
fileName = fileName[1:] # depends on [control=['if'], data=[]]
storageClient = storage.Client()
bucket = storageClient.get_bucket(bucketName)
blob = bucket.blob(bytes(fileName))
if exists:
if not blob.exists():
raise NoSuchFileException # depends on [control=['if'], data=[]]
# sync with cloud so info like size is available
blob.reload() # depends on [control=['if'], data=[]]
return blob
|
def text(self, x, y, text):
"""Print a text on ASCII canvas.
Args:
x (int): x coordinate where the text should start.
y (int): y coordinate where the text should start.
text (str): string that should be printed.
"""
for i, char in enumerate(text):
self.point(x + i, y, char)
|
def function[text, parameter[self, x, y, text]]:
constant[Print a text on ASCII canvas.
Args:
x (int): x coordinate where the text should start.
y (int): y coordinate where the text should start.
text (str): string that should be printed.
]
for taget[tuple[[<ast.Name object at 0x7da1b1f187f0>, <ast.Name object at 0x7da1b1f1bd60>]]] in starred[call[name[enumerate], parameter[name[text]]]] begin[:]
call[name[self].point, parameter[binary_operation[name[x] + name[i]], name[y], name[char]]]
|
keyword[def] identifier[text] ( identifier[self] , identifier[x] , identifier[y] , identifier[text] ):
literal[string]
keyword[for] identifier[i] , identifier[char] keyword[in] identifier[enumerate] ( identifier[text] ):
identifier[self] . identifier[point] ( identifier[x] + identifier[i] , identifier[y] , identifier[char] )
|
def text(self, x, y, text):
"""Print a text on ASCII canvas.
Args:
x (int): x coordinate where the text should start.
y (int): y coordinate where the text should start.
text (str): string that should be printed.
"""
for (i, char) in enumerate(text):
self.point(x + i, y, char) # depends on [control=['for'], data=[]]
|
def localize(self):
"""
Check if this module was saved as a resource. If it was, return a new module descriptor
that points to a local copy of that resource. Should only be called on a worker node. On
the leader, this method returns this resource, i.e. self.
:rtype: toil.resource.Resource
"""
if not self._runningOnWorker():
log.warn('The localize() method should only be invoked on a worker.')
resource = Resource.lookup(self._resourcePath)
if resource is None:
return self
else:
def stash(tmpDirPath):
# Save the original dirPath such that we can restore it in globalize()
with open(os.path.join(tmpDirPath, '.stash'), 'w') as f:
f.write('1' if self.fromVirtualEnv else '0')
f.write(self.dirPath)
resource.download(callback=stash)
return self.__class__(dirPath=resource.localDirPath,
name=self.name,
fromVirtualEnv=self.fromVirtualEnv)
|
def function[localize, parameter[self]]:
constant[
Check if this module was saved as a resource. If it was, return a new module descriptor
that points to a local copy of that resource. Should only be called on a worker node. On
the leader, this method returns this resource, i.e. self.
:rtype: toil.resource.Resource
]
if <ast.UnaryOp object at 0x7da18dc989a0> begin[:]
call[name[log].warn, parameter[constant[The localize() method should only be invoked on a worker.]]]
variable[resource] assign[=] call[name[Resource].lookup, parameter[name[self]._resourcePath]]
if compare[name[resource] is constant[None]] begin[:]
return[name[self]]
|
keyword[def] identifier[localize] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_runningOnWorker] ():
identifier[log] . identifier[warn] ( literal[string] )
identifier[resource] = identifier[Resource] . identifier[lookup] ( identifier[self] . identifier[_resourcePath] )
keyword[if] identifier[resource] keyword[is] keyword[None] :
keyword[return] identifier[self]
keyword[else] :
keyword[def] identifier[stash] ( identifier[tmpDirPath] ):
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[tmpDirPath] , literal[string] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] keyword[if] identifier[self] . identifier[fromVirtualEnv] keyword[else] literal[string] )
identifier[f] . identifier[write] ( identifier[self] . identifier[dirPath] )
identifier[resource] . identifier[download] ( identifier[callback] = identifier[stash] )
keyword[return] identifier[self] . identifier[__class__] ( identifier[dirPath] = identifier[resource] . identifier[localDirPath] ,
identifier[name] = identifier[self] . identifier[name] ,
identifier[fromVirtualEnv] = identifier[self] . identifier[fromVirtualEnv] )
|
def localize(self):
"""
Check if this module was saved as a resource. If it was, return a new module descriptor
that points to a local copy of that resource. Should only be called on a worker node. On
the leader, this method returns this resource, i.e. self.
:rtype: toil.resource.Resource
"""
if not self._runningOnWorker():
log.warn('The localize() method should only be invoked on a worker.') # depends on [control=['if'], data=[]]
resource = Resource.lookup(self._resourcePath)
if resource is None:
return self # depends on [control=['if'], data=[]]
else:
def stash(tmpDirPath):
# Save the original dirPath such that we can restore it in globalize()
with open(os.path.join(tmpDirPath, '.stash'), 'w') as f:
f.write('1' if self.fromVirtualEnv else '0')
f.write(self.dirPath) # depends on [control=['with'], data=['f']]
resource.download(callback=stash)
return self.__class__(dirPath=resource.localDirPath, name=self.name, fromVirtualEnv=self.fromVirtualEnv)
|
def color_normalize(src, mean, std=None):
"""Normalize src with mean and std.
Parameters
----------
src : NDArray
Input image
mean : NDArray
RGB mean to be subtracted
std : NDArray
RGB standard deviation to be divided
Returns
-------
NDArray
An `NDArray` containing the normalized image.
"""
if mean is not None:
src -= mean
if std is not None:
src /= std
return src
|
def function[color_normalize, parameter[src, mean, std]]:
constant[Normalize src with mean and std.
Parameters
----------
src : NDArray
Input image
mean : NDArray
RGB mean to be subtracted
std : NDArray
RGB standard deviation to be divided
Returns
-------
NDArray
An `NDArray` containing the normalized image.
]
if compare[name[mean] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da204962cb0>
if compare[name[std] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da204963100>
return[name[src]]
|
keyword[def] identifier[color_normalize] ( identifier[src] , identifier[mean] , identifier[std] = keyword[None] ):
literal[string]
keyword[if] identifier[mean] keyword[is] keyword[not] keyword[None] :
identifier[src] -= identifier[mean]
keyword[if] identifier[std] keyword[is] keyword[not] keyword[None] :
identifier[src] /= identifier[std]
keyword[return] identifier[src]
|
def color_normalize(src, mean, std=None):
"""Normalize src with mean and std.
Parameters
----------
src : NDArray
Input image
mean : NDArray
RGB mean to be subtracted
std : NDArray
RGB standard deviation to be divided
Returns
-------
NDArray
An `NDArray` containing the normalized image.
"""
if mean is not None:
src -= mean # depends on [control=['if'], data=['mean']]
if std is not None:
src /= std # depends on [control=['if'], data=['std']]
return src
|
def trending(limit=DEFAULT_SEARCH_LIMIT, api_key=GIPHY_PUBLIC_KEY,
strict=False, rating=None):
"""
Shorthand for creating a Giphy api wrapper with the given api key
and then calling the trending method. Note that this will return
a generator
"""
return Giphy(api_key=api_key, strict=strict).trending(
limit=limit, rating=rating)
|
def function[trending, parameter[limit, api_key, strict, rating]]:
constant[
Shorthand for creating a Giphy api wrapper with the given api key
and then calling the trending method. Note that this will return
a generator
]
return[call[call[name[Giphy], parameter[]].trending, parameter[]]]
|
keyword[def] identifier[trending] ( identifier[limit] = identifier[DEFAULT_SEARCH_LIMIT] , identifier[api_key] = identifier[GIPHY_PUBLIC_KEY] ,
identifier[strict] = keyword[False] , identifier[rating] = keyword[None] ):
literal[string]
keyword[return] identifier[Giphy] ( identifier[api_key] = identifier[api_key] , identifier[strict] = identifier[strict] ). identifier[trending] (
identifier[limit] = identifier[limit] , identifier[rating] = identifier[rating] )
|
def trending(limit=DEFAULT_SEARCH_LIMIT, api_key=GIPHY_PUBLIC_KEY, strict=False, rating=None):
"""
Shorthand for creating a Giphy api wrapper with the given api key
and then calling the trending method. Note that this will return
a generator
"""
return Giphy(api_key=api_key, strict=strict).trending(limit=limit, rating=rating)
|
def get_rules(scenario_id, **kwargs):
"""
Get all the rules for a given scenario.
"""
rules = db.DBSession.query(Rule).filter(Rule.scenario_id==scenario_id, Rule.status=='A').all()
return rules
|
def function[get_rules, parameter[scenario_id]]:
constant[
Get all the rules for a given scenario.
]
variable[rules] assign[=] call[call[call[name[db].DBSession.query, parameter[name[Rule]]].filter, parameter[compare[name[Rule].scenario_id equal[==] name[scenario_id]], compare[name[Rule].status equal[==] constant[A]]]].all, parameter[]]
return[name[rules]]
|
keyword[def] identifier[get_rules] ( identifier[scenario_id] ,** identifier[kwargs] ):
literal[string]
identifier[rules] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Rule] ). identifier[filter] ( identifier[Rule] . identifier[scenario_id] == identifier[scenario_id] , identifier[Rule] . identifier[status] == literal[string] ). identifier[all] ()
keyword[return] identifier[rules]
|
def get_rules(scenario_id, **kwargs):
"""
Get all the rules for a given scenario.
"""
rules = db.DBSession.query(Rule).filter(Rule.scenario_id == scenario_id, Rule.status == 'A').all()
return rules
|
def switch(self):
"""Switch if time for eAgc has come"""
t = self.system.dae.t
for idx in range(0, self.n):
if t >= self.tl[idx]:
if self.en[idx] == 0:
self.en[idx] = 1
logger.info(
'Extended ACE <{}> activated at t = {}.'.format(
self.idx[idx], t))
|
def function[switch, parameter[self]]:
constant[Switch if time for eAgc has come]
variable[t] assign[=] name[self].system.dae.t
for taget[name[idx]] in starred[call[name[range], parameter[constant[0], name[self].n]]] begin[:]
if compare[name[t] greater_or_equal[>=] call[name[self].tl][name[idx]]] begin[:]
if compare[call[name[self].en][name[idx]] equal[==] constant[0]] begin[:]
call[name[self].en][name[idx]] assign[=] constant[1]
call[name[logger].info, parameter[call[constant[Extended ACE <{}> activated at t = {}.].format, parameter[call[name[self].idx][name[idx]], name[t]]]]]
|
keyword[def] identifier[switch] ( identifier[self] ):
literal[string]
identifier[t] = identifier[self] . identifier[system] . identifier[dae] . identifier[t]
keyword[for] identifier[idx] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[n] ):
keyword[if] identifier[t] >= identifier[self] . identifier[tl] [ identifier[idx] ]:
keyword[if] identifier[self] . identifier[en] [ identifier[idx] ]== literal[int] :
identifier[self] . identifier[en] [ identifier[idx] ]= literal[int]
identifier[logger] . identifier[info] (
literal[string] . identifier[format] (
identifier[self] . identifier[idx] [ identifier[idx] ], identifier[t] ))
|
def switch(self):
"""Switch if time for eAgc has come"""
t = self.system.dae.t
for idx in range(0, self.n):
if t >= self.tl[idx]:
if self.en[idx] == 0:
self.en[idx] = 1
logger.info('Extended ACE <{}> activated at t = {}.'.format(self.idx[idx], t)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['t']] # depends on [control=['for'], data=['idx']]
|
def send(self, peer, typename, data):
"""
Sends a packet to a peer.
"""
def attempt_to_send(_):
if peer not in self._connections:
d = self._connect(peer)
d.addCallback(attempt_to_send)
return d
else:
conn = self._connections[peer][0]
conn.send_packet(typename, data)
return defer.succeed(None)
d = attempt_to_send(None)
self._ongoing_sends.add(d)
def send_completed(result):
if d in self._ongoing_sends:
self._ongoing_sends.remove(d)
return result
d.addBoth(send_completed)
return d
|
def function[send, parameter[self, peer, typename, data]]:
constant[
Sends a packet to a peer.
]
def function[attempt_to_send, parameter[_]]:
if compare[name[peer] <ast.NotIn object at 0x7da2590d7190> name[self]._connections] begin[:]
variable[d] assign[=] call[name[self]._connect, parameter[name[peer]]]
call[name[d].addCallback, parameter[name[attempt_to_send]]]
return[name[d]]
variable[d] assign[=] call[name[attempt_to_send], parameter[constant[None]]]
call[name[self]._ongoing_sends.add, parameter[name[d]]]
def function[send_completed, parameter[result]]:
if compare[name[d] in name[self]._ongoing_sends] begin[:]
call[name[self]._ongoing_sends.remove, parameter[name[d]]]
return[name[result]]
call[name[d].addBoth, parameter[name[send_completed]]]
return[name[d]]
|
keyword[def] identifier[send] ( identifier[self] , identifier[peer] , identifier[typename] , identifier[data] ):
literal[string]
keyword[def] identifier[attempt_to_send] ( identifier[_] ):
keyword[if] identifier[peer] keyword[not] keyword[in] identifier[self] . identifier[_connections] :
identifier[d] = identifier[self] . identifier[_connect] ( identifier[peer] )
identifier[d] . identifier[addCallback] ( identifier[attempt_to_send] )
keyword[return] identifier[d]
keyword[else] :
identifier[conn] = identifier[self] . identifier[_connections] [ identifier[peer] ][ literal[int] ]
identifier[conn] . identifier[send_packet] ( identifier[typename] , identifier[data] )
keyword[return] identifier[defer] . identifier[succeed] ( keyword[None] )
identifier[d] = identifier[attempt_to_send] ( keyword[None] )
identifier[self] . identifier[_ongoing_sends] . identifier[add] ( identifier[d] )
keyword[def] identifier[send_completed] ( identifier[result] ):
keyword[if] identifier[d] keyword[in] identifier[self] . identifier[_ongoing_sends] :
identifier[self] . identifier[_ongoing_sends] . identifier[remove] ( identifier[d] )
keyword[return] identifier[result]
identifier[d] . identifier[addBoth] ( identifier[send_completed] )
keyword[return] identifier[d]
|
def send(self, peer, typename, data):
"""
Sends a packet to a peer.
"""
def attempt_to_send(_):
if peer not in self._connections:
d = self._connect(peer)
d.addCallback(attempt_to_send)
return d # depends on [control=['if'], data=['peer']]
else:
conn = self._connections[peer][0]
conn.send_packet(typename, data)
return defer.succeed(None)
d = attempt_to_send(None)
self._ongoing_sends.add(d)
def send_completed(result):
if d in self._ongoing_sends:
self._ongoing_sends.remove(d) # depends on [control=['if'], data=['d']]
return result
d.addBoth(send_completed)
return d
|
def write_hdf5_dict(tsdict, h5f, group=None, **kwargs):
"""Write a `TimeSeriesBaseDict` to HDF5
Each series in the dict is written as a dataset in the group
"""
# create group if needed
if group and group not in h5f:
h5g = h5f.create_group(group)
elif group:
h5g = h5f[group]
else:
h5g = h5f
# write each timeseries
kwargs.setdefault('format', 'hdf5')
for key, series in tsdict.items():
series.write(h5g, path=str(key), **kwargs)
|
def function[write_hdf5_dict, parameter[tsdict, h5f, group]]:
constant[Write a `TimeSeriesBaseDict` to HDF5
Each series in the dict is written as a dataset in the group
]
if <ast.BoolOp object at 0x7da1b060ace0> begin[:]
variable[h5g] assign[=] call[name[h5f].create_group, parameter[name[group]]]
call[name[kwargs].setdefault, parameter[constant[format], constant[hdf5]]]
for taget[tuple[[<ast.Name object at 0x7da1b060a8c0>, <ast.Name object at 0x7da1b0608670>]]] in starred[call[name[tsdict].items, parameter[]]] begin[:]
call[name[series].write, parameter[name[h5g]]]
|
keyword[def] identifier[write_hdf5_dict] ( identifier[tsdict] , identifier[h5f] , identifier[group] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[group] keyword[and] identifier[group] keyword[not] keyword[in] identifier[h5f] :
identifier[h5g] = identifier[h5f] . identifier[create_group] ( identifier[group] )
keyword[elif] identifier[group] :
identifier[h5g] = identifier[h5f] [ identifier[group] ]
keyword[else] :
identifier[h5g] = identifier[h5f]
identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[string] )
keyword[for] identifier[key] , identifier[series] keyword[in] identifier[tsdict] . identifier[items] ():
identifier[series] . identifier[write] ( identifier[h5g] , identifier[path] = identifier[str] ( identifier[key] ),** identifier[kwargs] )
|
def write_hdf5_dict(tsdict, h5f, group=None, **kwargs):
"""Write a `TimeSeriesBaseDict` to HDF5
Each series in the dict is written as a dataset in the group
"""
# create group if needed
if group and group not in h5f:
h5g = h5f.create_group(group) # depends on [control=['if'], data=[]]
elif group:
h5g = h5f[group] # depends on [control=['if'], data=[]]
else:
h5g = h5f
# write each timeseries
kwargs.setdefault('format', 'hdf5')
for (key, series) in tsdict.items():
series.write(h5g, path=str(key), **kwargs) # depends on [control=['for'], data=[]]
|
def get_form(self, request, obj=None, **kwargs):
"""Get a :class:`Page <pages.admin.forms.PageForm>` for the
:class:`Page <pages.models.Page>` and modify its fields depending on
the request."""
template = get_template_from_request(request, obj)
#model = create_page_model(get_placeholders(template))
form = make_form(self.model, get_placeholders(template))
# bound the form
language = get_language_from_request(request)
form.base_fields['language'].initial = language
if obj:
initial_slug = obj.slug(language=language, fallback=False)
initial_title = obj.title(language=language, fallback=False)
form.base_fields['slug'].initial = initial_slug
form.base_fields['title'].initial = initial_title
template = get_template_from_request(request, obj)
page_templates = settings.get_page_templates()
template_choices = list(page_templates)
# is default template is not in the list add it
if not [tpl for tpl in template_choices if tpl[0] == settings.PAGE_DEFAULT_TEMPLATE]:
template_choices.insert(0, (settings.PAGE_DEFAULT_TEMPLATE,
_('Default template')))
form.base_fields['template'].choices = template_choices
form.base_fields['template'].initial = force_text(template)
for placeholder in get_placeholders(template):
ctype = placeholder.ctype
if obj:
initial = placeholder.get_content(obj, language, lang_fallback=False)
else:
initial = None
form.base_fields[ctype] = placeholder.get_field(obj,
language, initial=initial)
return form
|
def function[get_form, parameter[self, request, obj]]:
constant[Get a :class:`Page <pages.admin.forms.PageForm>` for the
:class:`Page <pages.models.Page>` and modify its fields depending on
the request.]
variable[template] assign[=] call[name[get_template_from_request], parameter[name[request], name[obj]]]
variable[form] assign[=] call[name[make_form], parameter[name[self].model, call[name[get_placeholders], parameter[name[template]]]]]
variable[language] assign[=] call[name[get_language_from_request], parameter[name[request]]]
call[name[form].base_fields][constant[language]].initial assign[=] name[language]
if name[obj] begin[:]
variable[initial_slug] assign[=] call[name[obj].slug, parameter[]]
variable[initial_title] assign[=] call[name[obj].title, parameter[]]
call[name[form].base_fields][constant[slug]].initial assign[=] name[initial_slug]
call[name[form].base_fields][constant[title]].initial assign[=] name[initial_title]
variable[template] assign[=] call[name[get_template_from_request], parameter[name[request], name[obj]]]
variable[page_templates] assign[=] call[name[settings].get_page_templates, parameter[]]
variable[template_choices] assign[=] call[name[list], parameter[name[page_templates]]]
if <ast.UnaryOp object at 0x7da18f810820> begin[:]
call[name[template_choices].insert, parameter[constant[0], tuple[[<ast.Attribute object at 0x7da18f813dc0>, <ast.Call object at 0x7da18f8102e0>]]]]
call[name[form].base_fields][constant[template]].choices assign[=] name[template_choices]
call[name[form].base_fields][constant[template]].initial assign[=] call[name[force_text], parameter[name[template]]]
for taget[name[placeholder]] in starred[call[name[get_placeholders], parameter[name[template]]]] begin[:]
variable[ctype] assign[=] name[placeholder].ctype
if name[obj] begin[:]
variable[initial] assign[=] call[name[placeholder].get_content, parameter[name[obj], name[language]]]
call[name[form].base_fields][name[ctype]] assign[=] call[name[placeholder].get_field, parameter[name[obj], name[language]]]
return[name[form]]
|
keyword[def] identifier[get_form] ( identifier[self] , identifier[request] , identifier[obj] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[template] = identifier[get_template_from_request] ( identifier[request] , identifier[obj] )
identifier[form] = identifier[make_form] ( identifier[self] . identifier[model] , identifier[get_placeholders] ( identifier[template] ))
identifier[language] = identifier[get_language_from_request] ( identifier[request] )
identifier[form] . identifier[base_fields] [ literal[string] ]. identifier[initial] = identifier[language]
keyword[if] identifier[obj] :
identifier[initial_slug] = identifier[obj] . identifier[slug] ( identifier[language] = identifier[language] , identifier[fallback] = keyword[False] )
identifier[initial_title] = identifier[obj] . identifier[title] ( identifier[language] = identifier[language] , identifier[fallback] = keyword[False] )
identifier[form] . identifier[base_fields] [ literal[string] ]. identifier[initial] = identifier[initial_slug]
identifier[form] . identifier[base_fields] [ literal[string] ]. identifier[initial] = identifier[initial_title]
identifier[template] = identifier[get_template_from_request] ( identifier[request] , identifier[obj] )
identifier[page_templates] = identifier[settings] . identifier[get_page_templates] ()
identifier[template_choices] = identifier[list] ( identifier[page_templates] )
keyword[if] keyword[not] [ identifier[tpl] keyword[for] identifier[tpl] keyword[in] identifier[template_choices] keyword[if] identifier[tpl] [ literal[int] ]== identifier[settings] . identifier[PAGE_DEFAULT_TEMPLATE] ]:
identifier[template_choices] . identifier[insert] ( literal[int] ,( identifier[settings] . identifier[PAGE_DEFAULT_TEMPLATE] ,
identifier[_] ( literal[string] )))
identifier[form] . identifier[base_fields] [ literal[string] ]. identifier[choices] = identifier[template_choices]
identifier[form] . identifier[base_fields] [ literal[string] ]. identifier[initial] = identifier[force_text] ( identifier[template] )
keyword[for] identifier[placeholder] keyword[in] identifier[get_placeholders] ( identifier[template] ):
identifier[ctype] = identifier[placeholder] . identifier[ctype]
keyword[if] identifier[obj] :
identifier[initial] = identifier[placeholder] . identifier[get_content] ( identifier[obj] , identifier[language] , identifier[lang_fallback] = keyword[False] )
keyword[else] :
identifier[initial] = keyword[None]
identifier[form] . identifier[base_fields] [ identifier[ctype] ]= identifier[placeholder] . identifier[get_field] ( identifier[obj] ,
identifier[language] , identifier[initial] = identifier[initial] )
keyword[return] identifier[form]
|
def get_form(self, request, obj=None, **kwargs):
"""Get a :class:`Page <pages.admin.forms.PageForm>` for the
:class:`Page <pages.models.Page>` and modify its fields depending on
the request."""
template = get_template_from_request(request, obj)
#model = create_page_model(get_placeholders(template))
form = make_form(self.model, get_placeholders(template))
# bound the form
language = get_language_from_request(request)
form.base_fields['language'].initial = language
if obj:
initial_slug = obj.slug(language=language, fallback=False)
initial_title = obj.title(language=language, fallback=False)
form.base_fields['slug'].initial = initial_slug
form.base_fields['title'].initial = initial_title # depends on [control=['if'], data=[]]
template = get_template_from_request(request, obj)
page_templates = settings.get_page_templates()
template_choices = list(page_templates)
# is default template is not in the list add it
if not [tpl for tpl in template_choices if tpl[0] == settings.PAGE_DEFAULT_TEMPLATE]:
template_choices.insert(0, (settings.PAGE_DEFAULT_TEMPLATE, _('Default template'))) # depends on [control=['if'], data=[]]
form.base_fields['template'].choices = template_choices
form.base_fields['template'].initial = force_text(template)
for placeholder in get_placeholders(template):
ctype = placeholder.ctype
if obj:
initial = placeholder.get_content(obj, language, lang_fallback=False) # depends on [control=['if'], data=[]]
else:
initial = None
form.base_fields[ctype] = placeholder.get_field(obj, language, initial=initial) # depends on [control=['for'], data=['placeholder']]
return form
|
def _classify_section(cls, section):
"""Attempt to find the canonical name of this section."""
name = section.lower()
if name in frozenset(['args', 'arguments', "params", "parameters"]):
return cls.ARGS_SECTION
if name in frozenset(['returns', 'return']):
return cls.RETURN_SECTION
if name in frozenset(['main']):
return cls.MAIN_SECTION
return None
|
def function[_classify_section, parameter[cls, section]]:
constant[Attempt to find the canonical name of this section.]
variable[name] assign[=] call[name[section].lower, parameter[]]
if compare[name[name] in call[name[frozenset], parameter[list[[<ast.Constant object at 0x7da1b026ebc0>, <ast.Constant object at 0x7da1b026eb60>, <ast.Constant object at 0x7da1b026f3a0>, <ast.Constant object at 0x7da1b026f400>]]]]] begin[:]
return[name[cls].ARGS_SECTION]
if compare[name[name] in call[name[frozenset], parameter[list[[<ast.Constant object at 0x7da1b026f8e0>, <ast.Constant object at 0x7da1b026f610>]]]]] begin[:]
return[name[cls].RETURN_SECTION]
if compare[name[name] in call[name[frozenset], parameter[list[[<ast.Constant object at 0x7da1b026ceb0>]]]]] begin[:]
return[name[cls].MAIN_SECTION]
return[constant[None]]
|
keyword[def] identifier[_classify_section] ( identifier[cls] , identifier[section] ):
literal[string]
identifier[name] = identifier[section] . identifier[lower] ()
keyword[if] identifier[name] keyword[in] identifier[frozenset] ([ literal[string] , literal[string] , literal[string] , literal[string] ]):
keyword[return] identifier[cls] . identifier[ARGS_SECTION]
keyword[if] identifier[name] keyword[in] identifier[frozenset] ([ literal[string] , literal[string] ]):
keyword[return] identifier[cls] . identifier[RETURN_SECTION]
keyword[if] identifier[name] keyword[in] identifier[frozenset] ([ literal[string] ]):
keyword[return] identifier[cls] . identifier[MAIN_SECTION]
keyword[return] keyword[None]
|
def _classify_section(cls, section):
"""Attempt to find the canonical name of this section."""
name = section.lower()
if name in frozenset(['args', 'arguments', 'params', 'parameters']):
return cls.ARGS_SECTION # depends on [control=['if'], data=[]]
if name in frozenset(['returns', 'return']):
return cls.RETURN_SECTION # depends on [control=['if'], data=[]]
if name in frozenset(['main']):
return cls.MAIN_SECTION # depends on [control=['if'], data=[]]
return None
|
def _init_session(self):
""" 初始化 requests.Session
"""
self.session = requests.Session()
self.session.mount('http://', adapter=requests.adapters.HTTPAdapter(
pool_connections=10,
pool_maxsize=100))
|
def function[_init_session, parameter[self]]:
constant[ 初始化 requests.Session
]
name[self].session assign[=] call[name[requests].Session, parameter[]]
call[name[self].session.mount, parameter[constant[http://]]]
|
keyword[def] identifier[_init_session] ( identifier[self] ):
literal[string]
identifier[self] . identifier[session] = identifier[requests] . identifier[Session] ()
identifier[self] . identifier[session] . identifier[mount] ( literal[string] , identifier[adapter] = identifier[requests] . identifier[adapters] . identifier[HTTPAdapter] (
identifier[pool_connections] = literal[int] ,
identifier[pool_maxsize] = literal[int] ))
|
def _init_session(self):
""" 初始化 requests.Session
"""
self.session = requests.Session()
self.session.mount('http://', adapter=requests.adapters.HTTPAdapter(pool_connections=10, pool_maxsize=100))
|
def make_elements(tokens, text, start=0, end=None, fallback=None):
"""Make elements from a list of parsed tokens.
It will turn all unmatched holes into fallback elements.
:param tokens: a list of parsed tokens.
:param text: the original tet.
:param start: the offset of where parsing starts. Defaults to the start of text.
:param end: the offset of where parsing ends. Defauls to the end of text.
:param fallback: fallback element type.
:returns: a list of inline elements.
"""
result = []
end = end or len(text)
prev_end = start
for token in tokens:
if prev_end < token.start:
result.append(fallback(text[prev_end:token.start]))
result.append(token.as_element())
prev_end = token.end
if prev_end < end:
result.append(fallback(text[prev_end:end]))
return result
|
def function[make_elements, parameter[tokens, text, start, end, fallback]]:
constant[Make elements from a list of parsed tokens.
It will turn all unmatched holes into fallback elements.
:param tokens: a list of parsed tokens.
:param text: the original tet.
:param start: the offset of where parsing starts. Defaults to the start of text.
:param end: the offset of where parsing ends. Defauls to the end of text.
:param fallback: fallback element type.
:returns: a list of inline elements.
]
variable[result] assign[=] list[[]]
variable[end] assign[=] <ast.BoolOp object at 0x7da18ede4f70>
variable[prev_end] assign[=] name[start]
for taget[name[token]] in starred[name[tokens]] begin[:]
if compare[name[prev_end] less[<] name[token].start] begin[:]
call[name[result].append, parameter[call[name[fallback], parameter[call[name[text]][<ast.Slice object at 0x7da18ede6ce0>]]]]]
call[name[result].append, parameter[call[name[token].as_element, parameter[]]]]
variable[prev_end] assign[=] name[token].end
if compare[name[prev_end] less[<] name[end]] begin[:]
call[name[result].append, parameter[call[name[fallback], parameter[call[name[text]][<ast.Slice object at 0x7da18ede72b0>]]]]]
return[name[result]]
|
keyword[def] identifier[make_elements] ( identifier[tokens] , identifier[text] , identifier[start] = literal[int] , identifier[end] = keyword[None] , identifier[fallback] = keyword[None] ):
literal[string]
identifier[result] =[]
identifier[end] = identifier[end] keyword[or] identifier[len] ( identifier[text] )
identifier[prev_end] = identifier[start]
keyword[for] identifier[token] keyword[in] identifier[tokens] :
keyword[if] identifier[prev_end] < identifier[token] . identifier[start] :
identifier[result] . identifier[append] ( identifier[fallback] ( identifier[text] [ identifier[prev_end] : identifier[token] . identifier[start] ]))
identifier[result] . identifier[append] ( identifier[token] . identifier[as_element] ())
identifier[prev_end] = identifier[token] . identifier[end]
keyword[if] identifier[prev_end] < identifier[end] :
identifier[result] . identifier[append] ( identifier[fallback] ( identifier[text] [ identifier[prev_end] : identifier[end] ]))
keyword[return] identifier[result]
|
def make_elements(tokens, text, start=0, end=None, fallback=None):
"""Make elements from a list of parsed tokens.
It will turn all unmatched holes into fallback elements.
:param tokens: a list of parsed tokens.
:param text: the original tet.
:param start: the offset of where parsing starts. Defaults to the start of text.
:param end: the offset of where parsing ends. Defauls to the end of text.
:param fallback: fallback element type.
:returns: a list of inline elements.
"""
result = []
end = end or len(text)
prev_end = start
for token in tokens:
if prev_end < token.start:
result.append(fallback(text[prev_end:token.start])) # depends on [control=['if'], data=['prev_end']]
result.append(token.as_element())
prev_end = token.end # depends on [control=['for'], data=['token']]
if prev_end < end:
result.append(fallback(text[prev_end:end])) # depends on [control=['if'], data=['prev_end', 'end']]
return result
|
def raise_right_error(response):
"""Raise appropriate error when bad response received."""
if response.status_code == 200:
return
if response.status_code == 500:
raise ServerError('Clef servers are down.')
if response.status_code == 403:
message = response.json().get('error')
error_class = MESSAGE_TO_ERROR_MAP[message]
if error_class == InvalidOAuthTokenError:
message = 'Something went wrong at Clef. Unable to retrieve user information with this token.'
raise error_class(message)
if response.status_code == 400:
message = response.json().get('error')
error_class = MESSAGE_TO_ERROR_MAP[message]
if error_class:
raise error_class(message)
else:
raise InvalidLogoutTokenError(message)
if response.status_code == 404:
raise NotFoundError('Unable to retrieve the page. Are you sure the Clef API endpoint is configured right?')
raise APIError
|
def function[raise_right_error, parameter[response]]:
constant[Raise appropriate error when bad response received.]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
return[None]
if compare[name[response].status_code equal[==] constant[500]] begin[:]
<ast.Raise object at 0x7da1b1454040>
if compare[name[response].status_code equal[==] constant[403]] begin[:]
variable[message] assign[=] call[call[name[response].json, parameter[]].get, parameter[constant[error]]]
variable[error_class] assign[=] call[name[MESSAGE_TO_ERROR_MAP]][name[message]]
if compare[name[error_class] equal[==] name[InvalidOAuthTokenError]] begin[:]
variable[message] assign[=] constant[Something went wrong at Clef. Unable to retrieve user information with this token.]
<ast.Raise object at 0x7da1b14722f0>
if compare[name[response].status_code equal[==] constant[400]] begin[:]
variable[message] assign[=] call[call[name[response].json, parameter[]].get, parameter[constant[error]]]
variable[error_class] assign[=] call[name[MESSAGE_TO_ERROR_MAP]][name[message]]
if name[error_class] begin[:]
<ast.Raise object at 0x7da1b1473490>
if compare[name[response].status_code equal[==] constant[404]] begin[:]
<ast.Raise object at 0x7da1b14703d0>
<ast.Raise object at 0x7da1b14734c0>
|
keyword[def] identifier[raise_right_error] ( identifier[response] ):
literal[string]
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return]
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[raise] identifier[ServerError] ( literal[string] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[message] = identifier[response] . identifier[json] (). identifier[get] ( literal[string] )
identifier[error_class] = identifier[MESSAGE_TO_ERROR_MAP] [ identifier[message] ]
keyword[if] identifier[error_class] == identifier[InvalidOAuthTokenError] :
identifier[message] = literal[string]
keyword[raise] identifier[error_class] ( identifier[message] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[message] = identifier[response] . identifier[json] (). identifier[get] ( literal[string] )
identifier[error_class] = identifier[MESSAGE_TO_ERROR_MAP] [ identifier[message] ]
keyword[if] identifier[error_class] :
keyword[raise] identifier[error_class] ( identifier[message] )
keyword[else] :
keyword[raise] identifier[InvalidLogoutTokenError] ( identifier[message] )
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[raise] identifier[NotFoundError] ( literal[string] )
keyword[raise] identifier[APIError]
|
def raise_right_error(response):
"""Raise appropriate error when bad response received."""
if response.status_code == 200:
return # depends on [control=['if'], data=[]]
if response.status_code == 500:
raise ServerError('Clef servers are down.') # depends on [control=['if'], data=[]]
if response.status_code == 403:
message = response.json().get('error')
error_class = MESSAGE_TO_ERROR_MAP[message]
if error_class == InvalidOAuthTokenError:
message = 'Something went wrong at Clef. Unable to retrieve user information with this token.' # depends on [control=['if'], data=[]]
raise error_class(message) # depends on [control=['if'], data=[]]
if response.status_code == 400:
message = response.json().get('error')
error_class = MESSAGE_TO_ERROR_MAP[message]
if error_class:
raise error_class(message) # depends on [control=['if'], data=[]]
else:
raise InvalidLogoutTokenError(message) # depends on [control=['if'], data=[]]
if response.status_code == 404:
raise NotFoundError('Unable to retrieve the page. Are you sure the Clef API endpoint is configured right?') # depends on [control=['if'], data=[]]
raise APIError
|
def make_diffuse_comp_info_dict(**kwargs):
"""Build and return the information about the diffuse components
"""
library_yamlfile = kwargs.pop('library', 'models/library.yaml')
components = kwargs.pop('components', None)
if components is None:
comp_yamlfile = kwargs.pop('comp', 'config/binning.yaml')
components = Component.build_from_yamlfile(comp_yamlfile)
gmm = kwargs.get('GalpropMapManager', GalpropMapManager(**kwargs))
dmm = kwargs.get('DiffuseModelManager', DiffuseModelManager(**kwargs))
if library_yamlfile is None or library_yamlfile == 'None':
diffuse_comps = {}
else:
diffuse_comps = DiffuseModelManager.read_diffuse_component_yaml(
library_yamlfile)
diffuse_comp_info_dict = dmm.make_diffuse_comp_info_dict(
diffuse_comps, components)
for diffuse_value in diffuse_comps.values():
if diffuse_value is None:
continue
if diffuse_value['model_type'] != 'galprop_rings':
continue
versions = diffuse_value['versions']
for version in versions:
galprop_dict = gmm.make_diffuse_comp_info_dict(version)
diffuse_comp_info_dict.update(galprop_dict)
return dict(comp_info_dict=diffuse_comp_info_dict,
GalpropMapManager=gmm,
DiffuseModelManager=dmm)
|
def function[make_diffuse_comp_info_dict, parameter[]]:
constant[Build and return the information about the diffuse components
]
variable[library_yamlfile] assign[=] call[name[kwargs].pop, parameter[constant[library], constant[models/library.yaml]]]
variable[components] assign[=] call[name[kwargs].pop, parameter[constant[components], constant[None]]]
if compare[name[components] is constant[None]] begin[:]
variable[comp_yamlfile] assign[=] call[name[kwargs].pop, parameter[constant[comp], constant[config/binning.yaml]]]
variable[components] assign[=] call[name[Component].build_from_yamlfile, parameter[name[comp_yamlfile]]]
variable[gmm] assign[=] call[name[kwargs].get, parameter[constant[GalpropMapManager], call[name[GalpropMapManager], parameter[]]]]
variable[dmm] assign[=] call[name[kwargs].get, parameter[constant[DiffuseModelManager], call[name[DiffuseModelManager], parameter[]]]]
if <ast.BoolOp object at 0x7da18bcca020> begin[:]
variable[diffuse_comps] assign[=] dictionary[[], []]
variable[diffuse_comp_info_dict] assign[=] call[name[dmm].make_diffuse_comp_info_dict, parameter[name[diffuse_comps], name[components]]]
for taget[name[diffuse_value]] in starred[call[name[diffuse_comps].values, parameter[]]] begin[:]
if compare[name[diffuse_value] is constant[None]] begin[:]
continue
if compare[call[name[diffuse_value]][constant[model_type]] not_equal[!=] constant[galprop_rings]] begin[:]
continue
variable[versions] assign[=] call[name[diffuse_value]][constant[versions]]
for taget[name[version]] in starred[name[versions]] begin[:]
variable[galprop_dict] assign[=] call[name[gmm].make_diffuse_comp_info_dict, parameter[name[version]]]
call[name[diffuse_comp_info_dict].update, parameter[name[galprop_dict]]]
return[call[name[dict], parameter[]]]
|
keyword[def] identifier[make_diffuse_comp_info_dict] (** identifier[kwargs] ):
literal[string]
identifier[library_yamlfile] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[components] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[components] keyword[is] keyword[None] :
identifier[comp_yamlfile] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[components] = identifier[Component] . identifier[build_from_yamlfile] ( identifier[comp_yamlfile] )
identifier[gmm] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[GalpropMapManager] (** identifier[kwargs] ))
identifier[dmm] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[DiffuseModelManager] (** identifier[kwargs] ))
keyword[if] identifier[library_yamlfile] keyword[is] keyword[None] keyword[or] identifier[library_yamlfile] == literal[string] :
identifier[diffuse_comps] ={}
keyword[else] :
identifier[diffuse_comps] = identifier[DiffuseModelManager] . identifier[read_diffuse_component_yaml] (
identifier[library_yamlfile] )
identifier[diffuse_comp_info_dict] = identifier[dmm] . identifier[make_diffuse_comp_info_dict] (
identifier[diffuse_comps] , identifier[components] )
keyword[for] identifier[diffuse_value] keyword[in] identifier[diffuse_comps] . identifier[values] ():
keyword[if] identifier[diffuse_value] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[diffuse_value] [ literal[string] ]!= literal[string] :
keyword[continue]
identifier[versions] = identifier[diffuse_value] [ literal[string] ]
keyword[for] identifier[version] keyword[in] identifier[versions] :
identifier[galprop_dict] = identifier[gmm] . identifier[make_diffuse_comp_info_dict] ( identifier[version] )
identifier[diffuse_comp_info_dict] . identifier[update] ( identifier[galprop_dict] )
keyword[return] identifier[dict] ( identifier[comp_info_dict] = identifier[diffuse_comp_info_dict] ,
identifier[GalpropMapManager] = identifier[gmm] ,
identifier[DiffuseModelManager] = identifier[dmm] )
|
def make_diffuse_comp_info_dict(**kwargs):
"""Build and return the information about the diffuse components
"""
library_yamlfile = kwargs.pop('library', 'models/library.yaml')
components = kwargs.pop('components', None)
if components is None:
comp_yamlfile = kwargs.pop('comp', 'config/binning.yaml')
components = Component.build_from_yamlfile(comp_yamlfile) # depends on [control=['if'], data=['components']]
gmm = kwargs.get('GalpropMapManager', GalpropMapManager(**kwargs))
dmm = kwargs.get('DiffuseModelManager', DiffuseModelManager(**kwargs))
if library_yamlfile is None or library_yamlfile == 'None':
diffuse_comps = {} # depends on [control=['if'], data=[]]
else:
diffuse_comps = DiffuseModelManager.read_diffuse_component_yaml(library_yamlfile)
diffuse_comp_info_dict = dmm.make_diffuse_comp_info_dict(diffuse_comps, components)
for diffuse_value in diffuse_comps.values():
if diffuse_value is None:
continue # depends on [control=['if'], data=[]]
if diffuse_value['model_type'] != 'galprop_rings':
continue # depends on [control=['if'], data=[]]
versions = diffuse_value['versions']
for version in versions:
galprop_dict = gmm.make_diffuse_comp_info_dict(version)
diffuse_comp_info_dict.update(galprop_dict) # depends on [control=['for'], data=['version']] # depends on [control=['for'], data=['diffuse_value']]
return dict(comp_info_dict=diffuse_comp_info_dict, GalpropMapManager=gmm, DiffuseModelManager=dmm)
|
def _setup_chassis(self):
"""
Sets up the router with the corresponding chassis
(create slots and insert default adapters).
"""
# With 1751 and 1760, WICs in WIC slot 1 show up as in slot 1, not 0
# e.g. s1/0 not s0/2
if self._chassis in ['1751', '1760']:
self._create_slots(2)
self._slots[1] = C1700_MB_WIC1()
else:
self._create_slots(1)
self._slots[0] = C1700_MB_1FE()
|
def function[_setup_chassis, parameter[self]]:
constant[
Sets up the router with the corresponding chassis
(create slots and insert default adapters).
]
if compare[name[self]._chassis in list[[<ast.Constant object at 0x7da2047e92d0>, <ast.Constant object at 0x7da2047e9240>]]] begin[:]
call[name[self]._create_slots, parameter[constant[2]]]
call[name[self]._slots][constant[1]] assign[=] call[name[C1700_MB_WIC1], parameter[]]
call[name[self]._slots][constant[0]] assign[=] call[name[C1700_MB_1FE], parameter[]]
|
keyword[def] identifier[_setup_chassis] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_chassis] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[_create_slots] ( literal[int] )
identifier[self] . identifier[_slots] [ literal[int] ]= identifier[C1700_MB_WIC1] ()
keyword[else] :
identifier[self] . identifier[_create_slots] ( literal[int] )
identifier[self] . identifier[_slots] [ literal[int] ]= identifier[C1700_MB_1FE] ()
|
def _setup_chassis(self):
"""
Sets up the router with the corresponding chassis
(create slots and insert default adapters).
"""
# With 1751 and 1760, WICs in WIC slot 1 show up as in slot 1, not 0
# e.g. s1/0 not s0/2
if self._chassis in ['1751', '1760']:
self._create_slots(2)
self._slots[1] = C1700_MB_WIC1() # depends on [control=['if'], data=[]]
else:
self._create_slots(1)
self._slots[0] = C1700_MB_1FE()
|
def first_derivative(f, **kwargs):
"""Calculate the first derivative of a grid of values.
Works for both regularly-spaced data and grids with varying spacing.
Either `x` or `delta` must be specified, or `f` must be given as an `xarray.DataArray` with
attached coordinate and projection information. If `f` is an `xarray.DataArray`, and `x` or
`delta` are given, `f` will be converted to a `pint.Quantity` and the derivative returned
as a `pint.Quantity`, otherwise, if neither `x` nor `delta` are given, the attached
coordinate information belonging to `axis` will be used and the derivative will be returned
as an `xarray.DataArray`.
This uses 3 points to calculate the derivative, using forward or backward at the edges of
the grid as appropriate, and centered elsewhere. The irregular spacing is handled
explicitly, using the formulation as specified by [Bowen2005]_.
Parameters
----------
f : array-like
Array of values of which to calculate the derivative
axis : int or str, optional
The array axis along which to take the derivative. If `f` is ndarray-like, must be an
integer. If `f` is a `DataArray`, can be a string (referring to either the coordinate
dimension name or the axis type) or integer (referring to axis number), unless using
implicit conversion to `pint.Quantity`, in which case it must be an integer. Defaults
to 0.
x : array-like, optional
The coordinate values corresponding to the grid points in `f`.
delta : array-like, optional
Spacing between the grid points in `f`. Should be one item less than the size
of `f` along `axis`.
Returns
-------
array-like
The first derivative calculated along the selected axis.
See Also
--------
second_derivative
"""
n, axis, delta = _process_deriv_args(f, kwargs)
# create slice objects --- initially all are [:, :, ..., :]
slice0 = [slice(None)] * n
slice1 = [slice(None)] * n
slice2 = [slice(None)] * n
delta_slice0 = [slice(None)] * n
delta_slice1 = [slice(None)] * n
# First handle centered case
slice0[axis] = slice(None, -2)
slice1[axis] = slice(1, -1)
slice2[axis] = slice(2, None)
delta_slice0[axis] = slice(None, -1)
delta_slice1[axis] = slice(1, None)
combined_delta = delta[tuple(delta_slice0)] + delta[tuple(delta_slice1)]
delta_diff = delta[tuple(delta_slice1)] - delta[tuple(delta_slice0)]
center = (- delta[tuple(delta_slice1)] / (combined_delta * delta[tuple(delta_slice0)])
* f[tuple(slice0)]
+ delta_diff / (delta[tuple(delta_slice0)] * delta[tuple(delta_slice1)])
* f[tuple(slice1)]
+ delta[tuple(delta_slice0)] / (combined_delta * delta[tuple(delta_slice1)])
* f[tuple(slice2)])
# Fill in "left" edge with forward difference
slice0[axis] = slice(None, 1)
slice1[axis] = slice(1, 2)
slice2[axis] = slice(2, 3)
delta_slice0[axis] = slice(None, 1)
delta_slice1[axis] = slice(1, 2)
combined_delta = delta[tuple(delta_slice0)] + delta[tuple(delta_slice1)]
big_delta = combined_delta + delta[tuple(delta_slice0)]
left = (- big_delta / (combined_delta * delta[tuple(delta_slice0)])
* f[tuple(slice0)]
+ combined_delta / (delta[tuple(delta_slice0)] * delta[tuple(delta_slice1)])
* f[tuple(slice1)]
- delta[tuple(delta_slice0)] / (combined_delta * delta[tuple(delta_slice1)])
* f[tuple(slice2)])
# Now the "right" edge with backward difference
slice0[axis] = slice(-3, -2)
slice1[axis] = slice(-2, -1)
slice2[axis] = slice(-1, None)
delta_slice0[axis] = slice(-2, -1)
delta_slice1[axis] = slice(-1, None)
combined_delta = delta[tuple(delta_slice0)] + delta[tuple(delta_slice1)]
big_delta = combined_delta + delta[tuple(delta_slice1)]
right = (delta[tuple(delta_slice1)] / (combined_delta * delta[tuple(delta_slice0)])
* f[tuple(slice0)]
- combined_delta / (delta[tuple(delta_slice0)] * delta[tuple(delta_slice1)])
* f[tuple(slice1)]
+ big_delta / (combined_delta * delta[tuple(delta_slice1)])
* f[tuple(slice2)])
return concatenate((left, center, right), axis=axis)
|
def function[first_derivative, parameter[f]]:
constant[Calculate the first derivative of a grid of values.
Works for both regularly-spaced data and grids with varying spacing.
Either `x` or `delta` must be specified, or `f` must be given as an `xarray.DataArray` with
attached coordinate and projection information. If `f` is an `xarray.DataArray`, and `x` or
`delta` are given, `f` will be converted to a `pint.Quantity` and the derivative returned
as a `pint.Quantity`, otherwise, if neither `x` nor `delta` are given, the attached
coordinate information belonging to `axis` will be used and the derivative will be returned
as an `xarray.DataArray`.
This uses 3 points to calculate the derivative, using forward or backward at the edges of
the grid as appropriate, and centered elsewhere. The irregular spacing is handled
explicitly, using the formulation as specified by [Bowen2005]_.
Parameters
----------
f : array-like
Array of values of which to calculate the derivative
axis : int or str, optional
The array axis along which to take the derivative. If `f` is ndarray-like, must be an
integer. If `f` is a `DataArray`, can be a string (referring to either the coordinate
dimension name or the axis type) or integer (referring to axis number), unless using
implicit conversion to `pint.Quantity`, in which case it must be an integer. Defaults
to 0.
x : array-like, optional
The coordinate values corresponding to the grid points in `f`.
delta : array-like, optional
Spacing between the grid points in `f`. Should be one item less than the size
of `f` along `axis`.
Returns
-------
array-like
The first derivative calculated along the selected axis.
See Also
--------
second_derivative
]
<ast.Tuple object at 0x7da1b22c79d0> assign[=] call[name[_process_deriv_args], parameter[name[f], name[kwargs]]]
variable[slice0] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b22c7eb0>]] * name[n]]
variable[slice1] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b22c7bb0>]] * name[n]]
variable[slice2] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b1d5c2e0>]] * name[n]]
variable[delta_slice0] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b1d5dfc0>]] * name[n]]
variable[delta_slice1] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b1d5cfd0>]] * name[n]]
call[name[slice0]][name[axis]] assign[=] call[name[slice], parameter[constant[None], <ast.UnaryOp object at 0x7da1b1d5de70>]]
call[name[slice1]][name[axis]] assign[=] call[name[slice], parameter[constant[1], <ast.UnaryOp object at 0x7da1b1d5ec50>]]
call[name[slice2]][name[axis]] assign[=] call[name[slice], parameter[constant[2], constant[None]]]
call[name[delta_slice0]][name[axis]] assign[=] call[name[slice], parameter[constant[None], <ast.UnaryOp object at 0x7da1b1d5d8d0>]]
call[name[delta_slice1]][name[axis]] assign[=] call[name[slice], parameter[constant[1], constant[None]]]
variable[combined_delta] assign[=] binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] + call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]
variable[delta_diff] assign[=] binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]] - call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]]]
variable[center] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b1d5d960> / binary_operation[name[combined_delta] * call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice0]]]]] + binary_operation[binary_operation[name[delta_diff] / binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] * call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice1]]]]]] + binary_operation[binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] / binary_operation[name[combined_delta] * call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice2]]]]]]
call[name[slice0]][name[axis]] assign[=] call[name[slice], parameter[constant[None], constant[1]]]
call[name[slice1]][name[axis]] assign[=] call[name[slice], parameter[constant[1], constant[2]]]
call[name[slice2]][name[axis]] assign[=] call[name[slice], parameter[constant[2], constant[3]]]
call[name[delta_slice0]][name[axis]] assign[=] call[name[slice], parameter[constant[None], constant[1]]]
call[name[delta_slice1]][name[axis]] assign[=] call[name[slice], parameter[constant[1], constant[2]]]
variable[combined_delta] assign[=] binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] + call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]
variable[big_delta] assign[=] binary_operation[name[combined_delta] + call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]]]
variable[left] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b2297820> / binary_operation[name[combined_delta] * call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice0]]]]] + binary_operation[binary_operation[name[combined_delta] / binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] * call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice1]]]]]] - binary_operation[binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] / binary_operation[name[combined_delta] * call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice2]]]]]]
call[name[slice0]][name[axis]] assign[=] call[name[slice], parameter[<ast.UnaryOp object at 0x7da1b2295150>, <ast.UnaryOp object at 0x7da1b2294490>]]
call[name[slice1]][name[axis]] assign[=] call[name[slice], parameter[<ast.UnaryOp object at 0x7da1b2296260>, <ast.UnaryOp object at 0x7da1b2296d70>]]
call[name[slice2]][name[axis]] assign[=] call[name[slice], parameter[<ast.UnaryOp object at 0x7da1b2297d00>, constant[None]]]
call[name[delta_slice0]][name[axis]] assign[=] call[name[slice], parameter[<ast.UnaryOp object at 0x7da1b22950f0>, <ast.UnaryOp object at 0x7da1b2296500>]]
call[name[delta_slice1]][name[axis]] assign[=] call[name[slice], parameter[<ast.UnaryOp object at 0x7da1b2297ee0>, constant[None]]]
variable[combined_delta] assign[=] binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] + call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]
variable[big_delta] assign[=] binary_operation[name[combined_delta] + call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]
variable[right] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]] / binary_operation[name[combined_delta] * call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice0]]]]] - binary_operation[binary_operation[name[combined_delta] / binary_operation[call[name[delta]][call[name[tuple], parameter[name[delta_slice0]]]] * call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice1]]]]]] + binary_operation[binary_operation[name[big_delta] / binary_operation[name[combined_delta] * call[name[delta]][call[name[tuple], parameter[name[delta_slice1]]]]]] * call[name[f]][call[name[tuple], parameter[name[slice2]]]]]]
return[call[name[concatenate], parameter[tuple[[<ast.Name object at 0x7da1b1d04e50>, <ast.Name object at 0x7da1b1d064a0>, <ast.Name object at 0x7da1b1d05b70>]]]]]
|
keyword[def] identifier[first_derivative] ( identifier[f] ,** identifier[kwargs] ):
literal[string]
identifier[n] , identifier[axis] , identifier[delta] = identifier[_process_deriv_args] ( identifier[f] , identifier[kwargs] )
identifier[slice0] =[ identifier[slice] ( keyword[None] )]* identifier[n]
identifier[slice1] =[ identifier[slice] ( keyword[None] )]* identifier[n]
identifier[slice2] =[ identifier[slice] ( keyword[None] )]* identifier[n]
identifier[delta_slice0] =[ identifier[slice] ( keyword[None] )]* identifier[n]
identifier[delta_slice1] =[ identifier[slice] ( keyword[None] )]* identifier[n]
identifier[slice0] [ identifier[axis] ]= identifier[slice] ( keyword[None] ,- literal[int] )
identifier[slice1] [ identifier[axis] ]= identifier[slice] ( literal[int] ,- literal[int] )
identifier[slice2] [ identifier[axis] ]= identifier[slice] ( literal[int] , keyword[None] )
identifier[delta_slice0] [ identifier[axis] ]= identifier[slice] ( keyword[None] ,- literal[int] )
identifier[delta_slice1] [ identifier[axis] ]= identifier[slice] ( literal[int] , keyword[None] )
identifier[combined_delta] = identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]+ identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]
identifier[delta_diff] = identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]- identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]
identifier[center] =(- identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]/( identifier[combined_delta] * identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )])
* identifier[f] [ identifier[tuple] ( identifier[slice0] )]
+ identifier[delta_diff] /( identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]* identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )])
* identifier[f] [ identifier[tuple] ( identifier[slice1] )]
+ identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]/( identifier[combined_delta] * identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )])
* identifier[f] [ identifier[tuple] ( identifier[slice2] )])
identifier[slice0] [ identifier[axis] ]= identifier[slice] ( keyword[None] , literal[int] )
identifier[slice1] [ identifier[axis] ]= identifier[slice] ( literal[int] , literal[int] )
identifier[slice2] [ identifier[axis] ]= identifier[slice] ( literal[int] , literal[int] )
identifier[delta_slice0] [ identifier[axis] ]= identifier[slice] ( keyword[None] , literal[int] )
identifier[delta_slice1] [ identifier[axis] ]= identifier[slice] ( literal[int] , literal[int] )
identifier[combined_delta] = identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]+ identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]
identifier[big_delta] = identifier[combined_delta] + identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]
identifier[left] =(- identifier[big_delta] /( identifier[combined_delta] * identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )])
* identifier[f] [ identifier[tuple] ( identifier[slice0] )]
+ identifier[combined_delta] /( identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]* identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )])
* identifier[f] [ identifier[tuple] ( identifier[slice1] )]
- identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]/( identifier[combined_delta] * identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )])
* identifier[f] [ identifier[tuple] ( identifier[slice2] )])
identifier[slice0] [ identifier[axis] ]= identifier[slice] (- literal[int] ,- literal[int] )
identifier[slice1] [ identifier[axis] ]= identifier[slice] (- literal[int] ,- literal[int] )
identifier[slice2] [ identifier[axis] ]= identifier[slice] (- literal[int] , keyword[None] )
identifier[delta_slice0] [ identifier[axis] ]= identifier[slice] (- literal[int] ,- literal[int] )
identifier[delta_slice1] [ identifier[axis] ]= identifier[slice] (- literal[int] , keyword[None] )
identifier[combined_delta] = identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]+ identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]
identifier[big_delta] = identifier[combined_delta] + identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]
identifier[right] =( identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )]/( identifier[combined_delta] * identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )])
* identifier[f] [ identifier[tuple] ( identifier[slice0] )]
- identifier[combined_delta] /( identifier[delta] [ identifier[tuple] ( identifier[delta_slice0] )]* identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )])
* identifier[f] [ identifier[tuple] ( identifier[slice1] )]
+ identifier[big_delta] /( identifier[combined_delta] * identifier[delta] [ identifier[tuple] ( identifier[delta_slice1] )])
* identifier[f] [ identifier[tuple] ( identifier[slice2] )])
keyword[return] identifier[concatenate] (( identifier[left] , identifier[center] , identifier[right] ), identifier[axis] = identifier[axis] )
|
def first_derivative(f, **kwargs):
"""Calculate the first derivative of a grid of values.
Works for both regularly-spaced data and grids with varying spacing.
Either `x` or `delta` must be specified, or `f` must be given as an `xarray.DataArray` with
attached coordinate and projection information. If `f` is an `xarray.DataArray`, and `x` or
`delta` are given, `f` will be converted to a `pint.Quantity` and the derivative returned
as a `pint.Quantity`, otherwise, if neither `x` nor `delta` are given, the attached
coordinate information belonging to `axis` will be used and the derivative will be returned
as an `xarray.DataArray`.
This uses 3 points to calculate the derivative, using forward or backward at the edges of
the grid as appropriate, and centered elsewhere. The irregular spacing is handled
explicitly, using the formulation as specified by [Bowen2005]_.
Parameters
----------
f : array-like
Array of values of which to calculate the derivative
axis : int or str, optional
The array axis along which to take the derivative. If `f` is ndarray-like, must be an
integer. If `f` is a `DataArray`, can be a string (referring to either the coordinate
dimension name or the axis type) or integer (referring to axis number), unless using
implicit conversion to `pint.Quantity`, in which case it must be an integer. Defaults
to 0.
x : array-like, optional
The coordinate values corresponding to the grid points in `f`.
delta : array-like, optional
Spacing between the grid points in `f`. Should be one item less than the size
of `f` along `axis`.
Returns
-------
array-like
The first derivative calculated along the selected axis.
See Also
--------
second_derivative
"""
(n, axis, delta) = _process_deriv_args(f, kwargs)
# create slice objects --- initially all are [:, :, ..., :]
slice0 = [slice(None)] * n
slice1 = [slice(None)] * n
slice2 = [slice(None)] * n
delta_slice0 = [slice(None)] * n
delta_slice1 = [slice(None)] * n
# First handle centered case
slice0[axis] = slice(None, -2)
slice1[axis] = slice(1, -1)
slice2[axis] = slice(2, None)
delta_slice0[axis] = slice(None, -1)
delta_slice1[axis] = slice(1, None)
combined_delta = delta[tuple(delta_slice0)] + delta[tuple(delta_slice1)]
delta_diff = delta[tuple(delta_slice1)] - delta[tuple(delta_slice0)]
center = -delta[tuple(delta_slice1)] / (combined_delta * delta[tuple(delta_slice0)]) * f[tuple(slice0)] + delta_diff / (delta[tuple(delta_slice0)] * delta[tuple(delta_slice1)]) * f[tuple(slice1)] + delta[tuple(delta_slice0)] / (combined_delta * delta[tuple(delta_slice1)]) * f[tuple(slice2)]
# Fill in "left" edge with forward difference
slice0[axis] = slice(None, 1)
slice1[axis] = slice(1, 2)
slice2[axis] = slice(2, 3)
delta_slice0[axis] = slice(None, 1)
delta_slice1[axis] = slice(1, 2)
combined_delta = delta[tuple(delta_slice0)] + delta[tuple(delta_slice1)]
big_delta = combined_delta + delta[tuple(delta_slice0)]
left = -big_delta / (combined_delta * delta[tuple(delta_slice0)]) * f[tuple(slice0)] + combined_delta / (delta[tuple(delta_slice0)] * delta[tuple(delta_slice1)]) * f[tuple(slice1)] - delta[tuple(delta_slice0)] / (combined_delta * delta[tuple(delta_slice1)]) * f[tuple(slice2)]
# Now the "right" edge with backward difference
slice0[axis] = slice(-3, -2)
slice1[axis] = slice(-2, -1)
slice2[axis] = slice(-1, None)
delta_slice0[axis] = slice(-2, -1)
delta_slice1[axis] = slice(-1, None)
combined_delta = delta[tuple(delta_slice0)] + delta[tuple(delta_slice1)]
big_delta = combined_delta + delta[tuple(delta_slice1)]
right = delta[tuple(delta_slice1)] / (combined_delta * delta[tuple(delta_slice0)]) * f[tuple(slice0)] - combined_delta / (delta[tuple(delta_slice0)] * delta[tuple(delta_slice1)]) * f[tuple(slice1)] + big_delta / (combined_delta * delta[tuple(delta_slice1)]) * f[tuple(slice2)]
return concatenate((left, center, right), axis=axis)
|
def _get_candidate_names():
"""Common setup sequence for all user-callable interfaces."""
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
|
def function[_get_candidate_names, parameter[]]:
constant[Common setup sequence for all user-callable interfaces.]
<ast.Global object at 0x7da1b25934c0>
if compare[name[_name_sequence] is constant[None]] begin[:]
call[name[_once_lock].acquire, parameter[]]
<ast.Try object at 0x7da1b25917b0>
return[name[_name_sequence]]
|
keyword[def] identifier[_get_candidate_names] ():
literal[string]
keyword[global] identifier[_name_sequence]
keyword[if] identifier[_name_sequence] keyword[is] keyword[None] :
identifier[_once_lock] . identifier[acquire] ()
keyword[try] :
keyword[if] identifier[_name_sequence] keyword[is] keyword[None] :
identifier[_name_sequence] = identifier[_RandomNameSequence] ()
keyword[finally] :
identifier[_once_lock] . identifier[release] ()
keyword[return] identifier[_name_sequence]
|
def _get_candidate_names():
"""Common setup sequence for all user-callable interfaces."""
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence() # depends on [control=['if'], data=['_name_sequence']] # depends on [control=['try'], data=[]]
finally:
_once_lock.release() # depends on [control=['if'], data=['_name_sequence']]
return _name_sequence
|
def _start_callbacks(self, result, exception):
"""Perform the callback chain going back and forth between the callback
and errback as needed.
If an exception is raised and the entire chain is gone through without a valid
errback then its simply logged.
"""
if self._cancelled:
raise CancelledError()
if self.called:
raise AlreadyCalledError()
self._result = result
self._exception = exception
if self._exception:
self._tb_info = ''.join(traceback.format_tb(sys.exc_info()[2]))
self.called = True
self._do_callbacks()
|
def function[_start_callbacks, parameter[self, result, exception]]:
constant[Perform the callback chain going back and forth between the callback
and errback as needed.
If an exception is raised and the entire chain is gone through without a valid
errback then its simply logged.
]
if name[self]._cancelled begin[:]
<ast.Raise object at 0x7da1b09ea5f0>
if name[self].called begin[:]
<ast.Raise object at 0x7da1b09eada0>
name[self]._result assign[=] name[result]
name[self]._exception assign[=] name[exception]
if name[self]._exception begin[:]
name[self]._tb_info assign[=] call[constant[].join, parameter[call[name[traceback].format_tb, parameter[call[call[name[sys].exc_info, parameter[]]][constant[2]]]]]]
name[self].called assign[=] constant[True]
call[name[self]._do_callbacks, parameter[]]
|
keyword[def] identifier[_start_callbacks] ( identifier[self] , identifier[result] , identifier[exception] ):
literal[string]
keyword[if] identifier[self] . identifier[_cancelled] :
keyword[raise] identifier[CancelledError] ()
keyword[if] identifier[self] . identifier[called] :
keyword[raise] identifier[AlreadyCalledError] ()
identifier[self] . identifier[_result] = identifier[result]
identifier[self] . identifier[_exception] = identifier[exception]
keyword[if] identifier[self] . identifier[_exception] :
identifier[self] . identifier[_tb_info] = literal[string] . identifier[join] ( identifier[traceback] . identifier[format_tb] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ]))
identifier[self] . identifier[called] = keyword[True]
identifier[self] . identifier[_do_callbacks] ()
|
def _start_callbacks(self, result, exception):
"""Perform the callback chain going back and forth between the callback
and errback as needed.
If an exception is raised and the entire chain is gone through without a valid
errback then its simply logged.
"""
if self._cancelled:
raise CancelledError() # depends on [control=['if'], data=[]]
if self.called:
raise AlreadyCalledError() # depends on [control=['if'], data=[]]
self._result = result
self._exception = exception
if self._exception:
self._tb_info = ''.join(traceback.format_tb(sys.exc_info()[2])) # depends on [control=['if'], data=[]]
self.called = True
self._do_callbacks()
|
def connect(self):
"""Connect to device."""
return self.loop.create_connection(lambda: self, self.host, self.port)
|
def function[connect, parameter[self]]:
constant[Connect to device.]
return[call[name[self].loop.create_connection, parameter[<ast.Lambda object at 0x7da18fe91840>, name[self].host, name[self].port]]]
|
keyword[def] identifier[connect] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[loop] . identifier[create_connection] ( keyword[lambda] : identifier[self] , identifier[self] . identifier[host] , identifier[self] . identifier[port] )
|
def connect(self):
"""Connect to device."""
return self.loop.create_connection(lambda : self, self.host, self.port)
|
def forwards(self, orm):
"Write your forwards methods here."
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
for c in orm["contenttypes.contenttype"].objects.all():
if c.app_label == "DublinCore":
c.app_label = "dublincore"
c.save()
|
def function[forwards, parameter[self, orm]]:
constant[Write your forwards methods here.]
for taget[name[c]] in starred[call[call[name[orm]][constant[contenttypes.contenttype]].objects.all, parameter[]]] begin[:]
if compare[name[c].app_label equal[==] constant[DublinCore]] begin[:]
name[c].app_label assign[=] constant[dublincore]
call[name[c].save, parameter[]]
|
keyword[def] identifier[forwards] ( identifier[self] , identifier[orm] ):
literal[string]
keyword[for] identifier[c] keyword[in] identifier[orm] [ literal[string] ]. identifier[objects] . identifier[all] ():
keyword[if] identifier[c] . identifier[app_label] == literal[string] :
identifier[c] . identifier[app_label] = literal[string]
identifier[c] . identifier[save] ()
|
def forwards(self, orm):
"""Write your forwards methods here."""
# Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..."
for c in orm['contenttypes.contenttype'].objects.all():
if c.app_label == 'DublinCore':
c.app_label = 'dublincore'
c.save() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']]
|
def update_variogram_model(self, variogram_model, variogram_parameters=None,
variogram_function=None, nlags=6, weight=False,
anisotropy_scaling=1., anisotropy_angle=0.):
"""Allows user to update variogram type and/or
variogram model parameters.
Parameters
----------
variogram_model : str
May be any of the variogram models listed above.
May also be 'custom', in which case variogram_parameters and
variogram_function must be specified.
variogram_parameters : list or dict, optional
List or dict of variogram model parameters, as explained above.
If not provided, a best fit model will be calculated as
described above.
variogram_function : callable, optional
A callable function that must be provided if variogram_model is
specified as 'custom'. See above for more information.
nlags : int, optional
Number of averaging bins for the semivariogram. Defualt is 6.
weight : boolean, optional
Flag that specifies if semivariance at smaller lags should be
weighted more heavily when automatically calculating the
variogram model. See above for more information. True indicates
that weights will be applied. Default is False.
anisotropy_scaling : float, optional
Scalar stretching value to take into account anisotropy.
Default is 1 (effectively no stretching).
Scaling is applied in the y-direction.
anisotropy_angle : float, optional
CCW angle (in degrees) by which to rotate coordinate system in
order to take into account anisotropy. Default is 0 (no rotation).
"""
if anisotropy_scaling != self.anisotropy_scaling or \
anisotropy_angle != self.anisotropy_angle:
if self.verbose:
print("Adjusting data for anisotropy...")
self.anisotropy_scaling = anisotropy_scaling
self.anisotropy_angle = anisotropy_angle
self.X_ADJUSTED, self.Y_ADJUSTED =\
_adjust_for_anisotropy(np.vstack((self.X_ORIG, self.Y_ORIG)).T,
[self.XCENTER, self.YCENTER],
[self.anisotropy_scaling],
[self.anisotropy_angle]).T
self.variogram_model = variogram_model
if self.variogram_model not in self.variogram_dict.keys() and self.variogram_model != 'custom':
raise ValueError("Specified variogram model '%s' is not supported." % variogram_model)
elif self.variogram_model == 'custom':
if variogram_function is None or not callable(variogram_function):
raise ValueError("Must specify callable function for "
"custom variogram model.")
else:
self.variogram_function = variogram_function
else:
self.variogram_function = self.variogram_dict[self.variogram_model]
if self.verbose:
print("Updating variogram mode...")
# See note above about the 'use_psill' kwarg...
vp_temp = _make_variogram_parameter_list(self.variogram_model,
variogram_parameters)
self.lags, self.semivariance, self.variogram_model_parameters = \
_initialize_variogram_model(np.vstack((self.X_ADJUSTED,
self.Y_ADJUSTED)).T,
self.Z, self.variogram_model, vp_temp,
self.variogram_function, nlags,
weight, 'euclidean')
if self.verbose:
if self.variogram_model == 'linear':
print("Using '%s' Variogram Model" % 'linear')
print("Slope:", self.variogram_model_parameters[0])
print("Nugget:", self.variogram_model_parameters[1], '\n')
elif self.variogram_model == 'power':
print("Using '%s' Variogram Model" % 'power')
print("Scale:", self.variogram_model_parameters[0])
print("Exponent:", self.variogram_model_parameters[1])
print("Nugget:", self.variogram_model_parameters[2], '\n')
elif self.variogram_model == 'custom':
print("Using Custom Variogram Model")
else:
print("Using '%s' Variogram Model" % self.variogram_model)
print("Partial Sill:", self.variogram_model_parameters[0])
print("Full Sill:", self.variogram_model_parameters[0] +
self.variogram_model_parameters[2])
print("Range:", self.variogram_model_parameters[1])
print("Nugget:", self.variogram_model_parameters[2], '\n')
if self.enable_plotting:
self.display_variogram_model()
if self.verbose:
print("Calculating statistics on variogram model fit...")
self.delta, self.sigma, self.epsilon = \
_find_statistics(np.vstack((self.X_ADJUSTED, self.Y_ADJUSTED)).T,
self.Z, self.variogram_function,
self.variogram_model_parameters,
'euclidean')
self.Q1 = core.calcQ1(self.epsilon)
self.Q2 = core.calcQ2(self.epsilon)
self.cR = core.calc_cR(self.Q2, self.sigma)
if self.verbose:
print("Q1 =", self.Q1)
print("Q2 =", self.Q2)
print("cR =", self.cR, '\n')
|
def function[update_variogram_model, parameter[self, variogram_model, variogram_parameters, variogram_function, nlags, weight, anisotropy_scaling, anisotropy_angle]]:
constant[Allows user to update variogram type and/or
variogram model parameters.
Parameters
----------
variogram_model : str
May be any of the variogram models listed above.
May also be 'custom', in which case variogram_parameters and
variogram_function must be specified.
variogram_parameters : list or dict, optional
List or dict of variogram model parameters, as explained above.
If not provided, a best fit model will be calculated as
described above.
variogram_function : callable, optional
A callable function that must be provided if variogram_model is
specified as 'custom'. See above for more information.
nlags : int, optional
Number of averaging bins for the semivariogram. Defualt is 6.
weight : boolean, optional
Flag that specifies if semivariance at smaller lags should be
weighted more heavily when automatically calculating the
variogram model. See above for more information. True indicates
that weights will be applied. Default is False.
anisotropy_scaling : float, optional
Scalar stretching value to take into account anisotropy.
Default is 1 (effectively no stretching).
Scaling is applied in the y-direction.
anisotropy_angle : float, optional
CCW angle (in degrees) by which to rotate coordinate system in
order to take into account anisotropy. Default is 0 (no rotation).
]
if <ast.BoolOp object at 0x7da1b26ad5d0> begin[:]
if name[self].verbose begin[:]
call[name[print], parameter[constant[Adjusting data for anisotropy...]]]
name[self].anisotropy_scaling assign[=] name[anisotropy_scaling]
name[self].anisotropy_angle assign[=] name[anisotropy_angle]
<ast.Tuple object at 0x7da1b26afe50> assign[=] call[name[_adjust_for_anisotropy], parameter[call[name[np].vstack, parameter[tuple[[<ast.Attribute object at 0x7da1b26ad810>, <ast.Attribute object at 0x7da1b26aee60>]]]].T, list[[<ast.Attribute object at 0x7da1b26adf30>, <ast.Attribute object at 0x7da1b26af3a0>]], list[[<ast.Attribute object at 0x7da1b26aec20>]], list[[<ast.Attribute object at 0x7da1b26ae230>]]]].T
name[self].variogram_model assign[=] name[variogram_model]
if <ast.BoolOp object at 0x7da1b26ade70> begin[:]
<ast.Raise object at 0x7da1b26afc40>
if name[self].verbose begin[:]
call[name[print], parameter[constant[Updating variogram mode...]]]
variable[vp_temp] assign[=] call[name[_make_variogram_parameter_list], parameter[name[self].variogram_model, name[variogram_parameters]]]
<ast.Tuple object at 0x7da1b26ae680> assign[=] call[name[_initialize_variogram_model], parameter[call[name[np].vstack, parameter[tuple[[<ast.Attribute object at 0x7da1b26afd90>, <ast.Attribute object at 0x7da1b26affa0>]]]].T, name[self].Z, name[self].variogram_model, name[vp_temp], name[self].variogram_function, name[nlags], name[weight], constant[euclidean]]]
if name[self].verbose begin[:]
if compare[name[self].variogram_model equal[==] constant[linear]] begin[:]
call[name[print], parameter[binary_operation[constant[Using '%s' Variogram Model] <ast.Mod object at 0x7da2590d6920> constant[linear]]]]
call[name[print], parameter[constant[Slope:], call[name[self].variogram_model_parameters][constant[0]]]]
call[name[print], parameter[constant[Nugget:], call[name[self].variogram_model_parameters][constant[1]], constant[
]]]
if name[self].enable_plotting begin[:]
call[name[self].display_variogram_model, parameter[]]
if name[self].verbose begin[:]
call[name[print], parameter[constant[Calculating statistics on variogram model fit...]]]
<ast.Tuple object at 0x7da2054a71f0> assign[=] call[name[_find_statistics], parameter[call[name[np].vstack, parameter[tuple[[<ast.Attribute object at 0x7da2054a7970>, <ast.Attribute object at 0x7da2054a6d10>]]]].T, name[self].Z, name[self].variogram_function, name[self].variogram_model_parameters, constant[euclidean]]]
name[self].Q1 assign[=] call[name[core].calcQ1, parameter[name[self].epsilon]]
name[self].Q2 assign[=] call[name[core].calcQ2, parameter[name[self].epsilon]]
name[self].cR assign[=] call[name[core].calc_cR, parameter[name[self].Q2, name[self].sigma]]
if name[self].verbose begin[:]
call[name[print], parameter[constant[Q1 =], name[self].Q1]]
call[name[print], parameter[constant[Q2 =], name[self].Q2]]
call[name[print], parameter[constant[cR =], name[self].cR, constant[
]]]
|
keyword[def] identifier[update_variogram_model] ( identifier[self] , identifier[variogram_model] , identifier[variogram_parameters] = keyword[None] ,
identifier[variogram_function] = keyword[None] , identifier[nlags] = literal[int] , identifier[weight] = keyword[False] ,
identifier[anisotropy_scaling] = literal[int] , identifier[anisotropy_angle] = literal[int] ):
literal[string]
keyword[if] identifier[anisotropy_scaling] != identifier[self] . identifier[anisotropy_scaling] keyword[or] identifier[anisotropy_angle] != identifier[self] . identifier[anisotropy_angle] :
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] )
identifier[self] . identifier[anisotropy_scaling] = identifier[anisotropy_scaling]
identifier[self] . identifier[anisotropy_angle] = identifier[anisotropy_angle]
identifier[self] . identifier[X_ADJUSTED] , identifier[self] . identifier[Y_ADJUSTED] = identifier[_adjust_for_anisotropy] ( identifier[np] . identifier[vstack] (( identifier[self] . identifier[X_ORIG] , identifier[self] . identifier[Y_ORIG] )). identifier[T] ,
[ identifier[self] . identifier[XCENTER] , identifier[self] . identifier[YCENTER] ],
[ identifier[self] . identifier[anisotropy_scaling] ],
[ identifier[self] . identifier[anisotropy_angle] ]). identifier[T]
identifier[self] . identifier[variogram_model] = identifier[variogram_model]
keyword[if] identifier[self] . identifier[variogram_model] keyword[not] keyword[in] identifier[self] . identifier[variogram_dict] . identifier[keys] () keyword[and] identifier[self] . identifier[variogram_model] != literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[variogram_model] )
keyword[elif] identifier[self] . identifier[variogram_model] == literal[string] :
keyword[if] identifier[variogram_function] keyword[is] keyword[None] keyword[or] keyword[not] identifier[callable] ( identifier[variogram_function] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[else] :
identifier[self] . identifier[variogram_function] = identifier[variogram_function]
keyword[else] :
identifier[self] . identifier[variogram_function] = identifier[self] . identifier[variogram_dict] [ identifier[self] . identifier[variogram_model] ]
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] )
identifier[vp_temp] = identifier[_make_variogram_parameter_list] ( identifier[self] . identifier[variogram_model] ,
identifier[variogram_parameters] )
identifier[self] . identifier[lags] , identifier[self] . identifier[semivariance] , identifier[self] . identifier[variogram_model_parameters] = identifier[_initialize_variogram_model] ( identifier[np] . identifier[vstack] (( identifier[self] . identifier[X_ADJUSTED] ,
identifier[self] . identifier[Y_ADJUSTED] )). identifier[T] ,
identifier[self] . identifier[Z] , identifier[self] . identifier[variogram_model] , identifier[vp_temp] ,
identifier[self] . identifier[variogram_function] , identifier[nlags] ,
identifier[weight] , literal[string] )
keyword[if] identifier[self] . identifier[verbose] :
keyword[if] identifier[self] . identifier[variogram_model] == literal[string] :
identifier[print] ( literal[string] % literal[string] )
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ])
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ], literal[string] )
keyword[elif] identifier[self] . identifier[variogram_model] == literal[string] :
identifier[print] ( literal[string] % literal[string] )
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ])
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ])
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ], literal[string] )
keyword[elif] identifier[self] . identifier[variogram_model] == literal[string] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] % identifier[self] . identifier[variogram_model] )
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ])
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ]+
identifier[self] . identifier[variogram_model_parameters] [ literal[int] ])
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ])
identifier[print] ( literal[string] , identifier[self] . identifier[variogram_model_parameters] [ literal[int] ], literal[string] )
keyword[if] identifier[self] . identifier[enable_plotting] :
identifier[self] . identifier[display_variogram_model] ()
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] )
identifier[self] . identifier[delta] , identifier[self] . identifier[sigma] , identifier[self] . identifier[epsilon] = identifier[_find_statistics] ( identifier[np] . identifier[vstack] (( identifier[self] . identifier[X_ADJUSTED] , identifier[self] . identifier[Y_ADJUSTED] )). identifier[T] ,
identifier[self] . identifier[Z] , identifier[self] . identifier[variogram_function] ,
identifier[self] . identifier[variogram_model_parameters] ,
literal[string] )
identifier[self] . identifier[Q1] = identifier[core] . identifier[calcQ1] ( identifier[self] . identifier[epsilon] )
identifier[self] . identifier[Q2] = identifier[core] . identifier[calcQ2] ( identifier[self] . identifier[epsilon] )
identifier[self] . identifier[cR] = identifier[core] . identifier[calc_cR] ( identifier[self] . identifier[Q2] , identifier[self] . identifier[sigma] )
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] , identifier[self] . identifier[Q1] )
identifier[print] ( literal[string] , identifier[self] . identifier[Q2] )
identifier[print] ( literal[string] , identifier[self] . identifier[cR] , literal[string] )
|
def update_variogram_model(self, variogram_model, variogram_parameters=None, variogram_function=None, nlags=6, weight=False, anisotropy_scaling=1.0, anisotropy_angle=0.0):
"""Allows user to update variogram type and/or
variogram model parameters.
Parameters
----------
variogram_model : str
May be any of the variogram models listed above.
May also be 'custom', in which case variogram_parameters and
variogram_function must be specified.
variogram_parameters : list or dict, optional
List or dict of variogram model parameters, as explained above.
If not provided, a best fit model will be calculated as
described above.
variogram_function : callable, optional
A callable function that must be provided if variogram_model is
specified as 'custom'. See above for more information.
nlags : int, optional
Number of averaging bins for the semivariogram. Defualt is 6.
weight : boolean, optional
Flag that specifies if semivariance at smaller lags should be
weighted more heavily when automatically calculating the
variogram model. See above for more information. True indicates
that weights will be applied. Default is False.
anisotropy_scaling : float, optional
Scalar stretching value to take into account anisotropy.
Default is 1 (effectively no stretching).
Scaling is applied in the y-direction.
anisotropy_angle : float, optional
CCW angle (in degrees) by which to rotate coordinate system in
order to take into account anisotropy. Default is 0 (no rotation).
"""
if anisotropy_scaling != self.anisotropy_scaling or anisotropy_angle != self.anisotropy_angle:
if self.verbose:
print('Adjusting data for anisotropy...') # depends on [control=['if'], data=[]]
self.anisotropy_scaling = anisotropy_scaling
self.anisotropy_angle = anisotropy_angle
(self.X_ADJUSTED, self.Y_ADJUSTED) = _adjust_for_anisotropy(np.vstack((self.X_ORIG, self.Y_ORIG)).T, [self.XCENTER, self.YCENTER], [self.anisotropy_scaling], [self.anisotropy_angle]).T # depends on [control=['if'], data=[]]
self.variogram_model = variogram_model
if self.variogram_model not in self.variogram_dict.keys() and self.variogram_model != 'custom':
raise ValueError("Specified variogram model '%s' is not supported." % variogram_model) # depends on [control=['if'], data=[]]
elif self.variogram_model == 'custom':
if variogram_function is None or not callable(variogram_function):
raise ValueError('Must specify callable function for custom variogram model.') # depends on [control=['if'], data=[]]
else:
self.variogram_function = variogram_function # depends on [control=['if'], data=[]]
else:
self.variogram_function = self.variogram_dict[self.variogram_model]
if self.verbose:
print('Updating variogram mode...') # depends on [control=['if'], data=[]] # See note above about the 'use_psill' kwarg...
vp_temp = _make_variogram_parameter_list(self.variogram_model, variogram_parameters)
(self.lags, self.semivariance, self.variogram_model_parameters) = _initialize_variogram_model(np.vstack((self.X_ADJUSTED, self.Y_ADJUSTED)).T, self.Z, self.variogram_model, vp_temp, self.variogram_function, nlags, weight, 'euclidean')
if self.verbose:
if self.variogram_model == 'linear':
print("Using '%s' Variogram Model" % 'linear')
print('Slope:', self.variogram_model_parameters[0])
print('Nugget:', self.variogram_model_parameters[1], '\n') # depends on [control=['if'], data=[]]
elif self.variogram_model == 'power':
print("Using '%s' Variogram Model" % 'power')
print('Scale:', self.variogram_model_parameters[0])
print('Exponent:', self.variogram_model_parameters[1])
print('Nugget:', self.variogram_model_parameters[2], '\n') # depends on [control=['if'], data=[]]
elif self.variogram_model == 'custom':
print('Using Custom Variogram Model') # depends on [control=['if'], data=[]]
else:
print("Using '%s' Variogram Model" % self.variogram_model)
print('Partial Sill:', self.variogram_model_parameters[0])
print('Full Sill:', self.variogram_model_parameters[0] + self.variogram_model_parameters[2])
print('Range:', self.variogram_model_parameters[1])
print('Nugget:', self.variogram_model_parameters[2], '\n') # depends on [control=['if'], data=[]]
if self.enable_plotting:
self.display_variogram_model() # depends on [control=['if'], data=[]]
if self.verbose:
print('Calculating statistics on variogram model fit...') # depends on [control=['if'], data=[]]
(self.delta, self.sigma, self.epsilon) = _find_statistics(np.vstack((self.X_ADJUSTED, self.Y_ADJUSTED)).T, self.Z, self.variogram_function, self.variogram_model_parameters, 'euclidean')
self.Q1 = core.calcQ1(self.epsilon)
self.Q2 = core.calcQ2(self.epsilon)
self.cR = core.calc_cR(self.Q2, self.sigma)
if self.verbose:
print('Q1 =', self.Q1)
print('Q2 =', self.Q2)
print('cR =', self.cR, '\n') # depends on [control=['if'], data=[]]
|
def get_workers_with_qualification(self, qualification_id):
"""Get workers with the given qualification."""
done = False
next_token = None
while not done:
if next_token is not None:
response = self.mturk.list_workers_with_qualification_type(
QualificationTypeId=qualification_id,
MaxResults=MAX_SUPPORTED_BATCH_SIZE,
Status="Granted",
NextToken=next_token,
)
else:
response = self.mturk.list_workers_with_qualification_type(
QualificationTypeId=qualification_id,
MaxResults=MAX_SUPPORTED_BATCH_SIZE,
Status="Granted",
)
if response:
for r in response["Qualifications"]:
yield {"id": r["WorkerId"], "score": r["IntegerValue"]}
if "NextToken" in response:
next_token = response["NextToken"]
else:
done = True
|
def function[get_workers_with_qualification, parameter[self, qualification_id]]:
constant[Get workers with the given qualification.]
variable[done] assign[=] constant[False]
variable[next_token] assign[=] constant[None]
while <ast.UnaryOp object at 0x7da1b03833d0> begin[:]
if compare[name[next_token] is_not constant[None]] begin[:]
variable[response] assign[=] call[name[self].mturk.list_workers_with_qualification_type, parameter[]]
if name[response] begin[:]
for taget[name[r]] in starred[call[name[response]][constant[Qualifications]]] begin[:]
<ast.Yield object at 0x7da1b03807f0>
if compare[constant[NextToken] in name[response]] begin[:]
variable[next_token] assign[=] call[name[response]][constant[NextToken]]
|
keyword[def] identifier[get_workers_with_qualification] ( identifier[self] , identifier[qualification_id] ):
literal[string]
identifier[done] = keyword[False]
identifier[next_token] = keyword[None]
keyword[while] keyword[not] identifier[done] :
keyword[if] identifier[next_token] keyword[is] keyword[not] keyword[None] :
identifier[response] = identifier[self] . identifier[mturk] . identifier[list_workers_with_qualification_type] (
identifier[QualificationTypeId] = identifier[qualification_id] ,
identifier[MaxResults] = identifier[MAX_SUPPORTED_BATCH_SIZE] ,
identifier[Status] = literal[string] ,
identifier[NextToken] = identifier[next_token] ,
)
keyword[else] :
identifier[response] = identifier[self] . identifier[mturk] . identifier[list_workers_with_qualification_type] (
identifier[QualificationTypeId] = identifier[qualification_id] ,
identifier[MaxResults] = identifier[MAX_SUPPORTED_BATCH_SIZE] ,
identifier[Status] = literal[string] ,
)
keyword[if] identifier[response] :
keyword[for] identifier[r] keyword[in] identifier[response] [ literal[string] ]:
keyword[yield] { literal[string] : identifier[r] [ literal[string] ], literal[string] : identifier[r] [ literal[string] ]}
keyword[if] literal[string] keyword[in] identifier[response] :
identifier[next_token] = identifier[response] [ literal[string] ]
keyword[else] :
identifier[done] = keyword[True]
|
def get_workers_with_qualification(self, qualification_id):
"""Get workers with the given qualification."""
done = False
next_token = None
while not done:
if next_token is not None:
response = self.mturk.list_workers_with_qualification_type(QualificationTypeId=qualification_id, MaxResults=MAX_SUPPORTED_BATCH_SIZE, Status='Granted', NextToken=next_token) # depends on [control=['if'], data=['next_token']]
else:
response = self.mturk.list_workers_with_qualification_type(QualificationTypeId=qualification_id, MaxResults=MAX_SUPPORTED_BATCH_SIZE, Status='Granted')
if response:
for r in response['Qualifications']:
yield {'id': r['WorkerId'], 'score': r['IntegerValue']} # depends on [control=['for'], data=['r']] # depends on [control=['if'], data=[]]
if 'NextToken' in response:
next_token = response['NextToken'] # depends on [control=['if'], data=['response']]
else:
done = True # depends on [control=['while'], data=[]]
|
def _deserialize(
self, data, fields_dict, error_store, many=False, partial=False,
unknown=RAISE, dict_class=dict, index_errors=True, index=None,
):
"""Deserialize ``data`` based on the schema defined by ``fields_dict``.
:param dict data: The data to deserialize.
:param dict fields_dict: Mapping of field names to :class:`Field` objects.
:param ErrorStore error_store: Structure to store errors.
:param bool many: Set to `True` if ``data`` should be deserialized as
a collection.
:param bool|tuple partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
:param type dict_class: Dictionary class used to construct the output.
:param bool index_errors: Whether to store the index of invalid items in
``self.errors`` when ``many=True``.
:param int index: Index of the item being serialized (for storing errors) if
serializing a collection, otherwise `None`.
:return: A dictionary of the deserialized data.
"""
index = index if index_errors else None
if many:
if not is_collection(data):
error_store.store_error([self.error_messages['type']], index=index)
ret = []
else:
self._pending = True
ret = [
self._deserialize(
d, fields_dict, error_store, many=False,
partial=partial, unknown=unknown,
dict_class=dict_class, index=idx,
index_errors=index_errors,
)
for idx, d in enumerate(data)
]
self._pending = False
return ret
ret = dict_class()
# Check data is a dict
if not isinstance(data, Mapping):
error_store.store_error([self.error_messages['type']], index=index)
else:
partial_is_collection = is_collection(partial)
for attr_name, field_obj in iteritems(fields_dict):
if field_obj.dump_only:
continue
field_name = attr_name
if field_obj.data_key:
field_name = field_obj.data_key
raw_value = data.get(field_name, missing)
if raw_value is missing:
# Ignore missing field if we're allowed to.
if (
partial is True or
(partial_is_collection and attr_name in partial)
):
continue
d_kwargs = {}
if isinstance(field_obj, Nested):
# Allow partial loading of nested schemas.
if partial_is_collection:
prefix = field_name + '.'
len_prefix = len(prefix)
sub_partial = [f[len_prefix:]
for f in partial if f.startswith(prefix)]
else:
sub_partial = partial
d_kwargs['partial'] = sub_partial
getter = lambda val: field_obj.deserialize(
val, field_name,
data, **d_kwargs
)
value = self._call_and_store(
getter_func=getter,
data=raw_value,
field_name=field_name,
error_store=error_store,
index=index,
)
if value is not missing:
key = fields_dict[attr_name].attribute or attr_name
set_value(ret, key, value)
if unknown != EXCLUDE:
fields = {
field_obj.data_key or field_name
for field_name, field_obj in fields_dict.items()
if not field_obj.dump_only
}
for key in set(data) - fields:
value = data[key]
if unknown == INCLUDE:
set_value(ret, key, value)
elif unknown == RAISE:
error_store.store_error(
[self.error_messages['unknown']],
key,
(index if index_errors else None),
)
return ret
|
def function[_deserialize, parameter[self, data, fields_dict, error_store, many, partial, unknown, dict_class, index_errors, index]]:
constant[Deserialize ``data`` based on the schema defined by ``fields_dict``.
:param dict data: The data to deserialize.
:param dict fields_dict: Mapping of field names to :class:`Field` objects.
:param ErrorStore error_store: Structure to store errors.
:param bool many: Set to `True` if ``data`` should be deserialized as
a collection.
:param bool|tuple partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
:param type dict_class: Dictionary class used to construct the output.
:param bool index_errors: Whether to store the index of invalid items in
``self.errors`` when ``many=True``.
:param int index: Index of the item being serialized (for storing errors) if
serializing a collection, otherwise `None`.
:return: A dictionary of the deserialized data.
]
variable[index] assign[=] <ast.IfExp object at 0x7da204960310>
if name[many] begin[:]
if <ast.UnaryOp object at 0x7da204962500> begin[:]
call[name[error_store].store_error, parameter[list[[<ast.Subscript object at 0x7da204961ba0>]]]]
variable[ret] assign[=] list[[]]
return[name[ret]]
variable[ret] assign[=] call[name[dict_class], parameter[]]
if <ast.UnaryOp object at 0x7da20c795270> begin[:]
call[name[error_store].store_error, parameter[list[[<ast.Subscript object at 0x7da20c795180>]]]]
return[name[ret]]
|
keyword[def] identifier[_deserialize] (
identifier[self] , identifier[data] , identifier[fields_dict] , identifier[error_store] , identifier[many] = keyword[False] , identifier[partial] = keyword[False] ,
identifier[unknown] = identifier[RAISE] , identifier[dict_class] = identifier[dict] , identifier[index_errors] = keyword[True] , identifier[index] = keyword[None] ,
):
literal[string]
identifier[index] = identifier[index] keyword[if] identifier[index_errors] keyword[else] keyword[None]
keyword[if] identifier[many] :
keyword[if] keyword[not] identifier[is_collection] ( identifier[data] ):
identifier[error_store] . identifier[store_error] ([ identifier[self] . identifier[error_messages] [ literal[string] ]], identifier[index] = identifier[index] )
identifier[ret] =[]
keyword[else] :
identifier[self] . identifier[_pending] = keyword[True]
identifier[ret] =[
identifier[self] . identifier[_deserialize] (
identifier[d] , identifier[fields_dict] , identifier[error_store] , identifier[many] = keyword[False] ,
identifier[partial] = identifier[partial] , identifier[unknown] = identifier[unknown] ,
identifier[dict_class] = identifier[dict_class] , identifier[index] = identifier[idx] ,
identifier[index_errors] = identifier[index_errors] ,
)
keyword[for] identifier[idx] , identifier[d] keyword[in] identifier[enumerate] ( identifier[data] )
]
identifier[self] . identifier[_pending] = keyword[False]
keyword[return] identifier[ret]
identifier[ret] = identifier[dict_class] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[Mapping] ):
identifier[error_store] . identifier[store_error] ([ identifier[self] . identifier[error_messages] [ literal[string] ]], identifier[index] = identifier[index] )
keyword[else] :
identifier[partial_is_collection] = identifier[is_collection] ( identifier[partial] )
keyword[for] identifier[attr_name] , identifier[field_obj] keyword[in] identifier[iteritems] ( identifier[fields_dict] ):
keyword[if] identifier[field_obj] . identifier[dump_only] :
keyword[continue]
identifier[field_name] = identifier[attr_name]
keyword[if] identifier[field_obj] . identifier[data_key] :
identifier[field_name] = identifier[field_obj] . identifier[data_key]
identifier[raw_value] = identifier[data] . identifier[get] ( identifier[field_name] , identifier[missing] )
keyword[if] identifier[raw_value] keyword[is] identifier[missing] :
keyword[if] (
identifier[partial] keyword[is] keyword[True] keyword[or]
( identifier[partial_is_collection] keyword[and] identifier[attr_name] keyword[in] identifier[partial] )
):
keyword[continue]
identifier[d_kwargs] ={}
keyword[if] identifier[isinstance] ( identifier[field_obj] , identifier[Nested] ):
keyword[if] identifier[partial_is_collection] :
identifier[prefix] = identifier[field_name] + literal[string]
identifier[len_prefix] = identifier[len] ( identifier[prefix] )
identifier[sub_partial] =[ identifier[f] [ identifier[len_prefix] :]
keyword[for] identifier[f] keyword[in] identifier[partial] keyword[if] identifier[f] . identifier[startswith] ( identifier[prefix] )]
keyword[else] :
identifier[sub_partial] = identifier[partial]
identifier[d_kwargs] [ literal[string] ]= identifier[sub_partial]
identifier[getter] = keyword[lambda] identifier[val] : identifier[field_obj] . identifier[deserialize] (
identifier[val] , identifier[field_name] ,
identifier[data] ,** identifier[d_kwargs]
)
identifier[value] = identifier[self] . identifier[_call_and_store] (
identifier[getter_func] = identifier[getter] ,
identifier[data] = identifier[raw_value] ,
identifier[field_name] = identifier[field_name] ,
identifier[error_store] = identifier[error_store] ,
identifier[index] = identifier[index] ,
)
keyword[if] identifier[value] keyword[is] keyword[not] identifier[missing] :
identifier[key] = identifier[fields_dict] [ identifier[attr_name] ]. identifier[attribute] keyword[or] identifier[attr_name]
identifier[set_value] ( identifier[ret] , identifier[key] , identifier[value] )
keyword[if] identifier[unknown] != identifier[EXCLUDE] :
identifier[fields] ={
identifier[field_obj] . identifier[data_key] keyword[or] identifier[field_name]
keyword[for] identifier[field_name] , identifier[field_obj] keyword[in] identifier[fields_dict] . identifier[items] ()
keyword[if] keyword[not] identifier[field_obj] . identifier[dump_only]
}
keyword[for] identifier[key] keyword[in] identifier[set] ( identifier[data] )- identifier[fields] :
identifier[value] = identifier[data] [ identifier[key] ]
keyword[if] identifier[unknown] == identifier[INCLUDE] :
identifier[set_value] ( identifier[ret] , identifier[key] , identifier[value] )
keyword[elif] identifier[unknown] == identifier[RAISE] :
identifier[error_store] . identifier[store_error] (
[ identifier[self] . identifier[error_messages] [ literal[string] ]],
identifier[key] ,
( identifier[index] keyword[if] identifier[index_errors] keyword[else] keyword[None] ),
)
keyword[return] identifier[ret]
|
def _deserialize(self, data, fields_dict, error_store, many=False, partial=False, unknown=RAISE, dict_class=dict, index_errors=True, index=None):
"""Deserialize ``data`` based on the schema defined by ``fields_dict``.
:param dict data: The data to deserialize.
:param dict fields_dict: Mapping of field names to :class:`Field` objects.
:param ErrorStore error_store: Structure to store errors.
:param bool many: Set to `True` if ``data`` should be deserialized as
a collection.
:param bool|tuple partial: Whether to ignore missing fields and not require
any fields declared. Propagates down to ``Nested`` fields as well. If
its value is an iterable, only missing fields listed in that iterable
will be ignored. Use dot delimiters to specify nested fields.
:param unknown: Whether to exclude, include, or raise an error for unknown
fields in the data. Use `EXCLUDE`, `INCLUDE` or `RAISE`.
:param type dict_class: Dictionary class used to construct the output.
:param bool index_errors: Whether to store the index of invalid items in
``self.errors`` when ``many=True``.
:param int index: Index of the item being serialized (for storing errors) if
serializing a collection, otherwise `None`.
:return: A dictionary of the deserialized data.
"""
index = index if index_errors else None
if many:
if not is_collection(data):
error_store.store_error([self.error_messages['type']], index=index)
ret = [] # depends on [control=['if'], data=[]]
else:
self._pending = True
ret = [self._deserialize(d, fields_dict, error_store, many=False, partial=partial, unknown=unknown, dict_class=dict_class, index=idx, index_errors=index_errors) for (idx, d) in enumerate(data)]
self._pending = False
return ret # depends on [control=['if'], data=[]]
ret = dict_class()
# Check data is a dict
if not isinstance(data, Mapping):
error_store.store_error([self.error_messages['type']], index=index) # depends on [control=['if'], data=[]]
else:
partial_is_collection = is_collection(partial)
for (attr_name, field_obj) in iteritems(fields_dict):
if field_obj.dump_only:
continue # depends on [control=['if'], data=[]]
field_name = attr_name
if field_obj.data_key:
field_name = field_obj.data_key # depends on [control=['if'], data=[]]
raw_value = data.get(field_name, missing)
if raw_value is missing:
# Ignore missing field if we're allowed to.
if partial is True or (partial_is_collection and attr_name in partial):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
d_kwargs = {}
if isinstance(field_obj, Nested):
# Allow partial loading of nested schemas.
if partial_is_collection:
prefix = field_name + '.'
len_prefix = len(prefix)
sub_partial = [f[len_prefix:] for f in partial if f.startswith(prefix)] # depends on [control=['if'], data=[]]
else:
sub_partial = partial
d_kwargs['partial'] = sub_partial # depends on [control=['if'], data=[]]
getter = lambda val: field_obj.deserialize(val, field_name, data, **d_kwargs)
value = self._call_and_store(getter_func=getter, data=raw_value, field_name=field_name, error_store=error_store, index=index)
if value is not missing:
key = fields_dict[attr_name].attribute or attr_name
set_value(ret, key, value) # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=[]]
if unknown != EXCLUDE:
fields = {field_obj.data_key or field_name for (field_name, field_obj) in fields_dict.items() if not field_obj.dump_only}
for key in set(data) - fields:
value = data[key]
if unknown == INCLUDE:
set_value(ret, key, value) # depends on [control=['if'], data=[]]
elif unknown == RAISE:
error_store.store_error([self.error_messages['unknown']], key, index if index_errors else None) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=['unknown']]
return ret
|
def isel_variable_and_index(
name: Hashable,
variable: Variable,
index: pd.Index,
indexers: Mapping[Any, Union[slice, Variable]],
) -> Tuple[Variable, Optional[pd.Index]]:
"""Index a Variable and pandas.Index together."""
if not indexers:
# nothing to index
return variable.copy(deep=False), index
if len(variable.dims) > 1:
raise NotImplementedError(
'indexing multi-dimensional variable with indexes is not '
'supported yet')
new_variable = variable.isel(indexers)
if new_variable.dims != (name,):
# can't preserve a index if result has new dimensions
return new_variable, None
# we need to compute the new index
(dim,) = variable.dims
indexer = indexers[dim]
if isinstance(indexer, Variable):
indexer = indexer.data
new_index = index[indexer]
return new_variable, new_index
|
def function[isel_variable_and_index, parameter[name, variable, index, indexers]]:
constant[Index a Variable and pandas.Index together.]
if <ast.UnaryOp object at 0x7da18f09e650> begin[:]
return[tuple[[<ast.Call object at 0x7da18f09ff40>, <ast.Name object at 0x7da18f09d480>]]]
if compare[call[name[len], parameter[name[variable].dims]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da18f09dc00>
variable[new_variable] assign[=] call[name[variable].isel, parameter[name[indexers]]]
if compare[name[new_variable].dims not_equal[!=] tuple[[<ast.Name object at 0x7da18f09ce80>]]] begin[:]
return[tuple[[<ast.Name object at 0x7da18f09c9d0>, <ast.Constant object at 0x7da18f09e380>]]]
<ast.Tuple object at 0x7da18f09ded0> assign[=] name[variable].dims
variable[indexer] assign[=] call[name[indexers]][name[dim]]
if call[name[isinstance], parameter[name[indexer], name[Variable]]] begin[:]
variable[indexer] assign[=] name[indexer].data
variable[new_index] assign[=] call[name[index]][name[indexer]]
return[tuple[[<ast.Name object at 0x7da18f09ec50>, <ast.Name object at 0x7da18f09cca0>]]]
|
keyword[def] identifier[isel_variable_and_index] (
identifier[name] : identifier[Hashable] ,
identifier[variable] : identifier[Variable] ,
identifier[index] : identifier[pd] . identifier[Index] ,
identifier[indexers] : identifier[Mapping] [ identifier[Any] , identifier[Union] [ identifier[slice] , identifier[Variable] ]],
)-> identifier[Tuple] [ identifier[Variable] , identifier[Optional] [ identifier[pd] . identifier[Index] ]]:
literal[string]
keyword[if] keyword[not] identifier[indexers] :
keyword[return] identifier[variable] . identifier[copy] ( identifier[deep] = keyword[False] ), identifier[index]
keyword[if] identifier[len] ( identifier[variable] . identifier[dims] )> literal[int] :
keyword[raise] identifier[NotImplementedError] (
literal[string]
literal[string] )
identifier[new_variable] = identifier[variable] . identifier[isel] ( identifier[indexers] )
keyword[if] identifier[new_variable] . identifier[dims] !=( identifier[name] ,):
keyword[return] identifier[new_variable] , keyword[None]
( identifier[dim] ,)= identifier[variable] . identifier[dims]
identifier[indexer] = identifier[indexers] [ identifier[dim] ]
keyword[if] identifier[isinstance] ( identifier[indexer] , identifier[Variable] ):
identifier[indexer] = identifier[indexer] . identifier[data]
identifier[new_index] = identifier[index] [ identifier[indexer] ]
keyword[return] identifier[new_variable] , identifier[new_index]
|
def isel_variable_and_index(name: Hashable, variable: Variable, index: pd.Index, indexers: Mapping[Any, Union[slice, Variable]]) -> Tuple[Variable, Optional[pd.Index]]:
"""Index a Variable and pandas.Index together."""
if not indexers:
# nothing to index
return (variable.copy(deep=False), index) # depends on [control=['if'], data=[]]
if len(variable.dims) > 1:
raise NotImplementedError('indexing multi-dimensional variable with indexes is not supported yet') # depends on [control=['if'], data=[]]
new_variable = variable.isel(indexers)
if new_variable.dims != (name,):
# can't preserve a index if result has new dimensions
return (new_variable, None) # depends on [control=['if'], data=[]]
# we need to compute the new index
(dim,) = variable.dims
indexer = indexers[dim]
if isinstance(indexer, Variable):
indexer = indexer.data # depends on [control=['if'], data=[]]
new_index = index[indexer]
return (new_variable, new_index)
|
def lonlat_to_laea(lon, lat, lon0, lat0, f_e=0.0, f_n=0.0):
"""
Converts vectors of longitude and latitude into Lambert Azimuthal
Equal Area projection (km), with respect to an origin point
:param numpy.ndarray lon:
Longitudes
:param numpy.ndarray lat:
Latitude
:param float lon0:
Central longitude
:param float lat0:
Central latitude
:param float f_e:
False easting (km)
:param float f_e:
False northing (km)
:returns:
* easting (km)
* northing (km)
"""
lon = np.radians(lon)
lat = np.radians(lat)
lon0 = np.radians(lon0)
lat0 = np.radians(lat0)
q_0 = TO_Q(lat0)
q_p = TO_Q(np.pi / 2.)
q_val = TO_Q(lat)
beta = np.arcsin(q_val / q_p)
beta0 = np.arcsin(q_0 / q_p)
r_q = WGS84["a"] * np.sqrt(q_p / 2.)
dval = WGS84["a"] * (
np.cos(lat0) / np.sqrt(1.0 - (WGS84["e2"] * (np.sin(lat0) ** 2.))) /
(r_q * np.cos(beta0)))
bval = r_q * np.sqrt(
2. / (1.0 + (np.sin(beta0) * np.sin(beta)) + (np.cos(beta) *
np.cos(beta0) * np.cos(lon - lon0))))
easting = f_e + ((bval * dval) * (np.cos(beta) * np.sin(lon - lon0)))
northing = f_n + (bval / dval) * ((np.cos(beta0) * np.sin(beta)) -
(np.sin(beta0) * np.cos(beta) * np.cos(lon - lon0)))
return easting, northing
|
def function[lonlat_to_laea, parameter[lon, lat, lon0, lat0, f_e, f_n]]:
constant[
Converts vectors of longitude and latitude into Lambert Azimuthal
Equal Area projection (km), with respect to an origin point
:param numpy.ndarray lon:
Longitudes
:param numpy.ndarray lat:
Latitude
:param float lon0:
Central longitude
:param float lat0:
Central latitude
:param float f_e:
False easting (km)
:param float f_e:
False northing (km)
:returns:
* easting (km)
* northing (km)
]
variable[lon] assign[=] call[name[np].radians, parameter[name[lon]]]
variable[lat] assign[=] call[name[np].radians, parameter[name[lat]]]
variable[lon0] assign[=] call[name[np].radians, parameter[name[lon0]]]
variable[lat0] assign[=] call[name[np].radians, parameter[name[lat0]]]
variable[q_0] assign[=] call[name[TO_Q], parameter[name[lat0]]]
variable[q_p] assign[=] call[name[TO_Q], parameter[binary_operation[name[np].pi / constant[2.0]]]]
variable[q_val] assign[=] call[name[TO_Q], parameter[name[lat]]]
variable[beta] assign[=] call[name[np].arcsin, parameter[binary_operation[name[q_val] / name[q_p]]]]
variable[beta0] assign[=] call[name[np].arcsin, parameter[binary_operation[name[q_0] / name[q_p]]]]
variable[r_q] assign[=] binary_operation[call[name[WGS84]][constant[a]] * call[name[np].sqrt, parameter[binary_operation[name[q_p] / constant[2.0]]]]]
variable[dval] assign[=] binary_operation[call[name[WGS84]][constant[a]] * binary_operation[binary_operation[call[name[np].cos, parameter[name[lat0]]] / call[name[np].sqrt, parameter[binary_operation[constant[1.0] - binary_operation[call[name[WGS84]][constant[e2]] * binary_operation[call[name[np].sin, parameter[name[lat0]]] ** constant[2.0]]]]]]] / binary_operation[name[r_q] * call[name[np].cos, parameter[name[beta0]]]]]]
variable[bval] assign[=] binary_operation[name[r_q] * call[name[np].sqrt, parameter[binary_operation[constant[2.0] / binary_operation[binary_operation[constant[1.0] + binary_operation[call[name[np].sin, parameter[name[beta0]]] * call[name[np].sin, parameter[name[beta]]]]] + binary_operation[binary_operation[call[name[np].cos, parameter[name[beta]]] * call[name[np].cos, parameter[name[beta0]]]] * call[name[np].cos, parameter[binary_operation[name[lon] - name[lon0]]]]]]]]]]
variable[easting] assign[=] binary_operation[name[f_e] + binary_operation[binary_operation[name[bval] * name[dval]] * binary_operation[call[name[np].cos, parameter[name[beta]]] * call[name[np].sin, parameter[binary_operation[name[lon] - name[lon0]]]]]]]
variable[northing] assign[=] binary_operation[name[f_n] + binary_operation[binary_operation[name[bval] / name[dval]] * binary_operation[binary_operation[call[name[np].cos, parameter[name[beta0]]] * call[name[np].sin, parameter[name[beta]]]] - binary_operation[binary_operation[call[name[np].sin, parameter[name[beta0]]] * call[name[np].cos, parameter[name[beta]]]] * call[name[np].cos, parameter[binary_operation[name[lon] - name[lon0]]]]]]]]
return[tuple[[<ast.Name object at 0x7da207f03310>, <ast.Name object at 0x7da207f020e0>]]]
|
keyword[def] identifier[lonlat_to_laea] ( identifier[lon] , identifier[lat] , identifier[lon0] , identifier[lat0] , identifier[f_e] = literal[int] , identifier[f_n] = literal[int] ):
literal[string]
identifier[lon] = identifier[np] . identifier[radians] ( identifier[lon] )
identifier[lat] = identifier[np] . identifier[radians] ( identifier[lat] )
identifier[lon0] = identifier[np] . identifier[radians] ( identifier[lon0] )
identifier[lat0] = identifier[np] . identifier[radians] ( identifier[lat0] )
identifier[q_0] = identifier[TO_Q] ( identifier[lat0] )
identifier[q_p] = identifier[TO_Q] ( identifier[np] . identifier[pi] / literal[int] )
identifier[q_val] = identifier[TO_Q] ( identifier[lat] )
identifier[beta] = identifier[np] . identifier[arcsin] ( identifier[q_val] / identifier[q_p] )
identifier[beta0] = identifier[np] . identifier[arcsin] ( identifier[q_0] / identifier[q_p] )
identifier[r_q] = identifier[WGS84] [ literal[string] ]* identifier[np] . identifier[sqrt] ( identifier[q_p] / literal[int] )
identifier[dval] = identifier[WGS84] [ literal[string] ]*(
identifier[np] . identifier[cos] ( identifier[lat0] )/ identifier[np] . identifier[sqrt] ( literal[int] -( identifier[WGS84] [ literal[string] ]*( identifier[np] . identifier[sin] ( identifier[lat0] )** literal[int] )))/
( identifier[r_q] * identifier[np] . identifier[cos] ( identifier[beta0] )))
identifier[bval] = identifier[r_q] * identifier[np] . identifier[sqrt] (
literal[int] /( literal[int] +( identifier[np] . identifier[sin] ( identifier[beta0] )* identifier[np] . identifier[sin] ( identifier[beta] ))+( identifier[np] . identifier[cos] ( identifier[beta] )*
identifier[np] . identifier[cos] ( identifier[beta0] )* identifier[np] . identifier[cos] ( identifier[lon] - identifier[lon0] ))))
identifier[easting] = identifier[f_e] +(( identifier[bval] * identifier[dval] )*( identifier[np] . identifier[cos] ( identifier[beta] )* identifier[np] . identifier[sin] ( identifier[lon] - identifier[lon0] )))
identifier[northing] = identifier[f_n] +( identifier[bval] / identifier[dval] )*(( identifier[np] . identifier[cos] ( identifier[beta0] )* identifier[np] . identifier[sin] ( identifier[beta] ))-
( identifier[np] . identifier[sin] ( identifier[beta0] )* identifier[np] . identifier[cos] ( identifier[beta] )* identifier[np] . identifier[cos] ( identifier[lon] - identifier[lon0] )))
keyword[return] identifier[easting] , identifier[northing]
|
def lonlat_to_laea(lon, lat, lon0, lat0, f_e=0.0, f_n=0.0):
"""
Converts vectors of longitude and latitude into Lambert Azimuthal
Equal Area projection (km), with respect to an origin point
:param numpy.ndarray lon:
Longitudes
:param numpy.ndarray lat:
Latitude
:param float lon0:
Central longitude
:param float lat0:
Central latitude
:param float f_e:
False easting (km)
:param float f_e:
False northing (km)
:returns:
* easting (km)
* northing (km)
"""
lon = np.radians(lon)
lat = np.radians(lat)
lon0 = np.radians(lon0)
lat0 = np.radians(lat0)
q_0 = TO_Q(lat0)
q_p = TO_Q(np.pi / 2.0)
q_val = TO_Q(lat)
beta = np.arcsin(q_val / q_p)
beta0 = np.arcsin(q_0 / q_p)
r_q = WGS84['a'] * np.sqrt(q_p / 2.0)
dval = WGS84['a'] * (np.cos(lat0) / np.sqrt(1.0 - WGS84['e2'] * np.sin(lat0) ** 2.0) / (r_q * np.cos(beta0)))
bval = r_q * np.sqrt(2.0 / (1.0 + np.sin(beta0) * np.sin(beta) + np.cos(beta) * np.cos(beta0) * np.cos(lon - lon0)))
easting = f_e + bval * dval * (np.cos(beta) * np.sin(lon - lon0))
northing = f_n + bval / dval * (np.cos(beta0) * np.sin(beta) - np.sin(beta0) * np.cos(beta) * np.cos(lon - lon0))
return (easting, northing)
|
def _term_size(self):
"""
Method returns lines and columns according to terminal size
"""
for fd in (0, 1, 2):
try:
return self._ioctl_GWINSZ(fd)
except:
pass
# try os.ctermid()
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
try:
return self._ioctl_GWINSZ(fd)
finally:
os.close(fd)
except:
pass
# try `stty size`
try:
return tuple(int(x) for x in os.popen("stty size", "r").read().split())
except:
pass
# try environment variables
try:
return tuple(int(os.getenv(var)) for var in ("LINES", "COLUMNS"))
except:
pass
# i give up. return default.
return (25, 80)
|
def function[_term_size, parameter[self]]:
constant[
Method returns lines and columns according to terminal size
]
for taget[name[fd]] in starred[tuple[[<ast.Constant object at 0x7da18f09e5c0>, <ast.Constant object at 0x7da18f09d180>, <ast.Constant object at 0x7da18f09edd0>]]] begin[:]
<ast.Try object at 0x7da18f09e1a0>
<ast.Try object at 0x7da18f09fb50>
<ast.Try object at 0x7da18f09fdc0>
<ast.Try object at 0x7da18f09c910>
return[tuple[[<ast.Constant object at 0x7da18f09eec0>, <ast.Constant object at 0x7da20c6c5330>]]]
|
keyword[def] identifier[_term_size] ( identifier[self] ):
literal[string]
keyword[for] identifier[fd] keyword[in] ( literal[int] , literal[int] , literal[int] ):
keyword[try] :
keyword[return] identifier[self] . identifier[_ioctl_GWINSZ] ( identifier[fd] )
keyword[except] :
keyword[pass]
keyword[try] :
identifier[fd] = identifier[os] . identifier[open] ( identifier[os] . identifier[ctermid] (), identifier[os] . identifier[O_RDONLY] )
keyword[try] :
keyword[return] identifier[self] . identifier[_ioctl_GWINSZ] ( identifier[fd] )
keyword[finally] :
identifier[os] . identifier[close] ( identifier[fd] )
keyword[except] :
keyword[pass]
keyword[try] :
keyword[return] identifier[tuple] ( identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[os] . identifier[popen] ( literal[string] , literal[string] ). identifier[read] (). identifier[split] ())
keyword[except] :
keyword[pass]
keyword[try] :
keyword[return] identifier[tuple] ( identifier[int] ( identifier[os] . identifier[getenv] ( identifier[var] )) keyword[for] identifier[var] keyword[in] ( literal[string] , literal[string] ))
keyword[except] :
keyword[pass]
keyword[return] ( literal[int] , literal[int] )
|
def _term_size(self):
"""
Method returns lines and columns according to terminal size
"""
for fd in (0, 1, 2):
try:
return self._ioctl_GWINSZ(fd) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['fd']]
# try os.ctermid()
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
try:
return self._ioctl_GWINSZ(fd) # depends on [control=['try'], data=[]]
finally:
os.close(fd) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
# try `stty size`
try:
return tuple((int(x) for x in os.popen('stty size', 'r').read().split())) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
# try environment variables
try:
return tuple((int(os.getenv(var)) for var in ('LINES', 'COLUMNS'))) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
# i give up. return default.
return (25, 80)
|
def stack(recs, fields=None):
"""Stack common fields in multiple record arrays (concatenate them).
Parameters
----------
recs : list
List of NumPy record arrays
fields : list of strings, optional (default=None)
The list of fields to include in the stacked array. If None, then
include the fields in common to all the record arrays.
Returns
-------
rec : NumPy record array
The stacked array.
"""
if fields is None:
fields = list(set.intersection(
*[set(rec.dtype.names) for rec in recs]))
# preserve order of fields wrt first record array
if set(fields) == set(recs[0].dtype.names):
fields = list(recs[0].dtype.names)
return np.hstack([rec[fields] for rec in recs])
|
def function[stack, parameter[recs, fields]]:
constant[Stack common fields in multiple record arrays (concatenate them).
Parameters
----------
recs : list
List of NumPy record arrays
fields : list of strings, optional (default=None)
The list of fields to include in the stacked array. If None, then
include the fields in common to all the record arrays.
Returns
-------
rec : NumPy record array
The stacked array.
]
if compare[name[fields] is constant[None]] begin[:]
variable[fields] assign[=] call[name[list], parameter[call[name[set].intersection, parameter[<ast.Starred object at 0x7da20e956d40>]]]]
if compare[call[name[set], parameter[name[fields]]] equal[==] call[name[set], parameter[call[name[recs]][constant[0]].dtype.names]]] begin[:]
variable[fields] assign[=] call[name[list], parameter[call[name[recs]][constant[0]].dtype.names]]
return[call[name[np].hstack, parameter[<ast.ListComp object at 0x7da20e957a90>]]]
|
keyword[def] identifier[stack] ( identifier[recs] , identifier[fields] = keyword[None] ):
literal[string]
keyword[if] identifier[fields] keyword[is] keyword[None] :
identifier[fields] = identifier[list] ( identifier[set] . identifier[intersection] (
*[ identifier[set] ( identifier[rec] . identifier[dtype] . identifier[names] ) keyword[for] identifier[rec] keyword[in] identifier[recs] ]))
keyword[if] identifier[set] ( identifier[fields] )== identifier[set] ( identifier[recs] [ literal[int] ]. identifier[dtype] . identifier[names] ):
identifier[fields] = identifier[list] ( identifier[recs] [ literal[int] ]. identifier[dtype] . identifier[names] )
keyword[return] identifier[np] . identifier[hstack] ([ identifier[rec] [ identifier[fields] ] keyword[for] identifier[rec] keyword[in] identifier[recs] ])
|
def stack(recs, fields=None):
"""Stack common fields in multiple record arrays (concatenate them).
Parameters
----------
recs : list
List of NumPy record arrays
fields : list of strings, optional (default=None)
The list of fields to include in the stacked array. If None, then
include the fields in common to all the record arrays.
Returns
-------
rec : NumPy record array
The stacked array.
"""
if fields is None:
fields = list(set.intersection(*[set(rec.dtype.names) for rec in recs]))
# preserve order of fields wrt first record array
if set(fields) == set(recs[0].dtype.names):
fields = list(recs[0].dtype.names) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['fields']]
return np.hstack([rec[fields] for rec in recs])
|
def search(self): # pylint: disable=no-self-use
"""
Request available queries
Posted data: {u'target': u''}
Return the list of available target queries
:return: See upper comment
:rtype: list
"""
logger.debug("Grafana search... %s", cherrypy.request.method)
if cherrypy.request.method == 'OPTIONS':
cherrypy.response.headers['Access-Control-Allow-Methods'] = 'GET,POST,PATCH,PUT,DELETE'
cherrypy.response.headers['Access-Control-Allow-Headers'] = 'Content-Type,Authorization'
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
cherrypy.request.handler = None
return {}
if getattr(cherrypy.request, 'json', None):
logger.debug("Posted data: %s", cherrypy.request.json)
logger.debug("Grafana search returns: %s", GRAFANA_TARGETS)
return GRAFANA_TARGETS
|
def function[search, parameter[self]]:
constant[
Request available queries
Posted data: {u'target': u''}
Return the list of available target queries
:return: See upper comment
:rtype: list
]
call[name[logger].debug, parameter[constant[Grafana search... %s], name[cherrypy].request.method]]
if compare[name[cherrypy].request.method equal[==] constant[OPTIONS]] begin[:]
call[name[cherrypy].response.headers][constant[Access-Control-Allow-Methods]] assign[=] constant[GET,POST,PATCH,PUT,DELETE]
call[name[cherrypy].response.headers][constant[Access-Control-Allow-Headers]] assign[=] constant[Content-Type,Authorization]
call[name[cherrypy].response.headers][constant[Access-Control-Allow-Origin]] assign[=] constant[*]
name[cherrypy].request.handler assign[=] constant[None]
return[dictionary[[], []]]
if call[name[getattr], parameter[name[cherrypy].request, constant[json], constant[None]]] begin[:]
call[name[logger].debug, parameter[constant[Posted data: %s], name[cherrypy].request.json]]
call[name[logger].debug, parameter[constant[Grafana search returns: %s], name[GRAFANA_TARGETS]]]
return[name[GRAFANA_TARGETS]]
|
keyword[def] identifier[search] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[cherrypy] . identifier[request] . identifier[method] )
keyword[if] identifier[cherrypy] . identifier[request] . identifier[method] == literal[string] :
identifier[cherrypy] . identifier[response] . identifier[headers] [ literal[string] ]= literal[string]
identifier[cherrypy] . identifier[response] . identifier[headers] [ literal[string] ]= literal[string]
identifier[cherrypy] . identifier[response] . identifier[headers] [ literal[string] ]= literal[string]
identifier[cherrypy] . identifier[request] . identifier[handler] = keyword[None]
keyword[return] {}
keyword[if] identifier[getattr] ( identifier[cherrypy] . identifier[request] , literal[string] , keyword[None] ):
identifier[logger] . identifier[debug] ( literal[string] , identifier[cherrypy] . identifier[request] . identifier[json] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[GRAFANA_TARGETS] )
keyword[return] identifier[GRAFANA_TARGETS]
|
def search(self): # pylint: disable=no-self-use
"\n Request available queries\n\n Posted data: {u'target': u''}\n\n Return the list of available target queries\n\n :return: See upper comment\n :rtype: list\n "
logger.debug('Grafana search... %s', cherrypy.request.method)
if cherrypy.request.method == 'OPTIONS':
cherrypy.response.headers['Access-Control-Allow-Methods'] = 'GET,POST,PATCH,PUT,DELETE'
cherrypy.response.headers['Access-Control-Allow-Headers'] = 'Content-Type,Authorization'
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
cherrypy.request.handler = None
return {} # depends on [control=['if'], data=[]]
if getattr(cherrypy.request, 'json', None):
logger.debug('Posted data: %s', cherrypy.request.json) # depends on [control=['if'], data=[]]
logger.debug('Grafana search returns: %s', GRAFANA_TARGETS)
return GRAFANA_TARGETS
|
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close()
except KeyError:
return StatusCode.error_invalid_object
|
def function[close, parameter[self, session]]:
constant[Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
]
<ast.Try object at 0x7da20c6c6470>
|
keyword[def] identifier[close] ( identifier[self] , identifier[session] ):
literal[string]
keyword[try] :
identifier[sess] = identifier[self] . identifier[sessions] [ identifier[session] ]
keyword[if] identifier[sess] keyword[is] keyword[not] identifier[self] :
identifier[sess] . identifier[close] ()
keyword[except] identifier[KeyError] :
keyword[return] identifier[StatusCode] . identifier[error_invalid_object]
|
def close(self, session):
"""Closes the specified session, event, or find list.
Corresponds to viClose function of the VISA library.
:param session: Unique logical identifier to a session, event, or find list.
:return: return value of the library call.
:rtype: VISAStatus
"""
try:
sess = self.sessions[session]
if sess is not self:
sess.close() # depends on [control=['if'], data=['sess']] # depends on [control=['try'], data=[]]
except KeyError:
return StatusCode.error_invalid_object # depends on [control=['except'], data=[]]
|
def _get_win_argv():
"""Returns a unicode argv under Windows and standard sys.argv otherwise
Returns:
List[`fsnative`]
"""
assert is_win
argc = ctypes.c_int()
try:
argv = winapi.CommandLineToArgvW(
winapi.GetCommandLineW(), ctypes.byref(argc))
except WindowsError:
return []
if not argv:
return []
res = argv[max(0, argc.value - len(sys.argv)):argc.value]
winapi.LocalFree(argv)
return res
|
def function[_get_win_argv, parameter[]]:
constant[Returns a unicode argv under Windows and standard sys.argv otherwise
Returns:
List[`fsnative`]
]
assert[name[is_win]]
variable[argc] assign[=] call[name[ctypes].c_int, parameter[]]
<ast.Try object at 0x7da1b1e97730>
if <ast.UnaryOp object at 0x7da1b1e942e0> begin[:]
return[list[[]]]
variable[res] assign[=] call[name[argv]][<ast.Slice object at 0x7da1b20409a0>]
call[name[winapi].LocalFree, parameter[name[argv]]]
return[name[res]]
|
keyword[def] identifier[_get_win_argv] ():
literal[string]
keyword[assert] identifier[is_win]
identifier[argc] = identifier[ctypes] . identifier[c_int] ()
keyword[try] :
identifier[argv] = identifier[winapi] . identifier[CommandLineToArgvW] (
identifier[winapi] . identifier[GetCommandLineW] (), identifier[ctypes] . identifier[byref] ( identifier[argc] ))
keyword[except] identifier[WindowsError] :
keyword[return] []
keyword[if] keyword[not] identifier[argv] :
keyword[return] []
identifier[res] = identifier[argv] [ identifier[max] ( literal[int] , identifier[argc] . identifier[value] - identifier[len] ( identifier[sys] . identifier[argv] )): identifier[argc] . identifier[value] ]
identifier[winapi] . identifier[LocalFree] ( identifier[argv] )
keyword[return] identifier[res]
|
def _get_win_argv():
"""Returns a unicode argv under Windows and standard sys.argv otherwise
Returns:
List[`fsnative`]
"""
assert is_win
argc = ctypes.c_int()
try:
argv = winapi.CommandLineToArgvW(winapi.GetCommandLineW(), ctypes.byref(argc)) # depends on [control=['try'], data=[]]
except WindowsError:
return [] # depends on [control=['except'], data=[]]
if not argv:
return [] # depends on [control=['if'], data=[]]
res = argv[max(0, argc.value - len(sys.argv)):argc.value]
winapi.LocalFree(argv)
return res
|
def _load_disktype_data(self):
"""Calls the :command:`disktype` command and obtains the disk GUID from GPT volume systems. As we
are running the tool anyway, the label is also extracted from the tool if it is not yet set.
The disktype data is only loaded and not assigned to volumes yet.
"""
if not _util.command_exists('disktype'):
logger.warning("disktype not installed, could not detect volume type")
return None
disktype = _util.check_output_(['disktype', self.parent.get_raw_path()]).strip()
current_partition = None
for line in disktype.splitlines():
if not line:
continue
# noinspection PyBroadException
try:
line = line.strip()
find_partition_nr = re.match(r"^Partition (\d+):", line)
if find_partition_nr:
current_partition = int(find_partition_nr.group(1))
elif current_partition is not None:
if line.startswith("Type ") and "GUID" in line:
self._disktype[current_partition]['guid'] = \
line[line.index('GUID') + 5:-1].strip() # output is between ()
elif line.startswith("Partition Name "):
self._disktype[current_partition]['label'] = \
line[line.index('Name ') + 6:-1].strip() # output is between ""
except Exception:
logger.exception("Error while parsing disktype output")
return
|
def function[_load_disktype_data, parameter[self]]:
constant[Calls the :command:`disktype` command and obtains the disk GUID from GPT volume systems. As we
are running the tool anyway, the label is also extracted from the tool if it is not yet set.
The disktype data is only loaded and not assigned to volumes yet.
]
if <ast.UnaryOp object at 0x7da1b045c3a0> begin[:]
call[name[logger].warning, parameter[constant[disktype not installed, could not detect volume type]]]
return[constant[None]]
variable[disktype] assign[=] call[call[name[_util].check_output_, parameter[list[[<ast.Constant object at 0x7da1b045f640>, <ast.Call object at 0x7da1b045d900>]]]].strip, parameter[]]
variable[current_partition] assign[=] constant[None]
for taget[name[line]] in starred[call[name[disktype].splitlines, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da1b045fc40> begin[:]
continue
<ast.Try object at 0x7da1b045dea0>
|
keyword[def] identifier[_load_disktype_data] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[_util] . identifier[command_exists] ( literal[string] ):
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return] keyword[None]
identifier[disktype] = identifier[_util] . identifier[check_output_] ([ literal[string] , identifier[self] . identifier[parent] . identifier[get_raw_path] ()]). identifier[strip] ()
identifier[current_partition] = keyword[None]
keyword[for] identifier[line] keyword[in] identifier[disktype] . identifier[splitlines] ():
keyword[if] keyword[not] identifier[line] :
keyword[continue]
keyword[try] :
identifier[line] = identifier[line] . identifier[strip] ()
identifier[find_partition_nr] = identifier[re] . identifier[match] ( literal[string] , identifier[line] )
keyword[if] identifier[find_partition_nr] :
identifier[current_partition] = identifier[int] ( identifier[find_partition_nr] . identifier[group] ( literal[int] ))
keyword[elif] identifier[current_partition] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[line] :
identifier[self] . identifier[_disktype] [ identifier[current_partition] ][ literal[string] ]= identifier[line] [ identifier[line] . identifier[index] ( literal[string] )+ literal[int] :- literal[int] ]. identifier[strip] ()
keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[self] . identifier[_disktype] [ identifier[current_partition] ][ literal[string] ]= identifier[line] [ identifier[line] . identifier[index] ( literal[string] )+ literal[int] :- literal[int] ]. identifier[strip] ()
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] ( literal[string] )
keyword[return]
|
def _load_disktype_data(self):
"""Calls the :command:`disktype` command and obtains the disk GUID from GPT volume systems. As we
are running the tool anyway, the label is also extracted from the tool if it is not yet set.
The disktype data is only loaded and not assigned to volumes yet.
"""
if not _util.command_exists('disktype'):
logger.warning('disktype not installed, could not detect volume type')
return None # depends on [control=['if'], data=[]]
disktype = _util.check_output_(['disktype', self.parent.get_raw_path()]).strip()
current_partition = None
for line in disktype.splitlines():
if not line:
continue # depends on [control=['if'], data=[]]
# noinspection PyBroadException
try:
line = line.strip()
find_partition_nr = re.match('^Partition (\\d+):', line)
if find_partition_nr:
current_partition = int(find_partition_nr.group(1)) # depends on [control=['if'], data=[]]
elif current_partition is not None:
if line.startswith('Type ') and 'GUID' in line:
self._disktype[current_partition]['guid'] = line[line.index('GUID') + 5:-1].strip() # output is between () # depends on [control=['if'], data=[]]
elif line.startswith('Partition Name '):
self._disktype[current_partition]['label'] = line[line.index('Name ') + 6:-1].strip() # output is between "" # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['current_partition']] # depends on [control=['try'], data=[]]
except Exception:
logger.exception('Error while parsing disktype output')
return # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['line']]
|
def _get_filename(self, path):
"""
This function gets the base filename from the path, if a language code
is present the filename will start from there.
"""
match = re.search("[a-z]{2,3}_[A-Z]{2}", path)
if match:
start = match.start(0)
filename = path[start:]
else:
filename = os.path.basename(path)
return filename
|
def function[_get_filename, parameter[self, path]]:
constant[
This function gets the base filename from the path, if a language code
is present the filename will start from there.
]
variable[match] assign[=] call[name[re].search, parameter[constant[[a-z]{2,3}_[A-Z]{2}], name[path]]]
if name[match] begin[:]
variable[start] assign[=] call[name[match].start, parameter[constant[0]]]
variable[filename] assign[=] call[name[path]][<ast.Slice object at 0x7da1b23d7fa0>]
return[name[filename]]
|
keyword[def] identifier[_get_filename] ( identifier[self] , identifier[path] ):
literal[string]
identifier[match] = identifier[re] . identifier[search] ( literal[string] , identifier[path] )
keyword[if] identifier[match] :
identifier[start] = identifier[match] . identifier[start] ( literal[int] )
identifier[filename] = identifier[path] [ identifier[start] :]
keyword[else] :
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] )
keyword[return] identifier[filename]
|
def _get_filename(self, path):
"""
This function gets the base filename from the path, if a language code
is present the filename will start from there.
"""
match = re.search('[a-z]{2,3}_[A-Z]{2}', path)
if match:
start = match.start(0)
filename = path[start:] # depends on [control=['if'], data=[]]
else:
filename = os.path.basename(path)
return filename
|
def unique(input_list):
"""
Return a list of unique items (similar to set functionality).
Parameters
----------
input_list : list
A list containg some items that can occur more than once.
Returns
-------
list
A list with only unique occurances of an item.
"""
output = []
for item in input_list:
if item not in output:
output.append(item)
return output
|
def function[unique, parameter[input_list]]:
constant[
Return a list of unique items (similar to set functionality).
Parameters
----------
input_list : list
A list containg some items that can occur more than once.
Returns
-------
list
A list with only unique occurances of an item.
]
variable[output] assign[=] list[[]]
for taget[name[item]] in starred[name[input_list]] begin[:]
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[output]] begin[:]
call[name[output].append, parameter[name[item]]]
return[name[output]]
|
keyword[def] identifier[unique] ( identifier[input_list] ):
literal[string]
identifier[output] =[]
keyword[for] identifier[item] keyword[in] identifier[input_list] :
keyword[if] identifier[item] keyword[not] keyword[in] identifier[output] :
identifier[output] . identifier[append] ( identifier[item] )
keyword[return] identifier[output]
|
def unique(input_list):
"""
Return a list of unique items (similar to set functionality).
Parameters
----------
input_list : list
A list containg some items that can occur more than once.
Returns
-------
list
A list with only unique occurances of an item.
"""
output = []
for item in input_list:
if item not in output:
output.append(item) # depends on [control=['if'], data=['item', 'output']] # depends on [control=['for'], data=['item']]
return output
|
def _deriv_growth(z, **cosmo):
""" Returns derivative of the linear growth factor at z
for a given cosmology **cosmo """
inv_h = (cosmo['omega_M_0']*(1 + z)**3 + cosmo['omega_lambda_0'])**(-0.5)
fz = (1 + z) * inv_h**3
deriv_g = growthfactor(z, norm=True, **cosmo)*(inv_h**2) *\
1.5 * cosmo['omega_M_0'] * (1 + z)**2 -\
fz * growthfactor(z, norm=True, **cosmo)/_int_growth(z, **cosmo)
return(deriv_g)
|
def function[_deriv_growth, parameter[z]]:
constant[ Returns derivative of the linear growth factor at z
for a given cosmology **cosmo ]
variable[inv_h] assign[=] binary_operation[binary_operation[binary_operation[call[name[cosmo]][constant[omega_M_0]] * binary_operation[binary_operation[constant[1] + name[z]] ** constant[3]]] + call[name[cosmo]][constant[omega_lambda_0]]] ** <ast.UnaryOp object at 0x7da1b106bb50>]
variable[fz] assign[=] binary_operation[binary_operation[constant[1] + name[z]] * binary_operation[name[inv_h] ** constant[3]]]
variable[deriv_g] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[growthfactor], parameter[name[z]]] * binary_operation[name[inv_h] ** constant[2]]] * constant[1.5]] * call[name[cosmo]][constant[omega_M_0]]] * binary_operation[binary_operation[constant[1] + name[z]] ** constant[2]]] - binary_operation[binary_operation[name[fz] * call[name[growthfactor], parameter[name[z]]]] / call[name[_int_growth], parameter[name[z]]]]]
return[name[deriv_g]]
|
keyword[def] identifier[_deriv_growth] ( identifier[z] ,** identifier[cosmo] ):
literal[string]
identifier[inv_h] =( identifier[cosmo] [ literal[string] ]*( literal[int] + identifier[z] )** literal[int] + identifier[cosmo] [ literal[string] ])**(- literal[int] )
identifier[fz] =( literal[int] + identifier[z] )* identifier[inv_h] ** literal[int]
identifier[deriv_g] = identifier[growthfactor] ( identifier[z] , identifier[norm] = keyword[True] ,** identifier[cosmo] )*( identifier[inv_h] ** literal[int] )* literal[int] * identifier[cosmo] [ literal[string] ]*( literal[int] + identifier[z] )** literal[int] - identifier[fz] * identifier[growthfactor] ( identifier[z] , identifier[norm] = keyword[True] ,** identifier[cosmo] )/ identifier[_int_growth] ( identifier[z] ,** identifier[cosmo] )
keyword[return] ( identifier[deriv_g] )
|
def _deriv_growth(z, **cosmo):
""" Returns derivative of the linear growth factor at z
for a given cosmology **cosmo """
inv_h = (cosmo['omega_M_0'] * (1 + z) ** 3 + cosmo['omega_lambda_0']) ** (-0.5)
fz = (1 + z) * inv_h ** 3
deriv_g = growthfactor(z, norm=True, **cosmo) * inv_h ** 2 * 1.5 * cosmo['omega_M_0'] * (1 + z) ** 2 - fz * growthfactor(z, norm=True, **cosmo) / _int_growth(z, **cosmo)
return deriv_g
|
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
if n is None or n < 0:
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while not self._eof:
buf += self._read1(self.MAX_N)
return buf
end = n + self._offset
if end < len(self._readbuffer):
buf = self._readbuffer[self._offset:end]
self._offset = end
return buf
n = end - len(self._readbuffer)
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while n > 0 and not self._eof:
data = self._read1(n)
if n < len(data):
self._readbuffer = data
self._offset = n
buf += data[:n]
break
buf += data
n -= len(data)
return buf
|
def function[read, parameter[self, n]]:
constant[Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
]
if <ast.BoolOp object at 0x7da18c4ccbe0> begin[:]
variable[buf] assign[=] call[name[self]._readbuffer][<ast.Slice object at 0x7da18c4ce770>]
name[self]._readbuffer assign[=] constant[b'']
name[self]._offset assign[=] constant[0]
while <ast.UnaryOp object at 0x7da18c4cecb0> begin[:]
<ast.AugAssign object at 0x7da18c4cff70>
return[name[buf]]
variable[end] assign[=] binary_operation[name[n] + name[self]._offset]
if compare[name[end] less[<] call[name[len], parameter[name[self]._readbuffer]]] begin[:]
variable[buf] assign[=] call[name[self]._readbuffer][<ast.Slice object at 0x7da18c4ccd00>]
name[self]._offset assign[=] name[end]
return[name[buf]]
variable[n] assign[=] binary_operation[name[end] - call[name[len], parameter[name[self]._readbuffer]]]
variable[buf] assign[=] call[name[self]._readbuffer][<ast.Slice object at 0x7da20c7962f0>]
name[self]._readbuffer assign[=] constant[b'']
name[self]._offset assign[=] constant[0]
while <ast.BoolOp object at 0x7da20c796890> begin[:]
variable[data] assign[=] call[name[self]._read1, parameter[name[n]]]
if compare[name[n] less[<] call[name[len], parameter[name[data]]]] begin[:]
name[self]._readbuffer assign[=] name[data]
name[self]._offset assign[=] name[n]
<ast.AugAssign object at 0x7da20c796a70>
break
<ast.AugAssign object at 0x7da2046203d0>
<ast.AugAssign object at 0x7da204623190>
return[name[buf]]
|
keyword[def] identifier[read] ( identifier[self] , identifier[n] =- literal[int] ):
literal[string]
keyword[if] identifier[n] keyword[is] keyword[None] keyword[or] identifier[n] < literal[int] :
identifier[buf] = identifier[self] . identifier[_readbuffer] [ identifier[self] . identifier[_offset] :]
identifier[self] . identifier[_readbuffer] = literal[string]
identifier[self] . identifier[_offset] = literal[int]
keyword[while] keyword[not] identifier[self] . identifier[_eof] :
identifier[buf] += identifier[self] . identifier[_read1] ( identifier[self] . identifier[MAX_N] )
keyword[return] identifier[buf]
identifier[end] = identifier[n] + identifier[self] . identifier[_offset]
keyword[if] identifier[end] < identifier[len] ( identifier[self] . identifier[_readbuffer] ):
identifier[buf] = identifier[self] . identifier[_readbuffer] [ identifier[self] . identifier[_offset] : identifier[end] ]
identifier[self] . identifier[_offset] = identifier[end]
keyword[return] identifier[buf]
identifier[n] = identifier[end] - identifier[len] ( identifier[self] . identifier[_readbuffer] )
identifier[buf] = identifier[self] . identifier[_readbuffer] [ identifier[self] . identifier[_offset] :]
identifier[self] . identifier[_readbuffer] = literal[string]
identifier[self] . identifier[_offset] = literal[int]
keyword[while] identifier[n] > literal[int] keyword[and] keyword[not] identifier[self] . identifier[_eof] :
identifier[data] = identifier[self] . identifier[_read1] ( identifier[n] )
keyword[if] identifier[n] < identifier[len] ( identifier[data] ):
identifier[self] . identifier[_readbuffer] = identifier[data]
identifier[self] . identifier[_offset] = identifier[n]
identifier[buf] += identifier[data] [: identifier[n] ]
keyword[break]
identifier[buf] += identifier[data]
identifier[n] -= identifier[len] ( identifier[data] )
keyword[return] identifier[buf]
|
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
if n is None or n < 0:
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while not self._eof:
buf += self._read1(self.MAX_N) # depends on [control=['while'], data=[]]
return buf # depends on [control=['if'], data=[]]
end = n + self._offset
if end < len(self._readbuffer):
buf = self._readbuffer[self._offset:end]
self._offset = end
return buf # depends on [control=['if'], data=['end']]
n = end - len(self._readbuffer)
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
self._offset = 0
while n > 0 and (not self._eof):
data = self._read1(n)
if n < len(data):
self._readbuffer = data
self._offset = n
buf += data[:n]
break # depends on [control=['if'], data=['n']]
buf += data
n -= len(data) # depends on [control=['while'], data=[]]
return buf
|
def update_channels(cls, installation_id, channels_to_add=set(),
channels_to_remove=set(), **kw):
"""
Allow an application to manually subscribe or unsubscribe an
installation to a certain push channel in a unified operation.
this is based on:
https://www.parse.com/docs/rest#installations-updating
installation_id: the installation id you'd like to add a channel to
channels_to_add: the name of the channel you'd like to subscribe the user to
channels_to_remove: the name of the channel you'd like to unsubscribe the user from
"""
installation_url = cls._get_installation_url(installation_id)
current_config = cls.GET(installation_url)
new_channels = list(set(current_config['channels']).union(channels_to_add).difference(channels_to_remove))
cls.PUT(installation_url, channels=new_channels)
|
def function[update_channels, parameter[cls, installation_id, channels_to_add, channels_to_remove]]:
constant[
Allow an application to manually subscribe or unsubscribe an
installation to a certain push channel in a unified operation.
this is based on:
https://www.parse.com/docs/rest#installations-updating
installation_id: the installation id you'd like to add a channel to
channels_to_add: the name of the channel you'd like to subscribe the user to
channels_to_remove: the name of the channel you'd like to unsubscribe the user from
]
variable[installation_url] assign[=] call[name[cls]._get_installation_url, parameter[name[installation_id]]]
variable[current_config] assign[=] call[name[cls].GET, parameter[name[installation_url]]]
variable[new_channels] assign[=] call[name[list], parameter[call[call[call[name[set], parameter[call[name[current_config]][constant[channels]]]].union, parameter[name[channels_to_add]]].difference, parameter[name[channels_to_remove]]]]]
call[name[cls].PUT, parameter[name[installation_url]]]
|
keyword[def] identifier[update_channels] ( identifier[cls] , identifier[installation_id] , identifier[channels_to_add] = identifier[set] (),
identifier[channels_to_remove] = identifier[set] (),** identifier[kw] ):
literal[string]
identifier[installation_url] = identifier[cls] . identifier[_get_installation_url] ( identifier[installation_id] )
identifier[current_config] = identifier[cls] . identifier[GET] ( identifier[installation_url] )
identifier[new_channels] = identifier[list] ( identifier[set] ( identifier[current_config] [ literal[string] ]). identifier[union] ( identifier[channels_to_add] ). identifier[difference] ( identifier[channels_to_remove] ))
identifier[cls] . identifier[PUT] ( identifier[installation_url] , identifier[channels] = identifier[new_channels] )
|
def update_channels(cls, installation_id, channels_to_add=set(), channels_to_remove=set(), **kw):
"""
Allow an application to manually subscribe or unsubscribe an
installation to a certain push channel in a unified operation.
this is based on:
https://www.parse.com/docs/rest#installations-updating
installation_id: the installation id you'd like to add a channel to
channels_to_add: the name of the channel you'd like to subscribe the user to
channels_to_remove: the name of the channel you'd like to unsubscribe the user from
"""
installation_url = cls._get_installation_url(installation_id)
current_config = cls.GET(installation_url)
new_channels = list(set(current_config['channels']).union(channels_to_add).difference(channels_to_remove))
cls.PUT(installation_url, channels=new_channels)
|
def connect(self, broker, port=1883, client_id="", clean_session=True):
""" Connect to an MQTT broker. This is a pre-requisite step for publish
and subscribe keywords.
`broker` MQTT broker host
`port` broker port (default 1883)
`client_id` if not specified, a random id is generated
`clean_session` specifies the clean session flag for the connection
Examples:
Connect to a broker with default port and client id
| Connect | 127.0.0.1 |
Connect to a broker by specifying the port and client id explicitly
| Connect | 127.0.0.1 | 1883 | test.client |
Connect to a broker with clean session flag set to false
| Connect | 127.0.0.1 | clean_session=${false} |
"""
logger.info('Connecting to %s at port %s' % (broker, port))
self._connected = False
self._unexpected_disconnect = False
self._mqttc = mqtt.Client(client_id, clean_session)
# set callbacks
self._mqttc.on_connect = self._on_connect
self._mqttc.on_disconnect = self._on_disconnect
if self._username:
self._mqttc.username_pw_set(self._username, self._password)
self._mqttc.connect(broker, int(port))
timer_start = time.time()
while time.time() < timer_start + self._loop_timeout:
if self._connected or self._unexpected_disconnect:
break;
self._mqttc.loop()
if self._unexpected_disconnect:
raise RuntimeError("The client disconnected unexpectedly")
logger.debug('client_id: %s' % self._mqttc._client_id)
return self._mqttc
|
def function[connect, parameter[self, broker, port, client_id, clean_session]]:
constant[ Connect to an MQTT broker. This is a pre-requisite step for publish
and subscribe keywords.
`broker` MQTT broker host
`port` broker port (default 1883)
`client_id` if not specified, a random id is generated
`clean_session` specifies the clean session flag for the connection
Examples:
Connect to a broker with default port and client id
| Connect | 127.0.0.1 |
Connect to a broker by specifying the port and client id explicitly
| Connect | 127.0.0.1 | 1883 | test.client |
Connect to a broker with clean session flag set to false
| Connect | 127.0.0.1 | clean_session=${false} |
]
call[name[logger].info, parameter[binary_operation[constant[Connecting to %s at port %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1be4ee0>, <ast.Name object at 0x7da1b1be5a20>]]]]]
name[self]._connected assign[=] constant[False]
name[self]._unexpected_disconnect assign[=] constant[False]
name[self]._mqttc assign[=] call[name[mqtt].Client, parameter[name[client_id], name[clean_session]]]
name[self]._mqttc.on_connect assign[=] name[self]._on_connect
name[self]._mqttc.on_disconnect assign[=] name[self]._on_disconnect
if name[self]._username begin[:]
call[name[self]._mqttc.username_pw_set, parameter[name[self]._username, name[self]._password]]
call[name[self]._mqttc.connect, parameter[name[broker], call[name[int], parameter[name[port]]]]]
variable[timer_start] assign[=] call[name[time].time, parameter[]]
while compare[call[name[time].time, parameter[]] less[<] binary_operation[name[timer_start] + name[self]._loop_timeout]] begin[:]
if <ast.BoolOp object at 0x7da1b1a2a7d0> begin[:]
break
call[name[self]._mqttc.loop, parameter[]]
if name[self]._unexpected_disconnect begin[:]
<ast.Raise object at 0x7da1b1a28460>
call[name[logger].debug, parameter[binary_operation[constant[client_id: %s] <ast.Mod object at 0x7da2590d6920> name[self]._mqttc._client_id]]]
return[name[self]._mqttc]
|
keyword[def] identifier[connect] ( identifier[self] , identifier[broker] , identifier[port] = literal[int] , identifier[client_id] = literal[string] , identifier[clean_session] = keyword[True] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] %( identifier[broker] , identifier[port] ))
identifier[self] . identifier[_connected] = keyword[False]
identifier[self] . identifier[_unexpected_disconnect] = keyword[False]
identifier[self] . identifier[_mqttc] = identifier[mqtt] . identifier[Client] ( identifier[client_id] , identifier[clean_session] )
identifier[self] . identifier[_mqttc] . identifier[on_connect] = identifier[self] . identifier[_on_connect]
identifier[self] . identifier[_mqttc] . identifier[on_disconnect] = identifier[self] . identifier[_on_disconnect]
keyword[if] identifier[self] . identifier[_username] :
identifier[self] . identifier[_mqttc] . identifier[username_pw_set] ( identifier[self] . identifier[_username] , identifier[self] . identifier[_password] )
identifier[self] . identifier[_mqttc] . identifier[connect] ( identifier[broker] , identifier[int] ( identifier[port] ))
identifier[timer_start] = identifier[time] . identifier[time] ()
keyword[while] identifier[time] . identifier[time] ()< identifier[timer_start] + identifier[self] . identifier[_loop_timeout] :
keyword[if] identifier[self] . identifier[_connected] keyword[or] identifier[self] . identifier[_unexpected_disconnect] :
keyword[break] ;
identifier[self] . identifier[_mqttc] . identifier[loop] ()
keyword[if] identifier[self] . identifier[_unexpected_disconnect] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[self] . identifier[_mqttc] . identifier[_client_id] )
keyword[return] identifier[self] . identifier[_mqttc]
|
def connect(self, broker, port=1883, client_id='', clean_session=True):
""" Connect to an MQTT broker. This is a pre-requisite step for publish
and subscribe keywords.
`broker` MQTT broker host
`port` broker port (default 1883)
`client_id` if not specified, a random id is generated
`clean_session` specifies the clean session flag for the connection
Examples:
Connect to a broker with default port and client id
| Connect | 127.0.0.1 |
Connect to a broker by specifying the port and client id explicitly
| Connect | 127.0.0.1 | 1883 | test.client |
Connect to a broker with clean session flag set to false
| Connect | 127.0.0.1 | clean_session=${false} |
"""
logger.info('Connecting to %s at port %s' % (broker, port))
self._connected = False
self._unexpected_disconnect = False
self._mqttc = mqtt.Client(client_id, clean_session)
# set callbacks
self._mqttc.on_connect = self._on_connect
self._mqttc.on_disconnect = self._on_disconnect
if self._username:
self._mqttc.username_pw_set(self._username, self._password) # depends on [control=['if'], data=[]]
self._mqttc.connect(broker, int(port))
timer_start = time.time()
while time.time() < timer_start + self._loop_timeout:
if self._connected or self._unexpected_disconnect:
break # depends on [control=['if'], data=[]]
self._mqttc.loop() # depends on [control=['while'], data=[]]
if self._unexpected_disconnect:
raise RuntimeError('The client disconnected unexpectedly') # depends on [control=['if'], data=[]]
logger.debug('client_id: %s' % self._mqttc._client_id)
return self._mqttc
|
def rpc_get_calltip(self, filename, source, offset):
"""Get the calltip for the function at the offset.
"""
return self._call_backend("rpc_get_calltip", None, filename,
get_source(source), offset)
|
def function[rpc_get_calltip, parameter[self, filename, source, offset]]:
constant[Get the calltip for the function at the offset.
]
return[call[name[self]._call_backend, parameter[constant[rpc_get_calltip], constant[None], name[filename], call[name[get_source], parameter[name[source]]], name[offset]]]]
|
keyword[def] identifier[rpc_get_calltip] ( identifier[self] , identifier[filename] , identifier[source] , identifier[offset] ):
literal[string]
keyword[return] identifier[self] . identifier[_call_backend] ( literal[string] , keyword[None] , identifier[filename] ,
identifier[get_source] ( identifier[source] ), identifier[offset] )
|
def rpc_get_calltip(self, filename, source, offset):
"""Get the calltip for the function at the offset.
"""
return self._call_backend('rpc_get_calltip', None, filename, get_source(source), offset)
|
def post_run_cell(self):
"""Runs after the user-entered code in a cell has been executed. It
detects any new, decoratable objects that haven't been decorated yet and
then decorates them.
"""
#We just want to detect any new, decoratable objects that haven't been
#decorated yet.
decorlist = {k: [] for k in self.atypes}
for atype in self.atypes:
for n, o in self._get_decoratables(atype):
self._decorate(atype, n, o)
#Next, check whether we have an outstanding "loop intercept" that we
#"wrapped" with respect to acorn by enabling streamlining.
if self.pre is not None:
#Re-enable the acorn logging systems so that it gets back to normal.
from acorn.logging.decoration import set_streamlining
set_streamlining(False)
from acorn import msg
from acorn.logging.database import record
from time import time
#Determine the elapsed time for the execution of the entire cell.
entry = self.pre
entry["e"] = time() - entry["s"]
#See if we can match the executed cell's code up with one that we
#intercepted in the past..
cellid = self._find_cellid(entry["c"])
if cellid is None:
cellid = self.cellid
#Store the contents of the cell *before* they get overwritten by a
#diff.
self.cellids[cellid] = entry["c"]
record("__main__.{0:d}".format(cellid), entry, diff=True)
msg.info(entry, 1)
self.pre = None
#Finally, check whether any new variables have shown up, or have had
#their values changed.
from acorn.logging.database import tracker, active_db, Instance
varchange = self._var_changes()
taskdb = active_db()
for n, o in varchange:
otrack = tracker(o)
if isinstance(otrack, Instance):
taskdb.log_uuid(otrack.uuid)
global thumb_uuid
if thumb_uuid is not None:
self._log_images()
#Reset the image tracker list so that we don't save these images
#again next cell execution.
thumb_uuid = None
self.cellid = None
|
def function[post_run_cell, parameter[self]]:
constant[Runs after the user-entered code in a cell has been executed. It
detects any new, decoratable objects that haven't been decorated yet and
then decorates them.
]
variable[decorlist] assign[=] <ast.DictComp object at 0x7da1b14e7190>
for taget[name[atype]] in starred[name[self].atypes] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b14e4190>, <ast.Name object at 0x7da1b14e4dc0>]]] in starred[call[name[self]._get_decoratables, parameter[name[atype]]]] begin[:]
call[name[self]._decorate, parameter[name[atype], name[n], name[o]]]
if compare[name[self].pre is_not constant[None]] begin[:]
from relative_module[acorn.logging.decoration] import module[set_streamlining]
call[name[set_streamlining], parameter[constant[False]]]
from relative_module[acorn] import module[msg]
from relative_module[acorn.logging.database] import module[record]
from relative_module[time] import module[time]
variable[entry] assign[=] name[self].pre
call[name[entry]][constant[e]] assign[=] binary_operation[call[name[time], parameter[]] - call[name[entry]][constant[s]]]
variable[cellid] assign[=] call[name[self]._find_cellid, parameter[call[name[entry]][constant[c]]]]
if compare[name[cellid] is constant[None]] begin[:]
variable[cellid] assign[=] name[self].cellid
call[name[self].cellids][name[cellid]] assign[=] call[name[entry]][constant[c]]
call[name[record], parameter[call[constant[__main__.{0:d}].format, parameter[name[cellid]]], name[entry]]]
call[name[msg].info, parameter[name[entry], constant[1]]]
name[self].pre assign[=] constant[None]
from relative_module[acorn.logging.database] import module[tracker], module[active_db], module[Instance]
variable[varchange] assign[=] call[name[self]._var_changes, parameter[]]
variable[taskdb] assign[=] call[name[active_db], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b14e5960>, <ast.Name object at 0x7da1b14e5ed0>]]] in starred[name[varchange]] begin[:]
variable[otrack] assign[=] call[name[tracker], parameter[name[o]]]
if call[name[isinstance], parameter[name[otrack], name[Instance]]] begin[:]
call[name[taskdb].log_uuid, parameter[name[otrack].uuid]]
<ast.Global object at 0x7da1b14e46a0>
if compare[name[thumb_uuid] is_not constant[None]] begin[:]
call[name[self]._log_images, parameter[]]
variable[thumb_uuid] assign[=] constant[None]
name[self].cellid assign[=] constant[None]
|
keyword[def] identifier[post_run_cell] ( identifier[self] ):
literal[string]
identifier[decorlist] ={ identifier[k] :[] keyword[for] identifier[k] keyword[in] identifier[self] . identifier[atypes] }
keyword[for] identifier[atype] keyword[in] identifier[self] . identifier[atypes] :
keyword[for] identifier[n] , identifier[o] keyword[in] identifier[self] . identifier[_get_decoratables] ( identifier[atype] ):
identifier[self] . identifier[_decorate] ( identifier[atype] , identifier[n] , identifier[o] )
keyword[if] identifier[self] . identifier[pre] keyword[is] keyword[not] keyword[None] :
keyword[from] identifier[acorn] . identifier[logging] . identifier[decoration] keyword[import] identifier[set_streamlining]
identifier[set_streamlining] ( keyword[False] )
keyword[from] identifier[acorn] keyword[import] identifier[msg]
keyword[from] identifier[acorn] . identifier[logging] . identifier[database] keyword[import] identifier[record]
keyword[from] identifier[time] keyword[import] identifier[time]
identifier[entry] = identifier[self] . identifier[pre]
identifier[entry] [ literal[string] ]= identifier[time] ()- identifier[entry] [ literal[string] ]
identifier[cellid] = identifier[self] . identifier[_find_cellid] ( identifier[entry] [ literal[string] ])
keyword[if] identifier[cellid] keyword[is] keyword[None] :
identifier[cellid] = identifier[self] . identifier[cellid]
identifier[self] . identifier[cellids] [ identifier[cellid] ]= identifier[entry] [ literal[string] ]
identifier[record] ( literal[string] . identifier[format] ( identifier[cellid] ), identifier[entry] , identifier[diff] = keyword[True] )
identifier[msg] . identifier[info] ( identifier[entry] , literal[int] )
identifier[self] . identifier[pre] = keyword[None]
keyword[from] identifier[acorn] . identifier[logging] . identifier[database] keyword[import] identifier[tracker] , identifier[active_db] , identifier[Instance]
identifier[varchange] = identifier[self] . identifier[_var_changes] ()
identifier[taskdb] = identifier[active_db] ()
keyword[for] identifier[n] , identifier[o] keyword[in] identifier[varchange] :
identifier[otrack] = identifier[tracker] ( identifier[o] )
keyword[if] identifier[isinstance] ( identifier[otrack] , identifier[Instance] ):
identifier[taskdb] . identifier[log_uuid] ( identifier[otrack] . identifier[uuid] )
keyword[global] identifier[thumb_uuid]
keyword[if] identifier[thumb_uuid] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_log_images] ()
identifier[thumb_uuid] = keyword[None]
identifier[self] . identifier[cellid] = keyword[None]
|
def post_run_cell(self):
"""Runs after the user-entered code in a cell has been executed. It
detects any new, decoratable objects that haven't been decorated yet and
then decorates them.
"""
#We just want to detect any new, decoratable objects that haven't been
#decorated yet.
decorlist = {k: [] for k in self.atypes}
for atype in self.atypes:
for (n, o) in self._get_decoratables(atype):
self._decorate(atype, n, o) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['atype']]
#Next, check whether we have an outstanding "loop intercept" that we
#"wrapped" with respect to acorn by enabling streamlining.
if self.pre is not None:
#Re-enable the acorn logging systems so that it gets back to normal.
from acorn.logging.decoration import set_streamlining
set_streamlining(False)
from acorn import msg
from acorn.logging.database import record
from time import time
#Determine the elapsed time for the execution of the entire cell.
entry = self.pre
entry['e'] = time() - entry['s']
#See if we can match the executed cell's code up with one that we
#intercepted in the past..
cellid = self._find_cellid(entry['c'])
if cellid is None:
cellid = self.cellid # depends on [control=['if'], data=['cellid']]
#Store the contents of the cell *before* they get overwritten by a
#diff.
self.cellids[cellid] = entry['c']
record('__main__.{0:d}'.format(cellid), entry, diff=True)
msg.info(entry, 1)
self.pre = None # depends on [control=['if'], data=[]]
#Finally, check whether any new variables have shown up, or have had
#their values changed.
from acorn.logging.database import tracker, active_db, Instance
varchange = self._var_changes()
taskdb = active_db()
for (n, o) in varchange:
otrack = tracker(o)
if isinstance(otrack, Instance):
taskdb.log_uuid(otrack.uuid) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
global thumb_uuid
if thumb_uuid is not None:
self._log_images()
#Reset the image tracker list so that we don't save these images
#again next cell execution.
thumb_uuid = None # depends on [control=['if'], data=['thumb_uuid']]
self.cellid = None
|
def channel_axis(self, batch):
"""Interface to model.channel_axis for attacks.
Parameters
----------
batch : bool
Controls whether the index of the axis for a batch of images
(4 dimensions) or a single image (3 dimensions) should be returned.
"""
axis = self.__model.channel_axis()
if not batch:
axis = axis - 1
return axis
|
def function[channel_axis, parameter[self, batch]]:
constant[Interface to model.channel_axis for attacks.
Parameters
----------
batch : bool
Controls whether the index of the axis for a batch of images
(4 dimensions) or a single image (3 dimensions) should be returned.
]
variable[axis] assign[=] call[name[self].__model.channel_axis, parameter[]]
if <ast.UnaryOp object at 0x7da20c76e230> begin[:]
variable[axis] assign[=] binary_operation[name[axis] - constant[1]]
return[name[axis]]
|
keyword[def] identifier[channel_axis] ( identifier[self] , identifier[batch] ):
literal[string]
identifier[axis] = identifier[self] . identifier[__model] . identifier[channel_axis] ()
keyword[if] keyword[not] identifier[batch] :
identifier[axis] = identifier[axis] - literal[int]
keyword[return] identifier[axis]
|
def channel_axis(self, batch):
"""Interface to model.channel_axis for attacks.
Parameters
----------
batch : bool
Controls whether the index of the axis for a batch of images
(4 dimensions) or a single image (3 dimensions) should be returned.
"""
axis = self.__model.channel_axis()
if not batch:
axis = axis - 1 # depends on [control=['if'], data=[]]
return axis
|
def has_function(function_name, libraries=None):
"""Checks if a given functions exists in the current platform."""
compiler = distutils.ccompiler.new_compiler()
with muted(sys.stdout, sys.stderr):
result = compiler.has_function(
function_name, libraries=libraries)
if os.path.exists('a.out'):
os.remove('a.out')
return result
|
def function[has_function, parameter[function_name, libraries]]:
constant[Checks if a given functions exists in the current platform.]
variable[compiler] assign[=] call[name[distutils].ccompiler.new_compiler, parameter[]]
with call[name[muted], parameter[name[sys].stdout, name[sys].stderr]] begin[:]
variable[result] assign[=] call[name[compiler].has_function, parameter[name[function_name]]]
if call[name[os].path.exists, parameter[constant[a.out]]] begin[:]
call[name[os].remove, parameter[constant[a.out]]]
return[name[result]]
|
keyword[def] identifier[has_function] ( identifier[function_name] , identifier[libraries] = keyword[None] ):
literal[string]
identifier[compiler] = identifier[distutils] . identifier[ccompiler] . identifier[new_compiler] ()
keyword[with] identifier[muted] ( identifier[sys] . identifier[stdout] , identifier[sys] . identifier[stderr] ):
identifier[result] = identifier[compiler] . identifier[has_function] (
identifier[function_name] , identifier[libraries] = identifier[libraries] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( literal[string] ):
identifier[os] . identifier[remove] ( literal[string] )
keyword[return] identifier[result]
|
def has_function(function_name, libraries=None):
"""Checks if a given functions exists in the current platform."""
compiler = distutils.ccompiler.new_compiler()
with muted(sys.stdout, sys.stderr):
result = compiler.has_function(function_name, libraries=libraries) # depends on [control=['with'], data=[]]
if os.path.exists('a.out'):
os.remove('a.out') # depends on [control=['if'], data=[]]
return result
|
def delete_event_ticket_class(self, id, ticket_class_id, **data):
"""
DELETE /events/:id/ticket_classes/:ticket_class_id/
Deletes the ticket class. Returns ``{"deleted": true}``.
"""
return self.delete("/events/{0}/ticket_classes/{0}/".format(id,ticket_class_id), data=data)
|
def function[delete_event_ticket_class, parameter[self, id, ticket_class_id]]:
constant[
DELETE /events/:id/ticket_classes/:ticket_class_id/
Deletes the ticket class. Returns ``{"deleted": true}``.
]
return[call[name[self].delete, parameter[call[constant[/events/{0}/ticket_classes/{0}/].format, parameter[name[id], name[ticket_class_id]]]]]]
|
keyword[def] identifier[delete_event_ticket_class] ( identifier[self] , identifier[id] , identifier[ticket_class_id] ,** identifier[data] ):
literal[string]
keyword[return] identifier[self] . identifier[delete] ( literal[string] . identifier[format] ( identifier[id] , identifier[ticket_class_id] ), identifier[data] = identifier[data] )
|
def delete_event_ticket_class(self, id, ticket_class_id, **data):
"""
DELETE /events/:id/ticket_classes/:ticket_class_id/
Deletes the ticket class. Returns ``{"deleted": true}``.
"""
return self.delete('/events/{0}/ticket_classes/{0}/'.format(id, ticket_class_id), data=data)
|
def insert_order(self, order):
'''
:param order: QA_Order类型
:return:
'''
#print(" *>> QAOrder!insert_order {}".format(order))
# QUEUED = 300 # queued 用于表示在order_queue中 实际表达的意思是订单存活 待成交
#order.status = ORDER_STATUS.QUEUED
# 🛠 todo 是为了速度快把order对象转换成 df 对象的吗?
#self.queue_df = self.queue_df.append(order.to_df(), ignore_index=True)
#self.queue_df.set_index('order_id', drop=True, inplace=True)
if order is not None:
self.order_list[order.order_id] = order
return order
else:
print('QAERROR Wrong for get None type while insert order to Queue')
|
def function[insert_order, parameter[self, order]]:
constant[
:param order: QA_Order类型
:return:
]
if compare[name[order] is_not constant[None]] begin[:]
call[name[self].order_list][name[order].order_id] assign[=] name[order]
return[name[order]]
|
keyword[def] identifier[insert_order] ( identifier[self] , identifier[order] ):
literal[string]
keyword[if] identifier[order] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[order_list] [ identifier[order] . identifier[order_id] ]= identifier[order]
keyword[return] identifier[order]
keyword[else] :
identifier[print] ( literal[string] )
|
def insert_order(self, order):
"""
:param order: QA_Order类型
:return:
"""
#print(" *>> QAOrder!insert_order {}".format(order))
# QUEUED = 300 # queued 用于表示在order_queue中 实际表达的意思是订单存活 待成交
#order.status = ORDER_STATUS.QUEUED
# 🛠 todo 是为了速度快把order对象转换成 df 对象的吗?
#self.queue_df = self.queue_df.append(order.to_df(), ignore_index=True)
#self.queue_df.set_index('order_id', drop=True, inplace=True)
if order is not None:
self.order_list[order.order_id] = order
return order # depends on [control=['if'], data=['order']]
else:
print('QAERROR Wrong for get None type while insert order to Queue')
|
def set_time_items(glob):
"""
This function prepares the benchmark items for inclusion in main script's
global scope.
:param glob: main script's global scope dictionary reference
"""
a = glob['args']
l = glob['logger']
class __TimeManager(object):
def __init__(self):
c = a._collisions
self._stats = getattr(a, c.get("stats") or "stats", False)
self._timings = getattr(a, c.get("timings") or "timings", False)
self.enabled = self._stats or self._timings
self.last = self.start = time.time()
self.times = []
def stats(self):
end = time.time()
b = ""
for d, s, e in self.times:
b += "\n{}\n> {} seconds".format(d, e - s)
l.time("Total time: {} seconds{}".format(end - self.start, b))
glob['time_manager'] = manager = __TimeManager()
def _take_time(start=None, descr=None):
t = manager.last = time.time()
if start is not None and descr is not None:
manager.times.append((descr, float(start), float(t)))
return t - (start or 0)
class Timer(object):
class TimeoutError(Exception):
pass # TimeoutError is not handled in Python 2
def __init__(self, description=None, message=TO_MSG, timeout=None,
fail_on_timeout=False):
self.fail = fail_on_timeout
self.id = len(manager.times)
self.descr = "#" + str(self.id) + \
(": " + (description or "")).rstrip(": ")
self.message = message
self.start = _take_time()
self.timeout = timeout
def __enter__(self):
if manager.enabled:
if self.timeout is not None:
signal.signal(signal.SIGALRM, self._handler)
signal.alarm(self.timeout)
if manager._timings and self.descr:
l.time(self.descr)
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if manager.enabled:
d = _take_time(self.start, self.descr)
if manager._timings:
l.time("> Time elapsed: {} seconds".format(d))
if self.timeout is not None:
if self.fail and exc_type is Timer.TimeoutError:
return True
def _handler(self, signum, frame):
raise Timer.TimeoutError(self.message)
glob['Timer'] = Timer
def get_time(message=None, start=manager.start):
if manager._timings:
l.time("> {}: {} seconds".format(message or "Time elapsed since "
"execution start", _take_time(start)))
glob['get_time'] = get_time
def get_time_since_last(message=None):
get_time(message or "Time elapsed since last measure", manager.last)
glob['get_time_since_last'] = get_time_since_last
|
def function[set_time_items, parameter[glob]]:
constant[
This function prepares the benchmark items for inclusion in main script's
global scope.
:param glob: main script's global scope dictionary reference
]
variable[a] assign[=] call[name[glob]][constant[args]]
variable[l] assign[=] call[name[glob]][constant[logger]]
class class[__TimeManager, parameter[]] begin[:]
def function[__init__, parameter[self]]:
variable[c] assign[=] name[a]._collisions
name[self]._stats assign[=] call[name[getattr], parameter[name[a], <ast.BoolOp object at 0x7da1b195c0a0>, constant[False]]]
name[self]._timings assign[=] call[name[getattr], parameter[name[a], <ast.BoolOp object at 0x7da1b195ee30>, constant[False]]]
name[self].enabled assign[=] <ast.BoolOp object at 0x7da1b195c8b0>
name[self].last assign[=] call[name[time].time, parameter[]]
name[self].times assign[=] list[[]]
def function[stats, parameter[self]]:
variable[end] assign[=] call[name[time].time, parameter[]]
variable[b] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da1b19b6c50>, <ast.Name object at 0x7da1b19b7bb0>, <ast.Name object at 0x7da1b19b65c0>]]] in starred[name[self].times] begin[:]
<ast.AugAssign object at 0x7da1b19b6f80>
call[name[l].time, parameter[call[constant[Total time: {} seconds{}].format, parameter[binary_operation[name[end] - name[self].start], name[b]]]]]
call[name[glob]][constant[time_manager]] assign[=] call[name[__TimeManager], parameter[]]
def function[_take_time, parameter[start, descr]]:
variable[t] assign[=] call[name[time].time, parameter[]]
if <ast.BoolOp object at 0x7da1b19b7190> begin[:]
call[name[manager].times.append, parameter[tuple[[<ast.Name object at 0x7da1b19b5ed0>, <ast.Call object at 0x7da1b19b4580>, <ast.Call object at 0x7da1b19b6a40>]]]]
return[binary_operation[name[t] - <ast.BoolOp object at 0x7da1b19b64a0>]]
class class[Timer, parameter[]] begin[:]
class class[TimeoutError, parameter[]] begin[:]
pass
def function[__init__, parameter[self, description, message, timeout, fail_on_timeout]]:
name[self].fail assign[=] name[fail_on_timeout]
name[self].id assign[=] call[name[len], parameter[name[manager].times]]
name[self].descr assign[=] binary_operation[binary_operation[constant[#] + call[name[str], parameter[name[self].id]]] + call[binary_operation[constant[: ] + <ast.BoolOp object at 0x7da1b19b7eb0>].rstrip, parameter[constant[: ]]]]
name[self].message assign[=] name[message]
name[self].start assign[=] call[name[_take_time], parameter[]]
name[self].timeout assign[=] name[timeout]
def function[__enter__, parameter[self]]:
if name[manager].enabled begin[:]
if compare[name[self].timeout is_not constant[None]] begin[:]
call[name[signal].signal, parameter[name[signal].SIGALRM, name[self]._handler]]
call[name[signal].alarm, parameter[name[self].timeout]]
if <ast.BoolOp object at 0x7da20c76df30> begin[:]
call[name[l].time, parameter[name[self].descr]]
return[name[self]]
def function[__exit__, parameter[self, exc_type, exc_value, exc_traceback]]:
if name[manager].enabled begin[:]
variable[d] assign[=] call[name[_take_time], parameter[name[self].start, name[self].descr]]
if name[manager]._timings begin[:]
call[name[l].time, parameter[call[constant[> Time elapsed: {} seconds].format, parameter[name[d]]]]]
if compare[name[self].timeout is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b1a4ae30> begin[:]
return[constant[True]]
def function[_handler, parameter[self, signum, frame]]:
<ast.Raise object at 0x7da1b1a4b160>
call[name[glob]][constant[Timer]] assign[=] name[Timer]
def function[get_time, parameter[message, start]]:
if name[manager]._timings begin[:]
call[name[l].time, parameter[call[constant[> {}: {} seconds].format, parameter[<ast.BoolOp object at 0x7da1b1a4b130>, call[name[_take_time], parameter[name[start]]]]]]]
call[name[glob]][constant[get_time]] assign[=] name[get_time]
def function[get_time_since_last, parameter[message]]:
call[name[get_time], parameter[<ast.BoolOp object at 0x7da1b1a49d20>, name[manager].last]]
call[name[glob]][constant[get_time_since_last]] assign[=] name[get_time_since_last]
|
keyword[def] identifier[set_time_items] ( identifier[glob] ):
literal[string]
identifier[a] = identifier[glob] [ literal[string] ]
identifier[l] = identifier[glob] [ literal[string] ]
keyword[class] identifier[__TimeManager] ( identifier[object] ):
keyword[def] identifier[__init__] ( identifier[self] ):
identifier[c] = identifier[a] . identifier[_collisions]
identifier[self] . identifier[_stats] = identifier[getattr] ( identifier[a] , identifier[c] . identifier[get] ( literal[string] ) keyword[or] literal[string] , keyword[False] )
identifier[self] . identifier[_timings] = identifier[getattr] ( identifier[a] , identifier[c] . identifier[get] ( literal[string] ) keyword[or] literal[string] , keyword[False] )
identifier[self] . identifier[enabled] = identifier[self] . identifier[_stats] keyword[or] identifier[self] . identifier[_timings]
identifier[self] . identifier[last] = identifier[self] . identifier[start] = identifier[time] . identifier[time] ()
identifier[self] . identifier[times] =[]
keyword[def] identifier[stats] ( identifier[self] ):
identifier[end] = identifier[time] . identifier[time] ()
identifier[b] = literal[string]
keyword[for] identifier[d] , identifier[s] , identifier[e] keyword[in] identifier[self] . identifier[times] :
identifier[b] += literal[string] . identifier[format] ( identifier[d] , identifier[e] - identifier[s] )
identifier[l] . identifier[time] ( literal[string] . identifier[format] ( identifier[end] - identifier[self] . identifier[start] , identifier[b] ))
identifier[glob] [ literal[string] ]= identifier[manager] = identifier[__TimeManager] ()
keyword[def] identifier[_take_time] ( identifier[start] = keyword[None] , identifier[descr] = keyword[None] ):
identifier[t] = identifier[manager] . identifier[last] = identifier[time] . identifier[time] ()
keyword[if] identifier[start] keyword[is] keyword[not] keyword[None] keyword[and] identifier[descr] keyword[is] keyword[not] keyword[None] :
identifier[manager] . identifier[times] . identifier[append] (( identifier[descr] , identifier[float] ( identifier[start] ), identifier[float] ( identifier[t] )))
keyword[return] identifier[t] -( identifier[start] keyword[or] literal[int] )
keyword[class] identifier[Timer] ( identifier[object] ):
keyword[class] identifier[TimeoutError] ( identifier[Exception] ):
keyword[pass]
keyword[def] identifier[__init__] ( identifier[self] , identifier[description] = keyword[None] , identifier[message] = identifier[TO_MSG] , identifier[timeout] = keyword[None] ,
identifier[fail_on_timeout] = keyword[False] ):
identifier[self] . identifier[fail] = identifier[fail_on_timeout]
identifier[self] . identifier[id] = identifier[len] ( identifier[manager] . identifier[times] )
identifier[self] . identifier[descr] = literal[string] + identifier[str] ( identifier[self] . identifier[id] )+( literal[string] +( identifier[description] keyword[or] literal[string] )). identifier[rstrip] ( literal[string] )
identifier[self] . identifier[message] = identifier[message]
identifier[self] . identifier[start] = identifier[_take_time] ()
identifier[self] . identifier[timeout] = identifier[timeout]
keyword[def] identifier[__enter__] ( identifier[self] ):
keyword[if] identifier[manager] . identifier[enabled] :
keyword[if] identifier[self] . identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGALRM] , identifier[self] . identifier[_handler] )
identifier[signal] . identifier[alarm] ( identifier[self] . identifier[timeout] )
keyword[if] identifier[manager] . identifier[_timings] keyword[and] identifier[self] . identifier[descr] :
identifier[l] . identifier[time] ( identifier[self] . identifier[descr] )
keyword[return] identifier[self]
keyword[def] identifier[__exit__] ( identifier[self] , identifier[exc_type] , identifier[exc_value] , identifier[exc_traceback] ):
keyword[if] identifier[manager] . identifier[enabled] :
identifier[d] = identifier[_take_time] ( identifier[self] . identifier[start] , identifier[self] . identifier[descr] )
keyword[if] identifier[manager] . identifier[_timings] :
identifier[l] . identifier[time] ( literal[string] . identifier[format] ( identifier[d] ))
keyword[if] identifier[self] . identifier[timeout] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[fail] keyword[and] identifier[exc_type] keyword[is] identifier[Timer] . identifier[TimeoutError] :
keyword[return] keyword[True]
keyword[def] identifier[_handler] ( identifier[self] , identifier[signum] , identifier[frame] ):
keyword[raise] identifier[Timer] . identifier[TimeoutError] ( identifier[self] . identifier[message] )
identifier[glob] [ literal[string] ]= identifier[Timer]
keyword[def] identifier[get_time] ( identifier[message] = keyword[None] , identifier[start] = identifier[manager] . identifier[start] ):
keyword[if] identifier[manager] . identifier[_timings] :
identifier[l] . identifier[time] ( literal[string] . identifier[format] ( identifier[message] keyword[or] literal[string]
literal[string] , identifier[_take_time] ( identifier[start] )))
identifier[glob] [ literal[string] ]= identifier[get_time]
keyword[def] identifier[get_time_since_last] ( identifier[message] = keyword[None] ):
identifier[get_time] ( identifier[message] keyword[or] literal[string] , identifier[manager] . identifier[last] )
identifier[glob] [ literal[string] ]= identifier[get_time_since_last]
|
def set_time_items(glob):
"""
This function prepares the benchmark items for inclusion in main script's
global scope.
:param glob: main script's global scope dictionary reference
"""
a = glob['args']
l = glob['logger']
class __TimeManager(object):
def __init__(self):
c = a._collisions
self._stats = getattr(a, c.get('stats') or 'stats', False)
self._timings = getattr(a, c.get('timings') or 'timings', False)
self.enabled = self._stats or self._timings
self.last = self.start = time.time()
self.times = []
def stats(self):
end = time.time()
b = ''
for (d, s, e) in self.times:
b += '\n{}\n> {} seconds'.format(d, e - s) # depends on [control=['for'], data=[]]
l.time('Total time: {} seconds{}'.format(end - self.start, b))
glob['time_manager'] = manager = __TimeManager()
def _take_time(start=None, descr=None):
t = manager.last = time.time()
if start is not None and descr is not None:
manager.times.append((descr, float(start), float(t))) # depends on [control=['if'], data=[]]
return t - (start or 0)
class Timer(object):
class TimeoutError(Exception):
pass # TimeoutError is not handled in Python 2
def __init__(self, description=None, message=TO_MSG, timeout=None, fail_on_timeout=False):
self.fail = fail_on_timeout
self.id = len(manager.times)
self.descr = '#' + str(self.id) + (': ' + (description or '')).rstrip(': ')
self.message = message
self.start = _take_time()
self.timeout = timeout
def __enter__(self):
if manager.enabled:
if self.timeout is not None:
signal.signal(signal.SIGALRM, self._handler)
signal.alarm(self.timeout) # depends on [control=['if'], data=[]]
if manager._timings and self.descr:
l.time(self.descr) # depends on [control=['if'], data=[]]
return self # depends on [control=['if'], data=[]]
def __exit__(self, exc_type, exc_value, exc_traceback):
if manager.enabled:
d = _take_time(self.start, self.descr)
if manager._timings:
l.time('> Time elapsed: {} seconds'.format(d)) # depends on [control=['if'], data=[]]
if self.timeout is not None:
if self.fail and exc_type is Timer.TimeoutError:
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def _handler(self, signum, frame):
raise Timer.TimeoutError(self.message)
glob['Timer'] = Timer
def get_time(message=None, start=manager.start):
if manager._timings:
l.time('> {}: {} seconds'.format(message or 'Time elapsed since execution start', _take_time(start))) # depends on [control=['if'], data=[]]
glob['get_time'] = get_time
def get_time_since_last(message=None):
get_time(message or 'Time elapsed since last measure', manager.last)
glob['get_time_since_last'] = get_time_since_last
|
def visit_NameConstant(self, node: ast.NameConstant) -> Any:
"""Forward the node value as a result."""
self.recomputed_values[node] = node.value
return node.value
|
def function[visit_NameConstant, parameter[self, node]]:
constant[Forward the node value as a result.]
call[name[self].recomputed_values][name[node]] assign[=] name[node].value
return[name[node].value]
|
keyword[def] identifier[visit_NameConstant] ( identifier[self] , identifier[node] : identifier[ast] . identifier[NameConstant] )-> identifier[Any] :
literal[string]
identifier[self] . identifier[recomputed_values] [ identifier[node] ]= identifier[node] . identifier[value]
keyword[return] identifier[node] . identifier[value]
|
def visit_NameConstant(self, node: ast.NameConstant) -> Any:
"""Forward the node value as a result."""
self.recomputed_values[node] = node.value
return node.value
|
def find_task_by_id(self, id, session=None):
"""
Find task with the given record ID.
"""
with self._session(session) as session:
return session.query(TaskRecord).get(id)
|
def function[find_task_by_id, parameter[self, id, session]]:
constant[
Find task with the given record ID.
]
with call[name[self]._session, parameter[name[session]]] begin[:]
return[call[call[name[session].query, parameter[name[TaskRecord]]].get, parameter[name[id]]]]
|
keyword[def] identifier[find_task_by_id] ( identifier[self] , identifier[id] , identifier[session] = keyword[None] ):
literal[string]
keyword[with] identifier[self] . identifier[_session] ( identifier[session] ) keyword[as] identifier[session] :
keyword[return] identifier[session] . identifier[query] ( identifier[TaskRecord] ). identifier[get] ( identifier[id] )
|
def find_task_by_id(self, id, session=None):
"""
Find task with the given record ID.
"""
with self._session(session) as session:
return session.query(TaskRecord).get(id) # depends on [control=['with'], data=['session']]
|
def _get_daily_message(self, dt, algo, metrics_tracker):
"""
Get a perf message for the given datetime.
"""
perf_message = metrics_tracker.handle_market_close(
dt,
self.data_portal,
)
perf_message['daily_perf']['recorded_vars'] = algo.recorded_vars
return perf_message
|
def function[_get_daily_message, parameter[self, dt, algo, metrics_tracker]]:
constant[
Get a perf message for the given datetime.
]
variable[perf_message] assign[=] call[name[metrics_tracker].handle_market_close, parameter[name[dt], name[self].data_portal]]
call[call[name[perf_message]][constant[daily_perf]]][constant[recorded_vars]] assign[=] name[algo].recorded_vars
return[name[perf_message]]
|
keyword[def] identifier[_get_daily_message] ( identifier[self] , identifier[dt] , identifier[algo] , identifier[metrics_tracker] ):
literal[string]
identifier[perf_message] = identifier[metrics_tracker] . identifier[handle_market_close] (
identifier[dt] ,
identifier[self] . identifier[data_portal] ,
)
identifier[perf_message] [ literal[string] ][ literal[string] ]= identifier[algo] . identifier[recorded_vars]
keyword[return] identifier[perf_message]
|
def _get_daily_message(self, dt, algo, metrics_tracker):
"""
Get a perf message for the given datetime.
"""
perf_message = metrics_tracker.handle_market_close(dt, self.data_portal)
perf_message['daily_perf']['recorded_vars'] = algo.recorded_vars
return perf_message
|
def read_format_from_metadata(text, ext):
"""Return the format of the file, when that information is available from the metadata"""
metadata = read_metadata(text, ext)
rearrange_jupytext_metadata(metadata)
return format_name_for_ext(metadata, ext, explicit_default=False)
|
def function[read_format_from_metadata, parameter[text, ext]]:
constant[Return the format of the file, when that information is available from the metadata]
variable[metadata] assign[=] call[name[read_metadata], parameter[name[text], name[ext]]]
call[name[rearrange_jupytext_metadata], parameter[name[metadata]]]
return[call[name[format_name_for_ext], parameter[name[metadata], name[ext]]]]
|
keyword[def] identifier[read_format_from_metadata] ( identifier[text] , identifier[ext] ):
literal[string]
identifier[metadata] = identifier[read_metadata] ( identifier[text] , identifier[ext] )
identifier[rearrange_jupytext_metadata] ( identifier[metadata] )
keyword[return] identifier[format_name_for_ext] ( identifier[metadata] , identifier[ext] , identifier[explicit_default] = keyword[False] )
|
def read_format_from_metadata(text, ext):
"""Return the format of the file, when that information is available from the metadata"""
metadata = read_metadata(text, ext)
rearrange_jupytext_metadata(metadata)
return format_name_for_ext(metadata, ext, explicit_default=False)
|
def path_to_reference(path):
"""Convert an object path reference to a reference."""
# By default JSON decodes strings as unicode. The Python __import__ does
# not like that choice. So we'll just cast all function paths to a string.
# NOTE: that there is no corresponding unit test for the classmethod
# version of this problem. It only impacts importing modules.
path = str(path)
if '.' not in path:
try:
return globals()["__builtins__"][path]
except KeyError:
try:
return getattr(globals()["__builtins__"], path)
except AttributeError:
pass
try:
return globals()[path]
except KeyError:
pass
raise errors.BadObjectPathError(
'Unable to find function "%s".' % (path,))
module_path, function_name = path.rsplit('.', 1)
try:
module = __import__(name=module_path,
fromlist=[function_name])
except ImportError:
module_path, class_name = module_path.rsplit('.', 1)
module = __import__(name=module_path, fromlist=[class_name])
module = getattr(module, class_name)
try:
return getattr(module, function_name)
except AttributeError:
raise errors.BadObjectPathError(
'Unable to find function "%s".' % (path,))
|
def function[path_to_reference, parameter[path]]:
constant[Convert an object path reference to a reference.]
variable[path] assign[=] call[name[str], parameter[name[path]]]
if compare[constant[.] <ast.NotIn object at 0x7da2590d7190> name[path]] begin[:]
<ast.Try object at 0x7da18f00ef80>
<ast.Try object at 0x7da18f00d000>
<ast.Raise object at 0x7da18f722890>
<ast.Tuple object at 0x7da18f721ff0> assign[=] call[name[path].rsplit, parameter[constant[.], constant[1]]]
<ast.Try object at 0x7da18f720f70>
<ast.Try object at 0x7da18f721b10>
|
keyword[def] identifier[path_to_reference] ( identifier[path] ):
literal[string]
identifier[path] = identifier[str] ( identifier[path] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[path] :
keyword[try] :
keyword[return] identifier[globals] ()[ literal[string] ][ identifier[path] ]
keyword[except] identifier[KeyError] :
keyword[try] :
keyword[return] identifier[getattr] ( identifier[globals] ()[ literal[string] ], identifier[path] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[try] :
keyword[return] identifier[globals] ()[ identifier[path] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[raise] identifier[errors] . identifier[BadObjectPathError] (
literal[string] %( identifier[path] ,))
identifier[module_path] , identifier[function_name] = identifier[path] . identifier[rsplit] ( literal[string] , literal[int] )
keyword[try] :
identifier[module] = identifier[__import__] ( identifier[name] = identifier[module_path] ,
identifier[fromlist] =[ identifier[function_name] ])
keyword[except] identifier[ImportError] :
identifier[module_path] , identifier[class_name] = identifier[module_path] . identifier[rsplit] ( literal[string] , literal[int] )
identifier[module] = identifier[__import__] ( identifier[name] = identifier[module_path] , identifier[fromlist] =[ identifier[class_name] ])
identifier[module] = identifier[getattr] ( identifier[module] , identifier[class_name] )
keyword[try] :
keyword[return] identifier[getattr] ( identifier[module] , identifier[function_name] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[errors] . identifier[BadObjectPathError] (
literal[string] %( identifier[path] ,))
|
def path_to_reference(path):
"""Convert an object path reference to a reference."""
# By default JSON decodes strings as unicode. The Python __import__ does
# not like that choice. So we'll just cast all function paths to a string.
# NOTE: that there is no corresponding unit test for the classmethod
# version of this problem. It only impacts importing modules.
path = str(path)
if '.' not in path:
try:
return globals()['__builtins__'][path] # depends on [control=['try'], data=[]]
except KeyError:
try:
return getattr(globals()['__builtins__'], path) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
try:
return globals()[path] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
raise errors.BadObjectPathError('Unable to find function "%s".' % (path,)) # depends on [control=['if'], data=['path']]
(module_path, function_name) = path.rsplit('.', 1)
try:
module = __import__(name=module_path, fromlist=[function_name]) # depends on [control=['try'], data=[]]
except ImportError:
(module_path, class_name) = module_path.rsplit('.', 1)
module = __import__(name=module_path, fromlist=[class_name])
module = getattr(module, class_name) # depends on [control=['except'], data=[]]
try:
return getattr(module, function_name) # depends on [control=['try'], data=[]]
except AttributeError:
raise errors.BadObjectPathError('Unable to find function "%s".' % (path,)) # depends on [control=['except'], data=[]]
|
def main():
"""Create a new Cheroot instance with arguments from the command line."""
parser = argparse.ArgumentParser(
description='Start an instance of the Cheroot WSGI/HTTP server.',
)
for arg, spec in _arg_spec.items():
parser.add_argument(arg, **spec)
raw_args = parser.parse_args()
# ensure cwd in sys.path
'' in sys.path or sys.path.insert(0, '')
# create a server based on the arguments provided
raw_args._wsgi_app.server(raw_args).safe_start()
|
def function[main, parameter[]]:
constant[Create a new Cheroot instance with arguments from the command line.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c6e58d0>, <ast.Name object at 0x7da20c6e4850>]]] in starred[call[name[_arg_spec].items, parameter[]]] begin[:]
call[name[parser].add_argument, parameter[name[arg]]]
variable[raw_args] assign[=] call[name[parser].parse_args, parameter[]]
<ast.BoolOp object at 0x7da20c6e5120>
call[call[name[raw_args]._wsgi_app.server, parameter[name[raw_args]]].safe_start, parameter[]]
|
keyword[def] identifier[main] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[description] = literal[string] ,
)
keyword[for] identifier[arg] , identifier[spec] keyword[in] identifier[_arg_spec] . identifier[items] ():
identifier[parser] . identifier[add_argument] ( identifier[arg] ,** identifier[spec] )
identifier[raw_args] = identifier[parser] . identifier[parse_args] ()
literal[string] keyword[in] identifier[sys] . identifier[path] keyword[or] identifier[sys] . identifier[path] . identifier[insert] ( literal[int] , literal[string] )
identifier[raw_args] . identifier[_wsgi_app] . identifier[server] ( identifier[raw_args] ). identifier[safe_start] ()
|
def main():
"""Create a new Cheroot instance with arguments from the command line."""
parser = argparse.ArgumentParser(description='Start an instance of the Cheroot WSGI/HTTP server.')
for (arg, spec) in _arg_spec.items():
parser.add_argument(arg, **spec) # depends on [control=['for'], data=[]]
raw_args = parser.parse_args()
# ensure cwd in sys.path
'' in sys.path or sys.path.insert(0, '')
# create a server based on the arguments provided
raw_args._wsgi_app.server(raw_args).safe_start()
|
def _get_file(db, user_id, api_path, query_fields, decrypt_func):
"""
Get file data for the given user_id, path, and query_fields. The
query_fields parameter specifies which database fields should be
included in the returned file data.
"""
result = db.execute(
_select_file(user_id, api_path, query_fields, limit=1),
).first()
if result is None:
raise NoSuchFile(api_path)
if files.c.content in query_fields:
return to_dict_with_content(query_fields, result, decrypt_func)
else:
return to_dict_no_content(query_fields, result)
|
def function[_get_file, parameter[db, user_id, api_path, query_fields, decrypt_func]]:
constant[
Get file data for the given user_id, path, and query_fields. The
query_fields parameter specifies which database fields should be
included in the returned file data.
]
variable[result] assign[=] call[call[name[db].execute, parameter[call[name[_select_file], parameter[name[user_id], name[api_path], name[query_fields]]]]].first, parameter[]]
if compare[name[result] is constant[None]] begin[:]
<ast.Raise object at 0x7da2041d8c40>
if compare[name[files].c.content in name[query_fields]] begin[:]
return[call[name[to_dict_with_content], parameter[name[query_fields], name[result], name[decrypt_func]]]]
|
keyword[def] identifier[_get_file] ( identifier[db] , identifier[user_id] , identifier[api_path] , identifier[query_fields] , identifier[decrypt_func] ):
literal[string]
identifier[result] = identifier[db] . identifier[execute] (
identifier[_select_file] ( identifier[user_id] , identifier[api_path] , identifier[query_fields] , identifier[limit] = literal[int] ),
). identifier[first] ()
keyword[if] identifier[result] keyword[is] keyword[None] :
keyword[raise] identifier[NoSuchFile] ( identifier[api_path] )
keyword[if] identifier[files] . identifier[c] . identifier[content] keyword[in] identifier[query_fields] :
keyword[return] identifier[to_dict_with_content] ( identifier[query_fields] , identifier[result] , identifier[decrypt_func] )
keyword[else] :
keyword[return] identifier[to_dict_no_content] ( identifier[query_fields] , identifier[result] )
|
def _get_file(db, user_id, api_path, query_fields, decrypt_func):
"""
Get file data for the given user_id, path, and query_fields. The
query_fields parameter specifies which database fields should be
included in the returned file data.
"""
result = db.execute(_select_file(user_id, api_path, query_fields, limit=1)).first()
if result is None:
raise NoSuchFile(api_path) # depends on [control=['if'], data=[]]
if files.c.content in query_fields:
return to_dict_with_content(query_fields, result, decrypt_func) # depends on [control=['if'], data=['query_fields']]
else:
return to_dict_no_content(query_fields, result)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.