code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def slugs_mobile_location_send(self, target, latitude, longitude, force_mavlink1=False):
'''
Transmits the last known position of the mobile GS to the UAV. Very
relevant when Track Mobile is enabled
target : The system reporting the action (uint8_t)
latitude : Mobile Latitude (float)
longitude : Mobile Longitude (float)
'''
return self.send(self.slugs_mobile_location_encode(target, latitude, longitude), force_mavlink1=force_mavlink1) | def function[slugs_mobile_location_send, parameter[self, target, latitude, longitude, force_mavlink1]]:
constant[
Transmits the last known position of the mobile GS to the UAV. Very
relevant when Track Mobile is enabled
target : The system reporting the action (uint8_t)
latitude : Mobile Latitude (float)
longitude : Mobile Longitude (float)
]
return[call[name[self].send, parameter[call[name[self].slugs_mobile_location_encode, parameter[name[target], name[latitude], name[longitude]]]]]] | keyword[def] identifier[slugs_mobile_location_send] ( identifier[self] , identifier[target] , identifier[latitude] , identifier[longitude] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[slugs_mobile_location_encode] ( identifier[target] , identifier[latitude] , identifier[longitude] ), identifier[force_mavlink1] = identifier[force_mavlink1] ) | def slugs_mobile_location_send(self, target, latitude, longitude, force_mavlink1=False):
"""
Transmits the last known position of the mobile GS to the UAV. Very
relevant when Track Mobile is enabled
target : The system reporting the action (uint8_t)
latitude : Mobile Latitude (float)
longitude : Mobile Longitude (float)
"""
return self.send(self.slugs_mobile_location_encode(target, latitude, longitude), force_mavlink1=force_mavlink1) |
def strip_xml_namespace(root):
"""Strip out namespace data from an ElementTree.
This function is recursive and will traverse all
subnodes to the root element
@param root: the root element
@return: the same root element, minus namespace
"""
try:
root.tag = root.tag.split('}')[1]
except IndexError:
pass
for element in root.getchildren():
strip_xml_namespace(element) | def function[strip_xml_namespace, parameter[root]]:
constant[Strip out namespace data from an ElementTree.
This function is recursive and will traverse all
subnodes to the root element
@param root: the root element
@return: the same root element, minus namespace
]
<ast.Try object at 0x7da2047ebfd0>
for taget[name[element]] in starred[call[name[root].getchildren, parameter[]]] begin[:]
call[name[strip_xml_namespace], parameter[name[element]]] | keyword[def] identifier[strip_xml_namespace] ( identifier[root] ):
literal[string]
keyword[try] :
identifier[root] . identifier[tag] = identifier[root] . identifier[tag] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[pass]
keyword[for] identifier[element] keyword[in] identifier[root] . identifier[getchildren] ():
identifier[strip_xml_namespace] ( identifier[element] ) | def strip_xml_namespace(root):
"""Strip out namespace data from an ElementTree.
This function is recursive and will traverse all
subnodes to the root element
@param root: the root element
@return: the same root element, minus namespace
"""
try:
root.tag = root.tag.split('}')[1] # depends on [control=['try'], data=[]]
except IndexError:
pass # depends on [control=['except'], data=[]]
for element in root.getchildren():
strip_xml_namespace(element) # depends on [control=['for'], data=['element']] |
def deserialize_skycoord(d):
"""
Deserializes a JSONified :obj:`astropy.coordinates.SkyCoord`.
Args:
d (:obj:`dict`): A dictionary representation of a :obj:`SkyCoord` object.
Returns:
A :obj:`SkyCoord` object.
"""
if 'distance' in d:
args = (d['lon'], d['lat'], d['distance'])
else:
args = (d['lon'], d['lat'])
return coords.SkyCoord(
*args,
frame=d['frame'],
representation='spherical') | def function[deserialize_skycoord, parameter[d]]:
constant[
Deserializes a JSONified :obj:`astropy.coordinates.SkyCoord`.
Args:
d (:obj:`dict`): A dictionary representation of a :obj:`SkyCoord` object.
Returns:
A :obj:`SkyCoord` object.
]
if compare[constant[distance] in name[d]] begin[:]
variable[args] assign[=] tuple[[<ast.Subscript object at 0x7da1b28d5a50>, <ast.Subscript object at 0x7da1b28d5570>, <ast.Subscript object at 0x7da1b28d4a30>]]
return[call[name[coords].SkyCoord, parameter[<ast.Starred object at 0x7da1b26c9f90>]]] | keyword[def] identifier[deserialize_skycoord] ( identifier[d] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[d] :
identifier[args] =( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])
keyword[else] :
identifier[args] =( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ])
keyword[return] identifier[coords] . identifier[SkyCoord] (
* identifier[args] ,
identifier[frame] = identifier[d] [ literal[string] ],
identifier[representation] = literal[string] ) | def deserialize_skycoord(d):
"""
Deserializes a JSONified :obj:`astropy.coordinates.SkyCoord`.
Args:
d (:obj:`dict`): A dictionary representation of a :obj:`SkyCoord` object.
Returns:
A :obj:`SkyCoord` object.
"""
if 'distance' in d:
args = (d['lon'], d['lat'], d['distance']) # depends on [control=['if'], data=['d']]
else:
args = (d['lon'], d['lat'])
return coords.SkyCoord(*args, frame=d['frame'], representation='spherical') |
def by_readings(self, role_names=['', 'Author']):
"""
The Creators who have been most-read, ordered by number of readings.
By default it will only include Creators whose role was left empty,
or is 'Author'.
Each Creator will have a `num_readings` attribute.
"""
if not spectator_apps.is_enabled('reading'):
raise ImproperlyConfigured("To use the CreatorManager.by_readings() method, 'spectator.reading' must by in INSTALLED_APPS.")
qs = self.get_queryset()
qs = qs.filter(publication_roles__role_name__in=role_names) \
.exclude(publications__reading__isnull=True) \
.annotate(num_readings=Count('publications__reading')) \
.order_by('-num_readings', 'name_sort')
return qs | def function[by_readings, parameter[self, role_names]]:
constant[
The Creators who have been most-read, ordered by number of readings.
By default it will only include Creators whose role was left empty,
or is 'Author'.
Each Creator will have a `num_readings` attribute.
]
if <ast.UnaryOp object at 0x7da1b26af5b0> begin[:]
<ast.Raise object at 0x7da1b26af6a0>
variable[qs] assign[=] call[name[self].get_queryset, parameter[]]
variable[qs] assign[=] call[call[call[call[name[qs].filter, parameter[]].exclude, parameter[]].annotate, parameter[]].order_by, parameter[constant[-num_readings], constant[name_sort]]]
return[name[qs]] | keyword[def] identifier[by_readings] ( identifier[self] , identifier[role_names] =[ literal[string] , literal[string] ]):
literal[string]
keyword[if] keyword[not] identifier[spectator_apps] . identifier[is_enabled] ( literal[string] ):
keyword[raise] identifier[ImproperlyConfigured] ( literal[string] )
identifier[qs] = identifier[self] . identifier[get_queryset] ()
identifier[qs] = identifier[qs] . identifier[filter] ( identifier[publication_roles__role_name__in] = identifier[role_names] ). identifier[exclude] ( identifier[publications__reading__isnull] = keyword[True] ). identifier[annotate] ( identifier[num_readings] = identifier[Count] ( literal[string] )). identifier[order_by] ( literal[string] , literal[string] )
keyword[return] identifier[qs] | def by_readings(self, role_names=['', 'Author']):
"""
The Creators who have been most-read, ordered by number of readings.
By default it will only include Creators whose role was left empty,
or is 'Author'.
Each Creator will have a `num_readings` attribute.
"""
if not spectator_apps.is_enabled('reading'):
raise ImproperlyConfigured("To use the CreatorManager.by_readings() method, 'spectator.reading' must by in INSTALLED_APPS.") # depends on [control=['if'], data=[]]
qs = self.get_queryset()
qs = qs.filter(publication_roles__role_name__in=role_names).exclude(publications__reading__isnull=True).annotate(num_readings=Count('publications__reading')).order_by('-num_readings', 'name_sort')
return qs |
def path(self) -> str:
"""
Accessor for (stringified) path to current tails file.
:return: (stringified) path to current tails file.
"""
cfg = json.loads(self._tails_cfg_json)
return join(cfg['base_dir'], cfg['file']) | def function[path, parameter[self]]:
constant[
Accessor for (stringified) path to current tails file.
:return: (stringified) path to current tails file.
]
variable[cfg] assign[=] call[name[json].loads, parameter[name[self]._tails_cfg_json]]
return[call[name[join], parameter[call[name[cfg]][constant[base_dir]], call[name[cfg]][constant[file]]]]] | keyword[def] identifier[path] ( identifier[self] )-> identifier[str] :
literal[string]
identifier[cfg] = identifier[json] . identifier[loads] ( identifier[self] . identifier[_tails_cfg_json] )
keyword[return] identifier[join] ( identifier[cfg] [ literal[string] ], identifier[cfg] [ literal[string] ]) | def path(self) -> str:
"""
Accessor for (stringified) path to current tails file.
:return: (stringified) path to current tails file.
"""
cfg = json.loads(self._tails_cfg_json)
return join(cfg['base_dir'], cfg['file']) |
def Show(self, waitTime: float = OPERATION_WAIT_TIME) -> bool:
"""
Call native `ShowWindow(SW.Show)`.
Return bool, True if succeed otherwise False.
"""
return self.ShowWindow(SW.Show, waitTime) | def function[Show, parameter[self, waitTime]]:
constant[
Call native `ShowWindow(SW.Show)`.
Return bool, True if succeed otherwise False.
]
return[call[name[self].ShowWindow, parameter[name[SW].Show, name[waitTime]]]] | keyword[def] identifier[Show] ( identifier[self] , identifier[waitTime] : identifier[float] = identifier[OPERATION_WAIT_TIME] )-> identifier[bool] :
literal[string]
keyword[return] identifier[self] . identifier[ShowWindow] ( identifier[SW] . identifier[Show] , identifier[waitTime] ) | def Show(self, waitTime: float=OPERATION_WAIT_TIME) -> bool:
"""
Call native `ShowWindow(SW.Show)`.
Return bool, True if succeed otherwise False.
"""
return self.ShowWindow(SW.Show, waitTime) |
def success( self ):
"""Test whether the experiment has been run successfully. This will
be False if the experiment hasn't been run, or if it's been run and
failed (in which case the exception will be stored in the metadata).
:returns: ``True`` if the experiment has been run successfully"""
if self.STATUS in self.metadata().keys():
return (self.metadata())[self.STATUS]
else:
return False | def function[success, parameter[self]]:
constant[Test whether the experiment has been run successfully. This will
be False if the experiment hasn't been run, or if it's been run and
failed (in which case the exception will be stored in the metadata).
:returns: ``True`` if the experiment has been run successfully]
if compare[name[self].STATUS in call[call[name[self].metadata, parameter[]].keys, parameter[]]] begin[:]
return[call[call[name[self].metadata, parameter[]]][name[self].STATUS]] | keyword[def] identifier[success] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[STATUS] keyword[in] identifier[self] . identifier[metadata] (). identifier[keys] ():
keyword[return] ( identifier[self] . identifier[metadata] ())[ identifier[self] . identifier[STATUS] ]
keyword[else] :
keyword[return] keyword[False] | def success(self):
"""Test whether the experiment has been run successfully. This will
be False if the experiment hasn't been run, or if it's been run and
failed (in which case the exception will be stored in the metadata).
:returns: ``True`` if the experiment has been run successfully"""
if self.STATUS in self.metadata().keys():
return self.metadata()[self.STATUS] # depends on [control=['if'], data=[]]
else:
return False |
def neural_gpu_body(inputs, hparams, name=None):
"""The core Neural GPU."""
with tf.variable_scope(name, "neural_gpu"):
def step(state, inp): # pylint: disable=missing-docstring
x = tf.nn.dropout(state, 1.0 - hparams.dropout)
for layer in range(hparams.num_hidden_layers):
x = common_layers.conv_gru(
x, (hparams.kernel_height, hparams.kernel_width),
hparams.hidden_size,
name="cgru_%d" % layer)
# Padding input is zeroed-out in the modality, we check this by summing.
padding_inp = tf.less(tf.reduce_sum(tf.abs(inp), axis=[1, 2]), 0.00001)
new_state = tf.where(padding_inp, state, x) # No-op where inp is padding.
return new_state
return tf.foldl(
step,
tf.transpose(inputs, [1, 0, 2, 3]),
initializer=inputs,
parallel_iterations=1,
swap_memory=True) | def function[neural_gpu_body, parameter[inputs, hparams, name]]:
constant[The core Neural GPU.]
with call[name[tf].variable_scope, parameter[name[name], constant[neural_gpu]]] begin[:]
def function[step, parameter[state, inp]]:
variable[x] assign[=] call[name[tf].nn.dropout, parameter[name[state], binary_operation[constant[1.0] - name[hparams].dropout]]]
for taget[name[layer]] in starred[call[name[range], parameter[name[hparams].num_hidden_layers]]] begin[:]
variable[x] assign[=] call[name[common_layers].conv_gru, parameter[name[x], tuple[[<ast.Attribute object at 0x7da18dc071c0>, <ast.Attribute object at 0x7da18dc05e10>]], name[hparams].hidden_size]]
variable[padding_inp] assign[=] call[name[tf].less, parameter[call[name[tf].reduce_sum, parameter[call[name[tf].abs, parameter[name[inp]]]]], constant[1e-05]]]
variable[new_state] assign[=] call[name[tf].where, parameter[name[padding_inp], name[state], name[x]]]
return[name[new_state]]
return[call[name[tf].foldl, parameter[name[step], call[name[tf].transpose, parameter[name[inputs], list[[<ast.Constant object at 0x7da18dc07a60>, <ast.Constant object at 0x7da18dc07c70>, <ast.Constant object at 0x7da18dc06aa0>, <ast.Constant object at 0x7da20c6e5300>]]]]]]] | keyword[def] identifier[neural_gpu_body] ( identifier[inputs] , identifier[hparams] , identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] , literal[string] ):
keyword[def] identifier[step] ( identifier[state] , identifier[inp] ):
identifier[x] = identifier[tf] . identifier[nn] . identifier[dropout] ( identifier[state] , literal[int] - identifier[hparams] . identifier[dropout] )
keyword[for] identifier[layer] keyword[in] identifier[range] ( identifier[hparams] . identifier[num_hidden_layers] ):
identifier[x] = identifier[common_layers] . identifier[conv_gru] (
identifier[x] ,( identifier[hparams] . identifier[kernel_height] , identifier[hparams] . identifier[kernel_width] ),
identifier[hparams] . identifier[hidden_size] ,
identifier[name] = literal[string] % identifier[layer] )
identifier[padding_inp] = identifier[tf] . identifier[less] ( identifier[tf] . identifier[reduce_sum] ( identifier[tf] . identifier[abs] ( identifier[inp] ), identifier[axis] =[ literal[int] , literal[int] ]), literal[int] )
identifier[new_state] = identifier[tf] . identifier[where] ( identifier[padding_inp] , identifier[state] , identifier[x] )
keyword[return] identifier[new_state]
keyword[return] identifier[tf] . identifier[foldl] (
identifier[step] ,
identifier[tf] . identifier[transpose] ( identifier[inputs] ,[ literal[int] , literal[int] , literal[int] , literal[int] ]),
identifier[initializer] = identifier[inputs] ,
identifier[parallel_iterations] = literal[int] ,
identifier[swap_memory] = keyword[True] ) | def neural_gpu_body(inputs, hparams, name=None):
"""The core Neural GPU."""
with tf.variable_scope(name, 'neural_gpu'):
def step(state, inp): # pylint: disable=missing-docstring
x = tf.nn.dropout(state, 1.0 - hparams.dropout)
for layer in range(hparams.num_hidden_layers):
x = common_layers.conv_gru(x, (hparams.kernel_height, hparams.kernel_width), hparams.hidden_size, name='cgru_%d' % layer) # depends on [control=['for'], data=['layer']]
# Padding input is zeroed-out in the modality, we check this by summing.
padding_inp = tf.less(tf.reduce_sum(tf.abs(inp), axis=[1, 2]), 1e-05)
new_state = tf.where(padding_inp, state, x) # No-op where inp is padding.
return new_state
return tf.foldl(step, tf.transpose(inputs, [1, 0, 2, 3]), initializer=inputs, parallel_iterations=1, swap_memory=True) # depends on [control=['with'], data=[]] |
def rgb2bgr(self):
""" Converts data using the cv conversion. """
new_data = cv2.cvtColor(self.raw_data, cv2.COLOR_RGB2BGR)
return ColorImage(new_data, frame=self.frame, encoding='bgr8') | def function[rgb2bgr, parameter[self]]:
constant[ Converts data using the cv conversion. ]
variable[new_data] assign[=] call[name[cv2].cvtColor, parameter[name[self].raw_data, name[cv2].COLOR_RGB2BGR]]
return[call[name[ColorImage], parameter[name[new_data]]]] | keyword[def] identifier[rgb2bgr] ( identifier[self] ):
literal[string]
identifier[new_data] = identifier[cv2] . identifier[cvtColor] ( identifier[self] . identifier[raw_data] , identifier[cv2] . identifier[COLOR_RGB2BGR] )
keyword[return] identifier[ColorImage] ( identifier[new_data] , identifier[frame] = identifier[self] . identifier[frame] , identifier[encoding] = literal[string] ) | def rgb2bgr(self):
""" Converts data using the cv conversion. """
new_data = cv2.cvtColor(self.raw_data, cv2.COLOR_RGB2BGR)
return ColorImage(new_data, frame=self.frame, encoding='bgr8') |
def predictive_probability(self, M_c, X_L, X_D, Y, Q):
"""Calculate probability of cells jointly taking values given a
latent state.
:param Y: A list of constraints to apply when querying. Each constraint
is a triplet of (r, d, v): r is the row index, d is the column
index and v is the value of the constraint
:type Y: list of lists
:param Q: A list of values to query. Each value is triplet of (r, d, v):
r is the row index, d is the column index, and v is the value at
which the density is evaluated.
:type Q: list of lists
:returns: float -- joint log probability of the values specified by Q
"""
return su.predictive_probability(M_c, X_L, X_D, Y, Q) | def function[predictive_probability, parameter[self, M_c, X_L, X_D, Y, Q]]:
constant[Calculate probability of cells jointly taking values given a
latent state.
:param Y: A list of constraints to apply when querying. Each constraint
is a triplet of (r, d, v): r is the row index, d is the column
index and v is the value of the constraint
:type Y: list of lists
:param Q: A list of values to query. Each value is triplet of (r, d, v):
r is the row index, d is the column index, and v is the value at
which the density is evaluated.
:type Q: list of lists
:returns: float -- joint log probability of the values specified by Q
]
return[call[name[su].predictive_probability, parameter[name[M_c], name[X_L], name[X_D], name[Y], name[Q]]]] | keyword[def] identifier[predictive_probability] ( identifier[self] , identifier[M_c] , identifier[X_L] , identifier[X_D] , identifier[Y] , identifier[Q] ):
literal[string]
keyword[return] identifier[su] . identifier[predictive_probability] ( identifier[M_c] , identifier[X_L] , identifier[X_D] , identifier[Y] , identifier[Q] ) | def predictive_probability(self, M_c, X_L, X_D, Y, Q):
"""Calculate probability of cells jointly taking values given a
latent state.
:param Y: A list of constraints to apply when querying. Each constraint
is a triplet of (r, d, v): r is the row index, d is the column
index and v is the value of the constraint
:type Y: list of lists
:param Q: A list of values to query. Each value is triplet of (r, d, v):
r is the row index, d is the column index, and v is the value at
which the density is evaluated.
:type Q: list of lists
:returns: float -- joint log probability of the values specified by Q
"""
return su.predictive_probability(M_c, X_L, X_D, Y, Q) |
def ApplyPluginToTypedCollection(plugin, type_names, fetch_fn):
"""Applies instant output plugin to a collection of results.
Args:
plugin: InstantOutputPlugin instance.
type_names: List of type names (strings) to be processed.
fetch_fn: Function that takes a type name as an argument and returns
available items (FlowResult) corresponding to this type. Items are
returned as a generator
Yields:
Bytes chunks, as generated by the plugin.
"""
for chunk in plugin.Start():
yield chunk
def GetValues(tn):
for v in fetch_fn(tn):
yield v
for type_name in sorted(type_names):
stored_cls = rdfvalue.RDFValue.classes[type_name]
for chunk in plugin.ProcessValues(stored_cls,
functools.partial(GetValues, type_name)):
yield chunk
for chunk in plugin.Finish():
yield chunk | def function[ApplyPluginToTypedCollection, parameter[plugin, type_names, fetch_fn]]:
constant[Applies instant output plugin to a collection of results.
Args:
plugin: InstantOutputPlugin instance.
type_names: List of type names (strings) to be processed.
fetch_fn: Function that takes a type name as an argument and returns
available items (FlowResult) corresponding to this type. Items are
returned as a generator
Yields:
Bytes chunks, as generated by the plugin.
]
for taget[name[chunk]] in starred[call[name[plugin].Start, parameter[]]] begin[:]
<ast.Yield object at 0x7da1b1b6cf40>
def function[GetValues, parameter[tn]]:
for taget[name[v]] in starred[call[name[fetch_fn], parameter[name[tn]]]] begin[:]
<ast.Yield object at 0x7da1b1b6fc70>
for taget[name[type_name]] in starred[call[name[sorted], parameter[name[type_names]]]] begin[:]
variable[stored_cls] assign[=] call[name[rdfvalue].RDFValue.classes][name[type_name]]
for taget[name[chunk]] in starred[call[name[plugin].ProcessValues, parameter[name[stored_cls], call[name[functools].partial, parameter[name[GetValues], name[type_name]]]]]] begin[:]
<ast.Yield object at 0x7da1b1b6ec20>
for taget[name[chunk]] in starred[call[name[plugin].Finish, parameter[]]] begin[:]
<ast.Yield object at 0x7da1b1b6fd60> | keyword[def] identifier[ApplyPluginToTypedCollection] ( identifier[plugin] , identifier[type_names] , identifier[fetch_fn] ):
literal[string]
keyword[for] identifier[chunk] keyword[in] identifier[plugin] . identifier[Start] ():
keyword[yield] identifier[chunk]
keyword[def] identifier[GetValues] ( identifier[tn] ):
keyword[for] identifier[v] keyword[in] identifier[fetch_fn] ( identifier[tn] ):
keyword[yield] identifier[v]
keyword[for] identifier[type_name] keyword[in] identifier[sorted] ( identifier[type_names] ):
identifier[stored_cls] = identifier[rdfvalue] . identifier[RDFValue] . identifier[classes] [ identifier[type_name] ]
keyword[for] identifier[chunk] keyword[in] identifier[plugin] . identifier[ProcessValues] ( identifier[stored_cls] ,
identifier[functools] . identifier[partial] ( identifier[GetValues] , identifier[type_name] )):
keyword[yield] identifier[chunk]
keyword[for] identifier[chunk] keyword[in] identifier[plugin] . identifier[Finish] ():
keyword[yield] identifier[chunk] | def ApplyPluginToTypedCollection(plugin, type_names, fetch_fn):
"""Applies instant output plugin to a collection of results.
Args:
plugin: InstantOutputPlugin instance.
type_names: List of type names (strings) to be processed.
fetch_fn: Function that takes a type name as an argument and returns
available items (FlowResult) corresponding to this type. Items are
returned as a generator
Yields:
Bytes chunks, as generated by the plugin.
"""
for chunk in plugin.Start():
yield chunk # depends on [control=['for'], data=['chunk']]
def GetValues(tn):
for v in fetch_fn(tn):
yield v # depends on [control=['for'], data=['v']]
for type_name in sorted(type_names):
stored_cls = rdfvalue.RDFValue.classes[type_name]
for chunk in plugin.ProcessValues(stored_cls, functools.partial(GetValues, type_name)):
yield chunk # depends on [control=['for'], data=['chunk']] # depends on [control=['for'], data=['type_name']]
for chunk in plugin.Finish():
yield chunk # depends on [control=['for'], data=['chunk']] |
def status(self):
"""
Return the status of the device.
Returns a dictionary with keys 'volume' (int 0-200) , 'power' (bool),
'muted' (bool) and 'source' (str).
"""
nad_reply = self._send(self.POLL_VOLUME +
self.POLL_POWER +
self.POLL_MUTED +
self.POLL_SOURCE, read_reply=True)
if nad_reply is None:
return
# split reply into parts of 10 characters
num_chars = 10
nad_status = [nad_reply[i:i + num_chars]
for i in range(0, len(nad_reply), num_chars)]
return {'volume': int(nad_status[0][-2:], 16),
'power': nad_status[1][-2:] == '01',
'muted': nad_status[2][-2:] == '01',
'source': self.SOURCES_REVERSED[nad_status[3][-2:]]} | def function[status, parameter[self]]:
constant[
Return the status of the device.
Returns a dictionary with keys 'volume' (int 0-200) , 'power' (bool),
'muted' (bool) and 'source' (str).
]
variable[nad_reply] assign[=] call[name[self]._send, parameter[binary_operation[binary_operation[binary_operation[name[self].POLL_VOLUME + name[self].POLL_POWER] + name[self].POLL_MUTED] + name[self].POLL_SOURCE]]]
if compare[name[nad_reply] is constant[None]] begin[:]
return[None]
variable[num_chars] assign[=] constant[10]
variable[nad_status] assign[=] <ast.ListComp object at 0x7da1b0370490>
return[dictionary[[<ast.Constant object at 0x7da1b0370fd0>, <ast.Constant object at 0x7da1b03716c0>, <ast.Constant object at 0x7da1b03727d0>, <ast.Constant object at 0x7da1b0371f30>], [<ast.Call object at 0x7da1b0372f50>, <ast.Compare object at 0x7da1b0371030>, <ast.Compare object at 0x7da1b0370f10>, <ast.Subscript object at 0x7da1b02adb10>]]] | keyword[def] identifier[status] ( identifier[self] ):
literal[string]
identifier[nad_reply] = identifier[self] . identifier[_send] ( identifier[self] . identifier[POLL_VOLUME] +
identifier[self] . identifier[POLL_POWER] +
identifier[self] . identifier[POLL_MUTED] +
identifier[self] . identifier[POLL_SOURCE] , identifier[read_reply] = keyword[True] )
keyword[if] identifier[nad_reply] keyword[is] keyword[None] :
keyword[return]
identifier[num_chars] = literal[int]
identifier[nad_status] =[ identifier[nad_reply] [ identifier[i] : identifier[i] + identifier[num_chars] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[nad_reply] ), identifier[num_chars] )]
keyword[return] { literal[string] : identifier[int] ( identifier[nad_status] [ literal[int] ][- literal[int] :], literal[int] ),
literal[string] : identifier[nad_status] [ literal[int] ][- literal[int] :]== literal[string] ,
literal[string] : identifier[nad_status] [ literal[int] ][- literal[int] :]== literal[string] ,
literal[string] : identifier[self] . identifier[SOURCES_REVERSED] [ identifier[nad_status] [ literal[int] ][- literal[int] :]]} | def status(self):
"""
Return the status of the device.
Returns a dictionary with keys 'volume' (int 0-200) , 'power' (bool),
'muted' (bool) and 'source' (str).
"""
nad_reply = self._send(self.POLL_VOLUME + self.POLL_POWER + self.POLL_MUTED + self.POLL_SOURCE, read_reply=True)
if nad_reply is None:
return # depends on [control=['if'], data=[]]
# split reply into parts of 10 characters
num_chars = 10
nad_status = [nad_reply[i:i + num_chars] for i in range(0, len(nad_reply), num_chars)]
return {'volume': int(nad_status[0][-2:], 16), 'power': nad_status[1][-2:] == '01', 'muted': nad_status[2][-2:] == '01', 'source': self.SOURCES_REVERSED[nad_status[3][-2:]]} |
def show_error_dialog(message: str, details: str=None):
"""
Convenience method for showing an error dialog.
"""
# TODO: i18n
message_box = QMessageBox(
QMessageBox.Critical,
"Error",
message,
QMessageBox.Ok,
None
)
if details:
message_box.setDetailedText(details)
message_box.exec_() | def function[show_error_dialog, parameter[message, details]]:
constant[
Convenience method for showing an error dialog.
]
variable[message_box] assign[=] call[name[QMessageBox], parameter[name[QMessageBox].Critical, constant[Error], name[message], name[QMessageBox].Ok, constant[None]]]
if name[details] begin[:]
call[name[message_box].setDetailedText, parameter[name[details]]]
call[name[message_box].exec_, parameter[]] | keyword[def] identifier[show_error_dialog] ( identifier[message] : identifier[str] , identifier[details] : identifier[str] = keyword[None] ):
literal[string]
identifier[message_box] = identifier[QMessageBox] (
identifier[QMessageBox] . identifier[Critical] ,
literal[string] ,
identifier[message] ,
identifier[QMessageBox] . identifier[Ok] ,
keyword[None]
)
keyword[if] identifier[details] :
identifier[message_box] . identifier[setDetailedText] ( identifier[details] )
identifier[message_box] . identifier[exec_] () | def show_error_dialog(message: str, details: str=None):
"""
Convenience method for showing an error dialog.
"""
# TODO: i18n
message_box = QMessageBox(QMessageBox.Critical, 'Error', message, QMessageBox.Ok, None)
if details:
message_box.setDetailedText(details) # depends on [control=['if'], data=[]]
message_box.exec_() |
def parse(string, timezone='UTC', day_first=False, year_first=True, strict=False):
""""Returns a MayaDT instance for the machine-produced moment specified.
Powered by pendulum.
Accepts most known formats. Useful for working with data.
Keyword Arguments:
string -- string to be parsed
timezone -- timezone referenced from (default: 'UTC')
day_first -- if true, the first value (e.g. 01/05/2016)
is parsed as day.
if year_first is set to True, this distinguishes
between YDM and YMD. (default: False)
year_first -- if true, the first value (e.g. 2016/05/01)
is parsed as year (default: True)
strict -- if False, allow pendulum to fall back on datetime parsing
if pendulum's own parsing fails
"""
options = {}
options['tz'] = timezone
options['day_first'] = day_first
options['year_first'] = year_first
options['strict'] = strict
dt = pendulum.parse(str(string), **options)
return MayaDT.from_datetime(dt) | def function[parse, parameter[string, timezone, day_first, year_first, strict]]:
constant["Returns a MayaDT instance for the machine-produced moment specified.
Powered by pendulum.
Accepts most known formats. Useful for working with data.
Keyword Arguments:
string -- string to be parsed
timezone -- timezone referenced from (default: 'UTC')
day_first -- if true, the first value (e.g. 01/05/2016)
is parsed as day.
if year_first is set to True, this distinguishes
between YDM and YMD. (default: False)
year_first -- if true, the first value (e.g. 2016/05/01)
is parsed as year (default: True)
strict -- if False, allow pendulum to fall back on datetime parsing
if pendulum's own parsing fails
]
variable[options] assign[=] dictionary[[], []]
call[name[options]][constant[tz]] assign[=] name[timezone]
call[name[options]][constant[day_first]] assign[=] name[day_first]
call[name[options]][constant[year_first]] assign[=] name[year_first]
call[name[options]][constant[strict]] assign[=] name[strict]
variable[dt] assign[=] call[name[pendulum].parse, parameter[call[name[str], parameter[name[string]]]]]
return[call[name[MayaDT].from_datetime, parameter[name[dt]]]] | keyword[def] identifier[parse] ( identifier[string] , identifier[timezone] = literal[string] , identifier[day_first] = keyword[False] , identifier[year_first] = keyword[True] , identifier[strict] = keyword[False] ):
literal[string]
identifier[options] ={}
identifier[options] [ literal[string] ]= identifier[timezone]
identifier[options] [ literal[string] ]= identifier[day_first]
identifier[options] [ literal[string] ]= identifier[year_first]
identifier[options] [ literal[string] ]= identifier[strict]
identifier[dt] = identifier[pendulum] . identifier[parse] ( identifier[str] ( identifier[string] ),** identifier[options] )
keyword[return] identifier[MayaDT] . identifier[from_datetime] ( identifier[dt] ) | def parse(string, timezone='UTC', day_first=False, year_first=True, strict=False):
""""Returns a MayaDT instance for the machine-produced moment specified.
Powered by pendulum.
Accepts most known formats. Useful for working with data.
Keyword Arguments:
string -- string to be parsed
timezone -- timezone referenced from (default: 'UTC')
day_first -- if true, the first value (e.g. 01/05/2016)
is parsed as day.
if year_first is set to True, this distinguishes
between YDM and YMD. (default: False)
year_first -- if true, the first value (e.g. 2016/05/01)
is parsed as year (default: True)
strict -- if False, allow pendulum to fall back on datetime parsing
if pendulum's own parsing fails
"""
options = {}
options['tz'] = timezone
options['day_first'] = day_first
options['year_first'] = year_first
options['strict'] = strict
dt = pendulum.parse(str(string), **options)
return MayaDT.from_datetime(dt) |
def extend_trafo_power(extendable_trafos, trafo_params):
"""
Extend power of first trafo in list of extendable trafos
Parameters
----------
extendable_trafos : :any:`list`
Trafos with rated power below maximum size available trafo
trafo_params : :pandas:`pandas.DataFrame<dataframe>`
Transformer parameters
"""
trafo = extendable_trafos[0]
trafo_s_max_a_before = trafo.s_max_a
trafo_nearest_larger = trafo_params.loc[
trafo_params.loc[
trafo_params['S_nom'] > trafo_s_max_a_before
].loc[
:, 'S_nom'
].idxmin(), :
]
trafo.s_max_a = trafo_nearest_larger['S_nom']
trafo.r = trafo_nearest_larger['R']
trafo.x = trafo_nearest_larger['X'] | def function[extend_trafo_power, parameter[extendable_trafos, trafo_params]]:
constant[
Extend power of first trafo in list of extendable trafos
Parameters
----------
extendable_trafos : :any:`list`
Trafos with rated power below maximum size available trafo
trafo_params : :pandas:`pandas.DataFrame<dataframe>`
Transformer parameters
]
variable[trafo] assign[=] call[name[extendable_trafos]][constant[0]]
variable[trafo_s_max_a_before] assign[=] name[trafo].s_max_a
variable[trafo_nearest_larger] assign[=] call[name[trafo_params].loc][tuple[[<ast.Call object at 0x7da2041d8eb0>, <ast.Slice object at 0x7da20c7cb460>]]]
name[trafo].s_max_a assign[=] call[name[trafo_nearest_larger]][constant[S_nom]]
name[trafo].r assign[=] call[name[trafo_nearest_larger]][constant[R]]
name[trafo].x assign[=] call[name[trafo_nearest_larger]][constant[X]] | keyword[def] identifier[extend_trafo_power] ( identifier[extendable_trafos] , identifier[trafo_params] ):
literal[string]
identifier[trafo] = identifier[extendable_trafos] [ literal[int] ]
identifier[trafo_s_max_a_before] = identifier[trafo] . identifier[s_max_a]
identifier[trafo_nearest_larger] = identifier[trafo_params] . identifier[loc] [
identifier[trafo_params] . identifier[loc] [
identifier[trafo_params] [ literal[string] ]> identifier[trafo_s_max_a_before]
]. identifier[loc] [
:, literal[string]
]. identifier[idxmin] (),:
]
identifier[trafo] . identifier[s_max_a] = identifier[trafo_nearest_larger] [ literal[string] ]
identifier[trafo] . identifier[r] = identifier[trafo_nearest_larger] [ literal[string] ]
identifier[trafo] . identifier[x] = identifier[trafo_nearest_larger] [ literal[string] ] | def extend_trafo_power(extendable_trafos, trafo_params):
"""
Extend power of first trafo in list of extendable trafos
Parameters
----------
extendable_trafos : :any:`list`
Trafos with rated power below maximum size available trafo
trafo_params : :pandas:`pandas.DataFrame<dataframe>`
Transformer parameters
"""
trafo = extendable_trafos[0]
trafo_s_max_a_before = trafo.s_max_a
trafo_nearest_larger = trafo_params.loc[trafo_params.loc[trafo_params['S_nom'] > trafo_s_max_a_before].loc[:, 'S_nom'].idxmin(), :]
trafo.s_max_a = trafo_nearest_larger['S_nom']
trafo.r = trafo_nearest_larger['R']
trafo.x = trafo_nearest_larger['X'] |
def pod_absent(name, namespace='default', **kwargs):
'''
Ensures that the named pod is absent from the given namespace.
name
The name of the pod
namespace
The name of the namespace
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
pod = __salt__['kubernetes.show_pod'](name, namespace, **kwargs)
if pod is None:
ret['result'] = True if not __opts__['test'] else None
ret['comment'] = 'The pod does not exist'
return ret
if __opts__['test']:
ret['comment'] = 'The pod is going to be deleted'
ret['result'] = None
return ret
res = __salt__['kubernetes.delete_pod'](name, namespace, **kwargs)
if res['code'] == 200 or res['code'] is None:
ret['result'] = True
ret['changes'] = {
'kubernetes.pod': {
'new': 'absent', 'old': 'present'}}
if res['code'] is None:
ret['comment'] = 'In progress'
else:
ret['comment'] = res['message']
else:
ret['comment'] = 'Something went wrong, response: {0}'.format(res)
return ret | def function[pod_absent, parameter[name, namespace]]:
constant[
Ensures that the named pod is absent from the given namespace.
name
The name of the pod
namespace
The name of the namespace
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18f58e5f0>, <ast.Constant object at 0x7da18f58f550>, <ast.Constant object at 0x7da18f58d090>, <ast.Constant object at 0x7da18f58fd60>], [<ast.Name object at 0x7da18f58f700>, <ast.Dict object at 0x7da18f58c490>, <ast.Constant object at 0x7da18f58f190>, <ast.Constant object at 0x7da18f58eda0>]]
variable[pod] assign[=] call[call[name[__salt__]][constant[kubernetes.show_pod]], parameter[name[name], name[namespace]]]
if compare[name[pod] is constant[None]] begin[:]
call[name[ret]][constant[result]] assign[=] <ast.IfExp object at 0x7da18f58d180>
call[name[ret]][constant[comment]] assign[=] constant[The pod does not exist]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] constant[The pod is going to be deleted]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[res] assign[=] call[call[name[__salt__]][constant[kubernetes.delete_pod]], parameter[name[name], name[namespace]]]
if <ast.BoolOp object at 0x7da18f58ecb0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[changes]] assign[=] dictionary[[<ast.Constant object at 0x7da18f58e860>], [<ast.Dict object at 0x7da18f58c820>]]
if compare[call[name[res]][constant[code]] is constant[None]] begin[:]
call[name[ret]][constant[comment]] assign[=] constant[In progress]
return[name[ret]] | keyword[def] identifier[pod_absent] ( identifier[name] , identifier[namespace] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : keyword[False] ,
literal[string] : literal[string] }
identifier[pod] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[namespace] ,** identifier[kwargs] )
keyword[if] identifier[pod] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= keyword[True] keyword[if] keyword[not] identifier[__opts__] [ literal[string] ] keyword[else] keyword[None]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[namespace] ,** identifier[kwargs] )
keyword[if] identifier[res] [ literal[string] ]== literal[int] keyword[or] identifier[res] [ literal[string] ] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]={
literal[string] :{
literal[string] : literal[string] , literal[string] : literal[string] }}
keyword[if] identifier[res] [ literal[string] ] keyword[is] keyword[None] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[else] :
identifier[ret] [ literal[string] ]= identifier[res] [ literal[string] ]
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[res] )
keyword[return] identifier[ret] | def pod_absent(name, namespace='default', **kwargs):
"""
Ensures that the named pod is absent from the given namespace.
name
The name of the pod
namespace
The name of the namespace
"""
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
pod = __salt__['kubernetes.show_pod'](name, namespace, **kwargs)
if pod is None:
ret['result'] = True if not __opts__['test'] else None
ret['comment'] = 'The pod does not exist'
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['comment'] = 'The pod is going to be deleted'
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
res = __salt__['kubernetes.delete_pod'](name, namespace, **kwargs)
if res['code'] == 200 or res['code'] is None:
ret['result'] = True
ret['changes'] = {'kubernetes.pod': {'new': 'absent', 'old': 'present'}}
if res['code'] is None:
ret['comment'] = 'In progress' # depends on [control=['if'], data=[]]
else:
ret['comment'] = res['message'] # depends on [control=['if'], data=[]]
else:
ret['comment'] = 'Something went wrong, response: {0}'.format(res)
return ret |
def unshift(self, chunk):
"""
Pushes a chunk of data back into the internal buffer. This is useful
in certain situations where a stream is being consumed by code that
needs to "un-consume" some amount of data that it has optimistically
pulled out of the source, so that the data can be passed on to some
other party.
"""
if chunk:
self._pos -= len(chunk)
self._unconsumed.append(chunk) | def function[unshift, parameter[self, chunk]]:
constant[
Pushes a chunk of data back into the internal buffer. This is useful
in certain situations where a stream is being consumed by code that
needs to "un-consume" some amount of data that it has optimistically
pulled out of the source, so that the data can be passed on to some
other party.
]
if name[chunk] begin[:]
<ast.AugAssign object at 0x7da1b1193490>
call[name[self]._unconsumed.append, parameter[name[chunk]]] | keyword[def] identifier[unshift] ( identifier[self] , identifier[chunk] ):
literal[string]
keyword[if] identifier[chunk] :
identifier[self] . identifier[_pos] -= identifier[len] ( identifier[chunk] )
identifier[self] . identifier[_unconsumed] . identifier[append] ( identifier[chunk] ) | def unshift(self, chunk):
"""
Pushes a chunk of data back into the internal buffer. This is useful
in certain situations where a stream is being consumed by code that
needs to "un-consume" some amount of data that it has optimistically
pulled out of the source, so that the data can be passed on to some
other party.
"""
if chunk:
self._pos -= len(chunk)
self._unconsumed.append(chunk) # depends on [control=['if'], data=[]] |
def searchone(filename, scan, paramfile, logfile, bdfdir):
""" Searches one scan of filename
filename is name of local sdm ('filename.GN' expected locally).
scan is scan number to search. if none provided, script prints all.
assumes filename is an sdm.
"""
filename = os.path.abspath(filename)
scans = ps.read_scans(filename, bdfdir=bdfdir)
if scan != 0:
d = rt.set_pipeline(filename, scan, paramfile=paramfile,
fileroot=os.path.basename(filename), logfile=logfile)
rt.pipeline(d, range(d['nsegments']))
# clean up and merge files
pc.merge_segments(filename, scan)
pc.merge_scans(os.path.dirname(filename), os.path.basename(filename), scans.keys())
else:
logger.info('Scans, Target names:')
logger.info('%s' % str([(ss, scans[ss]['source']) for ss in scans]))
logger.info('Example pipeline:')
state = rt.set_pipeline(filename, scans.popitem()[0], paramfile=paramfile,
fileroot=os.path.basename(filename), logfile=logfile) | def function[searchone, parameter[filename, scan, paramfile, logfile, bdfdir]]:
constant[ Searches one scan of filename
filename is name of local sdm ('filename.GN' expected locally).
scan is scan number to search. if none provided, script prints all.
assumes filename is an sdm.
]
variable[filename] assign[=] call[name[os].path.abspath, parameter[name[filename]]]
variable[scans] assign[=] call[name[ps].read_scans, parameter[name[filename]]]
if compare[name[scan] not_equal[!=] constant[0]] begin[:]
variable[d] assign[=] call[name[rt].set_pipeline, parameter[name[filename], name[scan]]]
call[name[rt].pipeline, parameter[name[d], call[name[range], parameter[call[name[d]][constant[nsegments]]]]]]
call[name[pc].merge_segments, parameter[name[filename], name[scan]]]
call[name[pc].merge_scans, parameter[call[name[os].path.dirname, parameter[name[filename]]], call[name[os].path.basename, parameter[name[filename]]], call[name[scans].keys, parameter[]]]] | keyword[def] identifier[searchone] ( identifier[filename] , identifier[scan] , identifier[paramfile] , identifier[logfile] , identifier[bdfdir] ):
literal[string]
identifier[filename] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[filename] )
identifier[scans] = identifier[ps] . identifier[read_scans] ( identifier[filename] , identifier[bdfdir] = identifier[bdfdir] )
keyword[if] identifier[scan] != literal[int] :
identifier[d] = identifier[rt] . identifier[set_pipeline] ( identifier[filename] , identifier[scan] , identifier[paramfile] = identifier[paramfile] ,
identifier[fileroot] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ), identifier[logfile] = identifier[logfile] )
identifier[rt] . identifier[pipeline] ( identifier[d] , identifier[range] ( identifier[d] [ literal[string] ]))
identifier[pc] . identifier[merge_segments] ( identifier[filename] , identifier[scan] )
identifier[pc] . identifier[merge_scans] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] ), identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ), identifier[scans] . identifier[keys] ())
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] % identifier[str] ([( identifier[ss] , identifier[scans] [ identifier[ss] ][ literal[string] ]) keyword[for] identifier[ss] keyword[in] identifier[scans] ]))
identifier[logger] . identifier[info] ( literal[string] )
identifier[state] = identifier[rt] . identifier[set_pipeline] ( identifier[filename] , identifier[scans] . identifier[popitem] ()[ literal[int] ], identifier[paramfile] = identifier[paramfile] ,
identifier[fileroot] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] ), identifier[logfile] = identifier[logfile] ) | def searchone(filename, scan, paramfile, logfile, bdfdir):
""" Searches one scan of filename
filename is name of local sdm ('filename.GN' expected locally).
scan is scan number to search. if none provided, script prints all.
assumes filename is an sdm.
"""
filename = os.path.abspath(filename)
scans = ps.read_scans(filename, bdfdir=bdfdir)
if scan != 0:
d = rt.set_pipeline(filename, scan, paramfile=paramfile, fileroot=os.path.basename(filename), logfile=logfile)
rt.pipeline(d, range(d['nsegments']))
# clean up and merge files
pc.merge_segments(filename, scan)
pc.merge_scans(os.path.dirname(filename), os.path.basename(filename), scans.keys()) # depends on [control=['if'], data=['scan']]
else:
logger.info('Scans, Target names:')
logger.info('%s' % str([(ss, scans[ss]['source']) for ss in scans]))
logger.info('Example pipeline:')
state = rt.set_pipeline(filename, scans.popitem()[0], paramfile=paramfile, fileroot=os.path.basename(filename), logfile=logfile) |
def units(self, val):
"""Override the units on the underlying variable."""
if isinstance(val, units.Unit):
self._unit = val
else:
self._unit = units(val) | def function[units, parameter[self, val]]:
constant[Override the units on the underlying variable.]
if call[name[isinstance], parameter[name[val], name[units].Unit]] begin[:]
name[self]._unit assign[=] name[val] | keyword[def] identifier[units] ( identifier[self] , identifier[val] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[val] , identifier[units] . identifier[Unit] ):
identifier[self] . identifier[_unit] = identifier[val]
keyword[else] :
identifier[self] . identifier[_unit] = identifier[units] ( identifier[val] ) | def units(self, val):
"""Override the units on the underlying variable."""
if isinstance(val, units.Unit):
self._unit = val # depends on [control=['if'], data=[]]
else:
self._unit = units(val) |
def prepare_checkers(self):
"""return checkers needed for activated messages and reports"""
if not self.config.reports:
self.disable_reporters()
# get needed checkers
neededcheckers = [self]
for checker in self.get_checkers()[1:]:
messages = {msg for msg in checker.msgs if self.is_message_enabled(msg)}
if messages or any(self.report_is_enabled(r[0]) for r in checker.reports):
neededcheckers.append(checker)
# Sort checkers by priority
neededcheckers = sorted(
neededcheckers, key=operator.attrgetter("priority"), reverse=True
)
return neededcheckers | def function[prepare_checkers, parameter[self]]:
constant[return checkers needed for activated messages and reports]
if <ast.UnaryOp object at 0x7da1b03143a0> begin[:]
call[name[self].disable_reporters, parameter[]]
variable[neededcheckers] assign[=] list[[<ast.Name object at 0x7da1b0316350>]]
for taget[name[checker]] in starred[call[call[name[self].get_checkers, parameter[]]][<ast.Slice object at 0x7da1b0314a60>]] begin[:]
variable[messages] assign[=] <ast.SetComp object at 0x7da1b03169e0>
if <ast.BoolOp object at 0x7da1b0317610> begin[:]
call[name[neededcheckers].append, parameter[name[checker]]]
variable[neededcheckers] assign[=] call[name[sorted], parameter[name[neededcheckers]]]
return[name[neededcheckers]] | keyword[def] identifier[prepare_checkers] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[config] . identifier[reports] :
identifier[self] . identifier[disable_reporters] ()
identifier[neededcheckers] =[ identifier[self] ]
keyword[for] identifier[checker] keyword[in] identifier[self] . identifier[get_checkers] ()[ literal[int] :]:
identifier[messages] ={ identifier[msg] keyword[for] identifier[msg] keyword[in] identifier[checker] . identifier[msgs] keyword[if] identifier[self] . identifier[is_message_enabled] ( identifier[msg] )}
keyword[if] identifier[messages] keyword[or] identifier[any] ( identifier[self] . identifier[report_is_enabled] ( identifier[r] [ literal[int] ]) keyword[for] identifier[r] keyword[in] identifier[checker] . identifier[reports] ):
identifier[neededcheckers] . identifier[append] ( identifier[checker] )
identifier[neededcheckers] = identifier[sorted] (
identifier[neededcheckers] , identifier[key] = identifier[operator] . identifier[attrgetter] ( literal[string] ), identifier[reverse] = keyword[True]
)
keyword[return] identifier[neededcheckers] | def prepare_checkers(self):
"""return checkers needed for activated messages and reports"""
if not self.config.reports:
self.disable_reporters() # depends on [control=['if'], data=[]]
# get needed checkers
neededcheckers = [self]
for checker in self.get_checkers()[1:]:
messages = {msg for msg in checker.msgs if self.is_message_enabled(msg)}
if messages or any((self.report_is_enabled(r[0]) for r in checker.reports)):
neededcheckers.append(checker) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['checker']]
# Sort checkers by priority
neededcheckers = sorted(neededcheckers, key=operator.attrgetter('priority'), reverse=True)
return neededcheckers |
def replace_variables(self, source: str, variables: dict) -> str:
"""Replace {{variable-name}} with stored value."""
try:
replaced = re.sub(
"{{(.*?)}}", lambda m: variables.get(m.group(1), ""), source
)
except TypeError:
replaced = source
return replaced | def function[replace_variables, parameter[self, source, variables]]:
constant[Replace {{variable-name}} with stored value.]
<ast.Try object at 0x7da2047e9e40>
return[name[replaced]] | keyword[def] identifier[replace_variables] ( identifier[self] , identifier[source] : identifier[str] , identifier[variables] : identifier[dict] )-> identifier[str] :
literal[string]
keyword[try] :
identifier[replaced] = identifier[re] . identifier[sub] (
literal[string] , keyword[lambda] identifier[m] : identifier[variables] . identifier[get] ( identifier[m] . identifier[group] ( literal[int] ), literal[string] ), identifier[source]
)
keyword[except] identifier[TypeError] :
identifier[replaced] = identifier[source]
keyword[return] identifier[replaced] | def replace_variables(self, source: str, variables: dict) -> str:
"""Replace {{variable-name}} with stored value."""
try:
replaced = re.sub('{{(.*?)}}', lambda m: variables.get(m.group(1), ''), source) # depends on [control=['try'], data=[]]
except TypeError:
replaced = source # depends on [control=['except'], data=[]]
return replaced |
def insertChild(self, index, item):
"""
Inserts a new item in the given index.
:param index | <int>
item | <XGanttWidgetItem>
"""
super(XGanttWidgetItem, self).insertChild(index, item)
item.sync() | def function[insertChild, parameter[self, index, item]]:
constant[
Inserts a new item in the given index.
:param index | <int>
item | <XGanttWidgetItem>
]
call[call[name[super], parameter[name[XGanttWidgetItem], name[self]]].insertChild, parameter[name[index], name[item]]]
call[name[item].sync, parameter[]] | keyword[def] identifier[insertChild] ( identifier[self] , identifier[index] , identifier[item] ):
literal[string]
identifier[super] ( identifier[XGanttWidgetItem] , identifier[self] ). identifier[insertChild] ( identifier[index] , identifier[item] )
identifier[item] . identifier[sync] () | def insertChild(self, index, item):
"""
Inserts a new item in the given index.
:param index | <int>
item | <XGanttWidgetItem>
"""
super(XGanttWidgetItem, self).insertChild(index, item)
item.sync() |
def antenna1(self, context):
""" antenna1 data source """
lrow, urow = MS.uvw_row_extents(context)
antenna1 = self._manager.ordered_uvw_table.getcol(
MS.ANTENNA1, startrow=lrow, nrow=urow-lrow)
return antenna1.reshape(context.shape).astype(context.dtype) | def function[antenna1, parameter[self, context]]:
constant[ antenna1 data source ]
<ast.Tuple object at 0x7da1b0f99840> assign[=] call[name[MS].uvw_row_extents, parameter[name[context]]]
variable[antenna1] assign[=] call[name[self]._manager.ordered_uvw_table.getcol, parameter[name[MS].ANTENNA1]]
return[call[call[name[antenna1].reshape, parameter[name[context].shape]].astype, parameter[name[context].dtype]]] | keyword[def] identifier[antenna1] ( identifier[self] , identifier[context] ):
literal[string]
identifier[lrow] , identifier[urow] = identifier[MS] . identifier[uvw_row_extents] ( identifier[context] )
identifier[antenna1] = identifier[self] . identifier[_manager] . identifier[ordered_uvw_table] . identifier[getcol] (
identifier[MS] . identifier[ANTENNA1] , identifier[startrow] = identifier[lrow] , identifier[nrow] = identifier[urow] - identifier[lrow] )
keyword[return] identifier[antenna1] . identifier[reshape] ( identifier[context] . identifier[shape] ). identifier[astype] ( identifier[context] . identifier[dtype] ) | def antenna1(self, context):
""" antenna1 data source """
(lrow, urow) = MS.uvw_row_extents(context)
antenna1 = self._manager.ordered_uvw_table.getcol(MS.ANTENNA1, startrow=lrow, nrow=urow - lrow)
return antenna1.reshape(context.shape).astype(context.dtype) |
def qapplication(translate=True, test_time=3):
"""
Return QApplication instance
Creates it if it doesn't already exist
test_time: Time to maintain open the application when testing. It's given
in seconds
"""
if running_in_mac_app():
SpyderApplication = MacApplication
else:
SpyderApplication = QApplication
app = SpyderApplication.instance()
if app is None:
# Set Application name for Gnome 3
# https://groups.google.com/forum/#!topic/pyside/24qxvwfrRDs
app = SpyderApplication(['Spyder'])
# Set application name for KDE (See issue 2207)
app.setApplicationName('Spyder')
if translate:
install_translator(app)
test_ci = os.environ.get('TEST_CI_WIDGETS', None)
if test_ci is not None:
timer_shutdown = QTimer(app)
timer_shutdown.timeout.connect(app.quit)
timer_shutdown.start(test_time*1000)
return app | def function[qapplication, parameter[translate, test_time]]:
constant[
Return QApplication instance
Creates it if it doesn't already exist
test_time: Time to maintain open the application when testing. It's given
in seconds
]
if call[name[running_in_mac_app], parameter[]] begin[:]
variable[SpyderApplication] assign[=] name[MacApplication]
variable[app] assign[=] call[name[SpyderApplication].instance, parameter[]]
if compare[name[app] is constant[None]] begin[:]
variable[app] assign[=] call[name[SpyderApplication], parameter[list[[<ast.Constant object at 0x7da1b2040d30>]]]]
call[name[app].setApplicationName, parameter[constant[Spyder]]]
if name[translate] begin[:]
call[name[install_translator], parameter[name[app]]]
variable[test_ci] assign[=] call[name[os].environ.get, parameter[constant[TEST_CI_WIDGETS], constant[None]]]
if compare[name[test_ci] is_not constant[None]] begin[:]
variable[timer_shutdown] assign[=] call[name[QTimer], parameter[name[app]]]
call[name[timer_shutdown].timeout.connect, parameter[name[app].quit]]
call[name[timer_shutdown].start, parameter[binary_operation[name[test_time] * constant[1000]]]]
return[name[app]] | keyword[def] identifier[qapplication] ( identifier[translate] = keyword[True] , identifier[test_time] = literal[int] ):
literal[string]
keyword[if] identifier[running_in_mac_app] ():
identifier[SpyderApplication] = identifier[MacApplication]
keyword[else] :
identifier[SpyderApplication] = identifier[QApplication]
identifier[app] = identifier[SpyderApplication] . identifier[instance] ()
keyword[if] identifier[app] keyword[is] keyword[None] :
identifier[app] = identifier[SpyderApplication] ([ literal[string] ])
identifier[app] . identifier[setApplicationName] ( literal[string] )
keyword[if] identifier[translate] :
identifier[install_translator] ( identifier[app] )
identifier[test_ci] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[test_ci] keyword[is] keyword[not] keyword[None] :
identifier[timer_shutdown] = identifier[QTimer] ( identifier[app] )
identifier[timer_shutdown] . identifier[timeout] . identifier[connect] ( identifier[app] . identifier[quit] )
identifier[timer_shutdown] . identifier[start] ( identifier[test_time] * literal[int] )
keyword[return] identifier[app] | def qapplication(translate=True, test_time=3):
"""
Return QApplication instance
Creates it if it doesn't already exist
test_time: Time to maintain open the application when testing. It's given
in seconds
"""
if running_in_mac_app():
SpyderApplication = MacApplication # depends on [control=['if'], data=[]]
else:
SpyderApplication = QApplication
app = SpyderApplication.instance()
if app is None: # Set Application name for Gnome 3
# https://groups.google.com/forum/#!topic/pyside/24qxvwfrRDs
app = SpyderApplication(['Spyder']) # Set application name for KDE (See issue 2207)
app.setApplicationName('Spyder') # depends on [control=['if'], data=['app']]
if translate:
install_translator(app) # depends on [control=['if'], data=[]]
test_ci = os.environ.get('TEST_CI_WIDGETS', None)
if test_ci is not None:
timer_shutdown = QTimer(app)
timer_shutdown.timeout.connect(app.quit)
timer_shutdown.start(test_time * 1000) # depends on [control=['if'], data=[]]
return app |
def isprime(number):
"""
Check if a number is a prime number
:type number: integer
:param number: The number to check
"""
if number == 1:
return False
for i in range(2, int(number**0.5) + 1):
if number % i == 0:
return False
return True | def function[isprime, parameter[number]]:
constant[
Check if a number is a prime number
:type number: integer
:param number: The number to check
]
if compare[name[number] equal[==] constant[1]] begin[:]
return[constant[False]]
for taget[name[i]] in starred[call[name[range], parameter[constant[2], binary_operation[call[name[int], parameter[binary_operation[name[number] ** constant[0.5]]]] + constant[1]]]]] begin[:]
if compare[binary_operation[name[number] <ast.Mod object at 0x7da2590d6920> name[i]] equal[==] constant[0]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[isprime] ( identifier[number] ):
literal[string]
keyword[if] identifier[number] == literal[int] :
keyword[return] keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[number] ** literal[int] )+ literal[int] ):
keyword[if] identifier[number] % identifier[i] == literal[int] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def isprime(number):
"""
Check if a number is a prime number
:type number: integer
:param number: The number to check
"""
if number == 1:
return False # depends on [control=['if'], data=[]]
for i in range(2, int(number ** 0.5) + 1):
if number % i == 0:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return True |
def profile(n):
"""
Usage:
@profile("my_func")
def my_func(): code
"""
def decorator_with_name(func):
def func_wrapper(*args, **kwargs):
with profile_kv(n):
return func(*args, **kwargs)
return func_wrapper
return decorator_with_name | def function[profile, parameter[n]]:
constant[
Usage:
@profile("my_func")
def my_func(): code
]
def function[decorator_with_name, parameter[func]]:
def function[func_wrapper, parameter[]]:
with call[name[profile_kv], parameter[name[n]]] begin[:]
return[call[name[func], parameter[<ast.Starred object at 0x7da18c4cdb70>]]]
return[name[func_wrapper]]
return[name[decorator_with_name]] | keyword[def] identifier[profile] ( identifier[n] ):
literal[string]
keyword[def] identifier[decorator_with_name] ( identifier[func] ):
keyword[def] identifier[func_wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[with] identifier[profile_kv] ( identifier[n] ):
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[func_wrapper]
keyword[return] identifier[decorator_with_name] | def profile(n):
"""
Usage:
@profile("my_func")
def my_func(): code
"""
def decorator_with_name(func):
def func_wrapper(*args, **kwargs):
with profile_kv(n):
return func(*args, **kwargs) # depends on [control=['with'], data=[]]
return func_wrapper
return decorator_with_name |
def delete_validation_log(self, **kwargs):
"""Deletes validation log.
Parameters
-----------
kwargs : logging information
Find items to delete, leave it empty to delete all log.
Examples
---------
- see ``save_training_log``.
"""
self._fill_project_info(kwargs)
self.db.ValidLog.delete_many(kwargs)
logging.info("[Database] Delete ValidLog SUCCESS") | def function[delete_validation_log, parameter[self]]:
constant[Deletes validation log.
Parameters
-----------
kwargs : logging information
Find items to delete, leave it empty to delete all log.
Examples
---------
- see ``save_training_log``.
]
call[name[self]._fill_project_info, parameter[name[kwargs]]]
call[name[self].db.ValidLog.delete_many, parameter[name[kwargs]]]
call[name[logging].info, parameter[constant[[Database] Delete ValidLog SUCCESS]]] | keyword[def] identifier[delete_validation_log] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[_fill_project_info] ( identifier[kwargs] )
identifier[self] . identifier[db] . identifier[ValidLog] . identifier[delete_many] ( identifier[kwargs] )
identifier[logging] . identifier[info] ( literal[string] ) | def delete_validation_log(self, **kwargs):
"""Deletes validation log.
Parameters
-----------
kwargs : logging information
Find items to delete, leave it empty to delete all log.
Examples
---------
- see ``save_training_log``.
"""
self._fill_project_info(kwargs)
self.db.ValidLog.delete_many(kwargs)
logging.info('[Database] Delete ValidLog SUCCESS') |
def is_number(arg):
'''
is_number(x) yields True if x is a numeric object and False otherwise.
'''
return (is_number(mag(arg)) if is_quantity(arg) else
is_npscalar(arg, 'number') or is_npvalue(arg, 'number')) | def function[is_number, parameter[arg]]:
constant[
is_number(x) yields True if x is a numeric object and False otherwise.
]
return[<ast.IfExp object at 0x7da18bc706d0>] | keyword[def] identifier[is_number] ( identifier[arg] ):
literal[string]
keyword[return] ( identifier[is_number] ( identifier[mag] ( identifier[arg] )) keyword[if] identifier[is_quantity] ( identifier[arg] ) keyword[else]
identifier[is_npscalar] ( identifier[arg] , literal[string] ) keyword[or] identifier[is_npvalue] ( identifier[arg] , literal[string] )) | def is_number(arg):
"""
is_number(x) yields True if x is a numeric object and False otherwise.
"""
return is_number(mag(arg)) if is_quantity(arg) else is_npscalar(arg, 'number') or is_npvalue(arg, 'number') |
def popkey(dct,key,default=NotGiven):
"""Return dct[key] and delete dct[key].
If default is given, return it if dct[key] doesn't exist, otherwise raise
KeyError. """
try:
val = dct[key]
except KeyError:
if default is NotGiven:
raise
else:
return default
else:
del dct[key]
return val | def function[popkey, parameter[dct, key, default]]:
constant[Return dct[key] and delete dct[key].
If default is given, return it if dct[key] doesn't exist, otherwise raise
KeyError. ]
<ast.Try object at 0x7da18f811ea0> | keyword[def] identifier[popkey] ( identifier[dct] , identifier[key] , identifier[default] = identifier[NotGiven] ):
literal[string]
keyword[try] :
identifier[val] = identifier[dct] [ identifier[key] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[default] keyword[is] identifier[NotGiven] :
keyword[raise]
keyword[else] :
keyword[return] identifier[default]
keyword[else] :
keyword[del] identifier[dct] [ identifier[key] ]
keyword[return] identifier[val] | def popkey(dct, key, default=NotGiven):
"""Return dct[key] and delete dct[key].
If default is given, return it if dct[key] doesn't exist, otherwise raise
KeyError. """
try:
val = dct[key] # depends on [control=['try'], data=[]]
except KeyError:
if default is NotGiven:
raise # depends on [control=['if'], data=[]]
else:
return default # depends on [control=['except'], data=[]]
else:
del dct[key]
return val |
def create_packs(self):
# pylint: disable=too-many-statements,too-many-locals,too-many-branches, unused-argument
"""Create packs of hosts and services (all dependencies are resolved)
It create a graph. All hosts are connected to their
parents, and hosts without parent are connected to host 'root'.
services are linked to their host. Dependencies between hosts/services are managed.
REF: doc/pack-creation.png
:return: None
"""
logger.info("- creating hosts packs for the realms:")
# We create a graph with host in nodes
graph = Graph()
graph.add_nodes(list(self.hosts.items.keys()))
# links will be used for relations between hosts
links = set()
# Now the relations
for host in self.hosts:
# Add parent relations
for parent in getattr(host, 'parents', []):
if parent:
links.add((parent, host.uuid))
# Add the others dependencies
for (dep, _, _, _) in host.act_depend_of:
links.add((dep, host.uuid))
for (dep, _, _, _, _) in host.chk_depend_of:
links.add((dep, host.uuid))
# For services: they are linked with their own host but we need
# to have the hosts of the service dependency in the same pack too
for service in self.services:
for (dep_id, _, _, _) in service.act_depend_of:
if dep_id in self.services:
dep = self.services[dep_id]
else:
dep = self.hosts[dep_id]
# I don't care about dep host: they are just the host
# of the service...
if hasattr(dep, 'host'):
links.add((dep.host, service.host))
# The other type of dep
for (dep_id, _, _, _, _) in service.chk_depend_of:
if dep_id in self.services:
dep = self.services[dep_id]
else:
dep = self.hosts[dep_id]
links.add((dep.host, service.host))
# For host/service that are business based, we need to link them too
for service in [srv for srv in self.services if srv.got_business_rule]:
for elem_uuid in service.business_rule.list_all_elements():
if elem_uuid in self.services:
elem = self.services[elem_uuid]
if elem.host != service.host: # do not link a host with itself
links.add((elem.host, service.host))
else: # it's already a host but only if it is in the known hosts list!
if elem_uuid in self.hosts and elem_uuid != service.host:
links.add((elem_uuid, service.host))
# Same for hosts of course
for host in [hst for hst in self.hosts if hst.got_business_rule]:
for elem_uuid in host.business_rule.list_all_elements():
if elem_uuid in self.services: # if it's a service
elem = self.services[elem_uuid]
if elem.host != host.uuid:
links.add((elem.host, host.uuid))
else: # e is a host
if elem_uuid != host.uuid:
links.add((elem_uuid, host.uuid))
# Now we create links in the graph. With links (set)
# We are sure to call the less add_edge
for (dep, host) in links:
graph.add_edge(dep, host)
graph.add_edge(host, dep)
# Now We find the default realm
default_realm = self.realms.get_default()
# Access_list from a node il all nodes that are connected
# with it: it's a list of ours mini_packs
# Now we look if all elements of all packs have the
# same realm. If not, not good!
for hosts_pack in graph.get_accessibility_packs():
passively_checked_hosts = False
actively_checked_hosts = False
tmp_realms = set()
logger.debug(" - host pack hosts:")
for host_id in hosts_pack:
host = self.hosts[host_id]
logger.debug(" - %s", host.get_name())
passively_checked_hosts = passively_checked_hosts or host.passive_checks_enabled
actively_checked_hosts = actively_checked_hosts or host.active_checks_enabled
if host.realm:
tmp_realms.add(host.realm)
if len(tmp_realms) > 1:
self.add_error("Error: the realm configuration of your hosts is not correct "
"because there is more than one realm in one pack (host relations):")
for host_id in hosts_pack:
host = self.hosts[host_id]
if not host.realm:
self.add_error(' -> the host %s do not have a realm' % host.get_name())
else:
# Do not use get_name for the realm because it is not an object but a
# string containing the not found realm name if the realm is not existing!
# As of it, it may raise an exception
if host.realm not in self.realms:
self.add_error(' -> the host %s is in the realm %s' %
(host.get_name(), host.realm))
else:
host_realm = self.realms[host.realm]
self.add_error(' -> the host %s is in the realm %s' %
(host.get_name(), host_realm.get_name()))
if len(tmp_realms) == 1: # Ok, good
tmp_realm = tmp_realms.pop()
if tmp_realm in self.realms:
realm = self.realms[tmp_realm]
else:
realm = self.realms.find_by_name(tmp_realm)
if not realm:
self.add_error(' -> some hosts are in an unknown realm %s!' % tmp_realm)
else:
# Set the current hosts pack to its realm
logger.debug(" - append pack %s to realm %s", hosts_pack, realm.get_name())
realm.packs.append(hosts_pack)
# Set if the realm only has passively or actively checked hosts...
realm.passively_checked_hosts = passively_checked_hosts
realm.actively_checked_hosts = actively_checked_hosts
elif not tmp_realms: # Hum... no realm value? So default Realm
if default_realm is not None:
# Set the current hosts pack to the default realm
default_realm.packs.append(hosts_pack)
else:
self.add_error("Error: some hosts do not have a realm and you did not "
"defined a default realm!")
for host in hosts_pack:
self.add_error(' Impacted host: %s ' % host.get_name())
# The load balancing is for a loop, so all
# hosts of a realm (in a pack) will be dispatched
# to the schedulers of this realm
# REF: doc/pack-aggregation.png
# Count the numbers of elements in all the realms,
# to compare with the total number of hosts
nb_elements_all_realms = 0
for realm in self.realms:
packs = {}
# create round-robin iterator for id of cfg
# So dispatching is load balanced in a realm
# but add a entry in the round-robin tourniquet for
# every weight point schedulers (so Weight round robin)
weight_list = []
no_spare_schedulers = realm.schedulers
if not no_spare_schedulers:
if realm.potential_schedulers:
no_spare_schedulers = [realm.potential_schedulers[0]]
nb_schedulers = len(no_spare_schedulers)
if nb_schedulers:
logger.info(" %d scheduler(s) for the realm %s", nb_schedulers, realm.get_name())
else:
logger.warning(" no scheduler for the realm %s", realm.get_name())
# Maybe there is no scheduler in the realm, it can be a
# big problem if there are elements in packs
nb_elements = 0
for hosts_pack in realm.packs:
nb_elements += len(hosts_pack)
nb_elements_all_realms += len(hosts_pack)
realm.hosts_count = nb_elements
if nb_elements:
if not nb_schedulers:
self.add_error("The realm %s has %d hosts but no scheduler!"
% (realm.get_name(), nb_elements))
realm.packs = [] # Dumb pack
continue
logger.info(" %d hosts in the realm %s, distributed in %d linked packs",
nb_elements, realm.get_name(), len(realm.packs))
else:
logger.info(" no hosts in the realm %s", realm.get_name())
# Create a relation between a pack and each scheduler in the realm
packindex = 0
packindices = {}
for s_id in no_spare_schedulers:
scheduler = self.schedulers[s_id]
logger.debug(" scheduler: %s", scheduler.instance_id)
packindices[s_id] = packindex
packindex += 1
for i in range(0, scheduler.weight):
weight_list.append(s_id)
logger.debug(" pack indices: %s", packindices)
# packindices is indexed with the scheduler id and contains
# the configuration part number to get used: sched1:0, sched2: 1, ...
round_robin = itertools.cycle(weight_list)
# We must initialize nb_schedulers packs
for i in range(0, nb_schedulers):
packs[i] = []
# Try to load the history association dict so we will try to
# send the hosts in the same "pack"
assoc = {}
# Now we explode the numerous packs into reals packs:
# we 'load balance' them in a round-robin way but with count number of hosts in
# case have some packs with too many hosts and other with few
realm.packs.sort(reverse=True)
pack_higher_hosts = 0
for hosts_pack in realm.packs:
valid_value = False
old_pack = -1
for host_id in hosts_pack:
host = self.hosts[host_id]
old_i = assoc.get(host.get_name(), -1)
# Maybe it's a new, if so, don't count it
if old_i == -1:
continue
# Maybe it is the first we look at, if so, take it's value
if old_pack == -1 and old_i != -1:
old_pack = old_i
valid_value = True
continue
if old_i == old_pack:
valid_value = True
if old_i != old_pack:
valid_value = False
# If it's a valid sub pack and the pack id really exist, use it!
if valid_value and old_pack in packindices:
i = old_pack
else:
if isinstance(i, int):
i = next(round_robin)
elif (len(packs[packindices[i]]) + len(hosts_pack)) >= pack_higher_hosts:
pack_higher_hosts = (len(packs[packindices[i]]) + len(hosts_pack))
i = next(round_robin)
for host_id in hosts_pack:
host = self.hosts[host_id]
packs[packindices[i]].append(host_id)
assoc[host.get_name()] = i
# Now packs is a dictionary indexed with the configuration part
# number and containing the list of hosts
realm.packs = packs
logger.info(" total number of hosts in all realms: %d", nb_elements_all_realms)
if len(self.hosts) != nb_elements_all_realms:
logger.warning("There are %d hosts defined, and %d hosts dispatched in the realms. "
"Some hosts have been ignored", len(self.hosts), nb_elements_all_realms)
self.add_error("There are %d hosts defined, and %d hosts dispatched in the realms. "
"Some hosts have been "
"ignored" % (len(self.hosts), nb_elements_all_realms)) | def function[create_packs, parameter[self]]:
constant[Create packs of hosts and services (all dependencies are resolved)
It create a graph. All hosts are connected to their
parents, and hosts without parent are connected to host 'root'.
services are linked to their host. Dependencies between hosts/services are managed.
REF: doc/pack-creation.png
:return: None
]
call[name[logger].info, parameter[constant[- creating hosts packs for the realms:]]]
variable[graph] assign[=] call[name[Graph], parameter[]]
call[name[graph].add_nodes, parameter[call[name[list], parameter[call[name[self].hosts.items.keys, parameter[]]]]]]
variable[links] assign[=] call[name[set], parameter[]]
for taget[name[host]] in starred[name[self].hosts] begin[:]
for taget[name[parent]] in starred[call[name[getattr], parameter[name[host], constant[parents], list[[]]]]] begin[:]
if name[parent] begin[:]
call[name[links].add, parameter[tuple[[<ast.Name object at 0x7da204344a90>, <ast.Attribute object at 0x7da204345780>]]]]
for taget[tuple[[<ast.Name object at 0x7da204346020>, <ast.Name object at 0x7da204344580>, <ast.Name object at 0x7da2043442e0>, <ast.Name object at 0x7da204345a50>]]] in starred[name[host].act_depend_of] begin[:]
call[name[links].add, parameter[tuple[[<ast.Name object at 0x7da204345c90>, <ast.Attribute object at 0x7da204347640>]]]]
for taget[tuple[[<ast.Name object at 0x7da204346980>, <ast.Name object at 0x7da2043448e0>, <ast.Name object at 0x7da2043448b0>, <ast.Name object at 0x7da204344160>, <ast.Name object at 0x7da204345270>]]] in starred[name[host].chk_depend_of] begin[:]
call[name[links].add, parameter[tuple[[<ast.Name object at 0x7da204344c40>, <ast.Attribute object at 0x7da204347790>]]]]
for taget[name[service]] in starred[name[self].services] begin[:]
for taget[tuple[[<ast.Name object at 0x7da204347df0>, <ast.Name object at 0x7da204347ee0>, <ast.Name object at 0x7da204345030>, <ast.Name object at 0x7da204344190>]]] in starred[name[service].act_depend_of] begin[:]
if compare[name[dep_id] in name[self].services] begin[:]
variable[dep] assign[=] call[name[self].services][name[dep_id]]
if call[name[hasattr], parameter[name[dep], constant[host]]] begin[:]
call[name[links].add, parameter[tuple[[<ast.Attribute object at 0x7da20e9b0dc0>, <ast.Attribute object at 0x7da20e9b2b90>]]]]
for taget[tuple[[<ast.Name object at 0x7da204347940>, <ast.Name object at 0x7da204347160>, <ast.Name object at 0x7da2043461a0>, <ast.Name object at 0x7da2043452a0>, <ast.Name object at 0x7da204345cf0>]]] in starred[name[service].chk_depend_of] begin[:]
if compare[name[dep_id] in name[self].services] begin[:]
variable[dep] assign[=] call[name[self].services][name[dep_id]]
call[name[links].add, parameter[tuple[[<ast.Attribute object at 0x7da2043442b0>, <ast.Attribute object at 0x7da2043471c0>]]]]
for taget[name[service]] in starred[<ast.ListComp object at 0x7da204345570>] begin[:]
for taget[name[elem_uuid]] in starred[call[name[service].business_rule.list_all_elements, parameter[]]] begin[:]
if compare[name[elem_uuid] in name[self].services] begin[:]
variable[elem] assign[=] call[name[self].services][name[elem_uuid]]
if compare[name[elem].host not_equal[!=] name[service].host] begin[:]
call[name[links].add, parameter[tuple[[<ast.Attribute object at 0x7da204346e90>, <ast.Attribute object at 0x7da204346f20>]]]]
for taget[name[host]] in starred[<ast.ListComp object at 0x7da204346500>] begin[:]
for taget[name[elem_uuid]] in starred[call[name[host].business_rule.list_all_elements, parameter[]]] begin[:]
if compare[name[elem_uuid] in name[self].services] begin[:]
variable[elem] assign[=] call[name[self].services][name[elem_uuid]]
if compare[name[elem].host not_equal[!=] name[host].uuid] begin[:]
call[name[links].add, parameter[tuple[[<ast.Attribute object at 0x7da204345ba0>, <ast.Attribute object at 0x7da2043449d0>]]]]
for taget[tuple[[<ast.Name object at 0x7da204961d80>, <ast.Name object at 0x7da204962b00>]]] in starred[name[links]] begin[:]
call[name[graph].add_edge, parameter[name[dep], name[host]]]
call[name[graph].add_edge, parameter[name[host], name[dep]]]
variable[default_realm] assign[=] call[name[self].realms.get_default, parameter[]]
for taget[name[hosts_pack]] in starred[call[name[graph].get_accessibility_packs, parameter[]]] begin[:]
variable[passively_checked_hosts] assign[=] constant[False]
variable[actively_checked_hosts] assign[=] constant[False]
variable[tmp_realms] assign[=] call[name[set], parameter[]]
call[name[logger].debug, parameter[constant[ - host pack hosts:]]]
for taget[name[host_id]] in starred[name[hosts_pack]] begin[:]
variable[host] assign[=] call[name[self].hosts][name[host_id]]
call[name[logger].debug, parameter[constant[ - %s], call[name[host].get_name, parameter[]]]]
variable[passively_checked_hosts] assign[=] <ast.BoolOp object at 0x7da2049638b0>
variable[actively_checked_hosts] assign[=] <ast.BoolOp object at 0x7da204961fc0>
if name[host].realm begin[:]
call[name[tmp_realms].add, parameter[name[host].realm]]
if compare[call[name[len], parameter[name[tmp_realms]]] greater[>] constant[1]] begin[:]
call[name[self].add_error, parameter[constant[Error: the realm configuration of your hosts is not correct because there is more than one realm in one pack (host relations):]]]
for taget[name[host_id]] in starred[name[hosts_pack]] begin[:]
variable[host] assign[=] call[name[self].hosts][name[host_id]]
if <ast.UnaryOp object at 0x7da204963c70> begin[:]
call[name[self].add_error, parameter[binary_operation[constant[ -> the host %s do not have a realm] <ast.Mod object at 0x7da2590d6920> call[name[host].get_name, parameter[]]]]]
if compare[call[name[len], parameter[name[tmp_realms]]] equal[==] constant[1]] begin[:]
variable[tmp_realm] assign[=] call[name[tmp_realms].pop, parameter[]]
if compare[name[tmp_realm] in name[self].realms] begin[:]
variable[realm] assign[=] call[name[self].realms][name[tmp_realm]]
if <ast.UnaryOp object at 0x7da204960640> begin[:]
call[name[self].add_error, parameter[binary_operation[constant[ -> some hosts are in an unknown realm %s!] <ast.Mod object at 0x7da2590d6920> name[tmp_realm]]]]
variable[nb_elements_all_realms] assign[=] constant[0]
for taget[name[realm]] in starred[name[self].realms] begin[:]
variable[packs] assign[=] dictionary[[], []]
variable[weight_list] assign[=] list[[]]
variable[no_spare_schedulers] assign[=] name[realm].schedulers
if <ast.UnaryOp object at 0x7da1b23444f0> begin[:]
if name[realm].potential_schedulers begin[:]
variable[no_spare_schedulers] assign[=] list[[<ast.Subscript object at 0x7da1b23475e0>]]
variable[nb_schedulers] assign[=] call[name[len], parameter[name[no_spare_schedulers]]]
if name[nb_schedulers] begin[:]
call[name[logger].info, parameter[constant[ %d scheduler(s) for the realm %s], name[nb_schedulers], call[name[realm].get_name, parameter[]]]]
variable[nb_elements] assign[=] constant[0]
for taget[name[hosts_pack]] in starred[name[realm].packs] begin[:]
<ast.AugAssign object at 0x7da1b2344f10>
<ast.AugAssign object at 0x7da1b2345c30>
name[realm].hosts_count assign[=] name[nb_elements]
if name[nb_elements] begin[:]
if <ast.UnaryOp object at 0x7da1b2346a70> begin[:]
call[name[self].add_error, parameter[binary_operation[constant[The realm %s has %d hosts but no scheduler!] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b23450c0>, <ast.Name object at 0x7da1b23464d0>]]]]]
name[realm].packs assign[=] list[[]]
continue
call[name[logger].info, parameter[constant[ %d hosts in the realm %s, distributed in %d linked packs], name[nb_elements], call[name[realm].get_name, parameter[]], call[name[len], parameter[name[realm].packs]]]]
variable[packindex] assign[=] constant[0]
variable[packindices] assign[=] dictionary[[], []]
for taget[name[s_id]] in starred[name[no_spare_schedulers]] begin[:]
variable[scheduler] assign[=] call[name[self].schedulers][name[s_id]]
call[name[logger].debug, parameter[constant[ scheduler: %s], name[scheduler].instance_id]]
call[name[packindices]][name[s_id]] assign[=] name[packindex]
<ast.AugAssign object at 0x7da204565c60>
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[scheduler].weight]]] begin[:]
call[name[weight_list].append, parameter[name[s_id]]]
call[name[logger].debug, parameter[constant[ pack indices: %s], name[packindices]]]
variable[round_robin] assign[=] call[name[itertools].cycle, parameter[name[weight_list]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[nb_schedulers]]]] begin[:]
call[name[packs]][name[i]] assign[=] list[[]]
variable[assoc] assign[=] dictionary[[], []]
call[name[realm].packs.sort, parameter[]]
variable[pack_higher_hosts] assign[=] constant[0]
for taget[name[hosts_pack]] in starred[name[realm].packs] begin[:]
variable[valid_value] assign[=] constant[False]
variable[old_pack] assign[=] <ast.UnaryOp object at 0x7da2045652a0>
for taget[name[host_id]] in starred[name[hosts_pack]] begin[:]
variable[host] assign[=] call[name[self].hosts][name[host_id]]
variable[old_i] assign[=] call[name[assoc].get, parameter[call[name[host].get_name, parameter[]], <ast.UnaryOp object at 0x7da204567340>]]
if compare[name[old_i] equal[==] <ast.UnaryOp object at 0x7da204564eb0>] begin[:]
continue
if <ast.BoolOp object at 0x7da2045661d0> begin[:]
variable[old_pack] assign[=] name[old_i]
variable[valid_value] assign[=] constant[True]
continue
if compare[name[old_i] equal[==] name[old_pack]] begin[:]
variable[valid_value] assign[=] constant[True]
if compare[name[old_i] not_equal[!=] name[old_pack]] begin[:]
variable[valid_value] assign[=] constant[False]
if <ast.BoolOp object at 0x7da204566350> begin[:]
variable[i] assign[=] name[old_pack]
for taget[name[host_id]] in starred[name[hosts_pack]] begin[:]
variable[host] assign[=] call[name[self].hosts][name[host_id]]
call[call[name[packs]][call[name[packindices]][name[i]]].append, parameter[name[host_id]]]
call[name[assoc]][call[name[host].get_name, parameter[]]] assign[=] name[i]
name[realm].packs assign[=] name[packs]
call[name[logger].info, parameter[constant[ total number of hosts in all realms: %d], name[nb_elements_all_realms]]]
if compare[call[name[len], parameter[name[self].hosts]] not_equal[!=] name[nb_elements_all_realms]] begin[:]
call[name[logger].warning, parameter[constant[There are %d hosts defined, and %d hosts dispatched in the realms. Some hosts have been ignored], call[name[len], parameter[name[self].hosts]], name[nb_elements_all_realms]]]
call[name[self].add_error, parameter[binary_operation[constant[There are %d hosts defined, and %d hosts dispatched in the realms. Some hosts have been ignored] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204567910>, <ast.Name object at 0x7da204567070>]]]]] | keyword[def] identifier[create_packs] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
identifier[graph] = identifier[Graph] ()
identifier[graph] . identifier[add_nodes] ( identifier[list] ( identifier[self] . identifier[hosts] . identifier[items] . identifier[keys] ()))
identifier[links] = identifier[set] ()
keyword[for] identifier[host] keyword[in] identifier[self] . identifier[hosts] :
keyword[for] identifier[parent] keyword[in] identifier[getattr] ( identifier[host] , literal[string] ,[]):
keyword[if] identifier[parent] :
identifier[links] . identifier[add] (( identifier[parent] , identifier[host] . identifier[uuid] ))
keyword[for] ( identifier[dep] , identifier[_] , identifier[_] , identifier[_] ) keyword[in] identifier[host] . identifier[act_depend_of] :
identifier[links] . identifier[add] (( identifier[dep] , identifier[host] . identifier[uuid] ))
keyword[for] ( identifier[dep] , identifier[_] , identifier[_] , identifier[_] , identifier[_] ) keyword[in] identifier[host] . identifier[chk_depend_of] :
identifier[links] . identifier[add] (( identifier[dep] , identifier[host] . identifier[uuid] ))
keyword[for] identifier[service] keyword[in] identifier[self] . identifier[services] :
keyword[for] ( identifier[dep_id] , identifier[_] , identifier[_] , identifier[_] ) keyword[in] identifier[service] . identifier[act_depend_of] :
keyword[if] identifier[dep_id] keyword[in] identifier[self] . identifier[services] :
identifier[dep] = identifier[self] . identifier[services] [ identifier[dep_id] ]
keyword[else] :
identifier[dep] = identifier[self] . identifier[hosts] [ identifier[dep_id] ]
keyword[if] identifier[hasattr] ( identifier[dep] , literal[string] ):
identifier[links] . identifier[add] (( identifier[dep] . identifier[host] , identifier[service] . identifier[host] ))
keyword[for] ( identifier[dep_id] , identifier[_] , identifier[_] , identifier[_] , identifier[_] ) keyword[in] identifier[service] . identifier[chk_depend_of] :
keyword[if] identifier[dep_id] keyword[in] identifier[self] . identifier[services] :
identifier[dep] = identifier[self] . identifier[services] [ identifier[dep_id] ]
keyword[else] :
identifier[dep] = identifier[self] . identifier[hosts] [ identifier[dep_id] ]
identifier[links] . identifier[add] (( identifier[dep] . identifier[host] , identifier[service] . identifier[host] ))
keyword[for] identifier[service] keyword[in] [ identifier[srv] keyword[for] identifier[srv] keyword[in] identifier[self] . identifier[services] keyword[if] identifier[srv] . identifier[got_business_rule] ]:
keyword[for] identifier[elem_uuid] keyword[in] identifier[service] . identifier[business_rule] . identifier[list_all_elements] ():
keyword[if] identifier[elem_uuid] keyword[in] identifier[self] . identifier[services] :
identifier[elem] = identifier[self] . identifier[services] [ identifier[elem_uuid] ]
keyword[if] identifier[elem] . identifier[host] != identifier[service] . identifier[host] :
identifier[links] . identifier[add] (( identifier[elem] . identifier[host] , identifier[service] . identifier[host] ))
keyword[else] :
keyword[if] identifier[elem_uuid] keyword[in] identifier[self] . identifier[hosts] keyword[and] identifier[elem_uuid] != identifier[service] . identifier[host] :
identifier[links] . identifier[add] (( identifier[elem_uuid] , identifier[service] . identifier[host] ))
keyword[for] identifier[host] keyword[in] [ identifier[hst] keyword[for] identifier[hst] keyword[in] identifier[self] . identifier[hosts] keyword[if] identifier[hst] . identifier[got_business_rule] ]:
keyword[for] identifier[elem_uuid] keyword[in] identifier[host] . identifier[business_rule] . identifier[list_all_elements] ():
keyword[if] identifier[elem_uuid] keyword[in] identifier[self] . identifier[services] :
identifier[elem] = identifier[self] . identifier[services] [ identifier[elem_uuid] ]
keyword[if] identifier[elem] . identifier[host] != identifier[host] . identifier[uuid] :
identifier[links] . identifier[add] (( identifier[elem] . identifier[host] , identifier[host] . identifier[uuid] ))
keyword[else] :
keyword[if] identifier[elem_uuid] != identifier[host] . identifier[uuid] :
identifier[links] . identifier[add] (( identifier[elem_uuid] , identifier[host] . identifier[uuid] ))
keyword[for] ( identifier[dep] , identifier[host] ) keyword[in] identifier[links] :
identifier[graph] . identifier[add_edge] ( identifier[dep] , identifier[host] )
identifier[graph] . identifier[add_edge] ( identifier[host] , identifier[dep] )
identifier[default_realm] = identifier[self] . identifier[realms] . identifier[get_default] ()
keyword[for] identifier[hosts_pack] keyword[in] identifier[graph] . identifier[get_accessibility_packs] ():
identifier[passively_checked_hosts] = keyword[False]
identifier[actively_checked_hosts] = keyword[False]
identifier[tmp_realms] = identifier[set] ()
identifier[logger] . identifier[debug] ( literal[string] )
keyword[for] identifier[host_id] keyword[in] identifier[hosts_pack] :
identifier[host] = identifier[self] . identifier[hosts] [ identifier[host_id] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[host] . identifier[get_name] ())
identifier[passively_checked_hosts] = identifier[passively_checked_hosts] keyword[or] identifier[host] . identifier[passive_checks_enabled]
identifier[actively_checked_hosts] = identifier[actively_checked_hosts] keyword[or] identifier[host] . identifier[active_checks_enabled]
keyword[if] identifier[host] . identifier[realm] :
identifier[tmp_realms] . identifier[add] ( identifier[host] . identifier[realm] )
keyword[if] identifier[len] ( identifier[tmp_realms] )> literal[int] :
identifier[self] . identifier[add_error] ( literal[string]
literal[string] )
keyword[for] identifier[host_id] keyword[in] identifier[hosts_pack] :
identifier[host] = identifier[self] . identifier[hosts] [ identifier[host_id] ]
keyword[if] keyword[not] identifier[host] . identifier[realm] :
identifier[self] . identifier[add_error] ( literal[string] % identifier[host] . identifier[get_name] ())
keyword[else] :
keyword[if] identifier[host] . identifier[realm] keyword[not] keyword[in] identifier[self] . identifier[realms] :
identifier[self] . identifier[add_error] ( literal[string] %
( identifier[host] . identifier[get_name] (), identifier[host] . identifier[realm] ))
keyword[else] :
identifier[host_realm] = identifier[self] . identifier[realms] [ identifier[host] . identifier[realm] ]
identifier[self] . identifier[add_error] ( literal[string] %
( identifier[host] . identifier[get_name] (), identifier[host_realm] . identifier[get_name] ()))
keyword[if] identifier[len] ( identifier[tmp_realms] )== literal[int] :
identifier[tmp_realm] = identifier[tmp_realms] . identifier[pop] ()
keyword[if] identifier[tmp_realm] keyword[in] identifier[self] . identifier[realms] :
identifier[realm] = identifier[self] . identifier[realms] [ identifier[tmp_realm] ]
keyword[else] :
identifier[realm] = identifier[self] . identifier[realms] . identifier[find_by_name] ( identifier[tmp_realm] )
keyword[if] keyword[not] identifier[realm] :
identifier[self] . identifier[add_error] ( literal[string] % identifier[tmp_realm] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[hosts_pack] , identifier[realm] . identifier[get_name] ())
identifier[realm] . identifier[packs] . identifier[append] ( identifier[hosts_pack] )
identifier[realm] . identifier[passively_checked_hosts] = identifier[passively_checked_hosts]
identifier[realm] . identifier[actively_checked_hosts] = identifier[actively_checked_hosts]
keyword[elif] keyword[not] identifier[tmp_realms] :
keyword[if] identifier[default_realm] keyword[is] keyword[not] keyword[None] :
identifier[default_realm] . identifier[packs] . identifier[append] ( identifier[hosts_pack] )
keyword[else] :
identifier[self] . identifier[add_error] ( literal[string]
literal[string] )
keyword[for] identifier[host] keyword[in] identifier[hosts_pack] :
identifier[self] . identifier[add_error] ( literal[string] % identifier[host] . identifier[get_name] ())
identifier[nb_elements_all_realms] = literal[int]
keyword[for] identifier[realm] keyword[in] identifier[self] . identifier[realms] :
identifier[packs] ={}
identifier[weight_list] =[]
identifier[no_spare_schedulers] = identifier[realm] . identifier[schedulers]
keyword[if] keyword[not] identifier[no_spare_schedulers] :
keyword[if] identifier[realm] . identifier[potential_schedulers] :
identifier[no_spare_schedulers] =[ identifier[realm] . identifier[potential_schedulers] [ literal[int] ]]
identifier[nb_schedulers] = identifier[len] ( identifier[no_spare_schedulers] )
keyword[if] identifier[nb_schedulers] :
identifier[logger] . identifier[info] ( literal[string] , identifier[nb_schedulers] , identifier[realm] . identifier[get_name] ())
keyword[else] :
identifier[logger] . identifier[warning] ( literal[string] , identifier[realm] . identifier[get_name] ())
identifier[nb_elements] = literal[int]
keyword[for] identifier[hosts_pack] keyword[in] identifier[realm] . identifier[packs] :
identifier[nb_elements] += identifier[len] ( identifier[hosts_pack] )
identifier[nb_elements_all_realms] += identifier[len] ( identifier[hosts_pack] )
identifier[realm] . identifier[hosts_count] = identifier[nb_elements]
keyword[if] identifier[nb_elements] :
keyword[if] keyword[not] identifier[nb_schedulers] :
identifier[self] . identifier[add_error] ( literal[string]
%( identifier[realm] . identifier[get_name] (), identifier[nb_elements] ))
identifier[realm] . identifier[packs] =[]
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] ,
identifier[nb_elements] , identifier[realm] . identifier[get_name] (), identifier[len] ( identifier[realm] . identifier[packs] ))
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] , identifier[realm] . identifier[get_name] ())
identifier[packindex] = literal[int]
identifier[packindices] ={}
keyword[for] identifier[s_id] keyword[in] identifier[no_spare_schedulers] :
identifier[scheduler] = identifier[self] . identifier[schedulers] [ identifier[s_id] ]
identifier[logger] . identifier[debug] ( literal[string] , identifier[scheduler] . identifier[instance_id] )
identifier[packindices] [ identifier[s_id] ]= identifier[packindex]
identifier[packindex] += literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[scheduler] . identifier[weight] ):
identifier[weight_list] . identifier[append] ( identifier[s_id] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[packindices] )
identifier[round_robin] = identifier[itertools] . identifier[cycle] ( identifier[weight_list] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[nb_schedulers] ):
identifier[packs] [ identifier[i] ]=[]
identifier[assoc] ={}
identifier[realm] . identifier[packs] . identifier[sort] ( identifier[reverse] = keyword[True] )
identifier[pack_higher_hosts] = literal[int]
keyword[for] identifier[hosts_pack] keyword[in] identifier[realm] . identifier[packs] :
identifier[valid_value] = keyword[False]
identifier[old_pack] =- literal[int]
keyword[for] identifier[host_id] keyword[in] identifier[hosts_pack] :
identifier[host] = identifier[self] . identifier[hosts] [ identifier[host_id] ]
identifier[old_i] = identifier[assoc] . identifier[get] ( identifier[host] . identifier[get_name] (),- literal[int] )
keyword[if] identifier[old_i] ==- literal[int] :
keyword[continue]
keyword[if] identifier[old_pack] ==- literal[int] keyword[and] identifier[old_i] !=- literal[int] :
identifier[old_pack] = identifier[old_i]
identifier[valid_value] = keyword[True]
keyword[continue]
keyword[if] identifier[old_i] == identifier[old_pack] :
identifier[valid_value] = keyword[True]
keyword[if] identifier[old_i] != identifier[old_pack] :
identifier[valid_value] = keyword[False]
keyword[if] identifier[valid_value] keyword[and] identifier[old_pack] keyword[in] identifier[packindices] :
identifier[i] = identifier[old_pack]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[i] , identifier[int] ):
identifier[i] = identifier[next] ( identifier[round_robin] )
keyword[elif] ( identifier[len] ( identifier[packs] [ identifier[packindices] [ identifier[i] ]])+ identifier[len] ( identifier[hosts_pack] ))>= identifier[pack_higher_hosts] :
identifier[pack_higher_hosts] =( identifier[len] ( identifier[packs] [ identifier[packindices] [ identifier[i] ]])+ identifier[len] ( identifier[hosts_pack] ))
identifier[i] = identifier[next] ( identifier[round_robin] )
keyword[for] identifier[host_id] keyword[in] identifier[hosts_pack] :
identifier[host] = identifier[self] . identifier[hosts] [ identifier[host_id] ]
identifier[packs] [ identifier[packindices] [ identifier[i] ]]. identifier[append] ( identifier[host_id] )
identifier[assoc] [ identifier[host] . identifier[get_name] ()]= identifier[i]
identifier[realm] . identifier[packs] = identifier[packs]
identifier[logger] . identifier[info] ( literal[string] , identifier[nb_elements_all_realms] )
keyword[if] identifier[len] ( identifier[self] . identifier[hosts] )!= identifier[nb_elements_all_realms] :
identifier[logger] . identifier[warning] ( literal[string]
literal[string] , identifier[len] ( identifier[self] . identifier[hosts] ), identifier[nb_elements_all_realms] )
identifier[self] . identifier[add_error] ( literal[string]
literal[string]
literal[string] %( identifier[len] ( identifier[self] . identifier[hosts] ), identifier[nb_elements_all_realms] )) | def create_packs(self):
# pylint: disable=too-many-statements,too-many-locals,too-many-branches, unused-argument
"Create packs of hosts and services (all dependencies are resolved)\n It create a graph. All hosts are connected to their\n parents, and hosts without parent are connected to host 'root'.\n services are linked to their host. Dependencies between hosts/services are managed.\n REF: doc/pack-creation.png\n\n :return: None\n "
logger.info('- creating hosts packs for the realms:')
# We create a graph with host in nodes
graph = Graph()
graph.add_nodes(list(self.hosts.items.keys()))
# links will be used for relations between hosts
links = set()
# Now the relations
for host in self.hosts:
# Add parent relations
for parent in getattr(host, 'parents', []):
if parent:
links.add((parent, host.uuid)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parent']]
# Add the others dependencies
for (dep, _, _, _) in host.act_depend_of:
links.add((dep, host.uuid)) # depends on [control=['for'], data=[]]
for (dep, _, _, _, _) in host.chk_depend_of:
links.add((dep, host.uuid)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['host']]
# For services: they are linked with their own host but we need
# to have the hosts of the service dependency in the same pack too
for service in self.services:
for (dep_id, _, _, _) in service.act_depend_of:
if dep_id in self.services:
dep = self.services[dep_id] # depends on [control=['if'], data=['dep_id']]
else:
dep = self.hosts[dep_id]
# I don't care about dep host: they are just the host
# of the service...
if hasattr(dep, 'host'):
links.add((dep.host, service.host)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# The other type of dep
for (dep_id, _, _, _, _) in service.chk_depend_of:
if dep_id in self.services:
dep = self.services[dep_id] # depends on [control=['if'], data=['dep_id']]
else:
dep = self.hosts[dep_id]
links.add((dep.host, service.host)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['service']]
# For host/service that are business based, we need to link them too
for service in [srv for srv in self.services if srv.got_business_rule]:
for elem_uuid in service.business_rule.list_all_elements():
if elem_uuid in self.services:
elem = self.services[elem_uuid]
if elem.host != service.host: # do not link a host with itself
links.add((elem.host, service.host)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['elem_uuid']] # it's already a host but only if it is in the known hosts list!
elif elem_uuid in self.hosts and elem_uuid != service.host:
links.add((elem_uuid, service.host)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['elem_uuid']] # depends on [control=['for'], data=['service']]
# Same for hosts of course
for host in [hst for hst in self.hosts if hst.got_business_rule]:
for elem_uuid in host.business_rule.list_all_elements():
if elem_uuid in self.services: # if it's a service
elem = self.services[elem_uuid]
if elem.host != host.uuid:
links.add((elem.host, host.uuid)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['elem_uuid']] # e is a host
elif elem_uuid != host.uuid:
links.add((elem_uuid, host.uuid)) # depends on [control=['if'], data=['elem_uuid']] # depends on [control=['for'], data=['elem_uuid']] # depends on [control=['for'], data=['host']]
# Now we create links in the graph. With links (set)
# We are sure to call the less add_edge
for (dep, host) in links:
graph.add_edge(dep, host)
graph.add_edge(host, dep) # depends on [control=['for'], data=[]]
# Now We find the default realm
default_realm = self.realms.get_default()
# Access_list from a node il all nodes that are connected
# with it: it's a list of ours mini_packs
# Now we look if all elements of all packs have the
# same realm. If not, not good!
for hosts_pack in graph.get_accessibility_packs():
passively_checked_hosts = False
actively_checked_hosts = False
tmp_realms = set()
logger.debug(' - host pack hosts:')
for host_id in hosts_pack:
host = self.hosts[host_id]
logger.debug(' - %s', host.get_name())
passively_checked_hosts = passively_checked_hosts or host.passive_checks_enabled
actively_checked_hosts = actively_checked_hosts or host.active_checks_enabled
if host.realm:
tmp_realms.add(host.realm) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['host_id']]
if len(tmp_realms) > 1:
self.add_error('Error: the realm configuration of your hosts is not correct because there is more than one realm in one pack (host relations):')
for host_id in hosts_pack:
host = self.hosts[host_id]
if not host.realm:
self.add_error(' -> the host %s do not have a realm' % host.get_name()) # depends on [control=['if'], data=[]]
# Do not use get_name for the realm because it is not an object but a
# string containing the not found realm name if the realm is not existing!
# As of it, it may raise an exception
elif host.realm not in self.realms:
self.add_error(' -> the host %s is in the realm %s' % (host.get_name(), host.realm)) # depends on [control=['if'], data=[]]
else:
host_realm = self.realms[host.realm]
self.add_error(' -> the host %s is in the realm %s' % (host.get_name(), host_realm.get_name())) # depends on [control=['for'], data=['host_id']] # depends on [control=['if'], data=[]]
if len(tmp_realms) == 1: # Ok, good
tmp_realm = tmp_realms.pop()
if tmp_realm in self.realms:
realm = self.realms[tmp_realm] # depends on [control=['if'], data=['tmp_realm']]
else:
realm = self.realms.find_by_name(tmp_realm)
if not realm:
self.add_error(' -> some hosts are in an unknown realm %s!' % tmp_realm) # depends on [control=['if'], data=[]]
else:
# Set the current hosts pack to its realm
logger.debug(' - append pack %s to realm %s', hosts_pack, realm.get_name())
realm.packs.append(hosts_pack)
# Set if the realm only has passively or actively checked hosts...
realm.passively_checked_hosts = passively_checked_hosts
realm.actively_checked_hosts = actively_checked_hosts # depends on [control=['if'], data=[]]
elif not tmp_realms: # Hum... no realm value? So default Realm
if default_realm is not None:
# Set the current hosts pack to the default realm
default_realm.packs.append(hosts_pack) # depends on [control=['if'], data=['default_realm']]
else:
self.add_error('Error: some hosts do not have a realm and you did not defined a default realm!')
for host in hosts_pack:
self.add_error(' Impacted host: %s ' % host.get_name()) # depends on [control=['for'], data=['host']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['hosts_pack']]
# The load balancing is for a loop, so all
# hosts of a realm (in a pack) will be dispatched
# to the schedulers of this realm
# REF: doc/pack-aggregation.png
# Count the numbers of elements in all the realms,
# to compare with the total number of hosts
nb_elements_all_realms = 0
for realm in self.realms:
packs = {}
# create round-robin iterator for id of cfg
# So dispatching is load balanced in a realm
# but add a entry in the round-robin tourniquet for
# every weight point schedulers (so Weight round robin)
weight_list = []
no_spare_schedulers = realm.schedulers
if not no_spare_schedulers:
if realm.potential_schedulers:
no_spare_schedulers = [realm.potential_schedulers[0]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
nb_schedulers = len(no_spare_schedulers)
if nb_schedulers:
logger.info(' %d scheduler(s) for the realm %s', nb_schedulers, realm.get_name()) # depends on [control=['if'], data=[]]
else:
logger.warning(' no scheduler for the realm %s', realm.get_name())
# Maybe there is no scheduler in the realm, it can be a
# big problem if there are elements in packs
nb_elements = 0
for hosts_pack in realm.packs:
nb_elements += len(hosts_pack)
nb_elements_all_realms += len(hosts_pack) # depends on [control=['for'], data=['hosts_pack']]
realm.hosts_count = nb_elements
if nb_elements:
if not nb_schedulers:
self.add_error('The realm %s has %d hosts but no scheduler!' % (realm.get_name(), nb_elements))
realm.packs = [] # Dumb pack
continue # depends on [control=['if'], data=[]]
logger.info(' %d hosts in the realm %s, distributed in %d linked packs', nb_elements, realm.get_name(), len(realm.packs)) # depends on [control=['if'], data=[]]
else:
logger.info(' no hosts in the realm %s', realm.get_name())
# Create a relation between a pack and each scheduler in the realm
packindex = 0
packindices = {}
for s_id in no_spare_schedulers:
scheduler = self.schedulers[s_id]
logger.debug(' scheduler: %s', scheduler.instance_id)
packindices[s_id] = packindex
packindex += 1
for i in range(0, scheduler.weight):
weight_list.append(s_id) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['s_id']]
logger.debug(' pack indices: %s', packindices)
# packindices is indexed with the scheduler id and contains
# the configuration part number to get used: sched1:0, sched2: 1, ...
round_robin = itertools.cycle(weight_list)
# We must initialize nb_schedulers packs
for i in range(0, nb_schedulers):
packs[i] = [] # depends on [control=['for'], data=['i']]
# Try to load the history association dict so we will try to
# send the hosts in the same "pack"
assoc = {}
# Now we explode the numerous packs into reals packs:
# we 'load balance' them in a round-robin way but with count number of hosts in
# case have some packs with too many hosts and other with few
realm.packs.sort(reverse=True)
pack_higher_hosts = 0
for hosts_pack in realm.packs:
valid_value = False
old_pack = -1
for host_id in hosts_pack:
host = self.hosts[host_id]
old_i = assoc.get(host.get_name(), -1)
# Maybe it's a new, if so, don't count it
if old_i == -1:
continue # depends on [control=['if'], data=[]]
# Maybe it is the first we look at, if so, take it's value
if old_pack == -1 and old_i != -1:
old_pack = old_i
valid_value = True
continue # depends on [control=['if'], data=[]]
if old_i == old_pack:
valid_value = True # depends on [control=['if'], data=[]]
if old_i != old_pack:
valid_value = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['host_id']]
# If it's a valid sub pack and the pack id really exist, use it!
if valid_value and old_pack in packindices:
i = old_pack # depends on [control=['if'], data=[]]
elif isinstance(i, int):
i = next(round_robin) # depends on [control=['if'], data=[]]
elif len(packs[packindices[i]]) + len(hosts_pack) >= pack_higher_hosts:
pack_higher_hosts = len(packs[packindices[i]]) + len(hosts_pack)
i = next(round_robin) # depends on [control=['if'], data=['pack_higher_hosts']]
for host_id in hosts_pack:
host = self.hosts[host_id]
packs[packindices[i]].append(host_id)
assoc[host.get_name()] = i # depends on [control=['for'], data=['host_id']] # depends on [control=['for'], data=['hosts_pack']]
# Now packs is a dictionary indexed with the configuration part
# number and containing the list of hosts
realm.packs = packs # depends on [control=['for'], data=['realm']]
logger.info(' total number of hosts in all realms: %d', nb_elements_all_realms)
if len(self.hosts) != nb_elements_all_realms:
logger.warning('There are %d hosts defined, and %d hosts dispatched in the realms. Some hosts have been ignored', len(self.hosts), nb_elements_all_realms)
self.add_error('There are %d hosts defined, and %d hosts dispatched in the realms. Some hosts have been ignored' % (len(self.hosts), nb_elements_all_realms)) # depends on [control=['if'], data=['nb_elements_all_realms']] |
def element_coordinates(cls, element):
"""
Returns a tuple containing the coordinates of the bounding box around
an element
"""
out = (0, 0, 0, 0)
if 'title' in element.attrib:
matches = cls.box_pattern.search(element.attrib['title'])
if matches:
coords = matches.group(1).split()
out = Rect._make(int(coords[n]) for n in range(4))
return out | def function[element_coordinates, parameter[cls, element]]:
constant[
Returns a tuple containing the coordinates of the bounding box around
an element
]
variable[out] assign[=] tuple[[<ast.Constant object at 0x7da1b1bcb100>, <ast.Constant object at 0x7da1b1bcb460>, <ast.Constant object at 0x7da1b1bcb5e0>, <ast.Constant object at 0x7da1b1bc9a20>]]
if compare[constant[title] in name[element].attrib] begin[:]
variable[matches] assign[=] call[name[cls].box_pattern.search, parameter[call[name[element].attrib][constant[title]]]]
if name[matches] begin[:]
variable[coords] assign[=] call[call[name[matches].group, parameter[constant[1]]].split, parameter[]]
variable[out] assign[=] call[name[Rect]._make, parameter[<ast.GeneratorExp object at 0x7da1b1bc9900>]]
return[name[out]] | keyword[def] identifier[element_coordinates] ( identifier[cls] , identifier[element] ):
literal[string]
identifier[out] =( literal[int] , literal[int] , literal[int] , literal[int] )
keyword[if] literal[string] keyword[in] identifier[element] . identifier[attrib] :
identifier[matches] = identifier[cls] . identifier[box_pattern] . identifier[search] ( identifier[element] . identifier[attrib] [ literal[string] ])
keyword[if] identifier[matches] :
identifier[coords] = identifier[matches] . identifier[group] ( literal[int] ). identifier[split] ()
identifier[out] = identifier[Rect] . identifier[_make] ( identifier[int] ( identifier[coords] [ identifier[n] ]) keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] ))
keyword[return] identifier[out] | def element_coordinates(cls, element):
"""
Returns a tuple containing the coordinates of the bounding box around
an element
"""
out = (0, 0, 0, 0)
if 'title' in element.attrib:
matches = cls.box_pattern.search(element.attrib['title'])
if matches:
coords = matches.group(1).split()
out = Rect._make((int(coords[n]) for n in range(4))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return out |
def _access(self): # pragma: no cover
"""
Get the HTTP code status.
:return: The matched HTTP status code.
:rtype: int|None
"""
try:
# We try to get the HTTP status code.
if PyFunceble.INTERN["to_test_type"] == "url":
# We are globally testing a URL.
# We get the head of the URL.
req = PyFunceble.requests.head(
self.to_get,
timeout=PyFunceble.CONFIGURATION["seconds_before_http_timeout"],
headers=self.headers,
verify=PyFunceble.CONFIGURATION["verify_ssl_certificate"],
)
else:
# We are not globally testing a URL.
# We get the head of the constructed URL.
req = PyFunceble.requests.head(
self.to_get,
timeout=PyFunceble.CONFIGURATION["seconds_before_http_timeout"],
headers=self.headers,
)
# And we try to get the status code.
return req.status_code
except (
PyFunceble.requests.exceptions.InvalidURL,
PyFunceble.socket.timeout,
PyFunceble.requests.exceptions.Timeout,
PyFunceble.requests.ConnectionError,
urllib3_exceptions.InvalidHeader,
UnicodeDecodeError, # The probability that this happend in production is minimal.
):
# If one of the listed exception is matched, that means that something
# went wrong and we were unable to extract the status code.
# We return None.
return None | def function[_access, parameter[self]]:
constant[
Get the HTTP code status.
:return: The matched HTTP status code.
:rtype: int|None
]
<ast.Try object at 0x7da18c4cd090> | keyword[def] identifier[_access] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[if] identifier[PyFunceble] . identifier[INTERN] [ literal[string] ]== literal[string] :
identifier[req] = identifier[PyFunceble] . identifier[requests] . identifier[head] (
identifier[self] . identifier[to_get] ,
identifier[timeout] = identifier[PyFunceble] . identifier[CONFIGURATION] [ literal[string] ],
identifier[headers] = identifier[self] . identifier[headers] ,
identifier[verify] = identifier[PyFunceble] . identifier[CONFIGURATION] [ literal[string] ],
)
keyword[else] :
identifier[req] = identifier[PyFunceble] . identifier[requests] . identifier[head] (
identifier[self] . identifier[to_get] ,
identifier[timeout] = identifier[PyFunceble] . identifier[CONFIGURATION] [ literal[string] ],
identifier[headers] = identifier[self] . identifier[headers] ,
)
keyword[return] identifier[req] . identifier[status_code]
keyword[except] (
identifier[PyFunceble] . identifier[requests] . identifier[exceptions] . identifier[InvalidURL] ,
identifier[PyFunceble] . identifier[socket] . identifier[timeout] ,
identifier[PyFunceble] . identifier[requests] . identifier[exceptions] . identifier[Timeout] ,
identifier[PyFunceble] . identifier[requests] . identifier[ConnectionError] ,
identifier[urllib3_exceptions] . identifier[InvalidHeader] ,
identifier[UnicodeDecodeError] ,
):
keyword[return] keyword[None] | def _access(self): # pragma: no cover
'\n Get the HTTP code status.\n\n :return: The matched HTTP status code.\n :rtype: int|None\n '
try:
# We try to get the HTTP status code.
if PyFunceble.INTERN['to_test_type'] == 'url':
# We are globally testing a URL.
# We get the head of the URL.
req = PyFunceble.requests.head(self.to_get, timeout=PyFunceble.CONFIGURATION['seconds_before_http_timeout'], headers=self.headers, verify=PyFunceble.CONFIGURATION['verify_ssl_certificate']) # depends on [control=['if'], data=[]]
else:
# We are not globally testing a URL.
# We get the head of the constructed URL.
req = PyFunceble.requests.head(self.to_get, timeout=PyFunceble.CONFIGURATION['seconds_before_http_timeout'], headers=self.headers)
# And we try to get the status code.
return req.status_code # depends on [control=['try'], data=[]]
except (PyFunceble.requests.exceptions.InvalidURL, PyFunceble.socket.timeout, PyFunceble.requests.exceptions.Timeout, PyFunceble.requests.ConnectionError, urllib3_exceptions.InvalidHeader, UnicodeDecodeError): # The probability that this happend in production is minimal.
# If one of the listed exception is matched, that means that something
# went wrong and we were unable to extract the status code.
# We return None.
return None # depends on [control=['except'], data=[]] |
def save_module(self, obj):
"""
Save a module as an import
"""
self.modules.add(obj)
if _is_dynamic(obj):
self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)),
obj=obj)
else:
self.save_reduce(subimport, (obj.__name__,), obj=obj) | def function[save_module, parameter[self, obj]]:
constant[
Save a module as an import
]
call[name[self].modules.add, parameter[name[obj]]]
if call[name[_is_dynamic], parameter[name[obj]]] begin[:]
call[name[self].save_reduce, parameter[name[dynamic_subimport], tuple[[<ast.Attribute object at 0x7da204567460>, <ast.Call object at 0x7da204566350>]]]] | keyword[def] identifier[save_module] ( identifier[self] , identifier[obj] ):
literal[string]
identifier[self] . identifier[modules] . identifier[add] ( identifier[obj] )
keyword[if] identifier[_is_dynamic] ( identifier[obj] ):
identifier[self] . identifier[save_reduce] ( identifier[dynamic_subimport] ,( identifier[obj] . identifier[__name__] , identifier[vars] ( identifier[obj] )),
identifier[obj] = identifier[obj] )
keyword[else] :
identifier[self] . identifier[save_reduce] ( identifier[subimport] ,( identifier[obj] . identifier[__name__] ,), identifier[obj] = identifier[obj] ) | def save_module(self, obj):
"""
Save a module as an import
"""
self.modules.add(obj)
if _is_dynamic(obj):
self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)), obj=obj) # depends on [control=['if'], data=[]]
else:
self.save_reduce(subimport, (obj.__name__,), obj=obj) |
def remove_unused_grid_rc(self):
"""Deletes unused grid row/cols"""
if 'columns' in self['layout']:
ckeys = tuple(self['layout']['columns'].keys())
for key in ckeys:
value = int(key)
if value > self.max_col:
del self['layout']['columns'][key]
if 'rows' in self['layout']:
rkeys = tuple(self['layout']['rows'].keys())
for key in rkeys:
value = int(key)
if value > self.max_row:
del self['layout']['rows'][key] | def function[remove_unused_grid_rc, parameter[self]]:
constant[Deletes unused grid row/cols]
if compare[constant[columns] in call[name[self]][constant[layout]]] begin[:]
variable[ckeys] assign[=] call[name[tuple], parameter[call[call[call[name[self]][constant[layout]]][constant[columns]].keys, parameter[]]]]
for taget[name[key]] in starred[name[ckeys]] begin[:]
variable[value] assign[=] call[name[int], parameter[name[key]]]
if compare[name[value] greater[>] name[self].max_col] begin[:]
<ast.Delete object at 0x7da1b170fd00>
if compare[constant[rows] in call[name[self]][constant[layout]]] begin[:]
variable[rkeys] assign[=] call[name[tuple], parameter[call[call[call[name[self]][constant[layout]]][constant[rows]].keys, parameter[]]]]
for taget[name[key]] in starred[name[rkeys]] begin[:]
variable[value] assign[=] call[name[int], parameter[name[key]]]
if compare[name[value] greater[>] name[self].max_row] begin[:]
<ast.Delete object at 0x7da1b170e620> | keyword[def] identifier[remove_unused_grid_rc] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] [ literal[string] ]:
identifier[ckeys] = identifier[tuple] ( identifier[self] [ literal[string] ][ literal[string] ]. identifier[keys] ())
keyword[for] identifier[key] keyword[in] identifier[ckeys] :
identifier[value] = identifier[int] ( identifier[key] )
keyword[if] identifier[value] > identifier[self] . identifier[max_col] :
keyword[del] identifier[self] [ literal[string] ][ literal[string] ][ identifier[key] ]
keyword[if] literal[string] keyword[in] identifier[self] [ literal[string] ]:
identifier[rkeys] = identifier[tuple] ( identifier[self] [ literal[string] ][ literal[string] ]. identifier[keys] ())
keyword[for] identifier[key] keyword[in] identifier[rkeys] :
identifier[value] = identifier[int] ( identifier[key] )
keyword[if] identifier[value] > identifier[self] . identifier[max_row] :
keyword[del] identifier[self] [ literal[string] ][ literal[string] ][ identifier[key] ] | def remove_unused_grid_rc(self):
"""Deletes unused grid row/cols"""
if 'columns' in self['layout']:
ckeys = tuple(self['layout']['columns'].keys())
for key in ckeys:
value = int(key)
if value > self.max_col:
del self['layout']['columns'][key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
if 'rows' in self['layout']:
rkeys = tuple(self['layout']['rows'].keys())
for key in rkeys:
value = int(key)
if value > self.max_row:
del self['layout']['rows'][key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] |
def FloatBetweenZeroAndOne(x):
"""Returns *x* only if *0 <= x <= 1*, otherwise raises error."""
x = float(x)
if 0 <= x <= 1:
return x
else:
raise ValueError("{0} not a float between 0 and 1.".format(x)) | def function[FloatBetweenZeroAndOne, parameter[x]]:
constant[Returns *x* only if *0 <= x <= 1*, otherwise raises error.]
variable[x] assign[=] call[name[float], parameter[name[x]]]
if compare[constant[0] less_or_equal[<=] name[x]] begin[:]
return[name[x]] | keyword[def] identifier[FloatBetweenZeroAndOne] ( identifier[x] ):
literal[string]
identifier[x] = identifier[float] ( identifier[x] )
keyword[if] literal[int] <= identifier[x] <= literal[int] :
keyword[return] identifier[x]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[x] )) | def FloatBetweenZeroAndOne(x):
"""Returns *x* only if *0 <= x <= 1*, otherwise raises error."""
x = float(x)
if 0 <= x <= 1:
return x # depends on [control=['if'], data=['x']]
else:
raise ValueError('{0} not a float between 0 and 1.'.format(x)) |
def check_array(array, force_2d=False, n_feats=None, ndim=None,
min_samples=1, name='Input data', verbose=True):
"""
tool to perform basic data validation.
called by check_X and check_y.
ensures that data:
- is ndim dimensional
- contains float-compatible data-types
- has at least min_samples
- has n_feats
- is finite
Parameters
----------
array : array-like
force_2d : boolean, default: False
whether to force a 2d array. Setting to True forces ndim = 2
n_feats : int, default: None
represents number of features that the array should have.
not enforced if n_feats is None.
ndim : int default: None
number of dimensions expected in the array
min_samples : int, default: 1
name : str, default: 'Input data'
name to use when referring to the array
verbose : bool, default: True
whether to print warnings
Returns
-------
array : validated array
"""
# make array
if force_2d:
array = make_2d(array, verbose=verbose)
ndim = 2
else:
array = np.array(array)
# cast to float
dtype = array.dtype
if dtype.kind not in ['i', 'f']:
try:
array = array.astype('float')
except ValueError as e:
raise ValueError('{} must be type int or float, '\
'but found type: {}\n'\
'Try transforming data with a LabelEncoder first.'\
.format(name, dtype.type))
# check finite
if not(np.isfinite(array).all()):
raise ValueError('{} must not contain Inf nor NaN'.format(name))
# check ndim
if ndim is not None:
if array.ndim != ndim:
raise ValueError('{} must have {} dimensions. '\
'found shape {}'.format(name, ndim, array.shape))
# check n_feats
if n_feats is not None:
m = array.shape[1]
if m != n_feats:
raise ValueError('{} must have {} features, '\
'but found {}'.format(name, n_feats, m))
# minimum samples
n = array.shape[0]
if n < min_samples:
raise ValueError('{} should have at least {} samples, '\
'but found {}'.format(name, min_samples, n))
return array | def function[check_array, parameter[array, force_2d, n_feats, ndim, min_samples, name, verbose]]:
constant[
tool to perform basic data validation.
called by check_X and check_y.
ensures that data:
- is ndim dimensional
- contains float-compatible data-types
- has at least min_samples
- has n_feats
- is finite
Parameters
----------
array : array-like
force_2d : boolean, default: False
whether to force a 2d array. Setting to True forces ndim = 2
n_feats : int, default: None
represents number of features that the array should have.
not enforced if n_feats is None.
ndim : int default: None
number of dimensions expected in the array
min_samples : int, default: 1
name : str, default: 'Input data'
name to use when referring to the array
verbose : bool, default: True
whether to print warnings
Returns
-------
array : validated array
]
if name[force_2d] begin[:]
variable[array] assign[=] call[name[make_2d], parameter[name[array]]]
variable[ndim] assign[=] constant[2]
variable[dtype] assign[=] name[array].dtype
if compare[name[dtype].kind <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da20c6aa170>, <ast.Constant object at 0x7da20c6a9660>]]] begin[:]
<ast.Try object at 0x7da20c6a8a00>
if <ast.UnaryOp object at 0x7da20c6a90c0> begin[:]
<ast.Raise object at 0x7da20c6abe80>
if compare[name[ndim] is_not constant[None]] begin[:]
if compare[name[array].ndim not_equal[!=] name[ndim]] begin[:]
<ast.Raise object at 0x7da20c6a90f0>
if compare[name[n_feats] is_not constant[None]] begin[:]
variable[m] assign[=] call[name[array].shape][constant[1]]
if compare[name[m] not_equal[!=] name[n_feats]] begin[:]
<ast.Raise object at 0x7da1b17b4fd0>
variable[n] assign[=] call[name[array].shape][constant[0]]
if compare[name[n] less[<] name[min_samples]] begin[:]
<ast.Raise object at 0x7da1b17b6440>
return[name[array]] | keyword[def] identifier[check_array] ( identifier[array] , identifier[force_2d] = keyword[False] , identifier[n_feats] = keyword[None] , identifier[ndim] = keyword[None] ,
identifier[min_samples] = literal[int] , identifier[name] = literal[string] , identifier[verbose] = keyword[True] ):
literal[string]
keyword[if] identifier[force_2d] :
identifier[array] = identifier[make_2d] ( identifier[array] , identifier[verbose] = identifier[verbose] )
identifier[ndim] = literal[int]
keyword[else] :
identifier[array] = identifier[np] . identifier[array] ( identifier[array] )
identifier[dtype] = identifier[array] . identifier[dtype]
keyword[if] identifier[dtype] . identifier[kind] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
keyword[try] :
identifier[array] = identifier[array] . identifier[astype] ( literal[string] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[name] , identifier[dtype] . identifier[type] ))
keyword[if] keyword[not] ( identifier[np] . identifier[isfinite] ( identifier[array] ). identifier[all] ()):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[if] identifier[ndim] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[array] . identifier[ndim] != identifier[ndim] :
keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[name] , identifier[ndim] , identifier[array] . identifier[shape] ))
keyword[if] identifier[n_feats] keyword[is] keyword[not] keyword[None] :
identifier[m] = identifier[array] . identifier[shape] [ literal[int] ]
keyword[if] identifier[m] != identifier[n_feats] :
keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[name] , identifier[n_feats] , identifier[m] ))
identifier[n] = identifier[array] . identifier[shape] [ literal[int] ]
keyword[if] identifier[n] < identifier[min_samples] :
keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[name] , identifier[min_samples] , identifier[n] ))
keyword[return] identifier[array] | def check_array(array, force_2d=False, n_feats=None, ndim=None, min_samples=1, name='Input data', verbose=True):
"""
tool to perform basic data validation.
called by check_X and check_y.
ensures that data:
- is ndim dimensional
- contains float-compatible data-types
- has at least min_samples
- has n_feats
- is finite
Parameters
----------
array : array-like
force_2d : boolean, default: False
whether to force a 2d array. Setting to True forces ndim = 2
n_feats : int, default: None
represents number of features that the array should have.
not enforced if n_feats is None.
ndim : int default: None
number of dimensions expected in the array
min_samples : int, default: 1
name : str, default: 'Input data'
name to use when referring to the array
verbose : bool, default: True
whether to print warnings
Returns
-------
array : validated array
"""
# make array
if force_2d:
array = make_2d(array, verbose=verbose)
ndim = 2 # depends on [control=['if'], data=[]]
else:
array = np.array(array)
# cast to float
dtype = array.dtype
if dtype.kind not in ['i', 'f']:
try:
array = array.astype('float') # depends on [control=['try'], data=[]]
except ValueError as e:
raise ValueError('{} must be type int or float, but found type: {}\nTry transforming data with a LabelEncoder first.'.format(name, dtype.type)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# check finite
if not np.isfinite(array).all():
raise ValueError('{} must not contain Inf nor NaN'.format(name)) # depends on [control=['if'], data=[]]
# check ndim
if ndim is not None:
if array.ndim != ndim:
raise ValueError('{} must have {} dimensions. found shape {}'.format(name, ndim, array.shape)) # depends on [control=['if'], data=['ndim']] # depends on [control=['if'], data=['ndim']]
# check n_feats
if n_feats is not None:
m = array.shape[1]
if m != n_feats:
raise ValueError('{} must have {} features, but found {}'.format(name, n_feats, m)) # depends on [control=['if'], data=['m', 'n_feats']] # depends on [control=['if'], data=['n_feats']]
# minimum samples
n = array.shape[0]
if n < min_samples:
raise ValueError('{} should have at least {} samples, but found {}'.format(name, min_samples, n)) # depends on [control=['if'], data=['n', 'min_samples']]
return array |
def random_restore(
rnd: Optional[tcod.random.Random], backup: tcod.random.Random
) -> None:
"""Restore a random number generator from a backed up copy.
Args:
rnd (Optional[Random]): A Random instance, or None to use the default.
backup (Random): The Random instance which was used as a backup.
.. deprecated:: 8.4
You can use the standard library copy and pickle modules to save a
random state.
"""
lib.TCOD_random_restore(rnd.random_c if rnd else ffi.NULL, backup.random_c) | def function[random_restore, parameter[rnd, backup]]:
constant[Restore a random number generator from a backed up copy.
Args:
rnd (Optional[Random]): A Random instance, or None to use the default.
backup (Random): The Random instance which was used as a backup.
.. deprecated:: 8.4
You can use the standard library copy and pickle modules to save a
random state.
]
call[name[lib].TCOD_random_restore, parameter[<ast.IfExp object at 0x7da18f810940>, name[backup].random_c]] | keyword[def] identifier[random_restore] (
identifier[rnd] : identifier[Optional] [ identifier[tcod] . identifier[random] . identifier[Random] ], identifier[backup] : identifier[tcod] . identifier[random] . identifier[Random]
)-> keyword[None] :
literal[string]
identifier[lib] . identifier[TCOD_random_restore] ( identifier[rnd] . identifier[random_c] keyword[if] identifier[rnd] keyword[else] identifier[ffi] . identifier[NULL] , identifier[backup] . identifier[random_c] ) | def random_restore(rnd: Optional[tcod.random.Random], backup: tcod.random.Random) -> None:
"""Restore a random number generator from a backed up copy.
Args:
rnd (Optional[Random]): A Random instance, or None to use the default.
backup (Random): The Random instance which was used as a backup.
.. deprecated:: 8.4
You can use the standard library copy and pickle modules to save a
random state.
"""
lib.TCOD_random_restore(rnd.random_c if rnd else ffi.NULL, backup.random_c) |
def _get_format(self, token):
""" Returns a QTextCharFormat for token or None.
"""
if token in self._formats:
return self._formats[token]
if self._style is None:
result = self._get_format_from_document(token, self._document)
else:
result = self._get_format_from_style(token, self._style)
self._formats[token] = result
return result | def function[_get_format, parameter[self, token]]:
constant[ Returns a QTextCharFormat for token or None.
]
if compare[name[token] in name[self]._formats] begin[:]
return[call[name[self]._formats][name[token]]]
if compare[name[self]._style is constant[None]] begin[:]
variable[result] assign[=] call[name[self]._get_format_from_document, parameter[name[token], name[self]._document]]
call[name[self]._formats][name[token]] assign[=] name[result]
return[name[result]] | keyword[def] identifier[_get_format] ( identifier[self] , identifier[token] ):
literal[string]
keyword[if] identifier[token] keyword[in] identifier[self] . identifier[_formats] :
keyword[return] identifier[self] . identifier[_formats] [ identifier[token] ]
keyword[if] identifier[self] . identifier[_style] keyword[is] keyword[None] :
identifier[result] = identifier[self] . identifier[_get_format_from_document] ( identifier[token] , identifier[self] . identifier[_document] )
keyword[else] :
identifier[result] = identifier[self] . identifier[_get_format_from_style] ( identifier[token] , identifier[self] . identifier[_style] )
identifier[self] . identifier[_formats] [ identifier[token] ]= identifier[result]
keyword[return] identifier[result] | def _get_format(self, token):
""" Returns a QTextCharFormat for token or None.
"""
if token in self._formats:
return self._formats[token] # depends on [control=['if'], data=['token']]
if self._style is None:
result = self._get_format_from_document(token, self._document) # depends on [control=['if'], data=[]]
else:
result = self._get_format_from_style(token, self._style)
self._formats[token] = result
return result |
def upload(ctx, browse=False, target=None, release='latest'):
"""Upload a ZIP of built docs (by default to PyPI, else a WebDAV URL)."""
cfg = config.load()
uploader = DocsUploader(ctx, cfg, target)
html_dir = os.path.join(ctx.rituals.docs.sources, ctx.rituals.docs.build)
if not os.path.isdir(html_dir):
notify.failure("No HTML docs dir found at '{}'!".format(html_dir))
url = uploader.upload(html_dir, release)
notify.info("Uploaded docs to '{url}'!".format(url=url or 'N/A'))
if url and browse: # Open in browser?
webbrowser.open_new_tab(url) | def function[upload, parameter[ctx, browse, target, release]]:
constant[Upload a ZIP of built docs (by default to PyPI, else a WebDAV URL).]
variable[cfg] assign[=] call[name[config].load, parameter[]]
variable[uploader] assign[=] call[name[DocsUploader], parameter[name[ctx], name[cfg], name[target]]]
variable[html_dir] assign[=] call[name[os].path.join, parameter[name[ctx].rituals.docs.sources, name[ctx].rituals.docs.build]]
if <ast.UnaryOp object at 0x7da18ede5540> begin[:]
call[name[notify].failure, parameter[call[constant[No HTML docs dir found at '{}'!].format, parameter[name[html_dir]]]]]
variable[url] assign[=] call[name[uploader].upload, parameter[name[html_dir], name[release]]]
call[name[notify].info, parameter[call[constant[Uploaded docs to '{url}'!].format, parameter[]]]]
if <ast.BoolOp object at 0x7da18ede7df0> begin[:]
call[name[webbrowser].open_new_tab, parameter[name[url]]] | keyword[def] identifier[upload] ( identifier[ctx] , identifier[browse] = keyword[False] , identifier[target] = keyword[None] , identifier[release] = literal[string] ):
literal[string]
identifier[cfg] = identifier[config] . identifier[load] ()
identifier[uploader] = identifier[DocsUploader] ( identifier[ctx] , identifier[cfg] , identifier[target] )
identifier[html_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[ctx] . identifier[rituals] . identifier[docs] . identifier[sources] , identifier[ctx] . identifier[rituals] . identifier[docs] . identifier[build] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[html_dir] ):
identifier[notify] . identifier[failure] ( literal[string] . identifier[format] ( identifier[html_dir] ))
identifier[url] = identifier[uploader] . identifier[upload] ( identifier[html_dir] , identifier[release] )
identifier[notify] . identifier[info] ( literal[string] . identifier[format] ( identifier[url] = identifier[url] keyword[or] literal[string] ))
keyword[if] identifier[url] keyword[and] identifier[browse] :
identifier[webbrowser] . identifier[open_new_tab] ( identifier[url] ) | def upload(ctx, browse=False, target=None, release='latest'):
"""Upload a ZIP of built docs (by default to PyPI, else a WebDAV URL)."""
cfg = config.load()
uploader = DocsUploader(ctx, cfg, target)
html_dir = os.path.join(ctx.rituals.docs.sources, ctx.rituals.docs.build)
if not os.path.isdir(html_dir):
notify.failure("No HTML docs dir found at '{}'!".format(html_dir)) # depends on [control=['if'], data=[]]
url = uploader.upload(html_dir, release)
notify.info("Uploaded docs to '{url}'!".format(url=url or 'N/A'))
if url and browse: # Open in browser?
webbrowser.open_new_tab(url) # depends on [control=['if'], data=[]] |
def _getAlias(self, auth_level_uri):
"""Return the alias for the specified auth level URI.
@raises KeyError: if no alias is defined
"""
for (alias, existing_uri) in self.auth_level_aliases.items():
if auth_level_uri == existing_uri:
return alias
raise KeyError(auth_level_uri) | def function[_getAlias, parameter[self, auth_level_uri]]:
constant[Return the alias for the specified auth level URI.
@raises KeyError: if no alias is defined
]
for taget[tuple[[<ast.Name object at 0x7da1b04f76a0>, <ast.Name object at 0x7da1b04f6e60>]]] in starred[call[name[self].auth_level_aliases.items, parameter[]]] begin[:]
if compare[name[auth_level_uri] equal[==] name[existing_uri]] begin[:]
return[name[alias]]
<ast.Raise object at 0x7da1b04f65c0> | keyword[def] identifier[_getAlias] ( identifier[self] , identifier[auth_level_uri] ):
literal[string]
keyword[for] ( identifier[alias] , identifier[existing_uri] ) keyword[in] identifier[self] . identifier[auth_level_aliases] . identifier[items] ():
keyword[if] identifier[auth_level_uri] == identifier[existing_uri] :
keyword[return] identifier[alias]
keyword[raise] identifier[KeyError] ( identifier[auth_level_uri] ) | def _getAlias(self, auth_level_uri):
"""Return the alias for the specified auth level URI.
@raises KeyError: if no alias is defined
"""
for (alias, existing_uri) in self.auth_level_aliases.items():
if auth_level_uri == existing_uri:
return alias # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise KeyError(auth_level_uri) |
def check_perms(parser, token):
"""
Returns a list of permissions (as ``codename`` strings) for a given
``user``/``group`` and ``obj`` (Model instance).
Parses ``check_perms`` tag which should be in format::
{% check_perms "perm1[, perm2, ...]" for user in slug as "context_var" %}
or
{% check_perms "perm1[, perm2, ...]" for user in "slug" as "context_var" %}
.. note::
Make sure that you set and use those permissions in same template
block (``{% block %}``).
Example of usage (assuming ``page` objects are available from *context*)::
{% check_perms "delete_page" for request.user in page.slug as "can_delete" %}
{% if can_delete %}
...
{% endif %}
"""
bits = token.split_contents()
format = '{% check_perms "perm1[, perm2, ...]" for user in slug as "context_var" %}'
if len(bits) != 8 or bits[2] != 'for' or bits[4] != "in" or bits[6] != 'as':
raise template.TemplateSyntaxError("get_obj_perms tag should be in "
"format: %s" % format)
perms = bits[1]
user = bits[3]
slug = bits[5]
context_var = bits[7]
if perms[0] != perms[-1] or perms[0] not in ('"', "'"):
raise template.TemplateSyntaxError("check_perms tag's perms "
"argument should be in quotes")
if context_var[0] != context_var[-1] or context_var[0] not in ('"', "'"):
raise template.TemplateSyntaxError("check_perms tag's context_var "
"argument should be in quotes")
context_var = context_var[1:-1]
return CheckPermissionsNode(perms, user, slug, context_var) | def function[check_perms, parameter[parser, token]]:
constant[
Returns a list of permissions (as ``codename`` strings) for a given
``user``/``group`` and ``obj`` (Model instance).
Parses ``check_perms`` tag which should be in format::
{% check_perms "perm1[, perm2, ...]" for user in slug as "context_var" %}
or
{% check_perms "perm1[, perm2, ...]" for user in "slug" as "context_var" %}
.. note::
Make sure that you set and use those permissions in same template
block (``{% block %}``).
Example of usage (assuming ``page` objects are available from *context*)::
{% check_perms "delete_page" for request.user in page.slug as "can_delete" %}
{% if can_delete %}
...
{% endif %}
]
variable[bits] assign[=] call[name[token].split_contents, parameter[]]
variable[format] assign[=] constant[{% check_perms "perm1[, perm2, ...]" for user in slug as "context_var" %}]
if <ast.BoolOp object at 0x7da18ede5c60> begin[:]
<ast.Raise object at 0x7da18ede5ff0>
variable[perms] assign[=] call[name[bits]][constant[1]]
variable[user] assign[=] call[name[bits]][constant[3]]
variable[slug] assign[=] call[name[bits]][constant[5]]
variable[context_var] assign[=] call[name[bits]][constant[7]]
if <ast.BoolOp object at 0x7da18ede7b50> begin[:]
<ast.Raise object at 0x7da18ede5ba0>
if <ast.BoolOp object at 0x7da18ede6620> begin[:]
<ast.Raise object at 0x7da18ede5f90>
variable[context_var] assign[=] call[name[context_var]][<ast.Slice object at 0x7da18ede5e70>]
return[call[name[CheckPermissionsNode], parameter[name[perms], name[user], name[slug], name[context_var]]]] | keyword[def] identifier[check_perms] ( identifier[parser] , identifier[token] ):
literal[string]
identifier[bits] = identifier[token] . identifier[split_contents] ()
identifier[format] = literal[string]
keyword[if] identifier[len] ( identifier[bits] )!= literal[int] keyword[or] identifier[bits] [ literal[int] ]!= literal[string] keyword[or] identifier[bits] [ literal[int] ]!= literal[string] keyword[or] identifier[bits] [ literal[int] ]!= literal[string] :
keyword[raise] identifier[template] . identifier[TemplateSyntaxError] ( literal[string]
literal[string] % identifier[format] )
identifier[perms] = identifier[bits] [ literal[int] ]
identifier[user] = identifier[bits] [ literal[int] ]
identifier[slug] = identifier[bits] [ literal[int] ]
identifier[context_var] = identifier[bits] [ literal[int] ]
keyword[if] identifier[perms] [ literal[int] ]!= identifier[perms] [- literal[int] ] keyword[or] identifier[perms] [ literal[int] ] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[template] . identifier[TemplateSyntaxError] ( literal[string]
literal[string] )
keyword[if] identifier[context_var] [ literal[int] ]!= identifier[context_var] [- literal[int] ] keyword[or] identifier[context_var] [ literal[int] ] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[template] . identifier[TemplateSyntaxError] ( literal[string]
literal[string] )
identifier[context_var] = identifier[context_var] [ literal[int] :- literal[int] ]
keyword[return] identifier[CheckPermissionsNode] ( identifier[perms] , identifier[user] , identifier[slug] , identifier[context_var] ) | def check_perms(parser, token):
"""
Returns a list of permissions (as ``codename`` strings) for a given
``user``/``group`` and ``obj`` (Model instance).
Parses ``check_perms`` tag which should be in format::
{% check_perms "perm1[, perm2, ...]" for user in slug as "context_var" %}
or
{% check_perms "perm1[, perm2, ...]" for user in "slug" as "context_var" %}
.. note::
Make sure that you set and use those permissions in same template
block (``{% block %}``).
Example of usage (assuming ``page` objects are available from *context*)::
{% check_perms "delete_page" for request.user in page.slug as "can_delete" %}
{% if can_delete %}
...
{% endif %}
"""
bits = token.split_contents()
format = '{% check_perms "perm1[, perm2, ...]" for user in slug as "context_var" %}'
if len(bits) != 8 or bits[2] != 'for' or bits[4] != 'in' or (bits[6] != 'as'):
raise template.TemplateSyntaxError('get_obj_perms tag should be in format: %s' % format) # depends on [control=['if'], data=[]]
perms = bits[1]
user = bits[3]
slug = bits[5]
context_var = bits[7]
if perms[0] != perms[-1] or perms[0] not in ('"', "'"):
raise template.TemplateSyntaxError("check_perms tag's perms argument should be in quotes") # depends on [control=['if'], data=[]]
if context_var[0] != context_var[-1] or context_var[0] not in ('"', "'"):
raise template.TemplateSyntaxError("check_perms tag's context_var argument should be in quotes") # depends on [control=['if'], data=[]]
context_var = context_var[1:-1]
return CheckPermissionsNode(perms, user, slug, context_var) |
def write_xpm(matrix, version, out, scale=1, border=None, color='#000',
background='#fff', name='img'):
"""\
Serializes the matrix as `XPM <https://en.wikipedia.org/wiki/X_PixMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write binary data.
:param scale: Indicates the size of a single module (default: 1 which
corresponds to 1 x 1 pixel per module).
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
:param color: Color of the modules (default: black). The
color can be provided as ``(R, G, B)`` tuple, as web color name
(like "red") or in hexadecimal format (``#RGB`` or ``#RRGGBB``).
:param background: Optional background color (default: white).
See `color` for valid values. ``None`` indicates a transparent
background.
:param str name: Name of the image (must be a valid C-identifier).
Default: "img".
"""
row_iter = matrix_iter(matrix, version, scale, border)
width, height = get_symbol_size(version, scale=scale, border=border)
stroke_color = colors.color_to_rgb_hex(color)
bg_color = colors.color_to_rgb_hex(background) if background is not None else 'None'
with writable(out, 'wt') as f:
write = f.write
write('/* XPM */\n'
'static char *{0}[] = {{\n'
'"{1} {2} 2 1",\n'
'" c {3}",\n'
'"X c {4}",\n'.format(name, width, height, bg_color, stroke_color))
for i, row in enumerate(row_iter):
write(''.join(chain(['"'], (' ' if not b else 'X' for b in row),
['"{0}\n'.format(',' if i < height - 1 else '')])))
write('};\n') | def function[write_xpm, parameter[matrix, version, out, scale, border, color, background, name]]:
constant[ Serializes the matrix as `XPM <https://en.wikipedia.org/wiki/X_PixMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write binary data.
:param scale: Indicates the size of a single module (default: 1 which
corresponds to 1 x 1 pixel per module).
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
:param color: Color of the modules (default: black). The
color can be provided as ``(R, G, B)`` tuple, as web color name
(like "red") or in hexadecimal format (``#RGB`` or ``#RRGGBB``).
:param background: Optional background color (default: white).
See `color` for valid values. ``None`` indicates a transparent
background.
:param str name: Name of the image (must be a valid C-identifier).
Default: "img".
]
variable[row_iter] assign[=] call[name[matrix_iter], parameter[name[matrix], name[version], name[scale], name[border]]]
<ast.Tuple object at 0x7da18f721480> assign[=] call[name[get_symbol_size], parameter[name[version]]]
variable[stroke_color] assign[=] call[name[colors].color_to_rgb_hex, parameter[name[color]]]
variable[bg_color] assign[=] <ast.IfExp object at 0x7da18f723f70>
with call[name[writable], parameter[name[out], constant[wt]]] begin[:]
variable[write] assign[=] name[f].write
call[name[write], parameter[call[constant[/* XPM */
static char *{0}[] = {{
"{1} {2} 2 1",
" c {3}",
"X c {4}",
].format, parameter[name[name], name[width], name[height], name[bg_color], name[stroke_color]]]]]
for taget[tuple[[<ast.Name object at 0x7da18f813bb0>, <ast.Name object at 0x7da18f8130a0>]]] in starred[call[name[enumerate], parameter[name[row_iter]]]] begin[:]
call[name[write], parameter[call[constant[].join, parameter[call[name[chain], parameter[list[[<ast.Constant object at 0x7da1b0c51de0>]], <ast.GeneratorExp object at 0x7da1b0c53eb0>, list[[<ast.Call object at 0x7da1b0c52a70>]]]]]]]]
call[name[write], parameter[constant[};
]]] | keyword[def] identifier[write_xpm] ( identifier[matrix] , identifier[version] , identifier[out] , identifier[scale] = literal[int] , identifier[border] = keyword[None] , identifier[color] = literal[string] ,
identifier[background] = literal[string] , identifier[name] = literal[string] ):
literal[string]
identifier[row_iter] = identifier[matrix_iter] ( identifier[matrix] , identifier[version] , identifier[scale] , identifier[border] )
identifier[width] , identifier[height] = identifier[get_symbol_size] ( identifier[version] , identifier[scale] = identifier[scale] , identifier[border] = identifier[border] )
identifier[stroke_color] = identifier[colors] . identifier[color_to_rgb_hex] ( identifier[color] )
identifier[bg_color] = identifier[colors] . identifier[color_to_rgb_hex] ( identifier[background] ) keyword[if] identifier[background] keyword[is] keyword[not] keyword[None] keyword[else] literal[string]
keyword[with] identifier[writable] ( identifier[out] , literal[string] ) keyword[as] identifier[f] :
identifier[write] = identifier[f] . identifier[write]
identifier[write] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[name] , identifier[width] , identifier[height] , identifier[bg_color] , identifier[stroke_color] ))
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[row_iter] ):
identifier[write] ( literal[string] . identifier[join] ( identifier[chain] ([ literal[string] ],( literal[string] keyword[if] keyword[not] identifier[b] keyword[else] literal[string] keyword[for] identifier[b] keyword[in] identifier[row] ),
[ literal[string] . identifier[format] ( literal[string] keyword[if] identifier[i] < identifier[height] - literal[int] keyword[else] literal[string] )])))
identifier[write] ( literal[string] ) | def write_xpm(matrix, version, out, scale=1, border=None, color='#000', background='#fff', name='img'):
""" Serializes the matrix as `XPM <https://en.wikipedia.org/wiki/X_PixMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write binary data.
:param scale: Indicates the size of a single module (default: 1 which
corresponds to 1 x 1 pixel per module).
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
:param color: Color of the modules (default: black). The
color can be provided as ``(R, G, B)`` tuple, as web color name
(like "red") or in hexadecimal format (``#RGB`` or ``#RRGGBB``).
:param background: Optional background color (default: white).
See `color` for valid values. ``None`` indicates a transparent
background.
:param str name: Name of the image (must be a valid C-identifier).
Default: "img".
"""
row_iter = matrix_iter(matrix, version, scale, border)
(width, height) = get_symbol_size(version, scale=scale, border=border)
stroke_color = colors.color_to_rgb_hex(color)
bg_color = colors.color_to_rgb_hex(background) if background is not None else 'None'
with writable(out, 'wt') as f:
write = f.write
write('/* XPM */\nstatic char *{0}[] = {{\n"{1} {2} 2 1",\n" c {3}",\n"X c {4}",\n'.format(name, width, height, bg_color, stroke_color))
for (i, row) in enumerate(row_iter):
write(''.join(chain(['"'], (' ' if not b else 'X' for b in row), ['"{0}\n'.format(',' if i < height - 1 else '')]))) # depends on [control=['for'], data=[]]
write('};\n') # depends on [control=['with'], data=['f']] |
def is_probably_a_voice_vote(self):
'''Guess whether this vote is a "voice vote".'''
if '+voice_vote' in self:
return True
if '+vote_type' in self:
if self['+vote_type'] == 'Voice':
return True
if 'voice vote' in self['motion'].lower():
return True
return False | def function[is_probably_a_voice_vote, parameter[self]]:
constant[Guess whether this vote is a "voice vote".]
if compare[constant[+voice_vote] in name[self]] begin[:]
return[constant[True]]
if compare[constant[+vote_type] in name[self]] begin[:]
if compare[call[name[self]][constant[+vote_type]] equal[==] constant[Voice]] begin[:]
return[constant[True]]
if compare[constant[voice vote] in call[call[name[self]][constant[motion]].lower, parameter[]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_probably_a_voice_vote] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] :
keyword[return] keyword[True]
keyword[if] literal[string] keyword[in] identifier[self] :
keyword[if] identifier[self] [ literal[string] ]== literal[string] :
keyword[return] keyword[True]
keyword[if] literal[string] keyword[in] identifier[self] [ literal[string] ]. identifier[lower] ():
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_probably_a_voice_vote(self):
"""Guess whether this vote is a "voice vote"."""
if '+voice_vote' in self:
return True # depends on [control=['if'], data=[]]
if '+vote_type' in self:
if self['+vote_type'] == 'Voice':
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['self']]
if 'voice vote' in self['motion'].lower():
return True # depends on [control=['if'], data=[]]
return False |
def next_turn(self, *args):
"""Advance time by one turn, if it's not blocked.
Block time by setting ``engine.universal['block'] = True``"""
if self.tmp_block:
return
eng = self.app.engine
dial = self.dialoglayout
if eng.universal.get('block'):
Logger.info("MainScreen: next_turn blocked, delete universal['block'] to unblock")
return
if dial.idx < len(dial.todo):
Logger.info("MainScreen: not advancing time while there's a dialog")
return
self.tmp_block = True
self.app.unbind(
branch=self.app._push_time,
turn=self.app._push_time,
tick=self.app._push_time
)
eng.next_turn(cb=self._update_from_next_turn) | def function[next_turn, parameter[self]]:
constant[Advance time by one turn, if it's not blocked.
Block time by setting ``engine.universal['block'] = True``]
if name[self].tmp_block begin[:]
return[None]
variable[eng] assign[=] name[self].app.engine
variable[dial] assign[=] name[self].dialoglayout
if call[name[eng].universal.get, parameter[constant[block]]] begin[:]
call[name[Logger].info, parameter[constant[MainScreen: next_turn blocked, delete universal['block'] to unblock]]]
return[None]
if compare[name[dial].idx less[<] call[name[len], parameter[name[dial].todo]]] begin[:]
call[name[Logger].info, parameter[constant[MainScreen: not advancing time while there's a dialog]]]
return[None]
name[self].tmp_block assign[=] constant[True]
call[name[self].app.unbind, parameter[]]
call[name[eng].next_turn, parameter[]] | keyword[def] identifier[next_turn] ( identifier[self] ,* identifier[args] ):
literal[string]
keyword[if] identifier[self] . identifier[tmp_block] :
keyword[return]
identifier[eng] = identifier[self] . identifier[app] . identifier[engine]
identifier[dial] = identifier[self] . identifier[dialoglayout]
keyword[if] identifier[eng] . identifier[universal] . identifier[get] ( literal[string] ):
identifier[Logger] . identifier[info] ( literal[string] )
keyword[return]
keyword[if] identifier[dial] . identifier[idx] < identifier[len] ( identifier[dial] . identifier[todo] ):
identifier[Logger] . identifier[info] ( literal[string] )
keyword[return]
identifier[self] . identifier[tmp_block] = keyword[True]
identifier[self] . identifier[app] . identifier[unbind] (
identifier[branch] = identifier[self] . identifier[app] . identifier[_push_time] ,
identifier[turn] = identifier[self] . identifier[app] . identifier[_push_time] ,
identifier[tick] = identifier[self] . identifier[app] . identifier[_push_time]
)
identifier[eng] . identifier[next_turn] ( identifier[cb] = identifier[self] . identifier[_update_from_next_turn] ) | def next_turn(self, *args):
"""Advance time by one turn, if it's not blocked.
Block time by setting ``engine.universal['block'] = True``"""
if self.tmp_block:
return # depends on [control=['if'], data=[]]
eng = self.app.engine
dial = self.dialoglayout
if eng.universal.get('block'):
Logger.info("MainScreen: next_turn blocked, delete universal['block'] to unblock")
return # depends on [control=['if'], data=[]]
if dial.idx < len(dial.todo):
Logger.info("MainScreen: not advancing time while there's a dialog")
return # depends on [control=['if'], data=[]]
self.tmp_block = True
self.app.unbind(branch=self.app._push_time, turn=self.app._push_time, tick=self.app._push_time)
eng.next_turn(cb=self._update_from_next_turn) |
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of trees with built-in support
:arg treeType: the name of the tree type required (case-insensitive). Supported
values are:
* "dom" - A generic builder for DOM implementations, defaulting to a
xml.dom.minidom based implementation.
* "etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to xml.etree.cElementTree if
available and xml.etree.ElementTree if not.
* "lxml" - A etree-based builder for lxml.etree, handling limitations
of lxml's implementation.
:arg implementation: (Currently applies to the "etree" and "dom" tree
types). A module implementing the tree type e.g. xml.etree.ElementTree
or xml.etree.cElementTree.
:arg kwargs: Any additional options to pass to the TreeBuilder when
creating it.
Example:
>>> from html5lib.treebuilders import getTreeBuilder
>>> builder = getTreeBuilder('etree')
"""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "lxml":
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
else:
raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
return treeBuilderCache.get(treeType) | def function[getTreeBuilder, parameter[treeType, implementation]]:
constant[Get a TreeBuilder class for various types of trees with built-in support
:arg treeType: the name of the tree type required (case-insensitive). Supported
values are:
* "dom" - A generic builder for DOM implementations, defaulting to a
xml.dom.minidom based implementation.
* "etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to xml.etree.cElementTree if
available and xml.etree.ElementTree if not.
* "lxml" - A etree-based builder for lxml.etree, handling limitations
of lxml's implementation.
:arg implementation: (Currently applies to the "etree" and "dom" tree
types). A module implementing the tree type e.g. xml.etree.ElementTree
or xml.etree.cElementTree.
:arg kwargs: Any additional options to pass to the TreeBuilder when
creating it.
Example:
>>> from html5lib.treebuilders import getTreeBuilder
>>> builder = getTreeBuilder('etree')
]
variable[treeType] assign[=] call[name[treeType].lower, parameter[]]
if compare[name[treeType] <ast.NotIn object at 0x7da2590d7190> name[treeBuilderCache]] begin[:]
if compare[name[treeType] equal[==] constant[dom]] begin[:]
from relative_module[None] import module[dom]
if compare[name[implementation] is constant[None]] begin[:]
from relative_module[xml.dom] import module[minidom]
variable[implementation] assign[=] name[minidom]
return[call[name[dom].getDomModule, parameter[name[implementation]]].TreeBuilder]
return[call[name[treeBuilderCache].get, parameter[name[treeType]]]] | keyword[def] identifier[getTreeBuilder] ( identifier[treeType] , identifier[implementation] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[treeType] = identifier[treeType] . identifier[lower] ()
keyword[if] identifier[treeType] keyword[not] keyword[in] identifier[treeBuilderCache] :
keyword[if] identifier[treeType] == literal[string] :
keyword[from] . keyword[import] identifier[dom]
keyword[if] identifier[implementation] keyword[is] keyword[None] :
keyword[from] identifier[xml] . identifier[dom] keyword[import] identifier[minidom]
identifier[implementation] = identifier[minidom]
keyword[return] identifier[dom] . identifier[getDomModule] ( identifier[implementation] ,** identifier[kwargs] ). identifier[TreeBuilder]
keyword[elif] identifier[treeType] == literal[string] :
keyword[from] . keyword[import] identifier[etree_lxml]
identifier[treeBuilderCache] [ identifier[treeType] ]= identifier[etree_lxml] . identifier[TreeBuilder]
keyword[elif] identifier[treeType] == literal[string] :
keyword[from] . keyword[import] identifier[etree]
keyword[if] identifier[implementation] keyword[is] keyword[None] :
identifier[implementation] = identifier[default_etree]
keyword[return] identifier[etree] . identifier[getETreeModule] ( identifier[implementation] ,** identifier[kwargs] ). identifier[TreeBuilder]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[treeType] )
keyword[return] identifier[treeBuilderCache] . identifier[get] ( identifier[treeType] ) | def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of trees with built-in support
:arg treeType: the name of the tree type required (case-insensitive). Supported
values are:
* "dom" - A generic builder for DOM implementations, defaulting to a
xml.dom.minidom based implementation.
* "etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to xml.etree.cElementTree if
available and xml.etree.ElementTree if not.
* "lxml" - A etree-based builder for lxml.etree, handling limitations
of lxml's implementation.
:arg implementation: (Currently applies to the "etree" and "dom" tree
types). A module implementing the tree type e.g. xml.etree.ElementTree
or xml.etree.cElementTree.
:arg kwargs: Any additional options to pass to the TreeBuilder when
creating it.
Example:
>>> from html5lib.treebuilders import getTreeBuilder
>>> builder = getTreeBuilder('etree')
"""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == 'dom':
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom # depends on [control=['if'], data=['implementation']]
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder # depends on [control=['if'], data=[]]
elif treeType == 'lxml':
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder # depends on [control=['if'], data=['treeType']]
elif treeType == 'etree':
from . import etree
if implementation is None:
implementation = default_etree # depends on [control=['if'], data=['implementation']]
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder # depends on [control=['if'], data=[]]
else:
raise ValueError('Unrecognised treebuilder "%s" ' % treeType) # depends on [control=['if'], data=['treeType', 'treeBuilderCache']]
return treeBuilderCache.get(treeType) |
def display_system(sys, style='vdw'):
'''Display the system *sys* with the default viewer.
'''
v = QtViewer()
#v.add_post_processing(FXAAEffect)
v.add_post_processing(SSAOEffect)
if style == 'vdw':
sr = v.add_renderer(AtomRenderer, sys.r_array, sys.type_array,
backend='impostors')
if style == 'ball-and-stick':
sr = v.add_renderer(BallAndStickRenderer,
sys.r_array,
sys.type_array,
sys.bonds)
if sys.box_vectors is not None:
v.add_renderer(BoxRenderer, sys.box_vectors)
# We autozoom on the box
a, b, c = sys.box_vectors
box_vertices = np.array([[0.0, 0.0, 0.0],
a, b, c,
a + b, a + c, b + c,
a + b + c])
v.widget.camera.autozoom(box_vertices)
else:
v.widget.camera.autozoom(sys.r_array)
v.run() | def function[display_system, parameter[sys, style]]:
constant[Display the system *sys* with the default viewer.
]
variable[v] assign[=] call[name[QtViewer], parameter[]]
call[name[v].add_post_processing, parameter[name[SSAOEffect]]]
if compare[name[style] equal[==] constant[vdw]] begin[:]
variable[sr] assign[=] call[name[v].add_renderer, parameter[name[AtomRenderer], name[sys].r_array, name[sys].type_array]]
if compare[name[style] equal[==] constant[ball-and-stick]] begin[:]
variable[sr] assign[=] call[name[v].add_renderer, parameter[name[BallAndStickRenderer], name[sys].r_array, name[sys].type_array, name[sys].bonds]]
if compare[name[sys].box_vectors is_not constant[None]] begin[:]
call[name[v].add_renderer, parameter[name[BoxRenderer], name[sys].box_vectors]]
<ast.Tuple object at 0x7da2054a4af0> assign[=] name[sys].box_vectors
variable[box_vertices] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da2054a4040>, <ast.Name object at 0x7da2054a5f30>, <ast.Name object at 0x7da2054a7010>, <ast.Name object at 0x7da2054a5c90>, <ast.BinOp object at 0x7da2054a4970>, <ast.BinOp object at 0x7da2054a5390>, <ast.BinOp object at 0x7da2054a41f0>, <ast.BinOp object at 0x7da2054a53f0>]]]]
call[name[v].widget.camera.autozoom, parameter[name[box_vertices]]]
call[name[v].run, parameter[]] | keyword[def] identifier[display_system] ( identifier[sys] , identifier[style] = literal[string] ):
literal[string]
identifier[v] = identifier[QtViewer] ()
identifier[v] . identifier[add_post_processing] ( identifier[SSAOEffect] )
keyword[if] identifier[style] == literal[string] :
identifier[sr] = identifier[v] . identifier[add_renderer] ( identifier[AtomRenderer] , identifier[sys] . identifier[r_array] , identifier[sys] . identifier[type_array] ,
identifier[backend] = literal[string] )
keyword[if] identifier[style] == literal[string] :
identifier[sr] = identifier[v] . identifier[add_renderer] ( identifier[BallAndStickRenderer] ,
identifier[sys] . identifier[r_array] ,
identifier[sys] . identifier[type_array] ,
identifier[sys] . identifier[bonds] )
keyword[if] identifier[sys] . identifier[box_vectors] keyword[is] keyword[not] keyword[None] :
identifier[v] . identifier[add_renderer] ( identifier[BoxRenderer] , identifier[sys] . identifier[box_vectors] )
identifier[a] , identifier[b] , identifier[c] = identifier[sys] . identifier[box_vectors]
identifier[box_vertices] = identifier[np] . identifier[array] ([[ literal[int] , literal[int] , literal[int] ],
identifier[a] , identifier[b] , identifier[c] ,
identifier[a] + identifier[b] , identifier[a] + identifier[c] , identifier[b] + identifier[c] ,
identifier[a] + identifier[b] + identifier[c] ])
identifier[v] . identifier[widget] . identifier[camera] . identifier[autozoom] ( identifier[box_vertices] )
keyword[else] :
identifier[v] . identifier[widget] . identifier[camera] . identifier[autozoom] ( identifier[sys] . identifier[r_array] )
identifier[v] . identifier[run] () | def display_system(sys, style='vdw'):
"""Display the system *sys* with the default viewer.
"""
v = QtViewer()
#v.add_post_processing(FXAAEffect)
v.add_post_processing(SSAOEffect)
if style == 'vdw':
sr = v.add_renderer(AtomRenderer, sys.r_array, sys.type_array, backend='impostors') # depends on [control=['if'], data=[]]
if style == 'ball-and-stick':
sr = v.add_renderer(BallAndStickRenderer, sys.r_array, sys.type_array, sys.bonds) # depends on [control=['if'], data=[]]
if sys.box_vectors is not None:
v.add_renderer(BoxRenderer, sys.box_vectors)
# We autozoom on the box
(a, b, c) = sys.box_vectors
box_vertices = np.array([[0.0, 0.0, 0.0], a, b, c, a + b, a + c, b + c, a + b + c])
v.widget.camera.autozoom(box_vertices) # depends on [control=['if'], data=[]]
else:
v.widget.camera.autozoom(sys.r_array)
v.run() |
def on_click(self, event):
"""
Control DPMS with mouse clicks.
"""
if event["button"] == self.button_toggle:
if "DPMS is Enabled" in self.py3.command_output("xset -q"):
self.py3.command_run("xset -dpms s off")
else:
self.py3.command_run("xset +dpms s on")
if event["button"] == self.button_off:
self.py3.command_run("xset dpms force off") | def function[on_click, parameter[self, event]]:
constant[
Control DPMS with mouse clicks.
]
if compare[call[name[event]][constant[button]] equal[==] name[self].button_toggle] begin[:]
if compare[constant[DPMS is Enabled] in call[name[self].py3.command_output, parameter[constant[xset -q]]]] begin[:]
call[name[self].py3.command_run, parameter[constant[xset -dpms s off]]]
if compare[call[name[event]][constant[button]] equal[==] name[self].button_off] begin[:]
call[name[self].py3.command_run, parameter[constant[xset dpms force off]]] | keyword[def] identifier[on_click] ( identifier[self] , identifier[event] ):
literal[string]
keyword[if] identifier[event] [ literal[string] ]== identifier[self] . identifier[button_toggle] :
keyword[if] literal[string] keyword[in] identifier[self] . identifier[py3] . identifier[command_output] ( literal[string] ):
identifier[self] . identifier[py3] . identifier[command_run] ( literal[string] )
keyword[else] :
identifier[self] . identifier[py3] . identifier[command_run] ( literal[string] )
keyword[if] identifier[event] [ literal[string] ]== identifier[self] . identifier[button_off] :
identifier[self] . identifier[py3] . identifier[command_run] ( literal[string] ) | def on_click(self, event):
"""
Control DPMS with mouse clicks.
"""
if event['button'] == self.button_toggle:
if 'DPMS is Enabled' in self.py3.command_output('xset -q'):
self.py3.command_run('xset -dpms s off') # depends on [control=['if'], data=[]]
else:
self.py3.command_run('xset +dpms s on') # depends on [control=['if'], data=[]]
if event['button'] == self.button_off:
self.py3.command_run('xset dpms force off') # depends on [control=['if'], data=[]] |
def execute_task(bufs):
"""Deserialize the buffer and execute the task.
Returns the result or throws exception.
"""
user_ns = locals()
user_ns.update({'__builtins__': __builtins__})
f, args, kwargs = unpack_apply_message(bufs, user_ns, copy=False)
# We might need to look into callability of the function from itself
# since we change it's name in the new namespace
prefix = "parsl_"
fname = prefix + "f"
argname = prefix + "args"
kwargname = prefix + "kwargs"
resultname = prefix + "result"
user_ns.update({fname: f,
argname: args,
kwargname: kwargs,
resultname: resultname})
code = "{0} = {1}(*{2}, **{3})".format(resultname, fname,
argname, kwargname)
try:
# logger.debug("[RUNNER] Executing: {0}".format(code))
exec(code, user_ns, user_ns)
except Exception as e:
logger.warning("Caught exception; will raise it: {}".format(e), exc_info=True)
raise e
else:
# logger.debug("[RUNNER] Result: {0}".format(user_ns.get(resultname)))
return user_ns.get(resultname) | def function[execute_task, parameter[bufs]]:
constant[Deserialize the buffer and execute the task.
Returns the result or throws exception.
]
variable[user_ns] assign[=] call[name[locals], parameter[]]
call[name[user_ns].update, parameter[dictionary[[<ast.Constant object at 0x7da18ede5060>], [<ast.Name object at 0x7da18ede4c10>]]]]
<ast.Tuple object at 0x7da18ede60e0> assign[=] call[name[unpack_apply_message], parameter[name[bufs], name[user_ns]]]
variable[prefix] assign[=] constant[parsl_]
variable[fname] assign[=] binary_operation[name[prefix] + constant[f]]
variable[argname] assign[=] binary_operation[name[prefix] + constant[args]]
variable[kwargname] assign[=] binary_operation[name[prefix] + constant[kwargs]]
variable[resultname] assign[=] binary_operation[name[prefix] + constant[result]]
call[name[user_ns].update, parameter[dictionary[[<ast.Name object at 0x7da18ede7d60>, <ast.Name object at 0x7da18ede7be0>, <ast.Name object at 0x7da18ede4ac0>, <ast.Name object at 0x7da18ede76d0>], [<ast.Name object at 0x7da18ede6590>, <ast.Name object at 0x7da18ede5090>, <ast.Name object at 0x7da18ede5720>, <ast.Name object at 0x7da18ede64d0>]]]]
variable[code] assign[=] call[constant[{0} = {1}(*{2}, **{3})].format, parameter[name[resultname], name[fname], name[argname], name[kwargname]]]
<ast.Try object at 0x7da18ede5600> | keyword[def] identifier[execute_task] ( identifier[bufs] ):
literal[string]
identifier[user_ns] = identifier[locals] ()
identifier[user_ns] . identifier[update] ({ literal[string] : identifier[__builtins__] })
identifier[f] , identifier[args] , identifier[kwargs] = identifier[unpack_apply_message] ( identifier[bufs] , identifier[user_ns] , identifier[copy] = keyword[False] )
identifier[prefix] = literal[string]
identifier[fname] = identifier[prefix] + literal[string]
identifier[argname] = identifier[prefix] + literal[string]
identifier[kwargname] = identifier[prefix] + literal[string]
identifier[resultname] = identifier[prefix] + literal[string]
identifier[user_ns] . identifier[update] ({ identifier[fname] : identifier[f] ,
identifier[argname] : identifier[args] ,
identifier[kwargname] : identifier[kwargs] ,
identifier[resultname] : identifier[resultname] })
identifier[code] = literal[string] . identifier[format] ( identifier[resultname] , identifier[fname] ,
identifier[argname] , identifier[kwargname] )
keyword[try] :
identifier[exec] ( identifier[code] , identifier[user_ns] , identifier[user_ns] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[e] ), identifier[exc_info] = keyword[True] )
keyword[raise] identifier[e]
keyword[else] :
keyword[return] identifier[user_ns] . identifier[get] ( identifier[resultname] ) | def execute_task(bufs):
"""Deserialize the buffer and execute the task.
Returns the result or throws exception.
"""
user_ns = locals()
user_ns.update({'__builtins__': __builtins__})
(f, args, kwargs) = unpack_apply_message(bufs, user_ns, copy=False)
# We might need to look into callability of the function from itself
# since we change it's name in the new namespace
prefix = 'parsl_'
fname = prefix + 'f'
argname = prefix + 'args'
kwargname = prefix + 'kwargs'
resultname = prefix + 'result'
user_ns.update({fname: f, argname: args, kwargname: kwargs, resultname: resultname})
code = '{0} = {1}(*{2}, **{3})'.format(resultname, fname, argname, kwargname)
try:
# logger.debug("[RUNNER] Executing: {0}".format(code))
exec(code, user_ns, user_ns) # depends on [control=['try'], data=[]]
except Exception as e:
logger.warning('Caught exception; will raise it: {}'.format(e), exc_info=True)
raise e # depends on [control=['except'], data=['e']]
else:
# logger.debug("[RUNNER] Result: {0}".format(user_ns.get(resultname)))
return user_ns.get(resultname) |
def get_config_params(properties):
'''Extract the set of configuration parameters from the properties attached
to the schedule
'''
param = []
wdef = ''
for prop in properties.split('\n'):
if prop.startswith('org.opencastproject.workflow.config'):
key, val = prop.split('=', 1)
key = key.split('.')[-1]
param.append((key, val))
elif prop.startswith('org.opencastproject.workflow.definition'):
wdef = prop.split('=', 1)[-1]
return wdef, param | def function[get_config_params, parameter[properties]]:
constant[Extract the set of configuration parameters from the properties attached
to the schedule
]
variable[param] assign[=] list[[]]
variable[wdef] assign[=] constant[]
for taget[name[prop]] in starred[call[name[properties].split, parameter[constant[
]]]] begin[:]
if call[name[prop].startswith, parameter[constant[org.opencastproject.workflow.config]]] begin[:]
<ast.Tuple object at 0x7da204567a30> assign[=] call[name[prop].split, parameter[constant[=], constant[1]]]
variable[key] assign[=] call[call[name[key].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da204564af0>]
call[name[param].append, parameter[tuple[[<ast.Name object at 0x7da204566440>, <ast.Name object at 0x7da204567700>]]]]
return[tuple[[<ast.Name object at 0x7da204566dd0>, <ast.Name object at 0x7da204564730>]]] | keyword[def] identifier[get_config_params] ( identifier[properties] ):
literal[string]
identifier[param] =[]
identifier[wdef] = literal[string]
keyword[for] identifier[prop] keyword[in] identifier[properties] . identifier[split] ( literal[string] ):
keyword[if] identifier[prop] . identifier[startswith] ( literal[string] ):
identifier[key] , identifier[val] = identifier[prop] . identifier[split] ( literal[string] , literal[int] )
identifier[key] = identifier[key] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[param] . identifier[append] (( identifier[key] , identifier[val] ))
keyword[elif] identifier[prop] . identifier[startswith] ( literal[string] ):
identifier[wdef] = identifier[prop] . identifier[split] ( literal[string] , literal[int] )[- literal[int] ]
keyword[return] identifier[wdef] , identifier[param] | def get_config_params(properties):
"""Extract the set of configuration parameters from the properties attached
to the schedule
"""
param = []
wdef = ''
for prop in properties.split('\n'):
if prop.startswith('org.opencastproject.workflow.config'):
(key, val) = prop.split('=', 1)
key = key.split('.')[-1]
param.append((key, val)) # depends on [control=['if'], data=[]]
elif prop.startswith('org.opencastproject.workflow.definition'):
wdef = prop.split('=', 1)[-1] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prop']]
return (wdef, param) |
def _get_metadap_dap(name, version=''):
'''Return data for dap of given or latest version.'''
m = metadap(name)
if not m:
raise DapiCommError('DAP {dap} not found.'.format(dap=name))
if not version:
d = m['latest_stable'] or m['latest']
if d:
d = data(d)
else:
d = dap(name, version)
if not d:
raise DapiCommError(
'DAP {dap} doesn\'t have version {version}.'.format(dap=name, version=version))
return m, d | def function[_get_metadap_dap, parameter[name, version]]:
constant[Return data for dap of given or latest version.]
variable[m] assign[=] call[name[metadap], parameter[name[name]]]
if <ast.UnaryOp object at 0x7da1b1026f50> begin[:]
<ast.Raise object at 0x7da1b10254b0>
if <ast.UnaryOp object at 0x7da1b1024df0> begin[:]
variable[d] assign[=] <ast.BoolOp object at 0x7da1b10271c0>
if name[d] begin[:]
variable[d] assign[=] call[name[data], parameter[name[d]]]
return[tuple[[<ast.Name object at 0x7da1b10267d0>, <ast.Name object at 0x7da1b1025fc0>]]] | keyword[def] identifier[_get_metadap_dap] ( identifier[name] , identifier[version] = literal[string] ):
literal[string]
identifier[m] = identifier[metadap] ( identifier[name] )
keyword[if] keyword[not] identifier[m] :
keyword[raise] identifier[DapiCommError] ( literal[string] . identifier[format] ( identifier[dap] = identifier[name] ))
keyword[if] keyword[not] identifier[version] :
identifier[d] = identifier[m] [ literal[string] ] keyword[or] identifier[m] [ literal[string] ]
keyword[if] identifier[d] :
identifier[d] = identifier[data] ( identifier[d] )
keyword[else] :
identifier[d] = identifier[dap] ( identifier[name] , identifier[version] )
keyword[if] keyword[not] identifier[d] :
keyword[raise] identifier[DapiCommError] (
literal[string] . identifier[format] ( identifier[dap] = identifier[name] , identifier[version] = identifier[version] ))
keyword[return] identifier[m] , identifier[d] | def _get_metadap_dap(name, version=''):
"""Return data for dap of given or latest version."""
m = metadap(name)
if not m:
raise DapiCommError('DAP {dap} not found.'.format(dap=name)) # depends on [control=['if'], data=[]]
if not version:
d = m['latest_stable'] or m['latest']
if d:
d = data(d) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
d = dap(name, version)
if not d:
raise DapiCommError("DAP {dap} doesn't have version {version}.".format(dap=name, version=version)) # depends on [control=['if'], data=[]]
return (m, d) |
def listLastFires(self, *args, **kwargs):
"""
Get information about recent hook fires
This endpoint will return information about the the last few times this hook has been
fired, including whether the hook was fired successfully or not
This method gives output: ``v1/list-lastFires-response.json#``
This method is ``experimental``
"""
return self._makeApiCall(self.funcinfo["listLastFires"], *args, **kwargs) | def function[listLastFires, parameter[self]]:
constant[
Get information about recent hook fires
This endpoint will return information about the the last few times this hook has been
fired, including whether the hook was fired successfully or not
This method gives output: ``v1/list-lastFires-response.json#``
This method is ``experimental``
]
return[call[name[self]._makeApiCall, parameter[call[name[self].funcinfo][constant[listLastFires]], <ast.Starred object at 0x7da20c6aaf50>]]] | keyword[def] identifier[listLastFires] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] ) | def listLastFires(self, *args, **kwargs):
"""
Get information about recent hook fires
This endpoint will return information about the the last few times this hook has been
fired, including whether the hook was fired successfully or not
This method gives output: ``v1/list-lastFires-response.json#``
This method is ``experimental``
"""
return self._makeApiCall(self.funcinfo['listLastFires'], *args, **kwargs) |
def get_events(self, name=None, time=None, chan=None, stage=None,
qual=None):
"""Get list of events in the file.
Parameters
----------
name : str, optional
name of the event of interest
time : tuple of two float, optional
start and end time of the period of interest
chan : tuple of str, optional
list of channels of interests
stage : tuple of str, optional
list of stages of interest
qual : str, optional
epoch signal qualifier (Good or Poor)
Returns
-------
list of dict
where each dict has 'name' (name of the event), 'start' (start
time), 'end' (end time), 'chan' (channels of interest, can be
empty), 'stage', 'quality' (signal quality)
Raises
------
IndexError
When there is no rater / epochs at all
"""
# get events inside window
events = self.rater.find('events')
if name is not None:
pattern = "event_type[@type='" + name + "']"
else:
pattern = "event_type"
if chan is not None:
if isinstance(chan, (tuple, list)):
if chan[0] is not None:
chan = ', '.join(chan)
else:
chan = None
if stage or qual:
ep_starts = [x['start'] for x in self.epochs]
if stage:
ep_stages = [x['stage'] for x in self.epochs]
if qual:
ep_quality = [x['quality'] for x in self.epochs]
ev = []
for e_type in events.iterfind(pattern):
event_name = e_type.get('type')
for e in e_type:
event_start = float(e.find('event_start').text)
event_end = float(e.find('event_end').text)
event_chan = e.find('event_chan').text
event_qual = e.find('event_qual').text
if event_chan is None: # xml doesn't store empty string
event_chan = ''
if stage or qual:
pos = bisect_left(ep_starts, event_start)
if pos == len(ep_starts):
pos -= 1
elif event_start != ep_starts[pos]:
pos -= 1
if stage is None:
stage_cond = True
else:
ev_stage = ep_stages[pos]
stage_cond = ev_stage in stage
if qual is None:
qual_cond = True
else:
ev_qual = ep_quality[pos]
qual_cond = ev_qual == qual
if time is None:
time_cond = True
else:
time_cond = time[0] <= event_end and time[1] >= event_start
if chan is None:
chan_cond = True
else:
chan_cond = event_chan == chan
if time_cond and chan_cond and stage_cond and qual_cond:
one_ev = {'name': event_name,
'start': event_start,
'end': event_end,
'chan': event_chan.split(', '), # always a list
'stage': '',
'quality': event_qual
}
if stage is not None:
one_ev['stage'] = ev_stage
ev.append(one_ev)
return ev | def function[get_events, parameter[self, name, time, chan, stage, qual]]:
constant[Get list of events in the file.
Parameters
----------
name : str, optional
name of the event of interest
time : tuple of two float, optional
start and end time of the period of interest
chan : tuple of str, optional
list of channels of interests
stage : tuple of str, optional
list of stages of interest
qual : str, optional
epoch signal qualifier (Good or Poor)
Returns
-------
list of dict
where each dict has 'name' (name of the event), 'start' (start
time), 'end' (end time), 'chan' (channels of interest, can be
empty), 'stage', 'quality' (signal quality)
Raises
------
IndexError
When there is no rater / epochs at all
]
variable[events] assign[=] call[name[self].rater.find, parameter[constant[events]]]
if compare[name[name] is_not constant[None]] begin[:]
variable[pattern] assign[=] binary_operation[binary_operation[constant[event_type[@type='] + name[name]] + constant[']]]
if compare[name[chan] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[chan], tuple[[<ast.Name object at 0x7da1b0dee590>, <ast.Name object at 0x7da1b0dee3e0>]]]] begin[:]
if compare[call[name[chan]][constant[0]] is_not constant[None]] begin[:]
variable[chan] assign[=] call[constant[, ].join, parameter[name[chan]]]
if <ast.BoolOp object at 0x7da20c6e6fb0> begin[:]
variable[ep_starts] assign[=] <ast.ListComp object at 0x7da20c6e5e40>
if name[stage] begin[:]
variable[ep_stages] assign[=] <ast.ListComp object at 0x7da20c6e4c10>
if name[qual] begin[:]
variable[ep_quality] assign[=] <ast.ListComp object at 0x7da20c6e7f10>
variable[ev] assign[=] list[[]]
for taget[name[e_type]] in starred[call[name[events].iterfind, parameter[name[pattern]]]] begin[:]
variable[event_name] assign[=] call[name[e_type].get, parameter[constant[type]]]
for taget[name[e]] in starred[name[e_type]] begin[:]
variable[event_start] assign[=] call[name[float], parameter[call[name[e].find, parameter[constant[event_start]]].text]]
variable[event_end] assign[=] call[name[float], parameter[call[name[e].find, parameter[constant[event_end]]].text]]
variable[event_chan] assign[=] call[name[e].find, parameter[constant[event_chan]]].text
variable[event_qual] assign[=] call[name[e].find, parameter[constant[event_qual]]].text
if compare[name[event_chan] is constant[None]] begin[:]
variable[event_chan] assign[=] constant[]
if <ast.BoolOp object at 0x7da20c6e72b0> begin[:]
variable[pos] assign[=] call[name[bisect_left], parameter[name[ep_starts], name[event_start]]]
if compare[name[pos] equal[==] call[name[len], parameter[name[ep_starts]]]] begin[:]
<ast.AugAssign object at 0x7da20c6e4af0>
if compare[name[stage] is constant[None]] begin[:]
variable[stage_cond] assign[=] constant[True]
if compare[name[qual] is constant[None]] begin[:]
variable[qual_cond] assign[=] constant[True]
if compare[name[time] is constant[None]] begin[:]
variable[time_cond] assign[=] constant[True]
if compare[name[chan] is constant[None]] begin[:]
variable[chan_cond] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b0ec37f0> begin[:]
variable[one_ev] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ec3760>, <ast.Constant object at 0x7da1b0ec0430>, <ast.Constant object at 0x7da1b0ec2c80>, <ast.Constant object at 0x7da1b0ec14b0>, <ast.Constant object at 0x7da1b0ec3250>, <ast.Constant object at 0x7da1b0ec2650>], [<ast.Name object at 0x7da1b0ec28f0>, <ast.Name object at 0x7da1b0ec2200>, <ast.Name object at 0x7da1b0ec1ab0>, <ast.Call object at 0x7da1b0ec0460>, <ast.Constant object at 0x7da1b0ec2440>, <ast.Name object at 0x7da1b0ec1000>]]
if compare[name[stage] is_not constant[None]] begin[:]
call[name[one_ev]][constant[stage]] assign[=] name[ev_stage]
call[name[ev].append, parameter[name[one_ev]]]
return[name[ev]] | keyword[def] identifier[get_events] ( identifier[self] , identifier[name] = keyword[None] , identifier[time] = keyword[None] , identifier[chan] = keyword[None] , identifier[stage] = keyword[None] ,
identifier[qual] = keyword[None] ):
literal[string]
identifier[events] = identifier[self] . identifier[rater] . identifier[find] ( literal[string] )
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[pattern] = literal[string] + identifier[name] + literal[string]
keyword[else] :
identifier[pattern] = literal[string]
keyword[if] identifier[chan] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[chan] ,( identifier[tuple] , identifier[list] )):
keyword[if] identifier[chan] [ literal[int] ] keyword[is] keyword[not] keyword[None] :
identifier[chan] = literal[string] . identifier[join] ( identifier[chan] )
keyword[else] :
identifier[chan] = keyword[None]
keyword[if] identifier[stage] keyword[or] identifier[qual] :
identifier[ep_starts] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[epochs] ]
keyword[if] identifier[stage] :
identifier[ep_stages] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[epochs] ]
keyword[if] identifier[qual] :
identifier[ep_quality] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[epochs] ]
identifier[ev] =[]
keyword[for] identifier[e_type] keyword[in] identifier[events] . identifier[iterfind] ( identifier[pattern] ):
identifier[event_name] = identifier[e_type] . identifier[get] ( literal[string] )
keyword[for] identifier[e] keyword[in] identifier[e_type] :
identifier[event_start] = identifier[float] ( identifier[e] . identifier[find] ( literal[string] ). identifier[text] )
identifier[event_end] = identifier[float] ( identifier[e] . identifier[find] ( literal[string] ). identifier[text] )
identifier[event_chan] = identifier[e] . identifier[find] ( literal[string] ). identifier[text]
identifier[event_qual] = identifier[e] . identifier[find] ( literal[string] ). identifier[text]
keyword[if] identifier[event_chan] keyword[is] keyword[None] :
identifier[event_chan] = literal[string]
keyword[if] identifier[stage] keyword[or] identifier[qual] :
identifier[pos] = identifier[bisect_left] ( identifier[ep_starts] , identifier[event_start] )
keyword[if] identifier[pos] == identifier[len] ( identifier[ep_starts] ):
identifier[pos] -= literal[int]
keyword[elif] identifier[event_start] != identifier[ep_starts] [ identifier[pos] ]:
identifier[pos] -= literal[int]
keyword[if] identifier[stage] keyword[is] keyword[None] :
identifier[stage_cond] = keyword[True]
keyword[else] :
identifier[ev_stage] = identifier[ep_stages] [ identifier[pos] ]
identifier[stage_cond] = identifier[ev_stage] keyword[in] identifier[stage]
keyword[if] identifier[qual] keyword[is] keyword[None] :
identifier[qual_cond] = keyword[True]
keyword[else] :
identifier[ev_qual] = identifier[ep_quality] [ identifier[pos] ]
identifier[qual_cond] = identifier[ev_qual] == identifier[qual]
keyword[if] identifier[time] keyword[is] keyword[None] :
identifier[time_cond] = keyword[True]
keyword[else] :
identifier[time_cond] = identifier[time] [ literal[int] ]<= identifier[event_end] keyword[and] identifier[time] [ literal[int] ]>= identifier[event_start]
keyword[if] identifier[chan] keyword[is] keyword[None] :
identifier[chan_cond] = keyword[True]
keyword[else] :
identifier[chan_cond] = identifier[event_chan] == identifier[chan]
keyword[if] identifier[time_cond] keyword[and] identifier[chan_cond] keyword[and] identifier[stage_cond] keyword[and] identifier[qual_cond] :
identifier[one_ev] ={ literal[string] : identifier[event_name] ,
literal[string] : identifier[event_start] ,
literal[string] : identifier[event_end] ,
literal[string] : identifier[event_chan] . identifier[split] ( literal[string] ),
literal[string] : literal[string] ,
literal[string] : identifier[event_qual]
}
keyword[if] identifier[stage] keyword[is] keyword[not] keyword[None] :
identifier[one_ev] [ literal[string] ]= identifier[ev_stage]
identifier[ev] . identifier[append] ( identifier[one_ev] )
keyword[return] identifier[ev] | def get_events(self, name=None, time=None, chan=None, stage=None, qual=None):
"""Get list of events in the file.
Parameters
----------
name : str, optional
name of the event of interest
time : tuple of two float, optional
start and end time of the period of interest
chan : tuple of str, optional
list of channels of interests
stage : tuple of str, optional
list of stages of interest
qual : str, optional
epoch signal qualifier (Good or Poor)
Returns
-------
list of dict
where each dict has 'name' (name of the event), 'start' (start
time), 'end' (end time), 'chan' (channels of interest, can be
empty), 'stage', 'quality' (signal quality)
Raises
------
IndexError
When there is no rater / epochs at all
"""
# get events inside window
events = self.rater.find('events')
if name is not None:
pattern = "event_type[@type='" + name + "']" # depends on [control=['if'], data=['name']]
else:
pattern = 'event_type'
if chan is not None:
if isinstance(chan, (tuple, list)):
if chan[0] is not None:
chan = ', '.join(chan) # depends on [control=['if'], data=[]]
else:
chan = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['chan']]
if stage or qual:
ep_starts = [x['start'] for x in self.epochs]
if stage:
ep_stages = [x['stage'] for x in self.epochs] # depends on [control=['if'], data=[]]
if qual:
ep_quality = [x['quality'] for x in self.epochs] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
ev = []
for e_type in events.iterfind(pattern):
event_name = e_type.get('type')
for e in e_type:
event_start = float(e.find('event_start').text)
event_end = float(e.find('event_end').text)
event_chan = e.find('event_chan').text
event_qual = e.find('event_qual').text
if event_chan is None: # xml doesn't store empty string
event_chan = '' # depends on [control=['if'], data=['event_chan']]
if stage or qual:
pos = bisect_left(ep_starts, event_start)
if pos == len(ep_starts):
pos -= 1 # depends on [control=['if'], data=['pos']]
elif event_start != ep_starts[pos]:
pos -= 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if stage is None:
stage_cond = True # depends on [control=['if'], data=[]]
else:
ev_stage = ep_stages[pos]
stage_cond = ev_stage in stage
if qual is None:
qual_cond = True # depends on [control=['if'], data=[]]
else:
ev_qual = ep_quality[pos]
qual_cond = ev_qual == qual
if time is None:
time_cond = True # depends on [control=['if'], data=[]]
else:
time_cond = time[0] <= event_end and time[1] >= event_start
if chan is None:
chan_cond = True # depends on [control=['if'], data=[]]
else:
chan_cond = event_chan == chan
if time_cond and chan_cond and stage_cond and qual_cond: # always a list
one_ev = {'name': event_name, 'start': event_start, 'end': event_end, 'chan': event_chan.split(', '), 'stage': '', 'quality': event_qual}
if stage is not None:
one_ev['stage'] = ev_stage # depends on [control=['if'], data=[]]
ev.append(one_ev) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']] # depends on [control=['for'], data=['e_type']]
return ev |
def select_seqs(ol,seqs):
'''
from elist.elist import *
ol = ['a','b','c','d']
select_seqs(ol,[1,2])
'''
rslt =copy.deepcopy(ol)
rslt = itemgetter(*seqs)(ol)
if(seqs.__len__()==0):
rslt = []
elif(seqs.__len__()==1):
rslt = [rslt]
else:
rslt = list(rslt)
return(rslt) | def function[select_seqs, parameter[ol, seqs]]:
constant[
from elist.elist import *
ol = ['a','b','c','d']
select_seqs(ol,[1,2])
]
variable[rslt] assign[=] call[name[copy].deepcopy, parameter[name[ol]]]
variable[rslt] assign[=] call[call[name[itemgetter], parameter[<ast.Starred object at 0x7da20c6aa5c0>]], parameter[name[ol]]]
if compare[call[name[seqs].__len__, parameter[]] equal[==] constant[0]] begin[:]
variable[rslt] assign[=] list[[]]
return[name[rslt]] | keyword[def] identifier[select_seqs] ( identifier[ol] , identifier[seqs] ):
literal[string]
identifier[rslt] = identifier[copy] . identifier[deepcopy] ( identifier[ol] )
identifier[rslt] = identifier[itemgetter] (* identifier[seqs] )( identifier[ol] )
keyword[if] ( identifier[seqs] . identifier[__len__] ()== literal[int] ):
identifier[rslt] =[]
keyword[elif] ( identifier[seqs] . identifier[__len__] ()== literal[int] ):
identifier[rslt] =[ identifier[rslt] ]
keyword[else] :
identifier[rslt] = identifier[list] ( identifier[rslt] )
keyword[return] ( identifier[rslt] ) | def select_seqs(ol, seqs):
"""
from elist.elist import *
ol = ['a','b','c','d']
select_seqs(ol,[1,2])
"""
rslt = copy.deepcopy(ol)
rslt = itemgetter(*seqs)(ol)
if seqs.__len__() == 0:
rslt = [] # depends on [control=['if'], data=[]]
elif seqs.__len__() == 1:
rslt = [rslt] # depends on [control=['if'], data=[]]
else:
rslt = list(rslt)
return rslt |
def write_constraints(self, table):
"""Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
constraint_sql = super(PostgresDbWriter, self).write_constraints(table)
for sql in constraint_sql:
self.execute(sql) | def function[write_constraints, parameter[self, table]]:
constant[Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
]
variable[constraint_sql] assign[=] call[call[name[super], parameter[name[PostgresDbWriter], name[self]]].write_constraints, parameter[name[table]]]
for taget[name[sql]] in starred[name[constraint_sql]] begin[:]
call[name[self].execute, parameter[name[sql]]] | keyword[def] identifier[write_constraints] ( identifier[self] , identifier[table] ):
literal[string]
identifier[constraint_sql] = identifier[super] ( identifier[PostgresDbWriter] , identifier[self] ). identifier[write_constraints] ( identifier[table] )
keyword[for] identifier[sql] keyword[in] identifier[constraint_sql] :
identifier[self] . identifier[execute] ( identifier[sql] ) | def write_constraints(self, table):
"""Send DDL to create the specified `table` constraints
:Parameters:
- `table`: an instance of a :py:class:`mysql2pgsql.lib.mysql_reader.MysqlReader.Table` object that represents the table to read/write.
Returns None
"""
constraint_sql = super(PostgresDbWriter, self).write_constraints(table)
for sql in constraint_sql:
self.execute(sql) # depends on [control=['for'], data=['sql']] |
def export_aggregate_by_csv(ekey, dstore):
"""
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
"""
token, what = ekey[0].split('/', 1)
aw = extract(dstore, 'aggregate/' + what)
fnames = []
writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
path = '%s.%s' % (sanitize(ekey[0]), ekey[1])
fname = dstore.export_path(path)
writer.save(aw.to_table(), fname)
fnames.append(fname)
return fnames | def function[export_aggregate_by_csv, parameter[ekey, dstore]]:
constant[
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
]
<ast.Tuple object at 0x7da18ede54e0> assign[=] call[call[name[ekey]][constant[0]].split, parameter[constant[/], constant[1]]]
variable[aw] assign[=] call[name[extract], parameter[name[dstore], binary_operation[constant[aggregate/] + name[what]]]]
variable[fnames] assign[=] list[[]]
variable[writer] assign[=] call[name[writers].CsvWriter, parameter[]]
variable[path] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18ede4b80>, <ast.Subscript object at 0x7da18ede6bc0>]]]
variable[fname] assign[=] call[name[dstore].export_path, parameter[name[path]]]
call[name[writer].save, parameter[call[name[aw].to_table, parameter[]], name[fname]]]
call[name[fnames].append, parameter[name[fname]]]
return[name[fnames]] | keyword[def] identifier[export_aggregate_by_csv] ( identifier[ekey] , identifier[dstore] ):
literal[string]
identifier[token] , identifier[what] = identifier[ekey] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )
identifier[aw] = identifier[extract] ( identifier[dstore] , literal[string] + identifier[what] )
identifier[fnames] =[]
identifier[writer] = identifier[writers] . identifier[CsvWriter] ( identifier[fmt] = identifier[writers] . identifier[FIVEDIGITS] )
identifier[path] = literal[string] %( identifier[sanitize] ( identifier[ekey] [ literal[int] ]), identifier[ekey] [ literal[int] ])
identifier[fname] = identifier[dstore] . identifier[export_path] ( identifier[path] )
identifier[writer] . identifier[save] ( identifier[aw] . identifier[to_table] (), identifier[fname] )
identifier[fnames] . identifier[append] ( identifier[fname] )
keyword[return] identifier[fnames] | def export_aggregate_by_csv(ekey, dstore):
"""
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
"""
(token, what) = ekey[0].split('/', 1)
aw = extract(dstore, 'aggregate/' + what)
fnames = []
writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
path = '%s.%s' % (sanitize(ekey[0]), ekey[1])
fname = dstore.export_path(path)
writer.save(aw.to_table(), fname)
fnames.append(fname)
return fnames |
def createCategoryFilter(self, filterName, positiveExamples, negativeExamples=[]):
"""Get a classifier filter (fingerprint) for positive and negative text samples
Args:
filterName, str: A unique name for the filter. (required)
positiveExamples, list(str): The list of positive example texts. (required)
negativeExamples, list(str): The list of negative example texts. (optional)
Returns:
CategoryFilter
Raises:
CorticalioException: if the request was not successful
"""
samples = {"positiveExamples": [{"text": s} for s in positiveExamples],
"negativeExamples": [{"text": s} for s in negativeExamples]}
body = json.dumps(samples)
return self._classify.createCategoryFilter(self._retina, filterName, body) | def function[createCategoryFilter, parameter[self, filterName, positiveExamples, negativeExamples]]:
constant[Get a classifier filter (fingerprint) for positive and negative text samples
Args:
filterName, str: A unique name for the filter. (required)
positiveExamples, list(str): The list of positive example texts. (required)
negativeExamples, list(str): The list of negative example texts. (optional)
Returns:
CategoryFilter
Raises:
CorticalioException: if the request was not successful
]
variable[samples] assign[=] dictionary[[<ast.Constant object at 0x7da2054a4e50>, <ast.Constant object at 0x7da2054a42b0>], [<ast.ListComp object at 0x7da2054a4bb0>, <ast.ListComp object at 0x7da2054a7e50>]]
variable[body] assign[=] call[name[json].dumps, parameter[name[samples]]]
return[call[name[self]._classify.createCategoryFilter, parameter[name[self]._retina, name[filterName], name[body]]]] | keyword[def] identifier[createCategoryFilter] ( identifier[self] , identifier[filterName] , identifier[positiveExamples] , identifier[negativeExamples] =[]):
literal[string]
identifier[samples] ={ literal[string] :[{ literal[string] : identifier[s] } keyword[for] identifier[s] keyword[in] identifier[positiveExamples] ],
literal[string] :[{ literal[string] : identifier[s] } keyword[for] identifier[s] keyword[in] identifier[negativeExamples] ]}
identifier[body] = identifier[json] . identifier[dumps] ( identifier[samples] )
keyword[return] identifier[self] . identifier[_classify] . identifier[createCategoryFilter] ( identifier[self] . identifier[_retina] , identifier[filterName] , identifier[body] ) | def createCategoryFilter(self, filterName, positiveExamples, negativeExamples=[]):
"""Get a classifier filter (fingerprint) for positive and negative text samples
Args:
filterName, str: A unique name for the filter. (required)
positiveExamples, list(str): The list of positive example texts. (required)
negativeExamples, list(str): The list of negative example texts. (optional)
Returns:
CategoryFilter
Raises:
CorticalioException: if the request was not successful
"""
samples = {'positiveExamples': [{'text': s} for s in positiveExamples], 'negativeExamples': [{'text': s} for s in negativeExamples]}
body = json.dumps(samples)
return self._classify.createCategoryFilter(self._retina, filterName, body) |
def _updateStream(self, xref=0, stream=None, new=0):
"""_updateStream(self, xref=0, stream=None, new=0) -> PyObject *"""
if self.isClosed or self.isEncrypted:
raise ValueError("operation illegal for closed / encrypted doc")
return _fitz.Document__updateStream(self, xref, stream, new) | def function[_updateStream, parameter[self, xref, stream, new]]:
constant[_updateStream(self, xref=0, stream=None, new=0) -> PyObject *]
if <ast.BoolOp object at 0x7da18f810940> begin[:]
<ast.Raise object at 0x7da18f8111e0>
return[call[name[_fitz].Document__updateStream, parameter[name[self], name[xref], name[stream], name[new]]]] | keyword[def] identifier[_updateStream] ( identifier[self] , identifier[xref] = literal[int] , identifier[stream] = keyword[None] , identifier[new] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[isClosed] keyword[or] identifier[self] . identifier[isEncrypted] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[_fitz] . identifier[Document__updateStream] ( identifier[self] , identifier[xref] , identifier[stream] , identifier[new] ) | def _updateStream(self, xref=0, stream=None, new=0):
"""_updateStream(self, xref=0, stream=None, new=0) -> PyObject *"""
if self.isClosed or self.isEncrypted:
raise ValueError('operation illegal for closed / encrypted doc') # depends on [control=['if'], data=[]]
return _fitz.Document__updateStream(self, xref, stream, new) |
def get_rest_apis(self, project_name):
"""
Generator that allows to iterate per every available apis.
"""
all_apis = self.apigateway_client.get_rest_apis(
limit=500
)
for api in all_apis['items']:
if api['name'] != project_name:
continue
yield api | def function[get_rest_apis, parameter[self, project_name]]:
constant[
Generator that allows to iterate per every available apis.
]
variable[all_apis] assign[=] call[name[self].apigateway_client.get_rest_apis, parameter[]]
for taget[name[api]] in starred[call[name[all_apis]][constant[items]]] begin[:]
if compare[call[name[api]][constant[name]] not_equal[!=] name[project_name]] begin[:]
continue
<ast.Yield object at 0x7da1b1f701f0> | keyword[def] identifier[get_rest_apis] ( identifier[self] , identifier[project_name] ):
literal[string]
identifier[all_apis] = identifier[self] . identifier[apigateway_client] . identifier[get_rest_apis] (
identifier[limit] = literal[int]
)
keyword[for] identifier[api] keyword[in] identifier[all_apis] [ literal[string] ]:
keyword[if] identifier[api] [ literal[string] ]!= identifier[project_name] :
keyword[continue]
keyword[yield] identifier[api] | def get_rest_apis(self, project_name):
"""
Generator that allows to iterate per every available apis.
"""
all_apis = self.apigateway_client.get_rest_apis(limit=500)
for api in all_apis['items']:
if api['name'] != project_name:
continue # depends on [control=['if'], data=[]]
yield api # depends on [control=['for'], data=['api']] |
def pixy_set_led(self, r, g, b):
"""
Sends the setLed Pixy command.
This method sets the RGB LED on front of Pixy.
:param r: red range between 0 and 255
:param g: green range between 0 and 255
:param b: blue range between 0 and 255
:returns: No return value.
"""
task = asyncio.ensure_future(self.core.pixy_set_led(r, g, b))
self.loop.run_until_complete(task) | def function[pixy_set_led, parameter[self, r, g, b]]:
constant[
Sends the setLed Pixy command.
This method sets the RGB LED on front of Pixy.
:param r: red range between 0 and 255
:param g: green range between 0 and 255
:param b: blue range between 0 and 255
:returns: No return value.
]
variable[task] assign[=] call[name[asyncio].ensure_future, parameter[call[name[self].core.pixy_set_led, parameter[name[r], name[g], name[b]]]]]
call[name[self].loop.run_until_complete, parameter[name[task]]] | keyword[def] identifier[pixy_set_led] ( identifier[self] , identifier[r] , identifier[g] , identifier[b] ):
literal[string]
identifier[task] = identifier[asyncio] . identifier[ensure_future] ( identifier[self] . identifier[core] . identifier[pixy_set_led] ( identifier[r] , identifier[g] , identifier[b] ))
identifier[self] . identifier[loop] . identifier[run_until_complete] ( identifier[task] ) | def pixy_set_led(self, r, g, b):
"""
Sends the setLed Pixy command.
This method sets the RGB LED on front of Pixy.
:param r: red range between 0 and 255
:param g: green range between 0 and 255
:param b: blue range between 0 and 255
:returns: No return value.
"""
task = asyncio.ensure_future(self.core.pixy_set_led(r, g, b))
self.loop.run_until_complete(task) |
def change_ssh_pwd(self, pwd=None, comment=None):
"""
Executes a change SSH password operation on the specified node
:param str pwd: changed password value
:param str comment: optional comment for audit log
:raises NodeCommandFailed: cannot change ssh password
:return: None
"""
self.make_request(
NodeCommandFailed,
method='update',
resource='change_ssh_pwd',
params={'comment': comment},
json={'value': pwd}) | def function[change_ssh_pwd, parameter[self, pwd, comment]]:
constant[
Executes a change SSH password operation on the specified node
:param str pwd: changed password value
:param str comment: optional comment for audit log
:raises NodeCommandFailed: cannot change ssh password
:return: None
]
call[name[self].make_request, parameter[name[NodeCommandFailed]]] | keyword[def] identifier[change_ssh_pwd] ( identifier[self] , identifier[pwd] = keyword[None] , identifier[comment] = keyword[None] ):
literal[string]
identifier[self] . identifier[make_request] (
identifier[NodeCommandFailed] ,
identifier[method] = literal[string] ,
identifier[resource] = literal[string] ,
identifier[params] ={ literal[string] : identifier[comment] },
identifier[json] ={ literal[string] : identifier[pwd] }) | def change_ssh_pwd(self, pwd=None, comment=None):
"""
Executes a change SSH password operation on the specified node
:param str pwd: changed password value
:param str comment: optional comment for audit log
:raises NodeCommandFailed: cannot change ssh password
:return: None
"""
self.make_request(NodeCommandFailed, method='update', resource='change_ssh_pwd', params={'comment': comment}, json={'value': pwd}) |
def get_dataset(self, dsid, dsinfo):
"""Get dataset function
Args:
dsid: Dataset ID
param2: Dataset Information
Returns:
Dask DataArray: Data
"""
data = self[dsinfo.get('file_key', dsid.name)]
data.attrs.update(dsinfo)
data.attrs["platform_name"] = self['/attr/satellite_name']
data.attrs["sensor"] = self['/attr/instrument_name']
return data | def function[get_dataset, parameter[self, dsid, dsinfo]]:
constant[Get dataset function
Args:
dsid: Dataset ID
param2: Dataset Information
Returns:
Dask DataArray: Data
]
variable[data] assign[=] call[name[self]][call[name[dsinfo].get, parameter[constant[file_key], name[dsid].name]]]
call[name[data].attrs.update, parameter[name[dsinfo]]]
call[name[data].attrs][constant[platform_name]] assign[=] call[name[self]][constant[/attr/satellite_name]]
call[name[data].attrs][constant[sensor]] assign[=] call[name[self]][constant[/attr/instrument_name]]
return[name[data]] | keyword[def] identifier[get_dataset] ( identifier[self] , identifier[dsid] , identifier[dsinfo] ):
literal[string]
identifier[data] = identifier[self] [ identifier[dsinfo] . identifier[get] ( literal[string] , identifier[dsid] . identifier[name] )]
identifier[data] . identifier[attrs] . identifier[update] ( identifier[dsinfo] )
identifier[data] . identifier[attrs] [ literal[string] ]= identifier[self] [ literal[string] ]
identifier[data] . identifier[attrs] [ literal[string] ]= identifier[self] [ literal[string] ]
keyword[return] identifier[data] | def get_dataset(self, dsid, dsinfo):
"""Get dataset function
Args:
dsid: Dataset ID
param2: Dataset Information
Returns:
Dask DataArray: Data
"""
data = self[dsinfo.get('file_key', dsid.name)]
data.attrs.update(dsinfo)
data.attrs['platform_name'] = self['/attr/satellite_name']
data.attrs['sensor'] = self['/attr/instrument_name']
return data |
def _prewarm_versatileimagefield(size_key, versatileimagefieldfile):
"""
Returns a 2-tuple:
0: bool signifying whether the image was successfully pre-warmed
1: The url of the successfully created image OR the path on storage of
the image that was not able to be successfully created.
Arguments:
`size_key_list`: A list of VersatileImageField size keys. Examples:
* 'crop__800x450'
* 'thumbnail__800x800'
`versatileimagefieldfile`: A VersatileImageFieldFile instance
"""
versatileimagefieldfile.create_on_demand = True
try:
url = get_url_from_image_key(versatileimagefieldfile, size_key)
except Exception:
success = False
url_or_filepath = versatileimagefieldfile.name
logger.exception('Thumbnail generation failed',
extra={'path': url_or_filepath})
else:
success = True
url_or_filepath = url
return (success, url_or_filepath) | def function[_prewarm_versatileimagefield, parameter[size_key, versatileimagefieldfile]]:
constant[
Returns a 2-tuple:
0: bool signifying whether the image was successfully pre-warmed
1: The url of the successfully created image OR the path on storage of
the image that was not able to be successfully created.
Arguments:
`size_key_list`: A list of VersatileImageField size keys. Examples:
* 'crop__800x450'
* 'thumbnail__800x800'
`versatileimagefieldfile`: A VersatileImageFieldFile instance
]
name[versatileimagefieldfile].create_on_demand assign[=] constant[True]
<ast.Try object at 0x7da1b26af1c0>
return[tuple[[<ast.Name object at 0x7da1b025d510>, <ast.Name object at 0x7da1b025c6d0>]]] | keyword[def] identifier[_prewarm_versatileimagefield] ( identifier[size_key] , identifier[versatileimagefieldfile] ):
literal[string]
identifier[versatileimagefieldfile] . identifier[create_on_demand] = keyword[True]
keyword[try] :
identifier[url] = identifier[get_url_from_image_key] ( identifier[versatileimagefieldfile] , identifier[size_key] )
keyword[except] identifier[Exception] :
identifier[success] = keyword[False]
identifier[url_or_filepath] = identifier[versatileimagefieldfile] . identifier[name]
identifier[logger] . identifier[exception] ( literal[string] ,
identifier[extra] ={ literal[string] : identifier[url_or_filepath] })
keyword[else] :
identifier[success] = keyword[True]
identifier[url_or_filepath] = identifier[url]
keyword[return] ( identifier[success] , identifier[url_or_filepath] ) | def _prewarm_versatileimagefield(size_key, versatileimagefieldfile):
"""
Returns a 2-tuple:
0: bool signifying whether the image was successfully pre-warmed
1: The url of the successfully created image OR the path on storage of
the image that was not able to be successfully created.
Arguments:
`size_key_list`: A list of VersatileImageField size keys. Examples:
* 'crop__800x450'
* 'thumbnail__800x800'
`versatileimagefieldfile`: A VersatileImageFieldFile instance
"""
versatileimagefieldfile.create_on_demand = True
try:
url = get_url_from_image_key(versatileimagefieldfile, size_key) # depends on [control=['try'], data=[]]
except Exception:
success = False
url_or_filepath = versatileimagefieldfile.name
logger.exception('Thumbnail generation failed', extra={'path': url_or_filepath}) # depends on [control=['except'], data=[]]
else:
success = True
url_or_filepath = url
return (success, url_or_filepath) |
def format_field(self, value, format_spec):
"""Format specifiers are described in :func:`format_field` which is a
static function.
"""
if format_spec:
spec, arg = format_spec[0], format_spec[1:]
arg = arg or None
else:
spec = arg = None
return self._format_field(spec, arg, value, self.numeric_locale) | def function[format_field, parameter[self, value, format_spec]]:
constant[Format specifiers are described in :func:`format_field` which is a
static function.
]
if name[format_spec] begin[:]
<ast.Tuple object at 0x7da18c4cc670> assign[=] tuple[[<ast.Subscript object at 0x7da18c4cebf0>, <ast.Subscript object at 0x7da18c4ccc40>]]
variable[arg] assign[=] <ast.BoolOp object at 0x7da18c4ccd00>
return[call[name[self]._format_field, parameter[name[spec], name[arg], name[value], name[self].numeric_locale]]] | keyword[def] identifier[format_field] ( identifier[self] , identifier[value] , identifier[format_spec] ):
literal[string]
keyword[if] identifier[format_spec] :
identifier[spec] , identifier[arg] = identifier[format_spec] [ literal[int] ], identifier[format_spec] [ literal[int] :]
identifier[arg] = identifier[arg] keyword[or] keyword[None]
keyword[else] :
identifier[spec] = identifier[arg] = keyword[None]
keyword[return] identifier[self] . identifier[_format_field] ( identifier[spec] , identifier[arg] , identifier[value] , identifier[self] . identifier[numeric_locale] ) | def format_field(self, value, format_spec):
"""Format specifiers are described in :func:`format_field` which is a
static function.
"""
if format_spec:
(spec, arg) = (format_spec[0], format_spec[1:])
arg = arg or None # depends on [control=['if'], data=[]]
else:
spec = arg = None
return self._format_field(spec, arg, value, self.numeric_locale) |
def quantum_successors(self, node):
"""Returns list of the successors of a node that are
connected by a quantum edge as DAGNodes."""
if isinstance(node, int):
warnings.warn('Calling quantum_successors() with a node id is deprecated,'
' use a DAGNode instead',
DeprecationWarning, 2)
node = self._id_to_node[node]
successors = []
for successor in self.successors(node):
if isinstance(self._multi_graph.get_edge_data(
node, successor, key=0)['wire'][0],
QuantumRegister):
successors.append(successor)
return successors | def function[quantum_successors, parameter[self, node]]:
constant[Returns list of the successors of a node that are
connected by a quantum edge as DAGNodes.]
if call[name[isinstance], parameter[name[node], name[int]]] begin[:]
call[name[warnings].warn, parameter[constant[Calling quantum_successors() with a node id is deprecated, use a DAGNode instead], name[DeprecationWarning], constant[2]]]
variable[node] assign[=] call[name[self]._id_to_node][name[node]]
variable[successors] assign[=] list[[]]
for taget[name[successor]] in starred[call[name[self].successors, parameter[name[node]]]] begin[:]
if call[name[isinstance], parameter[call[call[call[name[self]._multi_graph.get_edge_data, parameter[name[node], name[successor]]]][constant[wire]]][constant[0]], name[QuantumRegister]]] begin[:]
call[name[successors].append, parameter[name[successor]]]
return[name[successors]] | keyword[def] identifier[quantum_successors] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[node] , identifier[int] ):
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] ,
identifier[DeprecationWarning] , literal[int] )
identifier[node] = identifier[self] . identifier[_id_to_node] [ identifier[node] ]
identifier[successors] =[]
keyword[for] identifier[successor] keyword[in] identifier[self] . identifier[successors] ( identifier[node] ):
keyword[if] identifier[isinstance] ( identifier[self] . identifier[_multi_graph] . identifier[get_edge_data] (
identifier[node] , identifier[successor] , identifier[key] = literal[int] )[ literal[string] ][ literal[int] ],
identifier[QuantumRegister] ):
identifier[successors] . identifier[append] ( identifier[successor] )
keyword[return] identifier[successors] | def quantum_successors(self, node):
"""Returns list of the successors of a node that are
connected by a quantum edge as DAGNodes."""
if isinstance(node, int):
warnings.warn('Calling quantum_successors() with a node id is deprecated, use a DAGNode instead', DeprecationWarning, 2)
node = self._id_to_node[node] # depends on [control=['if'], data=[]]
successors = []
for successor in self.successors(node):
if isinstance(self._multi_graph.get_edge_data(node, successor, key=0)['wire'][0], QuantumRegister):
successors.append(successor) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['successor']]
return successors |
def set_style(self, input_feeds):
"""Set target style variables.
Expected usage:
style_loss = StyleLoss(style_layers)
...
init_op = tf.global_variables_initializer()
init_op.run()
feeds = {... session.run() 'feeds' argument that will make 'style_layers'
tensors evaluate to activation values of style image...}
style_loss.set_style(feeds) # this must be called after 'init_op.run()'
"""
sess = tf.get_default_session()
computed = sess.run(self.input_grams, input_feeds)
for v, g in zip(self.target_vars, computed):
v.load(g) | def function[set_style, parameter[self, input_feeds]]:
constant[Set target style variables.
Expected usage:
style_loss = StyleLoss(style_layers)
...
init_op = tf.global_variables_initializer()
init_op.run()
feeds = {... session.run() 'feeds' argument that will make 'style_layers'
tensors evaluate to activation values of style image...}
style_loss.set_style(feeds) # this must be called after 'init_op.run()'
]
variable[sess] assign[=] call[name[tf].get_default_session, parameter[]]
variable[computed] assign[=] call[name[sess].run, parameter[name[self].input_grams, name[input_feeds]]]
for taget[tuple[[<ast.Name object at 0x7da1b1ea0520>, <ast.Name object at 0x7da1b1ea2320>]]] in starred[call[name[zip], parameter[name[self].target_vars, name[computed]]]] begin[:]
call[name[v].load, parameter[name[g]]] | keyword[def] identifier[set_style] ( identifier[self] , identifier[input_feeds] ):
literal[string]
identifier[sess] = identifier[tf] . identifier[get_default_session] ()
identifier[computed] = identifier[sess] . identifier[run] ( identifier[self] . identifier[input_grams] , identifier[input_feeds] )
keyword[for] identifier[v] , identifier[g] keyword[in] identifier[zip] ( identifier[self] . identifier[target_vars] , identifier[computed] ):
identifier[v] . identifier[load] ( identifier[g] ) | def set_style(self, input_feeds):
"""Set target style variables.
Expected usage:
style_loss = StyleLoss(style_layers)
...
init_op = tf.global_variables_initializer()
init_op.run()
feeds = {... session.run() 'feeds' argument that will make 'style_layers'
tensors evaluate to activation values of style image...}
style_loss.set_style(feeds) # this must be called after 'init_op.run()'
"""
sess = tf.get_default_session()
computed = sess.run(self.input_grams, input_feeds)
for (v, g) in zip(self.target_vars, computed):
v.load(g) # depends on [control=['for'], data=[]] |
def list_mapping(html_cleaned):
"""将预处理后的网页文档映射成列表和字典,并提取虚假标题
Keyword arguments:
html_cleaned -- 预处理后的网页源代码,字符串类型
Return:
unit_raw -- 网页文本行
init_dict -- 字典的key是索引,value是网页文本行,并按照网页文本行长度降序排序
fake_title -- 虚假标题,即网页源代码<title>中的文本行
"""
unit_raw = html_cleaned.split('\n')
for i in unit_raw:
c = CDM(i)
if c.PTN is not 0:
fake_title = i
break
init_list = []
init_dict = {}
for i in unit_raw:
init_list.append(len(i))
for i in range(0, len(init_list)):
init_dict[i] = init_list[i]
init_dict = sorted(init_dict.items(), key=lambda item: item[1], reverse=True)
try:
log('debug', '映射成功,提取的虚假标题为:【{}】'.format(fake_title))
except UnboundLocalError:
fake_title = ''
log('err', '虚假标题提取失败')
return unit_raw, init_dict, fake_title | def function[list_mapping, parameter[html_cleaned]]:
constant[将预处理后的网页文档映射成列表和字典,并提取虚假标题
Keyword arguments:
html_cleaned -- 预处理后的网页源代码,字符串类型
Return:
unit_raw -- 网页文本行
init_dict -- 字典的key是索引,value是网页文本行,并按照网页文本行长度降序排序
fake_title -- 虚假标题,即网页源代码<title>中的文本行
]
variable[unit_raw] assign[=] call[name[html_cleaned].split, parameter[constant[
]]]
for taget[name[i]] in starred[name[unit_raw]] begin[:]
variable[c] assign[=] call[name[CDM], parameter[name[i]]]
if compare[name[c].PTN is_not constant[0]] begin[:]
variable[fake_title] assign[=] name[i]
break
variable[init_list] assign[=] list[[]]
variable[init_dict] assign[=] dictionary[[], []]
for taget[name[i]] in starred[name[unit_raw]] begin[:]
call[name[init_list].append, parameter[call[name[len], parameter[name[i]]]]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[init_list]]]]]] begin[:]
call[name[init_dict]][name[i]] assign[=] call[name[init_list]][name[i]]
variable[init_dict] assign[=] call[name[sorted], parameter[call[name[init_dict].items, parameter[]]]]
<ast.Try object at 0x7da1b09d04f0>
return[tuple[[<ast.Name object at 0x7da1b09d0df0>, <ast.Name object at 0x7da1b09d2890>, <ast.Name object at 0x7da1b09d3d90>]]] | keyword[def] identifier[list_mapping] ( identifier[html_cleaned] ):
literal[string]
identifier[unit_raw] = identifier[html_cleaned] . identifier[split] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[unit_raw] :
identifier[c] = identifier[CDM] ( identifier[i] )
keyword[if] identifier[c] . identifier[PTN] keyword[is] keyword[not] literal[int] :
identifier[fake_title] = identifier[i]
keyword[break]
identifier[init_list] =[]
identifier[init_dict] ={}
keyword[for] identifier[i] keyword[in] identifier[unit_raw] :
identifier[init_list] . identifier[append] ( identifier[len] ( identifier[i] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[init_list] )):
identifier[init_dict] [ identifier[i] ]= identifier[init_list] [ identifier[i] ]
identifier[init_dict] = identifier[sorted] ( identifier[init_dict] . identifier[items] (), identifier[key] = keyword[lambda] identifier[item] : identifier[item] [ literal[int] ], identifier[reverse] = keyword[True] )
keyword[try] :
identifier[log] ( literal[string] , literal[string] . identifier[format] ( identifier[fake_title] ))
keyword[except] identifier[UnboundLocalError] :
identifier[fake_title] = literal[string]
identifier[log] ( literal[string] , literal[string] )
keyword[return] identifier[unit_raw] , identifier[init_dict] , identifier[fake_title] | def list_mapping(html_cleaned):
"""将预处理后的网页文档映射成列表和字典,并提取虚假标题
Keyword arguments:
html_cleaned -- 预处理后的网页源代码,字符串类型
Return:
unit_raw -- 网页文本行
init_dict -- 字典的key是索引,value是网页文本行,并按照网页文本行长度降序排序
fake_title -- 虚假标题,即网页源代码<title>中的文本行
"""
unit_raw = html_cleaned.split('\n')
for i in unit_raw:
c = CDM(i)
if c.PTN is not 0:
fake_title = i
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
init_list = []
init_dict = {}
for i in unit_raw:
init_list.append(len(i)) # depends on [control=['for'], data=['i']]
for i in range(0, len(init_list)):
init_dict[i] = init_list[i] # depends on [control=['for'], data=['i']]
init_dict = sorted(init_dict.items(), key=lambda item: item[1], reverse=True)
try:
log('debug', '映射成功,提取的虚假标题为:【{}】'.format(fake_title)) # depends on [control=['try'], data=[]]
except UnboundLocalError:
fake_title = ''
log('err', '虚假标题提取失败') # depends on [control=['except'], data=[]]
return (unit_raw, init_dict, fake_title) |
def file_hash(content):
"""Generate hash for file or string and avoid strings starting with "ad"
to workaround ad blocks being over aggressiv.
The current implementation is based on sha256.
:param str|FileIO content: The content to hash, either as string or as file-like object
"""
h = hashlib.sha256()
if isinstance(content, bytes_type):
h.update(content)
else:
data = True
while data:
data = content.read(1024 * 1024)
h.update(data)
h_digest = h.digest()
# base64url
# | char | substitute |
# | + | - |
# | / | _ |
#
result = base64.b64encode(h_digest, altchars=b'-_')
# ensure this is a str object in 3.x
result = result.decode('ascii')
result = result.rstrip('=')
if result[:2].lower() == 'ad':
# workaround adblockers blocking everything starting with "ad"
# by replacing the "d" with another charackter
if result[1] == 'd':
result = result[0] + '~' + result[2:]
else:
# upper case D
result = result[0] + '.' + result[2:]
return result | def function[file_hash, parameter[content]]:
constant[Generate hash for file or string and avoid strings starting with "ad"
to workaround ad blocks being over aggressiv.
The current implementation is based on sha256.
:param str|FileIO content: The content to hash, either as string or as file-like object
]
variable[h] assign[=] call[name[hashlib].sha256, parameter[]]
if call[name[isinstance], parameter[name[content], name[bytes_type]]] begin[:]
call[name[h].update, parameter[name[content]]]
variable[h_digest] assign[=] call[name[h].digest, parameter[]]
variable[result] assign[=] call[name[base64].b64encode, parameter[name[h_digest]]]
variable[result] assign[=] call[name[result].decode, parameter[constant[ascii]]]
variable[result] assign[=] call[name[result].rstrip, parameter[constant[=]]]
if compare[call[call[name[result]][<ast.Slice object at 0x7da1b16e1a80>].lower, parameter[]] equal[==] constant[ad]] begin[:]
if compare[call[name[result]][constant[1]] equal[==] constant[d]] begin[:]
variable[result] assign[=] binary_operation[binary_operation[call[name[result]][constant[0]] + constant[~]] + call[name[result]][<ast.Slice object at 0x7da1b16e1e70>]]
return[name[result]] | keyword[def] identifier[file_hash] ( identifier[content] ):
literal[string]
identifier[h] = identifier[hashlib] . identifier[sha256] ()
keyword[if] identifier[isinstance] ( identifier[content] , identifier[bytes_type] ):
identifier[h] . identifier[update] ( identifier[content] )
keyword[else] :
identifier[data] = keyword[True]
keyword[while] identifier[data] :
identifier[data] = identifier[content] . identifier[read] ( literal[int] * literal[int] )
identifier[h] . identifier[update] ( identifier[data] )
identifier[h_digest] = identifier[h] . identifier[digest] ()
identifier[result] = identifier[base64] . identifier[b64encode] ( identifier[h_digest] , identifier[altchars] = literal[string] )
identifier[result] = identifier[result] . identifier[decode] ( literal[string] )
identifier[result] = identifier[result] . identifier[rstrip] ( literal[string] )
keyword[if] identifier[result] [: literal[int] ]. identifier[lower] ()== literal[string] :
keyword[if] identifier[result] [ literal[int] ]== literal[string] :
identifier[result] = identifier[result] [ literal[int] ]+ literal[string] + identifier[result] [ literal[int] :]
keyword[else] :
identifier[result] = identifier[result] [ literal[int] ]+ literal[string] + identifier[result] [ literal[int] :]
keyword[return] identifier[result] | def file_hash(content):
"""Generate hash for file or string and avoid strings starting with "ad"
to workaround ad blocks being over aggressiv.
The current implementation is based on sha256.
:param str|FileIO content: The content to hash, either as string or as file-like object
"""
h = hashlib.sha256()
if isinstance(content, bytes_type):
h.update(content) # depends on [control=['if'], data=[]]
else:
data = True
while data:
data = content.read(1024 * 1024)
h.update(data) # depends on [control=['while'], data=[]]
h_digest = h.digest()
# base64url
# | char | substitute |
# | + | - |
# | / | _ |
#
result = base64.b64encode(h_digest, altchars=b'-_')
# ensure this is a str object in 3.x
result = result.decode('ascii')
result = result.rstrip('=')
if result[:2].lower() == 'ad':
# workaround adblockers blocking everything starting with "ad"
# by replacing the "d" with another charackter
if result[1] == 'd':
result = result[0] + '~' + result[2:] # depends on [control=['if'], data=[]]
else:
# upper case D
result = result[0] + '.' + result[2:] # depends on [control=['if'], data=[]]
return result |
def get_gene_id(gene_name):
'''Retrieve systematic yeast gene name from the common name.
:param gene_name: Common name for yeast gene (e.g. ADE2).
:type gene_name: str
:returns: Systematic name for yeast gene (e.g. YOR128C).
:rtype: str
'''
from intermine.webservice import Service
service = Service('http://yeastmine.yeastgenome.org/yeastmine/service')
# Get a new query on the class (table) you will be querying:
query = service.new_query('Gene')
# The view specifies the output columns
query.add_view('primaryIdentifier', 'secondaryIdentifier', 'symbol',
'name', 'sgdAlias', 'crossReferences.identifier',
'crossReferences.source.name')
# Uncomment and edit the line below (the default) to select a custom sort
# order:
# query.add_sort_order('Gene.primaryIdentifier', 'ASC')
# You can edit the constraint values below
query.add_constraint('organism.shortName', '=', 'S. cerevisiae', code='B')
query.add_constraint('Gene', 'LOOKUP', gene_name, code='A')
# Uncomment and edit the code below to specify your own custom logic:
# query.set_logic('A and B')
for row in query.rows():
gid = row['secondaryIdentifier']
return gid | def function[get_gene_id, parameter[gene_name]]:
constant[Retrieve systematic yeast gene name from the common name.
:param gene_name: Common name for yeast gene (e.g. ADE2).
:type gene_name: str
:returns: Systematic name for yeast gene (e.g. YOR128C).
:rtype: str
]
from relative_module[intermine.webservice] import module[Service]
variable[service] assign[=] call[name[Service], parameter[constant[http://yeastmine.yeastgenome.org/yeastmine/service]]]
variable[query] assign[=] call[name[service].new_query, parameter[constant[Gene]]]
call[name[query].add_view, parameter[constant[primaryIdentifier], constant[secondaryIdentifier], constant[symbol], constant[name], constant[sgdAlias], constant[crossReferences.identifier], constant[crossReferences.source.name]]]
call[name[query].add_constraint, parameter[constant[organism.shortName], constant[=], constant[S. cerevisiae]]]
call[name[query].add_constraint, parameter[constant[Gene], constant[LOOKUP], name[gene_name]]]
for taget[name[row]] in starred[call[name[query].rows, parameter[]]] begin[:]
variable[gid] assign[=] call[name[row]][constant[secondaryIdentifier]]
return[name[gid]] | keyword[def] identifier[get_gene_id] ( identifier[gene_name] ):
literal[string]
keyword[from] identifier[intermine] . identifier[webservice] keyword[import] identifier[Service]
identifier[service] = identifier[Service] ( literal[string] )
identifier[query] = identifier[service] . identifier[new_query] ( literal[string] )
identifier[query] . identifier[add_view] ( literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] )
identifier[query] . identifier[add_constraint] ( literal[string] , literal[string] , literal[string] , identifier[code] = literal[string] )
identifier[query] . identifier[add_constraint] ( literal[string] , literal[string] , identifier[gene_name] , identifier[code] = literal[string] )
keyword[for] identifier[row] keyword[in] identifier[query] . identifier[rows] ():
identifier[gid] = identifier[row] [ literal[string] ]
keyword[return] identifier[gid] | def get_gene_id(gene_name):
"""Retrieve systematic yeast gene name from the common name.
:param gene_name: Common name for yeast gene (e.g. ADE2).
:type gene_name: str
:returns: Systematic name for yeast gene (e.g. YOR128C).
:rtype: str
"""
from intermine.webservice import Service
service = Service('http://yeastmine.yeastgenome.org/yeastmine/service')
# Get a new query on the class (table) you will be querying:
query = service.new_query('Gene')
# The view specifies the output columns
query.add_view('primaryIdentifier', 'secondaryIdentifier', 'symbol', 'name', 'sgdAlias', 'crossReferences.identifier', 'crossReferences.source.name')
# Uncomment and edit the line below (the default) to select a custom sort
# order:
# query.add_sort_order('Gene.primaryIdentifier', 'ASC')
# You can edit the constraint values below
query.add_constraint('organism.shortName', '=', 'S. cerevisiae', code='B')
query.add_constraint('Gene', 'LOOKUP', gene_name, code='A')
# Uncomment and edit the code below to specify your own custom logic:
# query.set_logic('A and B')
for row in query.rows():
gid = row['secondaryIdentifier'] # depends on [control=['for'], data=['row']]
return gid |
def absolute_proportions(proportions, count):
"""
Split a given integer into n parts according to len(proportions) so they sum up to count and
match the given proportions.
Args:
proportions (dict): Dict of proportions, with a identifier as key.
Returns:
dict: Dictionary with absolute proportions and same identifiers as key.
Example::
>>> absolute_proportions({'train': 0.5, 'test': 0.5}, 100)
{'train': 50, 'test': 50}
"""
# first create absolute values by flooring non-integer portions
relative_sum = sum(proportions.values())
absolute_proportions = {idx: int(count / relative_sum * prop_value) for idx, prop_value in
proportions.items()}
# Now distribute the rest value randomly over the different parts
absolute_sum = sum(absolute_proportions.values())
rest_value = count - absolute_sum
subset_keys = sorted(list(proportions.keys()))
for i in range(rest_value):
key = subset_keys[i % len(subset_keys)]
absolute_proportions[key] += 1
return absolute_proportions | def function[absolute_proportions, parameter[proportions, count]]:
constant[
Split a given integer into n parts according to len(proportions) so they sum up to count and
match the given proportions.
Args:
proportions (dict): Dict of proportions, with a identifier as key.
Returns:
dict: Dictionary with absolute proportions and same identifiers as key.
Example::
>>> absolute_proportions({'train': 0.5, 'test': 0.5}, 100)
{'train': 50, 'test': 50}
]
variable[relative_sum] assign[=] call[name[sum], parameter[call[name[proportions].values, parameter[]]]]
variable[absolute_proportions] assign[=] <ast.DictComp object at 0x7da1b0b1c9d0>
variable[absolute_sum] assign[=] call[name[sum], parameter[call[name[absolute_proportions].values, parameter[]]]]
variable[rest_value] assign[=] binary_operation[name[count] - name[absolute_sum]]
variable[subset_keys] assign[=] call[name[sorted], parameter[call[name[list], parameter[call[name[proportions].keys, parameter[]]]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[rest_value]]]] begin[:]
variable[key] assign[=] call[name[subset_keys]][binary_operation[name[i] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[subset_keys]]]]]
<ast.AugAssign object at 0x7da1b0b1cee0>
return[name[absolute_proportions]] | keyword[def] identifier[absolute_proportions] ( identifier[proportions] , identifier[count] ):
literal[string]
identifier[relative_sum] = identifier[sum] ( identifier[proportions] . identifier[values] ())
identifier[absolute_proportions] ={ identifier[idx] : identifier[int] ( identifier[count] / identifier[relative_sum] * identifier[prop_value] ) keyword[for] identifier[idx] , identifier[prop_value] keyword[in]
identifier[proportions] . identifier[items] ()}
identifier[absolute_sum] = identifier[sum] ( identifier[absolute_proportions] . identifier[values] ())
identifier[rest_value] = identifier[count] - identifier[absolute_sum]
identifier[subset_keys] = identifier[sorted] ( identifier[list] ( identifier[proportions] . identifier[keys] ()))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[rest_value] ):
identifier[key] = identifier[subset_keys] [ identifier[i] % identifier[len] ( identifier[subset_keys] )]
identifier[absolute_proportions] [ identifier[key] ]+= literal[int]
keyword[return] identifier[absolute_proportions] | def absolute_proportions(proportions, count):
"""
Split a given integer into n parts according to len(proportions) so they sum up to count and
match the given proportions.
Args:
proportions (dict): Dict of proportions, with a identifier as key.
Returns:
dict: Dictionary with absolute proportions and same identifiers as key.
Example::
>>> absolute_proportions({'train': 0.5, 'test': 0.5}, 100)
{'train': 50, 'test': 50}
"""
# first create absolute values by flooring non-integer portions
relative_sum = sum(proportions.values())
absolute_proportions = {idx: int(count / relative_sum * prop_value) for (idx, prop_value) in proportions.items()}
# Now distribute the rest value randomly over the different parts
absolute_sum = sum(absolute_proportions.values())
rest_value = count - absolute_sum
subset_keys = sorted(list(proportions.keys()))
for i in range(rest_value):
key = subset_keys[i % len(subset_keys)]
absolute_proportions[key] += 1 # depends on [control=['for'], data=['i']]
return absolute_proportions |
def argrange(self, axis=None):
""" Return argrange along specified axis """
amin = self.argmin(axis=axis)
amax = self.argmax(axis=axis)
if axis is None:
return (amin, amax)
else:
return np.stack([amin, amax]).T | def function[argrange, parameter[self, axis]]:
constant[ Return argrange along specified axis ]
variable[amin] assign[=] call[name[self].argmin, parameter[]]
variable[amax] assign[=] call[name[self].argmax, parameter[]]
if compare[name[axis] is constant[None]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b26ac3a0>, <ast.Name object at 0x7da1b26ad060>]]] | keyword[def] identifier[argrange] ( identifier[self] , identifier[axis] = keyword[None] ):
literal[string]
identifier[amin] = identifier[self] . identifier[argmin] ( identifier[axis] = identifier[axis] )
identifier[amax] = identifier[self] . identifier[argmax] ( identifier[axis] = identifier[axis] )
keyword[if] identifier[axis] keyword[is] keyword[None] :
keyword[return] ( identifier[amin] , identifier[amax] )
keyword[else] :
keyword[return] identifier[np] . identifier[stack] ([ identifier[amin] , identifier[amax] ]). identifier[T] | def argrange(self, axis=None):
""" Return argrange along specified axis """
amin = self.argmin(axis=axis)
amax = self.argmax(axis=axis)
if axis is None:
return (amin, amax) # depends on [control=['if'], data=[]]
else:
return np.stack([amin, amax]).T |
def main(ctx, host, password, port, quiet, session_timeout, connect_timeout,
username):
""" Manipulate one or more Junos devices.
Purpose: The main function is the entry point for the jaide tool. Click
| handles arguments, commands and options. The parameters passed to
| this function are all potential options (required or not) that
| must come *before* the command from the group in the command line.
@param ctx: The click context paramter, for receiving the object dictionary
| being manipulated by other previous functions. Needed by any
| function with the @click.pass_context decorator.
@type ctx: click.Context
@param host: The IP(s) or hostname(s) of the devices to connect to.
@type host: str
@param password: The string password used to connect to the device.
@type password: str
@param port: The numerical port to establish the connection to. Defauls
| to 22.
@type port: int
@param quiet: An option that the user can set to suppress all output
| from jaide.
@type quiet: bool
@param session_timeout: Sets the session timeout value. A higher value may
| be desired for long running commands, such as
| 'request system snapshot slice alternate'
@type session_timeout: int
@param connect_timeout: Sets the connection timeout value. This is how
| we'll wait when connecting before classifying
| the device unreachable.
@type connect_timeout: int
@param username: The string username used to connect to the device.
@type useranme: str
@returns: None. Functions part of click relating to the command group
| 'main' do not return anything. Click handles passing context
| between the functions and maintaing command order and chaining.
"""
# build the list of hosts
ctx.obj['hosts'] = [ip for ip in clean_lines(host)]
# set the connection parameters
ctx.obj['conn'] = {
"username": username,
"password": password,
"port": port,
"session_timeout": session_timeout,
"connect_timeout": connect_timeout
}
if quiet:
ctx.obj['out'] = "quiet" | def function[main, parameter[ctx, host, password, port, quiet, session_timeout, connect_timeout, username]]:
constant[ Manipulate one or more Junos devices.
Purpose: The main function is the entry point for the jaide tool. Click
| handles arguments, commands and options. The parameters passed to
| this function are all potential options (required or not) that
| must come *before* the command from the group in the command line.
@param ctx: The click context paramter, for receiving the object dictionary
| being manipulated by other previous functions. Needed by any
| function with the @click.pass_context decorator.
@type ctx: click.Context
@param host: The IP(s) or hostname(s) of the devices to connect to.
@type host: str
@param password: The string password used to connect to the device.
@type password: str
@param port: The numerical port to establish the connection to. Defauls
| to 22.
@type port: int
@param quiet: An option that the user can set to suppress all output
| from jaide.
@type quiet: bool
@param session_timeout: Sets the session timeout value. A higher value may
| be desired for long running commands, such as
| 'request system snapshot slice alternate'
@type session_timeout: int
@param connect_timeout: Sets the connection timeout value. This is how
| we'll wait when connecting before classifying
| the device unreachable.
@type connect_timeout: int
@param username: The string username used to connect to the device.
@type useranme: str
@returns: None. Functions part of click relating to the command group
| 'main' do not return anything. Click handles passing context
| between the functions and maintaing command order and chaining.
]
call[name[ctx].obj][constant[hosts]] assign[=] <ast.ListComp object at 0x7da1b257d3f0>
call[name[ctx].obj][constant[conn]] assign[=] dictionary[[<ast.Constant object at 0x7da1b257d480>, <ast.Constant object at 0x7da1b257f370>, <ast.Constant object at 0x7da1b257dbd0>, <ast.Constant object at 0x7da1b257c4c0>, <ast.Constant object at 0x7da1b257e4d0>], [<ast.Name object at 0x7da1b257cdc0>, <ast.Name object at 0x7da1b257d930>, <ast.Name object at 0x7da1b257e950>, <ast.Name object at 0x7da1b257f2e0>, <ast.Name object at 0x7da1b257f7f0>]]
if name[quiet] begin[:]
call[name[ctx].obj][constant[out]] assign[=] constant[quiet] | keyword[def] identifier[main] ( identifier[ctx] , identifier[host] , identifier[password] , identifier[port] , identifier[quiet] , identifier[session_timeout] , identifier[connect_timeout] ,
identifier[username] ):
literal[string]
identifier[ctx] . identifier[obj] [ literal[string] ]=[ identifier[ip] keyword[for] identifier[ip] keyword[in] identifier[clean_lines] ( identifier[host] )]
identifier[ctx] . identifier[obj] [ literal[string] ]={
literal[string] : identifier[username] ,
literal[string] : identifier[password] ,
literal[string] : identifier[port] ,
literal[string] : identifier[session_timeout] ,
literal[string] : identifier[connect_timeout]
}
keyword[if] identifier[quiet] :
identifier[ctx] . identifier[obj] [ literal[string] ]= literal[string] | def main(ctx, host, password, port, quiet, session_timeout, connect_timeout, username):
""" Manipulate one or more Junos devices.
Purpose: The main function is the entry point for the jaide tool. Click
| handles arguments, commands and options. The parameters passed to
| this function are all potential options (required or not) that
| must come *before* the command from the group in the command line.
@param ctx: The click context paramter, for receiving the object dictionary
| being manipulated by other previous functions. Needed by any
| function with the @click.pass_context decorator.
@type ctx: click.Context
@param host: The IP(s) or hostname(s) of the devices to connect to.
@type host: str
@param password: The string password used to connect to the device.
@type password: str
@param port: The numerical port to establish the connection to. Defauls
| to 22.
@type port: int
@param quiet: An option that the user can set to suppress all output
| from jaide.
@type quiet: bool
@param session_timeout: Sets the session timeout value. A higher value may
| be desired for long running commands, such as
| 'request system snapshot slice alternate'
@type session_timeout: int
@param connect_timeout: Sets the connection timeout value. This is how
| we'll wait when connecting before classifying
| the device unreachable.
@type connect_timeout: int
@param username: The string username used to connect to the device.
@type useranme: str
@returns: None. Functions part of click relating to the command group
| 'main' do not return anything. Click handles passing context
| between the functions and maintaing command order and chaining.
"""
# build the list of hosts
ctx.obj['hosts'] = [ip for ip in clean_lines(host)]
# set the connection parameters
ctx.obj['conn'] = {'username': username, 'password': password, 'port': port, 'session_timeout': session_timeout, 'connect_timeout': connect_timeout}
if quiet:
ctx.obj['out'] = 'quiet' # depends on [control=['if'], data=[]] |
def write_creds_to_aws_credentials_file(profile_name, credentials, credentials_file = aws_credentials_file):
"""
Write credentials to AWS config file
:param profile_name:
:param credentials:
:param credentials_file:
:return:
"""
profile_found = False
profile_ever_found = False
session_token_written = False
security_token_written = False
mfa_serial_written = False
expiration_written = False
# Create the .aws folder if needed
if not os.path.isdir(aws_config_dir):
os.mkdir(aws_config_dir)
# Create an empty file if target does not exist
if not os.path.isfile(credentials_file):
open(credentials_file, 'a').close()
# Open and parse/edit file
for line in fileinput.input(credentials_file, inplace=True):
profile_line = re_profile_name.match(line)
if profile_line:
if profile_line.groups()[0] == profile_name:
profile_found = True
profile_ever_found = True
else:
profile_found = False
print(line.rstrip())
elif profile_found:
if re_access_key.match(line) and 'AccessKeyId' in credentials and credentials['AccessKeyId']:
print('aws_access_key_id = %s' % credentials['AccessKeyId'])
elif re_secret_key.match(line) and 'SecretAccessKey' in credentials and credentials['SecretAccessKey']:
print('aws_secret_access_key = %s' % credentials['SecretAccessKey'])
elif re_mfa_serial.match(line) and 'SerialNumber' in credentials and credentials['SerialNumber']:
print('aws_mfa_serial = %s' % credentials['SerialNumber'])
mfa_serial_written = True
elif re_session_token.match(line) and 'SessionToken' in credentials and credentials['SessionToken']:
print('aws_session_token = %s' % credentials['SessionToken'])
session_token_written = True
elif re_security_token.match(line) and 'SessionToken' in credentials and credentials['SessionToken']:
print('aws_security_token = %s' % credentials['SessionToken'])
security_token_written = True
elif re_expiration.match(line) and 'Expiration' in credentials and credentials['Expiration']:
print('expiration = %s' % credentials['Expiration'])
expiration_written = True
else:
print(line.rstrip())
else:
print(line.rstrip())
# Complete the profile if needed
if profile_found:
with open(credentials_file, 'a') as f:
complete_profile(f, credentials, session_token_written, mfa_serial_written)
# Add new profile if not found
if not profile_ever_found:
with open(credentials_file, 'a') as f:
f.write('[%s]\n' % profile_name)
f.write('aws_access_key_id = %s\n' % credentials['AccessKeyId'])
f.write('aws_secret_access_key = %s\n' % credentials['SecretAccessKey'])
complete_profile(f, credentials, session_token_written, mfa_serial_written) | def function[write_creds_to_aws_credentials_file, parameter[profile_name, credentials, credentials_file]]:
constant[
Write credentials to AWS config file
:param profile_name:
:param credentials:
:param credentials_file:
:return:
]
variable[profile_found] assign[=] constant[False]
variable[profile_ever_found] assign[=] constant[False]
variable[session_token_written] assign[=] constant[False]
variable[security_token_written] assign[=] constant[False]
variable[mfa_serial_written] assign[=] constant[False]
variable[expiration_written] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b268ed70> begin[:]
call[name[os].mkdir, parameter[name[aws_config_dir]]]
if <ast.UnaryOp object at 0x7da1b265fca0> begin[:]
call[call[name[open], parameter[name[credentials_file], constant[a]]].close, parameter[]]
for taget[name[line]] in starred[call[name[fileinput].input, parameter[name[credentials_file]]]] begin[:]
variable[profile_line] assign[=] call[name[re_profile_name].match, parameter[name[line]]]
if name[profile_line] begin[:]
if compare[call[call[name[profile_line].groups, parameter[]]][constant[0]] equal[==] name[profile_name]] begin[:]
variable[profile_found] assign[=] constant[True]
variable[profile_ever_found] assign[=] constant[True]
call[name[print], parameter[call[name[line].rstrip, parameter[]]]]
if name[profile_found] begin[:]
with call[name[open], parameter[name[credentials_file], constant[a]]] begin[:]
call[name[complete_profile], parameter[name[f], name[credentials], name[session_token_written], name[mfa_serial_written]]]
if <ast.UnaryOp object at 0x7da1b2648700> begin[:]
with call[name[open], parameter[name[credentials_file], constant[a]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[[%s]
] <ast.Mod object at 0x7da2590d6920> name[profile_name]]]]
call[name[f].write, parameter[binary_operation[constant[aws_access_key_id = %s
] <ast.Mod object at 0x7da2590d6920> call[name[credentials]][constant[AccessKeyId]]]]]
call[name[f].write, parameter[binary_operation[constant[aws_secret_access_key = %s
] <ast.Mod object at 0x7da2590d6920> call[name[credentials]][constant[SecretAccessKey]]]]]
call[name[complete_profile], parameter[name[f], name[credentials], name[session_token_written], name[mfa_serial_written]]] | keyword[def] identifier[write_creds_to_aws_credentials_file] ( identifier[profile_name] , identifier[credentials] , identifier[credentials_file] = identifier[aws_credentials_file] ):
literal[string]
identifier[profile_found] = keyword[False]
identifier[profile_ever_found] = keyword[False]
identifier[session_token_written] = keyword[False]
identifier[security_token_written] = keyword[False]
identifier[mfa_serial_written] = keyword[False]
identifier[expiration_written] = keyword[False]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[aws_config_dir] ):
identifier[os] . identifier[mkdir] ( identifier[aws_config_dir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[credentials_file] ):
identifier[open] ( identifier[credentials_file] , literal[string] ). identifier[close] ()
keyword[for] identifier[line] keyword[in] identifier[fileinput] . identifier[input] ( identifier[credentials_file] , identifier[inplace] = keyword[True] ):
identifier[profile_line] = identifier[re_profile_name] . identifier[match] ( identifier[line] )
keyword[if] identifier[profile_line] :
keyword[if] identifier[profile_line] . identifier[groups] ()[ literal[int] ]== identifier[profile_name] :
identifier[profile_found] = keyword[True]
identifier[profile_ever_found] = keyword[True]
keyword[else] :
identifier[profile_found] = keyword[False]
identifier[print] ( identifier[line] . identifier[rstrip] ())
keyword[elif] identifier[profile_found] :
keyword[if] identifier[re_access_key] . identifier[match] ( identifier[line] ) keyword[and] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]:
identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ])
keyword[elif] identifier[re_secret_key] . identifier[match] ( identifier[line] ) keyword[and] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]:
identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ])
keyword[elif] identifier[re_mfa_serial] . identifier[match] ( identifier[line] ) keyword[and] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]:
identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ])
identifier[mfa_serial_written] = keyword[True]
keyword[elif] identifier[re_session_token] . identifier[match] ( identifier[line] ) keyword[and] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]:
identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ])
identifier[session_token_written] = keyword[True]
keyword[elif] identifier[re_security_token] . identifier[match] ( identifier[line] ) keyword[and] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]:
identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ])
identifier[security_token_written] = keyword[True]
keyword[elif] identifier[re_expiration] . identifier[match] ( identifier[line] ) keyword[and] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]:
identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ])
identifier[expiration_written] = keyword[True]
keyword[else] :
identifier[print] ( identifier[line] . identifier[rstrip] ())
keyword[else] :
identifier[print] ( identifier[line] . identifier[rstrip] ())
keyword[if] identifier[profile_found] :
keyword[with] identifier[open] ( identifier[credentials_file] , literal[string] ) keyword[as] identifier[f] :
identifier[complete_profile] ( identifier[f] , identifier[credentials] , identifier[session_token_written] , identifier[mfa_serial_written] )
keyword[if] keyword[not] identifier[profile_ever_found] :
keyword[with] identifier[open] ( identifier[credentials_file] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] % identifier[profile_name] )
identifier[f] . identifier[write] ( literal[string] % identifier[credentials] [ literal[string] ])
identifier[f] . identifier[write] ( literal[string] % identifier[credentials] [ literal[string] ])
identifier[complete_profile] ( identifier[f] , identifier[credentials] , identifier[session_token_written] , identifier[mfa_serial_written] ) | def write_creds_to_aws_credentials_file(profile_name, credentials, credentials_file=aws_credentials_file):
"""
Write credentials to AWS config file
:param profile_name:
:param credentials:
:param credentials_file:
:return:
"""
profile_found = False
profile_ever_found = False
session_token_written = False
security_token_written = False
mfa_serial_written = False
expiration_written = False
# Create the .aws folder if needed
if not os.path.isdir(aws_config_dir):
os.mkdir(aws_config_dir) # depends on [control=['if'], data=[]]
# Create an empty file if target does not exist
if not os.path.isfile(credentials_file):
open(credentials_file, 'a').close() # depends on [control=['if'], data=[]]
# Open and parse/edit file
for line in fileinput.input(credentials_file, inplace=True):
profile_line = re_profile_name.match(line)
if profile_line:
if profile_line.groups()[0] == profile_name:
profile_found = True
profile_ever_found = True # depends on [control=['if'], data=[]]
else:
profile_found = False
print(line.rstrip()) # depends on [control=['if'], data=[]]
elif profile_found:
if re_access_key.match(line) and 'AccessKeyId' in credentials and credentials['AccessKeyId']:
print('aws_access_key_id = %s' % credentials['AccessKeyId']) # depends on [control=['if'], data=[]]
elif re_secret_key.match(line) and 'SecretAccessKey' in credentials and credentials['SecretAccessKey']:
print('aws_secret_access_key = %s' % credentials['SecretAccessKey']) # depends on [control=['if'], data=[]]
elif re_mfa_serial.match(line) and 'SerialNumber' in credentials and credentials['SerialNumber']:
print('aws_mfa_serial = %s' % credentials['SerialNumber'])
mfa_serial_written = True # depends on [control=['if'], data=[]]
elif re_session_token.match(line) and 'SessionToken' in credentials and credentials['SessionToken']:
print('aws_session_token = %s' % credentials['SessionToken'])
session_token_written = True # depends on [control=['if'], data=[]]
elif re_security_token.match(line) and 'SessionToken' in credentials and credentials['SessionToken']:
print('aws_security_token = %s' % credentials['SessionToken'])
security_token_written = True # depends on [control=['if'], data=[]]
elif re_expiration.match(line) and 'Expiration' in credentials and credentials['Expiration']:
print('expiration = %s' % credentials['Expiration'])
expiration_written = True # depends on [control=['if'], data=[]]
else:
print(line.rstrip()) # depends on [control=['if'], data=[]]
else:
print(line.rstrip()) # depends on [control=['for'], data=['line']]
# Complete the profile if needed
if profile_found:
with open(credentials_file, 'a') as f:
complete_profile(f, credentials, session_token_written, mfa_serial_written) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
# Add new profile if not found
if not profile_ever_found:
with open(credentials_file, 'a') as f:
f.write('[%s]\n' % profile_name)
f.write('aws_access_key_id = %s\n' % credentials['AccessKeyId'])
f.write('aws_secret_access_key = %s\n' % credentials['SecretAccessKey'])
complete_profile(f, credentials, session_token_written, mfa_serial_written) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] |
def currentFolder(self, value):
"""gets/sets the current folder (folder id)"""
if value is not None and value.lower() == self._currentFolder['title']:
return
if value is None:
self._location = self.root
self._currentFolder = {
'title': 'root',
'id': None,
'created' : None,
'username' : None
}
self.__init()
elif value == "/" or value.lower() == 'root':
self.location = self.root
self._currentFolder = {
'title': 'root',
'id': None,
'created' : None,
'username' : None
}
self.__init()
else:
for folder in self.folders:
if 'title' in folder:
if folder['title'].lower() == value.lower():
self._location = "%s/%s" % (self.root, folder['id'])
self._currentFolder = folder
self.__init(folder['title'])
break | def function[currentFolder, parameter[self, value]]:
constant[gets/sets the current folder (folder id)]
if <ast.BoolOp object at 0x7da2041db1c0> begin[:]
return[None]
if compare[name[value] is constant[None]] begin[:]
name[self]._location assign[=] name[self].root
name[self]._currentFolder assign[=] dictionary[[<ast.Constant object at 0x7da1b12970a0>, <ast.Constant object at 0x7da1b1295150>, <ast.Constant object at 0x7da1b1294850>, <ast.Constant object at 0x7da1b1297e80>], [<ast.Constant object at 0x7da1b12971f0>, <ast.Constant object at 0x7da1b1294610>, <ast.Constant object at 0x7da1b1294700>, <ast.Constant object at 0x7da1b1296890>]]
call[name[self].__init, parameter[]] | keyword[def] identifier[currentFolder] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] keyword[and] identifier[value] . identifier[lower] ()== identifier[self] . identifier[_currentFolder] [ literal[string] ]:
keyword[return]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[self] . identifier[_location] = identifier[self] . identifier[root]
identifier[self] . identifier[_currentFolder] ={
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : keyword[None] ,
literal[string] : keyword[None]
}
identifier[self] . identifier[__init] ()
keyword[elif] identifier[value] == literal[string] keyword[or] identifier[value] . identifier[lower] ()== literal[string] :
identifier[self] . identifier[location] = identifier[self] . identifier[root]
identifier[self] . identifier[_currentFolder] ={
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : keyword[None] ,
literal[string] : keyword[None]
}
identifier[self] . identifier[__init] ()
keyword[else] :
keyword[for] identifier[folder] keyword[in] identifier[self] . identifier[folders] :
keyword[if] literal[string] keyword[in] identifier[folder] :
keyword[if] identifier[folder] [ literal[string] ]. identifier[lower] ()== identifier[value] . identifier[lower] ():
identifier[self] . identifier[_location] = literal[string] %( identifier[self] . identifier[root] , identifier[folder] [ literal[string] ])
identifier[self] . identifier[_currentFolder] = identifier[folder]
identifier[self] . identifier[__init] ( identifier[folder] [ literal[string] ])
keyword[break] | def currentFolder(self, value):
"""gets/sets the current folder (folder id)"""
if value is not None and value.lower() == self._currentFolder['title']:
return # depends on [control=['if'], data=[]]
if value is None:
self._location = self.root
self._currentFolder = {'title': 'root', 'id': None, 'created': None, 'username': None}
self.__init() # depends on [control=['if'], data=[]]
elif value == '/' or value.lower() == 'root':
self.location = self.root
self._currentFolder = {'title': 'root', 'id': None, 'created': None, 'username': None}
self.__init() # depends on [control=['if'], data=[]]
else:
for folder in self.folders:
if 'title' in folder:
if folder['title'].lower() == value.lower():
self._location = '%s/%s' % (self.root, folder['id'])
self._currentFolder = folder
self.__init(folder['title'])
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['folder']] # depends on [control=['for'], data=['folder']] |
def removeslash(
method: Callable[..., Optional[Awaitable[None]]]
) -> Callable[..., Optional[Awaitable[None]]]:
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper( # type: ignore
self: RequestHandler, *args, **kwargs
) -> Optional[Awaitable[None]]:
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return None
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper | def function[removeslash, parameter[method]]:
constant[Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
]
def function[wrapper, parameter[self]]:
if call[name[self].request.path.endswith, parameter[constant[/]]] begin[:]
if compare[name[self].request.method in tuple[[<ast.Constant object at 0x7da1b20cbe80>, <ast.Constant object at 0x7da1b20c9420>]]] begin[:]
variable[uri] assign[=] call[name[self].request.path.rstrip, parameter[constant[/]]]
if name[uri] begin[:]
if name[self].request.query begin[:]
<ast.AugAssign object at 0x7da1b20cb550>
call[name[self].redirect, parameter[name[uri]]]
return[constant[None]]
return[call[name[method], parameter[name[self], <ast.Starred object at 0x7da1b20cbc10>]]]
return[name[wrapper]] | keyword[def] identifier[removeslash] (
identifier[method] : identifier[Callable] [..., identifier[Optional] [ identifier[Awaitable] [ keyword[None] ]]]
)-> identifier[Callable] [..., identifier[Optional] [ identifier[Awaitable] [ keyword[None] ]]]:
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[method] )
keyword[def] identifier[wrapper] (
identifier[self] : identifier[RequestHandler] ,* identifier[args] ,** identifier[kwargs]
)-> identifier[Optional] [ identifier[Awaitable] [ keyword[None] ]]:
keyword[if] identifier[self] . identifier[request] . identifier[path] . identifier[endswith] ( literal[string] ):
keyword[if] identifier[self] . identifier[request] . identifier[method] keyword[in] ( literal[string] , literal[string] ):
identifier[uri] = identifier[self] . identifier[request] . identifier[path] . identifier[rstrip] ( literal[string] )
keyword[if] identifier[uri] :
keyword[if] identifier[self] . identifier[request] . identifier[query] :
identifier[uri] += literal[string] + identifier[self] . identifier[request] . identifier[query]
identifier[self] . identifier[redirect] ( identifier[uri] , identifier[permanent] = keyword[True] )
keyword[return] keyword[None]
keyword[else] :
keyword[raise] identifier[HTTPError] ( literal[int] )
keyword[return] identifier[method] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper] | def removeslash(method: Callable[..., Optional[Awaitable[None]]]) -> Callable[..., Optional[Awaitable[None]]]:
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self: RequestHandler, *args, **kwargs) -> Optional[Awaitable[None]]: # type: ignore
if self.request.path.endswith('/'):
if self.request.method in ('GET', 'HEAD'):
uri = self.request.path.rstrip('/')
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += '?' + self.request.query # depends on [control=['if'], data=[]]
self.redirect(uri, permanent=True)
return None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
raise HTTPError(404) # depends on [control=['if'], data=[]]
return method(self, *args, **kwargs)
return wrapper |
def refactor_tree(self, tree, name):
"""Refactors a parse tree (modifying the tree in place).
For compatible patterns the bottom matcher module is
used. Otherwise the tree is traversed node-to-node for
matches.
Args:
tree: a pytree.Node instance representing the root of the tree
to be refactored.
name: a human-readable name for this tree.
Returns:
True if the tree was modified, False otherwise.
"""
for fixer in chain(self.pre_order, self.post_order):
fixer.start_tree(tree, name)
#use traditional matching for the incompatible fixers
self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
self.traverse_by(self.bmi_post_order_heads, tree.post_order())
# obtain a set of candidate nodes
match_set = self.BM.run(tree.leaves())
while any(match_set.values()):
for fixer in self.BM.fixers:
if fixer in match_set and match_set[fixer]:
#sort by depth; apply fixers from bottom(of the AST) to top
match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
if fixer.keep_line_order:
#some fixers(eg fix_imports) must be applied
#with the original file's line order
match_set[fixer].sort(key=pytree.Base.get_lineno)
for node in list(match_set[fixer]):
if node in match_set[fixer]:
match_set[fixer].remove(node)
try:
find_root(node)
except ValueError:
# this node has been cut off from a
# previous transformation ; skip
continue
if node.fixers_applied and fixer in node.fixers_applied:
# do not apply the same fixer again
continue
results = fixer.match(node)
if results:
new = fixer.transform(node, results)
if new is not None:
node.replace(new)
#new.fixers_applied.append(fixer)
for node in new.post_order():
# do not apply the fixer again to
# this or any subnode
if not node.fixers_applied:
node.fixers_applied = []
node.fixers_applied.append(fixer)
# update the original match set for
# the added code
new_matches = self.BM.run(new.leaves())
for fxr in new_matches:
if not fxr in match_set:
match_set[fxr]=[]
match_set[fxr].extend(new_matches[fxr])
for fixer in chain(self.pre_order, self.post_order):
fixer.finish_tree(tree, name)
return tree.was_changed | def function[refactor_tree, parameter[self, tree, name]]:
constant[Refactors a parse tree (modifying the tree in place).
For compatible patterns the bottom matcher module is
used. Otherwise the tree is traversed node-to-node for
matches.
Args:
tree: a pytree.Node instance representing the root of the tree
to be refactored.
name: a human-readable name for this tree.
Returns:
True if the tree was modified, False otherwise.
]
for taget[name[fixer]] in starred[call[name[chain], parameter[name[self].pre_order, name[self].post_order]]] begin[:]
call[name[fixer].start_tree, parameter[name[tree], name[name]]]
call[name[self].traverse_by, parameter[name[self].bmi_pre_order_heads, call[name[tree].pre_order, parameter[]]]]
call[name[self].traverse_by, parameter[name[self].bmi_post_order_heads, call[name[tree].post_order, parameter[]]]]
variable[match_set] assign[=] call[name[self].BM.run, parameter[call[name[tree].leaves, parameter[]]]]
while call[name[any], parameter[call[name[match_set].values, parameter[]]]] begin[:]
for taget[name[fixer]] in starred[name[self].BM.fixers] begin[:]
if <ast.BoolOp object at 0x7da1b0717760> begin[:]
call[call[name[match_set]][name[fixer]].sort, parameter[]]
if name[fixer].keep_line_order begin[:]
call[call[name[match_set]][name[fixer]].sort, parameter[]]
for taget[name[node]] in starred[call[name[list], parameter[call[name[match_set]][name[fixer]]]]] begin[:]
if compare[name[node] in call[name[match_set]][name[fixer]]] begin[:]
call[call[name[match_set]][name[fixer]].remove, parameter[name[node]]]
<ast.Try object at 0x7da20e956d40>
if <ast.BoolOp object at 0x7da20e9570d0> begin[:]
continue
variable[results] assign[=] call[name[fixer].match, parameter[name[node]]]
if name[results] begin[:]
variable[new] assign[=] call[name[fixer].transform, parameter[name[node], name[results]]]
if compare[name[new] is_not constant[None]] begin[:]
call[name[node].replace, parameter[name[new]]]
for taget[name[node]] in starred[call[name[new].post_order, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da1b077a230> begin[:]
name[node].fixers_applied assign[=] list[[]]
call[name[node].fixers_applied.append, parameter[name[fixer]]]
variable[new_matches] assign[=] call[name[self].BM.run, parameter[call[name[new].leaves, parameter[]]]]
for taget[name[fxr]] in starred[name[new_matches]] begin[:]
if <ast.UnaryOp object at 0x7da1b0778d00> begin[:]
call[name[match_set]][name[fxr]] assign[=] list[[]]
call[call[name[match_set]][name[fxr]].extend, parameter[call[name[new_matches]][name[fxr]]]]
for taget[name[fixer]] in starred[call[name[chain], parameter[name[self].pre_order, name[self].post_order]]] begin[:]
call[name[fixer].finish_tree, parameter[name[tree], name[name]]]
return[name[tree].was_changed] | keyword[def] identifier[refactor_tree] ( identifier[self] , identifier[tree] , identifier[name] ):
literal[string]
keyword[for] identifier[fixer] keyword[in] identifier[chain] ( identifier[self] . identifier[pre_order] , identifier[self] . identifier[post_order] ):
identifier[fixer] . identifier[start_tree] ( identifier[tree] , identifier[name] )
identifier[self] . identifier[traverse_by] ( identifier[self] . identifier[bmi_pre_order_heads] , identifier[tree] . identifier[pre_order] ())
identifier[self] . identifier[traverse_by] ( identifier[self] . identifier[bmi_post_order_heads] , identifier[tree] . identifier[post_order] ())
identifier[match_set] = identifier[self] . identifier[BM] . identifier[run] ( identifier[tree] . identifier[leaves] ())
keyword[while] identifier[any] ( identifier[match_set] . identifier[values] ()):
keyword[for] identifier[fixer] keyword[in] identifier[self] . identifier[BM] . identifier[fixers] :
keyword[if] identifier[fixer] keyword[in] identifier[match_set] keyword[and] identifier[match_set] [ identifier[fixer] ]:
identifier[match_set] [ identifier[fixer] ]. identifier[sort] ( identifier[key] = identifier[pytree] . identifier[Base] . identifier[depth] , identifier[reverse] = keyword[True] )
keyword[if] identifier[fixer] . identifier[keep_line_order] :
identifier[match_set] [ identifier[fixer] ]. identifier[sort] ( identifier[key] = identifier[pytree] . identifier[Base] . identifier[get_lineno] )
keyword[for] identifier[node] keyword[in] identifier[list] ( identifier[match_set] [ identifier[fixer] ]):
keyword[if] identifier[node] keyword[in] identifier[match_set] [ identifier[fixer] ]:
identifier[match_set] [ identifier[fixer] ]. identifier[remove] ( identifier[node] )
keyword[try] :
identifier[find_root] ( identifier[node] )
keyword[except] identifier[ValueError] :
keyword[continue]
keyword[if] identifier[node] . identifier[fixers_applied] keyword[and] identifier[fixer] keyword[in] identifier[node] . identifier[fixers_applied] :
keyword[continue]
identifier[results] = identifier[fixer] . identifier[match] ( identifier[node] )
keyword[if] identifier[results] :
identifier[new] = identifier[fixer] . identifier[transform] ( identifier[node] , identifier[results] )
keyword[if] identifier[new] keyword[is] keyword[not] keyword[None] :
identifier[node] . identifier[replace] ( identifier[new] )
keyword[for] identifier[node] keyword[in] identifier[new] . identifier[post_order] ():
keyword[if] keyword[not] identifier[node] . identifier[fixers_applied] :
identifier[node] . identifier[fixers_applied] =[]
identifier[node] . identifier[fixers_applied] . identifier[append] ( identifier[fixer] )
identifier[new_matches] = identifier[self] . identifier[BM] . identifier[run] ( identifier[new] . identifier[leaves] ())
keyword[for] identifier[fxr] keyword[in] identifier[new_matches] :
keyword[if] keyword[not] identifier[fxr] keyword[in] identifier[match_set] :
identifier[match_set] [ identifier[fxr] ]=[]
identifier[match_set] [ identifier[fxr] ]. identifier[extend] ( identifier[new_matches] [ identifier[fxr] ])
keyword[for] identifier[fixer] keyword[in] identifier[chain] ( identifier[self] . identifier[pre_order] , identifier[self] . identifier[post_order] ):
identifier[fixer] . identifier[finish_tree] ( identifier[tree] , identifier[name] )
keyword[return] identifier[tree] . identifier[was_changed] | def refactor_tree(self, tree, name):
"""Refactors a parse tree (modifying the tree in place).
For compatible patterns the bottom matcher module is
used. Otherwise the tree is traversed node-to-node for
matches.
Args:
tree: a pytree.Node instance representing the root of the tree
to be refactored.
name: a human-readable name for this tree.
Returns:
True if the tree was modified, False otherwise.
"""
for fixer in chain(self.pre_order, self.post_order):
fixer.start_tree(tree, name) # depends on [control=['for'], data=['fixer']]
#use traditional matching for the incompatible fixers
self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
self.traverse_by(self.bmi_post_order_heads, tree.post_order())
# obtain a set of candidate nodes
match_set = self.BM.run(tree.leaves())
while any(match_set.values()):
for fixer in self.BM.fixers:
if fixer in match_set and match_set[fixer]:
#sort by depth; apply fixers from bottom(of the AST) to top
match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
if fixer.keep_line_order:
#some fixers(eg fix_imports) must be applied
#with the original file's line order
match_set[fixer].sort(key=pytree.Base.get_lineno) # depends on [control=['if'], data=[]]
for node in list(match_set[fixer]):
if node in match_set[fixer]:
match_set[fixer].remove(node) # depends on [control=['if'], data=['node']]
try:
find_root(node) # depends on [control=['try'], data=[]]
except ValueError:
# this node has been cut off from a
# previous transformation ; skip
continue # depends on [control=['except'], data=[]]
if node.fixers_applied and fixer in node.fixers_applied:
# do not apply the same fixer again
continue # depends on [control=['if'], data=[]]
results = fixer.match(node)
if results:
new = fixer.transform(node, results)
if new is not None:
node.replace(new)
#new.fixers_applied.append(fixer)
for node in new.post_order():
# do not apply the fixer again to
# this or any subnode
if not node.fixers_applied:
node.fixers_applied = [] # depends on [control=['if'], data=[]]
node.fixers_applied.append(fixer) # depends on [control=['for'], data=['node']]
# update the original match set for
# the added code
new_matches = self.BM.run(new.leaves())
for fxr in new_matches:
if not fxr in match_set:
match_set[fxr] = [] # depends on [control=['if'], data=[]]
match_set[fxr].extend(new_matches[fxr]) # depends on [control=['for'], data=['fxr']] # depends on [control=['if'], data=['new']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fixer']] # depends on [control=['while'], data=[]]
for fixer in chain(self.pre_order, self.post_order):
fixer.finish_tree(tree, name) # depends on [control=['for'], data=['fixer']]
return tree.was_changed |
def unlink(self):
"""
Overrides orm unlink method.
@param self: The object pointer
@return: True/False.
"""
for room in self:
for reserv_line in room.room_reservation_line_ids:
if reserv_line.status == 'confirm':
raise ValidationError(_('User is not able to delete the \
room after the room in %s state \
in reservation')
% (reserv_line.status))
return super(HotelRoom, self).unlink() | def function[unlink, parameter[self]]:
constant[
Overrides orm unlink method.
@param self: The object pointer
@return: True/False.
]
for taget[name[room]] in starred[name[self]] begin[:]
for taget[name[reserv_line]] in starred[name[room].room_reservation_line_ids] begin[:]
if compare[name[reserv_line].status equal[==] constant[confirm]] begin[:]
<ast.Raise object at 0x7da18f00d450>
return[call[call[name[super], parameter[name[HotelRoom], name[self]]].unlink, parameter[]]] | keyword[def] identifier[unlink] ( identifier[self] ):
literal[string]
keyword[for] identifier[room] keyword[in] identifier[self] :
keyword[for] identifier[reserv_line] keyword[in] identifier[room] . identifier[room_reservation_line_ids] :
keyword[if] identifier[reserv_line] . identifier[status] == literal[string] :
keyword[raise] identifier[ValidationError] ( identifier[_] ( literal[string] )
%( identifier[reserv_line] . identifier[status] ))
keyword[return] identifier[super] ( identifier[HotelRoom] , identifier[self] ). identifier[unlink] () | def unlink(self):
"""
Overrides orm unlink method.
@param self: The object pointer
@return: True/False.
"""
for room in self:
for reserv_line in room.room_reservation_line_ids:
if reserv_line.status == 'confirm':
raise ValidationError(_('User is not able to delete the room after the room in %s state in reservation') % reserv_line.status) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['reserv_line']] # depends on [control=['for'], data=['room']]
return super(HotelRoom, self).unlink() |
def _list_of_dictionaries_to_mysql_inserts(
self,
tableName,
createStatement=None):
"""Convert a python list of dictionaries to pretty csv output
**Key Arguments:**
- ``tableName`` -- the name of the table to create the insert statements for
- ``createStatement`` -- add this create statement to the top of the file. Will only be executed if no table of that name exists in database. Default *None*
**Return:**
- ``output`` -- the mysql insert statements (as a string)
"""
self.log.debug(
'completed the ````_list_of_dictionaries_to_mysql_inserts`` function')
if not len(self.listOfDictionaries):
return "NO MATCH"
dataCopy = copy.deepcopy(self.listOfDictionaries)
if createStatement:
output = createStatement + "\n"
else:
output = ""
inserts = []
inserts = []
inserts[:] = [convert_dictionary_to_mysql_table(log=self.log, dictionary=d, dbTableName=tableName, uniqueKeyList=[
], dateModified=False, returnInsertOnly=True, replace=True, batchInserts=False, reDatetime=self.reDatetime) for d in dataCopy]
output += ";\n".join(inserts) + ";"
self.log.debug(
'completed the ``_list_of_dictionaries_to_mysql_inserts`` function')
return output | def function[_list_of_dictionaries_to_mysql_inserts, parameter[self, tableName, createStatement]]:
constant[Convert a python list of dictionaries to pretty csv output
**Key Arguments:**
- ``tableName`` -- the name of the table to create the insert statements for
- ``createStatement`` -- add this create statement to the top of the file. Will only be executed if no table of that name exists in database. Default *None*
**Return:**
- ``output`` -- the mysql insert statements (as a string)
]
call[name[self].log.debug, parameter[constant[completed the ````_list_of_dictionaries_to_mysql_inserts`` function]]]
if <ast.UnaryOp object at 0x7da20cabf820> begin[:]
return[constant[NO MATCH]]
variable[dataCopy] assign[=] call[name[copy].deepcopy, parameter[name[self].listOfDictionaries]]
if name[createStatement] begin[:]
variable[output] assign[=] binary_operation[name[createStatement] + constant[
]]
variable[inserts] assign[=] list[[]]
variable[inserts] assign[=] list[[]]
call[name[inserts]][<ast.Slice object at 0x7da20cabd6f0>] assign[=] <ast.ListComp object at 0x7da20cabf400>
<ast.AugAssign object at 0x7da20c6aa560>
call[name[self].log.debug, parameter[constant[completed the ``_list_of_dictionaries_to_mysql_inserts`` function]]]
return[name[output]] | keyword[def] identifier[_list_of_dictionaries_to_mysql_inserts] (
identifier[self] ,
identifier[tableName] ,
identifier[createStatement] = keyword[None] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] (
literal[string] )
keyword[if] keyword[not] identifier[len] ( identifier[self] . identifier[listOfDictionaries] ):
keyword[return] literal[string]
identifier[dataCopy] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[listOfDictionaries] )
keyword[if] identifier[createStatement] :
identifier[output] = identifier[createStatement] + literal[string]
keyword[else] :
identifier[output] = literal[string]
identifier[inserts] =[]
identifier[inserts] =[]
identifier[inserts] [:]=[ identifier[convert_dictionary_to_mysql_table] ( identifier[log] = identifier[self] . identifier[log] , identifier[dictionary] = identifier[d] , identifier[dbTableName] = identifier[tableName] , identifier[uniqueKeyList] =[
], identifier[dateModified] = keyword[False] , identifier[returnInsertOnly] = keyword[True] , identifier[replace] = keyword[True] , identifier[batchInserts] = keyword[False] , identifier[reDatetime] = identifier[self] . identifier[reDatetime] ) keyword[for] identifier[d] keyword[in] identifier[dataCopy] ]
identifier[output] += literal[string] . identifier[join] ( identifier[inserts] )+ literal[string]
identifier[self] . identifier[log] . identifier[debug] (
literal[string] )
keyword[return] identifier[output] | def _list_of_dictionaries_to_mysql_inserts(self, tableName, createStatement=None):
"""Convert a python list of dictionaries to pretty csv output
**Key Arguments:**
- ``tableName`` -- the name of the table to create the insert statements for
- ``createStatement`` -- add this create statement to the top of the file. Will only be executed if no table of that name exists in database. Default *None*
**Return:**
- ``output`` -- the mysql insert statements (as a string)
"""
self.log.debug('completed the ````_list_of_dictionaries_to_mysql_inserts`` function')
if not len(self.listOfDictionaries):
return 'NO MATCH' # depends on [control=['if'], data=[]]
dataCopy = copy.deepcopy(self.listOfDictionaries)
if createStatement:
output = createStatement + '\n' # depends on [control=['if'], data=[]]
else:
output = ''
inserts = []
inserts = []
inserts[:] = [convert_dictionary_to_mysql_table(log=self.log, dictionary=d, dbTableName=tableName, uniqueKeyList=[], dateModified=False, returnInsertOnly=True, replace=True, batchInserts=False, reDatetime=self.reDatetime) for d in dataCopy]
output += ';\n'.join(inserts) + ';'
self.log.debug('completed the ``_list_of_dictionaries_to_mysql_inserts`` function')
return output |
def extend_schema(schema, documentAST=None):
"""Produces a new schema given an existing schema and a document which may
contain GraphQL type extensions and definitions. The original schema will
remain unaltered.
Because a schema represents a graph of references, a schema cannot be
extended without effectively making an entire copy. We do not know until it's
too late if subgraphs remain unchanged.
This algorithm copies the provided schema, applying extensions while
producing the copy. The original schema remains unaltered."""
assert isinstance(schema, GraphQLSchema), "Must provide valid GraphQLSchema"
assert documentAST and isinstance(
documentAST, ast.Document
), "Must provide valid Document AST"
# Collect the type definitions and extensions found in the document.
type_definition_map = {}
type_extensions_map = defaultdict(list)
for _def in documentAST.definitions:
if isinstance(
_def,
(
ast.ObjectTypeDefinition,
ast.InterfaceTypeDefinition,
ast.EnumTypeDefinition,
ast.UnionTypeDefinition,
ast.ScalarTypeDefinition,
ast.InputObjectTypeDefinition,
),
):
# Sanity check that none of the defined types conflict with the
# schema's existing types.
type_name = _def.name.value
if schema.get_type(type_name):
raise GraphQLError(
(
'Type "{}" already exists in the schema. It cannot also '
+ "be defined in this type definition."
).format(type_name),
[_def],
)
type_definition_map[type_name] = _def
elif isinstance(_def, ast.TypeExtensionDefinition):
# Sanity check that this type extension exists within the
# schema's existing types.
extended_type_name = _def.definition.name.value
existing_type = schema.get_type(extended_type_name)
if not existing_type:
raise GraphQLError(
(
'Cannot extend type "{}" because it does not '
+ "exist in the existing schema."
).format(extended_type_name),
[_def.definition],
)
if not isinstance(existing_type, GraphQLObjectType):
raise GraphQLError(
'Cannot extend non-object type "{}".'.format(extended_type_name),
[_def.definition],
)
type_extensions_map[extended_type_name].append(_def)
# Below are functions used for producing this schema that have closed over
# this scope and have access to the schema, cache, and newly defined types.
def get_type_from_def(type_def):
type = _get_named_type(type_def.name)
assert type, "Invalid schema"
return type
def get_type_from_AST(astNode):
type = _get_named_type(astNode.name.value)
if not type:
raise GraphQLError(
(
'Unknown type: "{}". Ensure that this type exists '
+ "either in the original schema, or is added in a type definition."
).format(astNode.name.value),
[astNode],
)
return type
# Given a name, returns a type from either the existing schema or an
# added type.
def _get_named_type(typeName):
cached_type_def = type_def_cache.get(typeName)
if cached_type_def:
return cached_type_def
existing_type = schema.get_type(typeName)
if existing_type:
type_def = extend_type(existing_type)
type_def_cache[typeName] = type_def
return type_def
type_ast = type_definition_map.get(typeName)
if type_ast:
type_def = build_type(type_ast)
type_def_cache[typeName] = type_def
return type_def
# Given a type's introspection result, construct the correct
# GraphQLType instance.
def extend_type(type):
if isinstance(type, GraphQLObjectType):
return extend_object_type(type)
if isinstance(type, GraphQLInterfaceType):
return extend_interface_type(type)
if isinstance(type, GraphQLUnionType):
return extend_union_type(type)
return type
def extend_object_type(type):
return GraphQLObjectType(
name=type.name,
description=type.description,
interfaces=lambda: extend_implemented_interfaces(type),
fields=lambda: extend_field_map(type),
)
def extend_interface_type(type):
return GraphQLInterfaceType(
name=type.name,
description=type.description,
fields=lambda: extend_field_map(type),
resolve_type=cannot_execute_client_schema,
)
def extend_union_type(type):
return GraphQLUnionType(
name=type.name,
description=type.description,
types=list(map(get_type_from_def, type.types)),
resolve_type=cannot_execute_client_schema,
)
def extend_implemented_interfaces(type):
interfaces = list(map(get_type_from_def, type.interfaces))
# If there are any extensions to the interfaces, apply those here.
extensions = type_extensions_map[type.name]
for extension in extensions:
for namedType in extension.definition.interfaces:
interface_name = namedType.name.value
if any([_def.name == interface_name for _def in interfaces]):
raise GraphQLError(
(
'Type "{}" already implements "{}". '
+ "It cannot also be implemented in this type extension."
).format(type.name, interface_name),
[namedType],
)
interfaces.append(get_type_from_AST(namedType))
return interfaces
def extend_field_map(type):
new_field_map = OrderedDict()
old_field_map = type.fields
for field_name, field in old_field_map.items():
new_field_map[field_name] = GraphQLField(
extend_field_type(field.type),
description=field.description,
deprecation_reason=field.deprecation_reason,
args=field.args,
resolver=cannot_execute_client_schema,
)
# If there are any extensions to the fields, apply those here.
extensions = type_extensions_map[type.name]
for extension in extensions:
for field in extension.definition.fields:
field_name = field.name.value
if field_name in old_field_map:
raise GraphQLError(
(
'Field "{}.{}" already exists in the '
+ "schema. It cannot also be defined in this type extension."
).format(type.name, field_name),
[field],
)
new_field_map[field_name] = GraphQLField(
build_field_type(field.type),
args=build_input_values(field.arguments),
resolver=cannot_execute_client_schema,
)
return new_field_map
def extend_field_type(type):
if isinstance(type, GraphQLList):
return GraphQLList(extend_field_type(type.of_type))
if isinstance(type, GraphQLNonNull):
return GraphQLNonNull(extend_field_type(type.of_type))
return get_type_from_def(type)
def build_type(type_ast):
_type_build = {
ast.ObjectTypeDefinition: build_object_type,
ast.InterfaceTypeDefinition: build_interface_type,
ast.UnionTypeDefinition: build_union_type,
ast.ScalarTypeDefinition: build_scalar_type,
ast.EnumTypeDefinition: build_enum_type,
ast.InputObjectTypeDefinition: build_input_object_type,
}
func = _type_build.get(type(type_ast))
if func:
return func(type_ast)
def build_object_type(type_ast):
return GraphQLObjectType(
type_ast.name.value,
interfaces=lambda: build_implemented_interfaces(type_ast),
fields=lambda: build_field_map(type_ast),
)
def build_interface_type(type_ast):
return GraphQLInterfaceType(
type_ast.name.value,
fields=lambda: build_field_map(type_ast),
resolve_type=cannot_execute_client_schema,
)
def build_union_type(type_ast):
return GraphQLUnionType(
type_ast.name.value,
types=list(map(get_type_from_AST, type_ast.types)),
resolve_type=cannot_execute_client_schema,
)
def build_scalar_type(type_ast):
return GraphQLScalarType(
type_ast.name.value,
serialize=lambda *args, **kwargs: None,
# Note: validation calls the parse functions to determine if a
# literal value is correct. Returning null would cause use of custom
# scalars to always fail validation. Returning false causes them to
# always pass validation.
parse_value=lambda *args, **kwargs: False,
parse_literal=lambda *args, **kwargs: False,
)
def build_enum_type(type_ast):
return GraphQLEnumType(
type_ast.name.value,
values={v.name.value: GraphQLEnumValue() for v in type_ast.values},
)
def build_input_object_type(type_ast):
return GraphQLInputObjectType(
type_ast.name.value,
fields=lambda: build_input_values(type_ast.fields, GraphQLInputObjectField),
)
def build_implemented_interfaces(type_ast):
return list(map(get_type_from_AST, type_ast.interfaces))
def build_field_map(type_ast):
return {
field.name.value: GraphQLField(
build_field_type(field.type),
args=build_input_values(field.arguments),
resolver=cannot_execute_client_schema,
)
for field in type_ast.fields
}
def build_input_values(values, input_type=GraphQLArgument):
input_values = OrderedDict()
for value in values:
type = build_field_type(value.type)
input_values[value.name.value] = input_type(
type, default_value=value_from_ast(value.default_value, type)
)
return input_values
def build_field_type(type_ast):
if isinstance(type_ast, ast.ListType):
return GraphQLList(build_field_type(type_ast.type))
if isinstance(type_ast, ast.NonNullType):
return GraphQLNonNull(build_field_type(type_ast.type))
return get_type_from_AST(type_ast)
# If this document contains no new types, then return the same unmodified
# GraphQLSchema instance.
if not type_extensions_map and not type_definition_map:
return schema
# A cache to use to store the actual GraphQLType definition objects by name.
# Initialize to the GraphQL built in scalars and introspection types. All
# functions below are inline so that this type def cache is within the scope
# of the closure.
type_def_cache = {
"String": GraphQLString,
"Int": GraphQLInt,
"Float": GraphQLFloat,
"Boolean": GraphQLBoolean,
"ID": GraphQLID,
"__Schema": __Schema,
"__Directive": __Directive,
"__DirectiveLocation": __DirectiveLocation,
"__Type": __Type,
"__Field": __Field,
"__InputValue": __InputValue,
"__EnumValue": __EnumValue,
"__TypeKind": __TypeKind,
}
# Get the root Query, Mutation, and Subscription types.
query_type = get_type_from_def(schema.get_query_type())
existing_mutation_type = schema.get_mutation_type()
mutationType = (
existing_mutation_type and get_type_from_def(existing_mutation_type) or None
)
existing_subscription_type = schema.get_subscription_type()
subscription_type = (
existing_subscription_type
and get_type_from_def(existing_subscription_type)
or None
)
# Iterate through all types, getting the type definition for each, ensuring
# that any type not directly referenced by a field will get created.
types = [get_type_from_def(_def) for _def in schema.get_type_map().values()]
# Do the same with new types, appending to the list of defined types.
types += [get_type_from_AST(_def) for _def in type_definition_map.values()]
# Then produce and return a Schema with these types.
return GraphQLSchema(
query=query_type,
mutation=mutationType,
subscription=subscription_type,
# Copy directives.
directives=schema.get_directives(),
types=types,
) | def function[extend_schema, parameter[schema, documentAST]]:
constant[Produces a new schema given an existing schema and a document which may
contain GraphQL type extensions and definitions. The original schema will
remain unaltered.
Because a schema represents a graph of references, a schema cannot be
extended without effectively making an entire copy. We do not know until it's
too late if subgraphs remain unchanged.
This algorithm copies the provided schema, applying extensions while
producing the copy. The original schema remains unaltered.]
assert[call[name[isinstance], parameter[name[schema], name[GraphQLSchema]]]]
assert[<ast.BoolOp object at 0x7da1b19b98a0>]
variable[type_definition_map] assign[=] dictionary[[], []]
variable[type_extensions_map] assign[=] call[name[defaultdict], parameter[name[list]]]
for taget[name[_def]] in starred[name[documentAST].definitions] begin[:]
if call[name[isinstance], parameter[name[_def], tuple[[<ast.Attribute object at 0x7da1b19bb5b0>, <ast.Attribute object at 0x7da1b19b8040>, <ast.Attribute object at 0x7da1b19ba860>, <ast.Attribute object at 0x7da1b19b8220>, <ast.Attribute object at 0x7da1b19ba1a0>, <ast.Attribute object at 0x7da1b19ba2c0>]]]] begin[:]
variable[type_name] assign[=] name[_def].name.value
if call[name[schema].get_type, parameter[name[type_name]]] begin[:]
<ast.Raise object at 0x7da1b19bb7f0>
call[name[type_definition_map]][name[type_name]] assign[=] name[_def]
def function[get_type_from_def, parameter[type_def]]:
variable[type] assign[=] call[name[_get_named_type], parameter[name[type_def].name]]
assert[name[type]]
return[name[type]]
def function[get_type_from_AST, parameter[astNode]]:
variable[type] assign[=] call[name[_get_named_type], parameter[name[astNode].name.value]]
if <ast.UnaryOp object at 0x7da20c6e5570> begin[:]
<ast.Raise object at 0x7da20c6e7250>
return[name[type]]
def function[_get_named_type, parameter[typeName]]:
variable[cached_type_def] assign[=] call[name[type_def_cache].get, parameter[name[typeName]]]
if name[cached_type_def] begin[:]
return[name[cached_type_def]]
variable[existing_type] assign[=] call[name[schema].get_type, parameter[name[typeName]]]
if name[existing_type] begin[:]
variable[type_def] assign[=] call[name[extend_type], parameter[name[existing_type]]]
call[name[type_def_cache]][name[typeName]] assign[=] name[type_def]
return[name[type_def]]
variable[type_ast] assign[=] call[name[type_definition_map].get, parameter[name[typeName]]]
if name[type_ast] begin[:]
variable[type_def] assign[=] call[name[build_type], parameter[name[type_ast]]]
call[name[type_def_cache]][name[typeName]] assign[=] name[type_def]
return[name[type_def]]
def function[extend_type, parameter[type]]:
if call[name[isinstance], parameter[name[type], name[GraphQLObjectType]]] begin[:]
return[call[name[extend_object_type], parameter[name[type]]]]
if call[name[isinstance], parameter[name[type], name[GraphQLInterfaceType]]] begin[:]
return[call[name[extend_interface_type], parameter[name[type]]]]
if call[name[isinstance], parameter[name[type], name[GraphQLUnionType]]] begin[:]
return[call[name[extend_union_type], parameter[name[type]]]]
return[name[type]]
def function[extend_object_type, parameter[type]]:
return[call[name[GraphQLObjectType], parameter[]]]
def function[extend_interface_type, parameter[type]]:
return[call[name[GraphQLInterfaceType], parameter[]]]
def function[extend_union_type, parameter[type]]:
return[call[name[GraphQLUnionType], parameter[]]]
def function[extend_implemented_interfaces, parameter[type]]:
variable[interfaces] assign[=] call[name[list], parameter[call[name[map], parameter[name[get_type_from_def], name[type].interfaces]]]]
variable[extensions] assign[=] call[name[type_extensions_map]][name[type].name]
for taget[name[extension]] in starred[name[extensions]] begin[:]
for taget[name[namedType]] in starred[name[extension].definition.interfaces] begin[:]
variable[interface_name] assign[=] name[namedType].name.value
if call[name[any], parameter[<ast.ListComp object at 0x7da20c6e5ba0>]] begin[:]
<ast.Raise object at 0x7da20c6e5750>
call[name[interfaces].append, parameter[call[name[get_type_from_AST], parameter[name[namedType]]]]]
return[name[interfaces]]
def function[extend_field_map, parameter[type]]:
variable[new_field_map] assign[=] call[name[OrderedDict], parameter[]]
variable[old_field_map] assign[=] name[type].fields
for taget[tuple[[<ast.Name object at 0x7da1b26ae350>, <ast.Name object at 0x7da1b26acfa0>]]] in starred[call[name[old_field_map].items, parameter[]]] begin[:]
call[name[new_field_map]][name[field_name]] assign[=] call[name[GraphQLField], parameter[call[name[extend_field_type], parameter[name[field].type]]]]
variable[extensions] assign[=] call[name[type_extensions_map]][name[type].name]
for taget[name[extension]] in starred[name[extensions]] begin[:]
for taget[name[field]] in starred[name[extension].definition.fields] begin[:]
variable[field_name] assign[=] name[field].name.value
if compare[name[field_name] in name[old_field_map]] begin[:]
<ast.Raise object at 0x7da1b26aecb0>
call[name[new_field_map]][name[field_name]] assign[=] call[name[GraphQLField], parameter[call[name[build_field_type], parameter[name[field].type]]]]
return[name[new_field_map]]
def function[extend_field_type, parameter[type]]:
if call[name[isinstance], parameter[name[type], name[GraphQLList]]] begin[:]
return[call[name[GraphQLList], parameter[call[name[extend_field_type], parameter[name[type].of_type]]]]]
if call[name[isinstance], parameter[name[type], name[GraphQLNonNull]]] begin[:]
return[call[name[GraphQLNonNull], parameter[call[name[extend_field_type], parameter[name[type].of_type]]]]]
return[call[name[get_type_from_def], parameter[name[type]]]]
def function[build_type, parameter[type_ast]]:
variable[_type_build] assign[=] dictionary[[<ast.Attribute object at 0x7da1b26ada20>, <ast.Attribute object at 0x7da1b26ae1d0>, <ast.Attribute object at 0x7da1b26ae500>, <ast.Attribute object at 0x7da1b26affd0>, <ast.Attribute object at 0x7da1b26ae170>, <ast.Attribute object at 0x7da1b26ad0f0>], [<ast.Name object at 0x7da1b26ac1c0>, <ast.Name object at 0x7da1b26ae9b0>, <ast.Name object at 0x7da1b26ad030>, <ast.Name object at 0x7da1b26af130>, <ast.Name object at 0x7da1b26ade10>, <ast.Name object at 0x7da1b26af9a0>]]
variable[func] assign[=] call[name[_type_build].get, parameter[call[name[type], parameter[name[type_ast]]]]]
if name[func] begin[:]
return[call[name[func], parameter[name[type_ast]]]]
def function[build_object_type, parameter[type_ast]]:
return[call[name[GraphQLObjectType], parameter[name[type_ast].name.value]]]
def function[build_interface_type, parameter[type_ast]]:
return[call[name[GraphQLInterfaceType], parameter[name[type_ast].name.value]]]
def function[build_union_type, parameter[type_ast]]:
return[call[name[GraphQLUnionType], parameter[name[type_ast].name.value]]]
def function[build_scalar_type, parameter[type_ast]]:
return[call[name[GraphQLScalarType], parameter[name[type_ast].name.value]]]
def function[build_enum_type, parameter[type_ast]]:
return[call[name[GraphQLEnumType], parameter[name[type_ast].name.value]]]
def function[build_input_object_type, parameter[type_ast]]:
return[call[name[GraphQLInputObjectType], parameter[name[type_ast].name.value]]]
def function[build_implemented_interfaces, parameter[type_ast]]:
return[call[name[list], parameter[call[name[map], parameter[name[get_type_from_AST], name[type_ast].interfaces]]]]]
def function[build_field_map, parameter[type_ast]]:
return[<ast.DictComp object at 0x7da18f00d060>]
def function[build_input_values, parameter[values, input_type]]:
variable[input_values] assign[=] call[name[OrderedDict], parameter[]]
for taget[name[value]] in starred[name[values]] begin[:]
variable[type] assign[=] call[name[build_field_type], parameter[name[value].type]]
call[name[input_values]][name[value].name.value] assign[=] call[name[input_type], parameter[name[type]]]
return[name[input_values]]
def function[build_field_type, parameter[type_ast]]:
if call[name[isinstance], parameter[name[type_ast], name[ast].ListType]] begin[:]
return[call[name[GraphQLList], parameter[call[name[build_field_type], parameter[name[type_ast].type]]]]]
if call[name[isinstance], parameter[name[type_ast], name[ast].NonNullType]] begin[:]
return[call[name[GraphQLNonNull], parameter[call[name[build_field_type], parameter[name[type_ast].type]]]]]
return[call[name[get_type_from_AST], parameter[name[type_ast]]]]
if <ast.BoolOp object at 0x7da18f00ce50> begin[:]
return[name[schema]]
variable[type_def_cache] assign[=] dictionary[[<ast.Constant object at 0x7da18f00e200>, <ast.Constant object at 0x7da18f00dd80>, <ast.Constant object at 0x7da18f00e980>, <ast.Constant object at 0x7da18f00d690>, <ast.Constant object at 0x7da18f00c3a0>, <ast.Constant object at 0x7da18f00e7d0>, <ast.Constant object at 0x7da18f00dd20>, <ast.Constant object at 0x7da18f00fee0>, <ast.Constant object at 0x7da18f00e890>, <ast.Constant object at 0x7da18f00edd0>, <ast.Constant object at 0x7da18f00c3d0>, <ast.Constant object at 0x7da18f00e710>, <ast.Constant object at 0x7da18f00cfa0>], [<ast.Name object at 0x7da18f00da80>, <ast.Name object at 0x7da18f00e410>, <ast.Name object at 0x7da18f00cca0>, <ast.Name object at 0x7da18f00eaa0>, <ast.Name object at 0x7da18f00d870>, <ast.Name object at 0x7da18f00e4d0>, <ast.Name object at 0x7da18f00fb20>, <ast.Name object at 0x7da18f00df90>, <ast.Name object at 0x7da18f00e320>, <ast.Name object at 0x7da18f00d150>, <ast.Name object at 0x7da18f00ebf0>, <ast.Name object at 0x7da18f00d5d0>, <ast.Name object at 0x7da18f00c190>]]
variable[query_type] assign[=] call[name[get_type_from_def], parameter[call[name[schema].get_query_type, parameter[]]]]
variable[existing_mutation_type] assign[=] call[name[schema].get_mutation_type, parameter[]]
variable[mutationType] assign[=] <ast.BoolOp object at 0x7da18f00d420>
variable[existing_subscription_type] assign[=] call[name[schema].get_subscription_type, parameter[]]
variable[subscription_type] assign[=] <ast.BoolOp object at 0x7da1b1985270>
variable[types] assign[=] <ast.ListComp object at 0x7da1b19869b0>
<ast.AugAssign object at 0x7da1b1987940>
return[call[name[GraphQLSchema], parameter[]]] | keyword[def] identifier[extend_schema] ( identifier[schema] , identifier[documentAST] = keyword[None] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[schema] , identifier[GraphQLSchema] ), literal[string]
keyword[assert] identifier[documentAST] keyword[and] identifier[isinstance] (
identifier[documentAST] , identifier[ast] . identifier[Document]
), literal[string]
identifier[type_definition_map] ={}
identifier[type_extensions_map] = identifier[defaultdict] ( identifier[list] )
keyword[for] identifier[_def] keyword[in] identifier[documentAST] . identifier[definitions] :
keyword[if] identifier[isinstance] (
identifier[_def] ,
(
identifier[ast] . identifier[ObjectTypeDefinition] ,
identifier[ast] . identifier[InterfaceTypeDefinition] ,
identifier[ast] . identifier[EnumTypeDefinition] ,
identifier[ast] . identifier[UnionTypeDefinition] ,
identifier[ast] . identifier[ScalarTypeDefinition] ,
identifier[ast] . identifier[InputObjectTypeDefinition] ,
),
):
identifier[type_name] = identifier[_def] . identifier[name] . identifier[value]
keyword[if] identifier[schema] . identifier[get_type] ( identifier[type_name] ):
keyword[raise] identifier[GraphQLError] (
(
literal[string]
+ literal[string]
). identifier[format] ( identifier[type_name] ),
[ identifier[_def] ],
)
identifier[type_definition_map] [ identifier[type_name] ]= identifier[_def]
keyword[elif] identifier[isinstance] ( identifier[_def] , identifier[ast] . identifier[TypeExtensionDefinition] ):
identifier[extended_type_name] = identifier[_def] . identifier[definition] . identifier[name] . identifier[value]
identifier[existing_type] = identifier[schema] . identifier[get_type] ( identifier[extended_type_name] )
keyword[if] keyword[not] identifier[existing_type] :
keyword[raise] identifier[GraphQLError] (
(
literal[string]
+ literal[string]
). identifier[format] ( identifier[extended_type_name] ),
[ identifier[_def] . identifier[definition] ],
)
keyword[if] keyword[not] identifier[isinstance] ( identifier[existing_type] , identifier[GraphQLObjectType] ):
keyword[raise] identifier[GraphQLError] (
literal[string] . identifier[format] ( identifier[extended_type_name] ),
[ identifier[_def] . identifier[definition] ],
)
identifier[type_extensions_map] [ identifier[extended_type_name] ]. identifier[append] ( identifier[_def] )
keyword[def] identifier[get_type_from_def] ( identifier[type_def] ):
identifier[type] = identifier[_get_named_type] ( identifier[type_def] . identifier[name] )
keyword[assert] identifier[type] , literal[string]
keyword[return] identifier[type]
keyword[def] identifier[get_type_from_AST] ( identifier[astNode] ):
identifier[type] = identifier[_get_named_type] ( identifier[astNode] . identifier[name] . identifier[value] )
keyword[if] keyword[not] identifier[type] :
keyword[raise] identifier[GraphQLError] (
(
literal[string]
+ literal[string]
). identifier[format] ( identifier[astNode] . identifier[name] . identifier[value] ),
[ identifier[astNode] ],
)
keyword[return] identifier[type]
keyword[def] identifier[_get_named_type] ( identifier[typeName] ):
identifier[cached_type_def] = identifier[type_def_cache] . identifier[get] ( identifier[typeName] )
keyword[if] identifier[cached_type_def] :
keyword[return] identifier[cached_type_def]
identifier[existing_type] = identifier[schema] . identifier[get_type] ( identifier[typeName] )
keyword[if] identifier[existing_type] :
identifier[type_def] = identifier[extend_type] ( identifier[existing_type] )
identifier[type_def_cache] [ identifier[typeName] ]= identifier[type_def]
keyword[return] identifier[type_def]
identifier[type_ast] = identifier[type_definition_map] . identifier[get] ( identifier[typeName] )
keyword[if] identifier[type_ast] :
identifier[type_def] = identifier[build_type] ( identifier[type_ast] )
identifier[type_def_cache] [ identifier[typeName] ]= identifier[type_def]
keyword[return] identifier[type_def]
keyword[def] identifier[extend_type] ( identifier[type] ):
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLObjectType] ):
keyword[return] identifier[extend_object_type] ( identifier[type] )
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLInterfaceType] ):
keyword[return] identifier[extend_interface_type] ( identifier[type] )
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLUnionType] ):
keyword[return] identifier[extend_union_type] ( identifier[type] )
keyword[return] identifier[type]
keyword[def] identifier[extend_object_type] ( identifier[type] ):
keyword[return] identifier[GraphQLObjectType] (
identifier[name] = identifier[type] . identifier[name] ,
identifier[description] = identifier[type] . identifier[description] ,
identifier[interfaces] = keyword[lambda] : identifier[extend_implemented_interfaces] ( identifier[type] ),
identifier[fields] = keyword[lambda] : identifier[extend_field_map] ( identifier[type] ),
)
keyword[def] identifier[extend_interface_type] ( identifier[type] ):
keyword[return] identifier[GraphQLInterfaceType] (
identifier[name] = identifier[type] . identifier[name] ,
identifier[description] = identifier[type] . identifier[description] ,
identifier[fields] = keyword[lambda] : identifier[extend_field_map] ( identifier[type] ),
identifier[resolve_type] = identifier[cannot_execute_client_schema] ,
)
keyword[def] identifier[extend_union_type] ( identifier[type] ):
keyword[return] identifier[GraphQLUnionType] (
identifier[name] = identifier[type] . identifier[name] ,
identifier[description] = identifier[type] . identifier[description] ,
identifier[types] = identifier[list] ( identifier[map] ( identifier[get_type_from_def] , identifier[type] . identifier[types] )),
identifier[resolve_type] = identifier[cannot_execute_client_schema] ,
)
keyword[def] identifier[extend_implemented_interfaces] ( identifier[type] ):
identifier[interfaces] = identifier[list] ( identifier[map] ( identifier[get_type_from_def] , identifier[type] . identifier[interfaces] ))
identifier[extensions] = identifier[type_extensions_map] [ identifier[type] . identifier[name] ]
keyword[for] identifier[extension] keyword[in] identifier[extensions] :
keyword[for] identifier[namedType] keyword[in] identifier[extension] . identifier[definition] . identifier[interfaces] :
identifier[interface_name] = identifier[namedType] . identifier[name] . identifier[value]
keyword[if] identifier[any] ([ identifier[_def] . identifier[name] == identifier[interface_name] keyword[for] identifier[_def] keyword[in] identifier[interfaces] ]):
keyword[raise] identifier[GraphQLError] (
(
literal[string]
+ literal[string]
). identifier[format] ( identifier[type] . identifier[name] , identifier[interface_name] ),
[ identifier[namedType] ],
)
identifier[interfaces] . identifier[append] ( identifier[get_type_from_AST] ( identifier[namedType] ))
keyword[return] identifier[interfaces]
keyword[def] identifier[extend_field_map] ( identifier[type] ):
identifier[new_field_map] = identifier[OrderedDict] ()
identifier[old_field_map] = identifier[type] . identifier[fields]
keyword[for] identifier[field_name] , identifier[field] keyword[in] identifier[old_field_map] . identifier[items] ():
identifier[new_field_map] [ identifier[field_name] ]= identifier[GraphQLField] (
identifier[extend_field_type] ( identifier[field] . identifier[type] ),
identifier[description] = identifier[field] . identifier[description] ,
identifier[deprecation_reason] = identifier[field] . identifier[deprecation_reason] ,
identifier[args] = identifier[field] . identifier[args] ,
identifier[resolver] = identifier[cannot_execute_client_schema] ,
)
identifier[extensions] = identifier[type_extensions_map] [ identifier[type] . identifier[name] ]
keyword[for] identifier[extension] keyword[in] identifier[extensions] :
keyword[for] identifier[field] keyword[in] identifier[extension] . identifier[definition] . identifier[fields] :
identifier[field_name] = identifier[field] . identifier[name] . identifier[value]
keyword[if] identifier[field_name] keyword[in] identifier[old_field_map] :
keyword[raise] identifier[GraphQLError] (
(
literal[string]
+ literal[string]
). identifier[format] ( identifier[type] . identifier[name] , identifier[field_name] ),
[ identifier[field] ],
)
identifier[new_field_map] [ identifier[field_name] ]= identifier[GraphQLField] (
identifier[build_field_type] ( identifier[field] . identifier[type] ),
identifier[args] = identifier[build_input_values] ( identifier[field] . identifier[arguments] ),
identifier[resolver] = identifier[cannot_execute_client_schema] ,
)
keyword[return] identifier[new_field_map]
keyword[def] identifier[extend_field_type] ( identifier[type] ):
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLList] ):
keyword[return] identifier[GraphQLList] ( identifier[extend_field_type] ( identifier[type] . identifier[of_type] ))
keyword[if] identifier[isinstance] ( identifier[type] , identifier[GraphQLNonNull] ):
keyword[return] identifier[GraphQLNonNull] ( identifier[extend_field_type] ( identifier[type] . identifier[of_type] ))
keyword[return] identifier[get_type_from_def] ( identifier[type] )
keyword[def] identifier[build_type] ( identifier[type_ast] ):
identifier[_type_build] ={
identifier[ast] . identifier[ObjectTypeDefinition] : identifier[build_object_type] ,
identifier[ast] . identifier[InterfaceTypeDefinition] : identifier[build_interface_type] ,
identifier[ast] . identifier[UnionTypeDefinition] : identifier[build_union_type] ,
identifier[ast] . identifier[ScalarTypeDefinition] : identifier[build_scalar_type] ,
identifier[ast] . identifier[EnumTypeDefinition] : identifier[build_enum_type] ,
identifier[ast] . identifier[InputObjectTypeDefinition] : identifier[build_input_object_type] ,
}
identifier[func] = identifier[_type_build] . identifier[get] ( identifier[type] ( identifier[type_ast] ))
keyword[if] identifier[func] :
keyword[return] identifier[func] ( identifier[type_ast] )
keyword[def] identifier[build_object_type] ( identifier[type_ast] ):
keyword[return] identifier[GraphQLObjectType] (
identifier[type_ast] . identifier[name] . identifier[value] ,
identifier[interfaces] = keyword[lambda] : identifier[build_implemented_interfaces] ( identifier[type_ast] ),
identifier[fields] = keyword[lambda] : identifier[build_field_map] ( identifier[type_ast] ),
)
keyword[def] identifier[build_interface_type] ( identifier[type_ast] ):
keyword[return] identifier[GraphQLInterfaceType] (
identifier[type_ast] . identifier[name] . identifier[value] ,
identifier[fields] = keyword[lambda] : identifier[build_field_map] ( identifier[type_ast] ),
identifier[resolve_type] = identifier[cannot_execute_client_schema] ,
)
keyword[def] identifier[build_union_type] ( identifier[type_ast] ):
keyword[return] identifier[GraphQLUnionType] (
identifier[type_ast] . identifier[name] . identifier[value] ,
identifier[types] = identifier[list] ( identifier[map] ( identifier[get_type_from_AST] , identifier[type_ast] . identifier[types] )),
identifier[resolve_type] = identifier[cannot_execute_client_schema] ,
)
keyword[def] identifier[build_scalar_type] ( identifier[type_ast] ):
keyword[return] identifier[GraphQLScalarType] (
identifier[type_ast] . identifier[name] . identifier[value] ,
identifier[serialize] = keyword[lambda] * identifier[args] ,** identifier[kwargs] : keyword[None] ,
identifier[parse_value] = keyword[lambda] * identifier[args] ,** identifier[kwargs] : keyword[False] ,
identifier[parse_literal] = keyword[lambda] * identifier[args] ,** identifier[kwargs] : keyword[False] ,
)
keyword[def] identifier[build_enum_type] ( identifier[type_ast] ):
keyword[return] identifier[GraphQLEnumType] (
identifier[type_ast] . identifier[name] . identifier[value] ,
identifier[values] ={ identifier[v] . identifier[name] . identifier[value] : identifier[GraphQLEnumValue] () keyword[for] identifier[v] keyword[in] identifier[type_ast] . identifier[values] },
)
keyword[def] identifier[build_input_object_type] ( identifier[type_ast] ):
keyword[return] identifier[GraphQLInputObjectType] (
identifier[type_ast] . identifier[name] . identifier[value] ,
identifier[fields] = keyword[lambda] : identifier[build_input_values] ( identifier[type_ast] . identifier[fields] , identifier[GraphQLInputObjectField] ),
)
keyword[def] identifier[build_implemented_interfaces] ( identifier[type_ast] ):
keyword[return] identifier[list] ( identifier[map] ( identifier[get_type_from_AST] , identifier[type_ast] . identifier[interfaces] ))
keyword[def] identifier[build_field_map] ( identifier[type_ast] ):
keyword[return] {
identifier[field] . identifier[name] . identifier[value] : identifier[GraphQLField] (
identifier[build_field_type] ( identifier[field] . identifier[type] ),
identifier[args] = identifier[build_input_values] ( identifier[field] . identifier[arguments] ),
identifier[resolver] = identifier[cannot_execute_client_schema] ,
)
keyword[for] identifier[field] keyword[in] identifier[type_ast] . identifier[fields]
}
keyword[def] identifier[build_input_values] ( identifier[values] , identifier[input_type] = identifier[GraphQLArgument] ):
identifier[input_values] = identifier[OrderedDict] ()
keyword[for] identifier[value] keyword[in] identifier[values] :
identifier[type] = identifier[build_field_type] ( identifier[value] . identifier[type] )
identifier[input_values] [ identifier[value] . identifier[name] . identifier[value] ]= identifier[input_type] (
identifier[type] , identifier[default_value] = identifier[value_from_ast] ( identifier[value] . identifier[default_value] , identifier[type] )
)
keyword[return] identifier[input_values]
keyword[def] identifier[build_field_type] ( identifier[type_ast] ):
keyword[if] identifier[isinstance] ( identifier[type_ast] , identifier[ast] . identifier[ListType] ):
keyword[return] identifier[GraphQLList] ( identifier[build_field_type] ( identifier[type_ast] . identifier[type] ))
keyword[if] identifier[isinstance] ( identifier[type_ast] , identifier[ast] . identifier[NonNullType] ):
keyword[return] identifier[GraphQLNonNull] ( identifier[build_field_type] ( identifier[type_ast] . identifier[type] ))
keyword[return] identifier[get_type_from_AST] ( identifier[type_ast] )
keyword[if] keyword[not] identifier[type_extensions_map] keyword[and] keyword[not] identifier[type_definition_map] :
keyword[return] identifier[schema]
identifier[type_def_cache] ={
literal[string] : identifier[GraphQLString] ,
literal[string] : identifier[GraphQLInt] ,
literal[string] : identifier[GraphQLFloat] ,
literal[string] : identifier[GraphQLBoolean] ,
literal[string] : identifier[GraphQLID] ,
literal[string] : identifier[__Schema] ,
literal[string] : identifier[__Directive] ,
literal[string] : identifier[__DirectiveLocation] ,
literal[string] : identifier[__Type] ,
literal[string] : identifier[__Field] ,
literal[string] : identifier[__InputValue] ,
literal[string] : identifier[__EnumValue] ,
literal[string] : identifier[__TypeKind] ,
}
identifier[query_type] = identifier[get_type_from_def] ( identifier[schema] . identifier[get_query_type] ())
identifier[existing_mutation_type] = identifier[schema] . identifier[get_mutation_type] ()
identifier[mutationType] =(
identifier[existing_mutation_type] keyword[and] identifier[get_type_from_def] ( identifier[existing_mutation_type] ) keyword[or] keyword[None]
)
identifier[existing_subscription_type] = identifier[schema] . identifier[get_subscription_type] ()
identifier[subscription_type] =(
identifier[existing_subscription_type]
keyword[and] identifier[get_type_from_def] ( identifier[existing_subscription_type] )
keyword[or] keyword[None]
)
identifier[types] =[ identifier[get_type_from_def] ( identifier[_def] ) keyword[for] identifier[_def] keyword[in] identifier[schema] . identifier[get_type_map] (). identifier[values] ()]
identifier[types] +=[ identifier[get_type_from_AST] ( identifier[_def] ) keyword[for] identifier[_def] keyword[in] identifier[type_definition_map] . identifier[values] ()]
keyword[return] identifier[GraphQLSchema] (
identifier[query] = identifier[query_type] ,
identifier[mutation] = identifier[mutationType] ,
identifier[subscription] = identifier[subscription_type] ,
identifier[directives] = identifier[schema] . identifier[get_directives] (),
identifier[types] = identifier[types] ,
) | def extend_schema(schema, documentAST=None):
"""Produces a new schema given an existing schema and a document which may
contain GraphQL type extensions and definitions. The original schema will
remain unaltered.
Because a schema represents a graph of references, a schema cannot be
extended without effectively making an entire copy. We do not know until it's
too late if subgraphs remain unchanged.
This algorithm copies the provided schema, applying extensions while
producing the copy. The original schema remains unaltered."""
assert isinstance(schema, GraphQLSchema), 'Must provide valid GraphQLSchema'
assert documentAST and isinstance(documentAST, ast.Document), 'Must provide valid Document AST'
# Collect the type definitions and extensions found in the document.
type_definition_map = {}
type_extensions_map = defaultdict(list)
for _def in documentAST.definitions:
if isinstance(_def, (ast.ObjectTypeDefinition, ast.InterfaceTypeDefinition, ast.EnumTypeDefinition, ast.UnionTypeDefinition, ast.ScalarTypeDefinition, ast.InputObjectTypeDefinition)):
# Sanity check that none of the defined types conflict with the
# schema's existing types.
type_name = _def.name.value
if schema.get_type(type_name):
raise GraphQLError(('Type "{}" already exists in the schema. It cannot also ' + 'be defined in this type definition.').format(type_name), [_def]) # depends on [control=['if'], data=[]]
type_definition_map[type_name] = _def # depends on [control=['if'], data=[]]
elif isinstance(_def, ast.TypeExtensionDefinition):
# Sanity check that this type extension exists within the
# schema's existing types.
extended_type_name = _def.definition.name.value
existing_type = schema.get_type(extended_type_name)
if not existing_type:
raise GraphQLError(('Cannot extend type "{}" because it does not ' + 'exist in the existing schema.').format(extended_type_name), [_def.definition]) # depends on [control=['if'], data=[]]
if not isinstance(existing_type, GraphQLObjectType):
raise GraphQLError('Cannot extend non-object type "{}".'.format(extended_type_name), [_def.definition]) # depends on [control=['if'], data=[]]
type_extensions_map[extended_type_name].append(_def) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_def']]
# Below are functions used for producing this schema that have closed over
# this scope and have access to the schema, cache, and newly defined types.
def get_type_from_def(type_def):
type = _get_named_type(type_def.name)
assert type, 'Invalid schema'
return type
def get_type_from_AST(astNode):
type = _get_named_type(astNode.name.value)
if not type:
raise GraphQLError(('Unknown type: "{}". Ensure that this type exists ' + 'either in the original schema, or is added in a type definition.').format(astNode.name.value), [astNode]) # depends on [control=['if'], data=[]]
return type
# Given a name, returns a type from either the existing schema or an
# added type.
def _get_named_type(typeName):
cached_type_def = type_def_cache.get(typeName)
if cached_type_def:
return cached_type_def # depends on [control=['if'], data=[]]
existing_type = schema.get_type(typeName)
if existing_type:
type_def = extend_type(existing_type)
type_def_cache[typeName] = type_def
return type_def # depends on [control=['if'], data=[]]
type_ast = type_definition_map.get(typeName)
if type_ast:
type_def = build_type(type_ast)
type_def_cache[typeName] = type_def
return type_def # depends on [control=['if'], data=[]]
# Given a type's introspection result, construct the correct
# GraphQLType instance.
def extend_type(type):
if isinstance(type, GraphQLObjectType):
return extend_object_type(type) # depends on [control=['if'], data=[]]
if isinstance(type, GraphQLInterfaceType):
return extend_interface_type(type) # depends on [control=['if'], data=[]]
if isinstance(type, GraphQLUnionType):
return extend_union_type(type) # depends on [control=['if'], data=[]]
return type
def extend_object_type(type):
return GraphQLObjectType(name=type.name, description=type.description, interfaces=lambda : extend_implemented_interfaces(type), fields=lambda : extend_field_map(type))
def extend_interface_type(type):
return GraphQLInterfaceType(name=type.name, description=type.description, fields=lambda : extend_field_map(type), resolve_type=cannot_execute_client_schema)
def extend_union_type(type):
return GraphQLUnionType(name=type.name, description=type.description, types=list(map(get_type_from_def, type.types)), resolve_type=cannot_execute_client_schema)
def extend_implemented_interfaces(type):
interfaces = list(map(get_type_from_def, type.interfaces))
# If there are any extensions to the interfaces, apply those here.
extensions = type_extensions_map[type.name]
for extension in extensions:
for namedType in extension.definition.interfaces:
interface_name = namedType.name.value
if any([_def.name == interface_name for _def in interfaces]):
raise GraphQLError(('Type "{}" already implements "{}". ' + 'It cannot also be implemented in this type extension.').format(type.name, interface_name), [namedType]) # depends on [control=['if'], data=[]]
interfaces.append(get_type_from_AST(namedType)) # depends on [control=['for'], data=['namedType']] # depends on [control=['for'], data=['extension']]
return interfaces
def extend_field_map(type):
new_field_map = OrderedDict()
old_field_map = type.fields
for (field_name, field) in old_field_map.items():
new_field_map[field_name] = GraphQLField(extend_field_type(field.type), description=field.description, deprecation_reason=field.deprecation_reason, args=field.args, resolver=cannot_execute_client_schema) # depends on [control=['for'], data=[]]
# If there are any extensions to the fields, apply those here.
extensions = type_extensions_map[type.name]
for extension in extensions:
for field in extension.definition.fields:
field_name = field.name.value
if field_name in old_field_map:
raise GraphQLError(('Field "{}.{}" already exists in the ' + 'schema. It cannot also be defined in this type extension.').format(type.name, field_name), [field]) # depends on [control=['if'], data=['field_name']]
new_field_map[field_name] = GraphQLField(build_field_type(field.type), args=build_input_values(field.arguments), resolver=cannot_execute_client_schema) # depends on [control=['for'], data=['field']] # depends on [control=['for'], data=['extension']]
return new_field_map
def extend_field_type(type):
if isinstance(type, GraphQLList):
return GraphQLList(extend_field_type(type.of_type)) # depends on [control=['if'], data=[]]
if isinstance(type, GraphQLNonNull):
return GraphQLNonNull(extend_field_type(type.of_type)) # depends on [control=['if'], data=[]]
return get_type_from_def(type)
def build_type(type_ast):
_type_build = {ast.ObjectTypeDefinition: build_object_type, ast.InterfaceTypeDefinition: build_interface_type, ast.UnionTypeDefinition: build_union_type, ast.ScalarTypeDefinition: build_scalar_type, ast.EnumTypeDefinition: build_enum_type, ast.InputObjectTypeDefinition: build_input_object_type}
func = _type_build.get(type(type_ast))
if func:
return func(type_ast) # depends on [control=['if'], data=[]]
def build_object_type(type_ast):
return GraphQLObjectType(type_ast.name.value, interfaces=lambda : build_implemented_interfaces(type_ast), fields=lambda : build_field_map(type_ast))
def build_interface_type(type_ast):
return GraphQLInterfaceType(type_ast.name.value, fields=lambda : build_field_map(type_ast), resolve_type=cannot_execute_client_schema)
def build_union_type(type_ast):
return GraphQLUnionType(type_ast.name.value, types=list(map(get_type_from_AST, type_ast.types)), resolve_type=cannot_execute_client_schema)
def build_scalar_type(type_ast):
# Note: validation calls the parse functions to determine if a
# literal value is correct. Returning null would cause use of custom
# scalars to always fail validation. Returning false causes them to
# always pass validation.
return GraphQLScalarType(type_ast.name.value, serialize=lambda *args, **kwargs: None, parse_value=lambda *args, **kwargs: False, parse_literal=lambda *args, **kwargs: False)
def build_enum_type(type_ast):
return GraphQLEnumType(type_ast.name.value, values={v.name.value: GraphQLEnumValue() for v in type_ast.values})
def build_input_object_type(type_ast):
return GraphQLInputObjectType(type_ast.name.value, fields=lambda : build_input_values(type_ast.fields, GraphQLInputObjectField))
def build_implemented_interfaces(type_ast):
return list(map(get_type_from_AST, type_ast.interfaces))
def build_field_map(type_ast):
return {field.name.value: GraphQLField(build_field_type(field.type), args=build_input_values(field.arguments), resolver=cannot_execute_client_schema) for field in type_ast.fields}
def build_input_values(values, input_type=GraphQLArgument):
input_values = OrderedDict()
for value in values:
type = build_field_type(value.type)
input_values[value.name.value] = input_type(type, default_value=value_from_ast(value.default_value, type)) # depends on [control=['for'], data=['value']]
return input_values
def build_field_type(type_ast):
if isinstance(type_ast, ast.ListType):
return GraphQLList(build_field_type(type_ast.type)) # depends on [control=['if'], data=[]]
if isinstance(type_ast, ast.NonNullType):
return GraphQLNonNull(build_field_type(type_ast.type)) # depends on [control=['if'], data=[]]
return get_type_from_AST(type_ast)
# If this document contains no new types, then return the same unmodified
# GraphQLSchema instance.
if not type_extensions_map and (not type_definition_map):
return schema # depends on [control=['if'], data=[]]
# A cache to use to store the actual GraphQLType definition objects by name.
# Initialize to the GraphQL built in scalars and introspection types. All
# functions below are inline so that this type def cache is within the scope
# of the closure.
type_def_cache = {'String': GraphQLString, 'Int': GraphQLInt, 'Float': GraphQLFloat, 'Boolean': GraphQLBoolean, 'ID': GraphQLID, '__Schema': __Schema, '__Directive': __Directive, '__DirectiveLocation': __DirectiveLocation, '__Type': __Type, '__Field': __Field, '__InputValue': __InputValue, '__EnumValue': __EnumValue, '__TypeKind': __TypeKind}
# Get the root Query, Mutation, and Subscription types.
query_type = get_type_from_def(schema.get_query_type())
existing_mutation_type = schema.get_mutation_type()
mutationType = existing_mutation_type and get_type_from_def(existing_mutation_type) or None
existing_subscription_type = schema.get_subscription_type()
subscription_type = existing_subscription_type and get_type_from_def(existing_subscription_type) or None
# Iterate through all types, getting the type definition for each, ensuring
# that any type not directly referenced by a field will get created.
types = [get_type_from_def(_def) for _def in schema.get_type_map().values()]
# Do the same with new types, appending to the list of defined types.
types += [get_type_from_AST(_def) for _def in type_definition_map.values()]
# Then produce and return a Schema with these types.
# Copy directives.
return GraphQLSchema(query=query_type, mutation=mutationType, subscription=subscription_type, directives=schema.get_directives(), types=types) |
def annToRLE(self, ann):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
t = self.imgs[ann['image_id']]
h, w = t['height'], t['width']
segm = ann['segmentation']
if type(segm) == list:
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, h, w)
rle = maskUtils.merge(rles)
elif type(segm['counts']) == list:
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, h, w)
else:
# rle
rle = ann['segmentation']
return rle | def function[annToRLE, parameter[self, ann]]:
constant[
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
]
variable[t] assign[=] call[name[self].imgs][call[name[ann]][constant[image_id]]]
<ast.Tuple object at 0x7da18f00cf10> assign[=] tuple[[<ast.Subscript object at 0x7da18f00e1a0>, <ast.Subscript object at 0x7da18f00f760>]]
variable[segm] assign[=] call[name[ann]][constant[segmentation]]
if compare[call[name[type], parameter[name[segm]]] equal[==] name[list]] begin[:]
variable[rles] assign[=] call[name[maskUtils].frPyObjects, parameter[name[segm], name[h], name[w]]]
variable[rle] assign[=] call[name[maskUtils].merge, parameter[name[rles]]]
return[name[rle]] | keyword[def] identifier[annToRLE] ( identifier[self] , identifier[ann] ):
literal[string]
identifier[t] = identifier[self] . identifier[imgs] [ identifier[ann] [ literal[string] ]]
identifier[h] , identifier[w] = identifier[t] [ literal[string] ], identifier[t] [ literal[string] ]
identifier[segm] = identifier[ann] [ literal[string] ]
keyword[if] identifier[type] ( identifier[segm] )== identifier[list] :
identifier[rles] = identifier[maskUtils] . identifier[frPyObjects] ( identifier[segm] , identifier[h] , identifier[w] )
identifier[rle] = identifier[maskUtils] . identifier[merge] ( identifier[rles] )
keyword[elif] identifier[type] ( identifier[segm] [ literal[string] ])== identifier[list] :
identifier[rle] = identifier[maskUtils] . identifier[frPyObjects] ( identifier[segm] , identifier[h] , identifier[w] )
keyword[else] :
identifier[rle] = identifier[ann] [ literal[string] ]
keyword[return] identifier[rle] | def annToRLE(self, ann):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
t = self.imgs[ann['image_id']]
(h, w) = (t['height'], t['width'])
segm = ann['segmentation']
if type(segm) == list:
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, h, w)
rle = maskUtils.merge(rles) # depends on [control=['if'], data=[]]
elif type(segm['counts']) == list:
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, h, w) # depends on [control=['if'], data=[]]
else:
# rle
rle = ann['segmentation']
return rle |
def get_value(self, obj):
"""
Returns the value of the object's attribute.
"""
if self.attribute is None:
return None
attrs = self.attribute.split('__')
value = obj
for attr in attrs:
try:
value = getattr(value, attr, None)
except (ValueError, ObjectDoesNotExist):
# needs to have a primary key value before a many-to-many
# relationship can be used.
return None
if value is None:
return None
# RelatedManager and ManyRelatedManager classes are callable in
# Django >= 1.7 but we don't want to call them
if callable(value) and not isinstance(value, Manager):
value = value()
return value | def function[get_value, parameter[self, obj]]:
constant[
Returns the value of the object's attribute.
]
if compare[name[self].attribute is constant[None]] begin[:]
return[constant[None]]
variable[attrs] assign[=] call[name[self].attribute.split, parameter[constant[__]]]
variable[value] assign[=] name[obj]
for taget[name[attr]] in starred[name[attrs]] begin[:]
<ast.Try object at 0x7da1b1d8b010>
if compare[name[value] is constant[None]] begin[:]
return[constant[None]]
if <ast.BoolOp object at 0x7da1b1d50550> begin[:]
variable[value] assign[=] call[name[value], parameter[]]
return[name[value]] | keyword[def] identifier[get_value] ( identifier[self] , identifier[obj] ):
literal[string]
keyword[if] identifier[self] . identifier[attribute] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[attrs] = identifier[self] . identifier[attribute] . identifier[split] ( literal[string] )
identifier[value] = identifier[obj]
keyword[for] identifier[attr] keyword[in] identifier[attrs] :
keyword[try] :
identifier[value] = identifier[getattr] ( identifier[value] , identifier[attr] , keyword[None] )
keyword[except] ( identifier[ValueError] , identifier[ObjectDoesNotExist] ):
keyword[return] keyword[None]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[callable] ( identifier[value] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[value] , identifier[Manager] ):
identifier[value] = identifier[value] ()
keyword[return] identifier[value] | def get_value(self, obj):
"""
Returns the value of the object's attribute.
"""
if self.attribute is None:
return None # depends on [control=['if'], data=[]]
attrs = self.attribute.split('__')
value = obj
for attr in attrs:
try:
value = getattr(value, attr, None) # depends on [control=['try'], data=[]]
except (ValueError, ObjectDoesNotExist):
# needs to have a primary key value before a many-to-many
# relationship can be used.
return None # depends on [control=['except'], data=[]]
if value is None:
return None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']]
# RelatedManager and ManyRelatedManager classes are callable in
# Django >= 1.7 but we don't want to call them
if callable(value) and (not isinstance(value, Manager)):
value = value() # depends on [control=['if'], data=[]]
return value |
def randomSizeAndLocation(cls, radius, widthLimits,
heightLimits, origin=None):
'''
:param: radius - float
:param: widthLimits - iterable of floats with length >= 2
:param: heightLimits - iterable of floats with length >= 2
:param: origin - optional Point subclass
:return: Rectangle
'''
r = cls(widthLimits, heightLimits, origin)
r.origin = Point.randomLocation(radius, origin) | def function[randomSizeAndLocation, parameter[cls, radius, widthLimits, heightLimits, origin]]:
constant[
:param: radius - float
:param: widthLimits - iterable of floats with length >= 2
:param: heightLimits - iterable of floats with length >= 2
:param: origin - optional Point subclass
:return: Rectangle
]
variable[r] assign[=] call[name[cls], parameter[name[widthLimits], name[heightLimits], name[origin]]]
name[r].origin assign[=] call[name[Point].randomLocation, parameter[name[radius], name[origin]]] | keyword[def] identifier[randomSizeAndLocation] ( identifier[cls] , identifier[radius] , identifier[widthLimits] ,
identifier[heightLimits] , identifier[origin] = keyword[None] ):
literal[string]
identifier[r] = identifier[cls] ( identifier[widthLimits] , identifier[heightLimits] , identifier[origin] )
identifier[r] . identifier[origin] = identifier[Point] . identifier[randomLocation] ( identifier[radius] , identifier[origin] ) | def randomSizeAndLocation(cls, radius, widthLimits, heightLimits, origin=None):
"""
:param: radius - float
:param: widthLimits - iterable of floats with length >= 2
:param: heightLimits - iterable of floats with length >= 2
:param: origin - optional Point subclass
:return: Rectangle
"""
r = cls(widthLimits, heightLimits, origin)
r.origin = Point.randomLocation(radius, origin) |
def squarelim():
"""Set all axes with equal aspect ratio, such that the space is 'square'."""
fig = gcf()
xmin, xmax = fig.xlim
ymin, ymax = fig.ylim
zmin, zmax = fig.zlim
width = max([abs(xmax - xmin), abs(ymax - ymin), abs(zmax - zmin)])
xc = (xmin + xmax) / 2
yc = (ymin + ymax) / 2
zc = (zmin + zmax) / 2
xlim(xc - width / 2, xc + width / 2)
ylim(yc - width / 2, yc + width / 2)
zlim(zc - width / 2, zc + width / 2) | def function[squarelim, parameter[]]:
constant[Set all axes with equal aspect ratio, such that the space is 'square'.]
variable[fig] assign[=] call[name[gcf], parameter[]]
<ast.Tuple object at 0x7da204564d00> assign[=] name[fig].xlim
<ast.Tuple object at 0x7da204566a10> assign[=] name[fig].ylim
<ast.Tuple object at 0x7da204564700> assign[=] name[fig].zlim
variable[width] assign[=] call[name[max], parameter[list[[<ast.Call object at 0x7da2045670a0>, <ast.Call object at 0x7da2045652d0>, <ast.Call object at 0x7da204566fb0>]]]]
variable[xc] assign[=] binary_operation[binary_operation[name[xmin] + name[xmax]] / constant[2]]
variable[yc] assign[=] binary_operation[binary_operation[name[ymin] + name[ymax]] / constant[2]]
variable[zc] assign[=] binary_operation[binary_operation[name[zmin] + name[zmax]] / constant[2]]
call[name[xlim], parameter[binary_operation[name[xc] - binary_operation[name[width] / constant[2]]], binary_operation[name[xc] + binary_operation[name[width] / constant[2]]]]]
call[name[ylim], parameter[binary_operation[name[yc] - binary_operation[name[width] / constant[2]]], binary_operation[name[yc] + binary_operation[name[width] / constant[2]]]]]
call[name[zlim], parameter[binary_operation[name[zc] - binary_operation[name[width] / constant[2]]], binary_operation[name[zc] + binary_operation[name[width] / constant[2]]]]] | keyword[def] identifier[squarelim] ():
literal[string]
identifier[fig] = identifier[gcf] ()
identifier[xmin] , identifier[xmax] = identifier[fig] . identifier[xlim]
identifier[ymin] , identifier[ymax] = identifier[fig] . identifier[ylim]
identifier[zmin] , identifier[zmax] = identifier[fig] . identifier[zlim]
identifier[width] = identifier[max] ([ identifier[abs] ( identifier[xmax] - identifier[xmin] ), identifier[abs] ( identifier[ymax] - identifier[ymin] ), identifier[abs] ( identifier[zmax] - identifier[zmin] )])
identifier[xc] =( identifier[xmin] + identifier[xmax] )/ literal[int]
identifier[yc] =( identifier[ymin] + identifier[ymax] )/ literal[int]
identifier[zc] =( identifier[zmin] + identifier[zmax] )/ literal[int]
identifier[xlim] ( identifier[xc] - identifier[width] / literal[int] , identifier[xc] + identifier[width] / literal[int] )
identifier[ylim] ( identifier[yc] - identifier[width] / literal[int] , identifier[yc] + identifier[width] / literal[int] )
identifier[zlim] ( identifier[zc] - identifier[width] / literal[int] , identifier[zc] + identifier[width] / literal[int] ) | def squarelim():
"""Set all axes with equal aspect ratio, such that the space is 'square'."""
fig = gcf()
(xmin, xmax) = fig.xlim
(ymin, ymax) = fig.ylim
(zmin, zmax) = fig.zlim
width = max([abs(xmax - xmin), abs(ymax - ymin), abs(zmax - zmin)])
xc = (xmin + xmax) / 2
yc = (ymin + ymax) / 2
zc = (zmin + zmax) / 2
xlim(xc - width / 2, xc + width / 2)
ylim(yc - width / 2, yc + width / 2)
zlim(zc - width / 2, zc + width / 2) |
def api_view(injector):
"""Create DRF class-based API view from injector class."""
handler = create_handler(APIView, injector)
apply_http_methods(handler, injector)
apply_api_view_methods(handler, injector)
return injector.let(as_view=handler.as_view) | def function[api_view, parameter[injector]]:
constant[Create DRF class-based API view from injector class.]
variable[handler] assign[=] call[name[create_handler], parameter[name[APIView], name[injector]]]
call[name[apply_http_methods], parameter[name[handler], name[injector]]]
call[name[apply_api_view_methods], parameter[name[handler], name[injector]]]
return[call[name[injector].let, parameter[]]] | keyword[def] identifier[api_view] ( identifier[injector] ):
literal[string]
identifier[handler] = identifier[create_handler] ( identifier[APIView] , identifier[injector] )
identifier[apply_http_methods] ( identifier[handler] , identifier[injector] )
identifier[apply_api_view_methods] ( identifier[handler] , identifier[injector] )
keyword[return] identifier[injector] . identifier[let] ( identifier[as_view] = identifier[handler] . identifier[as_view] ) | def api_view(injector):
"""Create DRF class-based API view from injector class."""
handler = create_handler(APIView, injector)
apply_http_methods(handler, injector)
apply_api_view_methods(handler, injector)
return injector.let(as_view=handler.as_view) |
def remove(self, widget):
"""Remove a widget from the window."""
for i, (wid, _) in enumerate(self._widgets):
if widget is wid:
del self._widgets[i]
return True
raise ValueError('Widget not in list') | def function[remove, parameter[self, widget]]:
constant[Remove a widget from the window.]
for taget[tuple[[<ast.Name object at 0x7da20c6e53f0>, <ast.Tuple object at 0x7da20c6e79d0>]]] in starred[call[name[enumerate], parameter[name[self]._widgets]]] begin[:]
if compare[name[widget] is name[wid]] begin[:]
<ast.Delete object at 0x7da20c6e7d30>
return[constant[True]]
<ast.Raise object at 0x7da20c6e6350> | keyword[def] identifier[remove] ( identifier[self] , identifier[widget] ):
literal[string]
keyword[for] identifier[i] ,( identifier[wid] , identifier[_] ) keyword[in] identifier[enumerate] ( identifier[self] . identifier[_widgets] ):
keyword[if] identifier[widget] keyword[is] identifier[wid] :
keyword[del] identifier[self] . identifier[_widgets] [ identifier[i] ]
keyword[return] keyword[True]
keyword[raise] identifier[ValueError] ( literal[string] ) | def remove(self, widget):
"""Remove a widget from the window."""
for (i, (wid, _)) in enumerate(self._widgets):
if widget is wid:
del self._widgets[i]
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise ValueError('Widget not in list') |
def poa_horizontal_ratio(surface_tilt, surface_azimuth,
solar_zenith, solar_azimuth):
"""
Calculates the ratio of the beam components of the plane of array
irradiance and the horizontal irradiance.
Input all angles in degrees.
Parameters
----------
surface_tilt : numeric
Panel tilt from horizontal.
surface_azimuth : numeric
Panel azimuth from north.
solar_zenith : numeric
Solar zenith angle.
solar_azimuth : numeric
Solar azimuth angle.
Returns
-------
ratio : numeric
Ratio of the plane of array irradiance to the horizontal plane
irradiance
"""
cos_poa_zen = aoi_projection(surface_tilt, surface_azimuth,
solar_zenith, solar_azimuth)
cos_solar_zenith = tools.cosd(solar_zenith)
# ratio of tilted and horizontal beam irradiance
ratio = cos_poa_zen / cos_solar_zenith
try:
ratio.name = 'poa_ratio'
except AttributeError:
pass
return ratio | def function[poa_horizontal_ratio, parameter[surface_tilt, surface_azimuth, solar_zenith, solar_azimuth]]:
constant[
Calculates the ratio of the beam components of the plane of array
irradiance and the horizontal irradiance.
Input all angles in degrees.
Parameters
----------
surface_tilt : numeric
Panel tilt from horizontal.
surface_azimuth : numeric
Panel azimuth from north.
solar_zenith : numeric
Solar zenith angle.
solar_azimuth : numeric
Solar azimuth angle.
Returns
-------
ratio : numeric
Ratio of the plane of array irradiance to the horizontal plane
irradiance
]
variable[cos_poa_zen] assign[=] call[name[aoi_projection], parameter[name[surface_tilt], name[surface_azimuth], name[solar_zenith], name[solar_azimuth]]]
variable[cos_solar_zenith] assign[=] call[name[tools].cosd, parameter[name[solar_zenith]]]
variable[ratio] assign[=] binary_operation[name[cos_poa_zen] / name[cos_solar_zenith]]
<ast.Try object at 0x7da1b1bad840>
return[name[ratio]] | keyword[def] identifier[poa_horizontal_ratio] ( identifier[surface_tilt] , identifier[surface_azimuth] ,
identifier[solar_zenith] , identifier[solar_azimuth] ):
literal[string]
identifier[cos_poa_zen] = identifier[aoi_projection] ( identifier[surface_tilt] , identifier[surface_azimuth] ,
identifier[solar_zenith] , identifier[solar_azimuth] )
identifier[cos_solar_zenith] = identifier[tools] . identifier[cosd] ( identifier[solar_zenith] )
identifier[ratio] = identifier[cos_poa_zen] / identifier[cos_solar_zenith]
keyword[try] :
identifier[ratio] . identifier[name] = literal[string]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[return] identifier[ratio] | def poa_horizontal_ratio(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth):
"""
Calculates the ratio of the beam components of the plane of array
irradiance and the horizontal irradiance.
Input all angles in degrees.
Parameters
----------
surface_tilt : numeric
Panel tilt from horizontal.
surface_azimuth : numeric
Panel azimuth from north.
solar_zenith : numeric
Solar zenith angle.
solar_azimuth : numeric
Solar azimuth angle.
Returns
-------
ratio : numeric
Ratio of the plane of array irradiance to the horizontal plane
irradiance
"""
cos_poa_zen = aoi_projection(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth)
cos_solar_zenith = tools.cosd(solar_zenith)
# ratio of tilted and horizontal beam irradiance
ratio = cos_poa_zen / cos_solar_zenith
try:
ratio.name = 'poa_ratio' # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
return ratio |
def get_reviews(self, user_id):
""" Get reviews for a particular user
"""
url = _REVIEWS_USER.format(c_api=_C_API_BEGINNING,
api=_API_VERSION,
user_id=user_id,
at=self.access_token)
return _get_request(url) | def function[get_reviews, parameter[self, user_id]]:
constant[ Get reviews for a particular user
]
variable[url] assign[=] call[name[_REVIEWS_USER].format, parameter[]]
return[call[name[_get_request], parameter[name[url]]]] | keyword[def] identifier[get_reviews] ( identifier[self] , identifier[user_id] ):
literal[string]
identifier[url] = identifier[_REVIEWS_USER] . identifier[format] ( identifier[c_api] = identifier[_C_API_BEGINNING] ,
identifier[api] = identifier[_API_VERSION] ,
identifier[user_id] = identifier[user_id] ,
identifier[at] = identifier[self] . identifier[access_token] )
keyword[return] identifier[_get_request] ( identifier[url] ) | def get_reviews(self, user_id):
""" Get reviews for a particular user
"""
url = _REVIEWS_USER.format(c_api=_C_API_BEGINNING, api=_API_VERSION, user_id=user_id, at=self.access_token)
return _get_request(url) |
def _abortConnection(self):
"""
We need a way to close the connection when an event line is too long
or if we time out waiting for an event. This is normally done by
calling :meth:`~twisted.internet.interfaces.ITransport.loseConnection``
or :meth:`~twisted.internet.interfaces.ITCPTransport.abortConnection`,
but newer versions of Twisted make this complicated.
Despite what the documentation says for
:class:`twisted.internet.protocol.Protocol`, the ``transport``
attribute is not necessarily a
:class:`twisted.internet.interfaces.ITransport`. Looking at the
documentation for :class:`twisted.internet.interfaces.IProtocol`, the
``transport`` attribute is actually not defined and neither is the
type of the ``transport`` parameter to
:meth:`~twisted.internet.interfaces.IProtocol.makeConnection`.
``SseProtocol`` will most often be used with HTTP requests initiated
with :class:`twisted.web.client.Agent` which, in newer versions of
Twisted, ends up giving us a
:class:`twisted.web._newclient.TransportProxyProducer` for our
``transport``. This is just a
:class:`twisted.internet.interfaces.IPushProducer` that wraps the
actual transport. If our transport is one of these, try call
``abortConnection()`` on the underlying transport.
"""
transport = self.transport
if isinstance(transport, TransportProxyProducer):
transport = transport._producer
if hasattr(transport, 'abortConnection'):
transport.abortConnection()
else:
self.log.error(
'Transport {} has no abortConnection method'.format(transport)) | def function[_abortConnection, parameter[self]]:
constant[
We need a way to close the connection when an event line is too long
or if we time out waiting for an event. This is normally done by
calling :meth:`~twisted.internet.interfaces.ITransport.loseConnection``
or :meth:`~twisted.internet.interfaces.ITCPTransport.abortConnection`,
but newer versions of Twisted make this complicated.
Despite what the documentation says for
:class:`twisted.internet.protocol.Protocol`, the ``transport``
attribute is not necessarily a
:class:`twisted.internet.interfaces.ITransport`. Looking at the
documentation for :class:`twisted.internet.interfaces.IProtocol`, the
``transport`` attribute is actually not defined and neither is the
type of the ``transport`` parameter to
:meth:`~twisted.internet.interfaces.IProtocol.makeConnection`.
``SseProtocol`` will most often be used with HTTP requests initiated
with :class:`twisted.web.client.Agent` which, in newer versions of
Twisted, ends up giving us a
:class:`twisted.web._newclient.TransportProxyProducer` for our
``transport``. This is just a
:class:`twisted.internet.interfaces.IPushProducer` that wraps the
actual transport. If our transport is one of these, try call
``abortConnection()`` on the underlying transport.
]
variable[transport] assign[=] name[self].transport
if call[name[isinstance], parameter[name[transport], name[TransportProxyProducer]]] begin[:]
variable[transport] assign[=] name[transport]._producer
if call[name[hasattr], parameter[name[transport], constant[abortConnection]]] begin[:]
call[name[transport].abortConnection, parameter[]] | keyword[def] identifier[_abortConnection] ( identifier[self] ):
literal[string]
identifier[transport] = identifier[self] . identifier[transport]
keyword[if] identifier[isinstance] ( identifier[transport] , identifier[TransportProxyProducer] ):
identifier[transport] = identifier[transport] . identifier[_producer]
keyword[if] identifier[hasattr] ( identifier[transport] , literal[string] ):
identifier[transport] . identifier[abortConnection] ()
keyword[else] :
identifier[self] . identifier[log] . identifier[error] (
literal[string] . identifier[format] ( identifier[transport] )) | def _abortConnection(self):
"""
We need a way to close the connection when an event line is too long
or if we time out waiting for an event. This is normally done by
calling :meth:`~twisted.internet.interfaces.ITransport.loseConnection``
or :meth:`~twisted.internet.interfaces.ITCPTransport.abortConnection`,
but newer versions of Twisted make this complicated.
Despite what the documentation says for
:class:`twisted.internet.protocol.Protocol`, the ``transport``
attribute is not necessarily a
:class:`twisted.internet.interfaces.ITransport`. Looking at the
documentation for :class:`twisted.internet.interfaces.IProtocol`, the
``transport`` attribute is actually not defined and neither is the
type of the ``transport`` parameter to
:meth:`~twisted.internet.interfaces.IProtocol.makeConnection`.
``SseProtocol`` will most often be used with HTTP requests initiated
with :class:`twisted.web.client.Agent` which, in newer versions of
Twisted, ends up giving us a
:class:`twisted.web._newclient.TransportProxyProducer` for our
``transport``. This is just a
:class:`twisted.internet.interfaces.IPushProducer` that wraps the
actual transport. If our transport is one of these, try call
``abortConnection()`` on the underlying transport.
"""
transport = self.transport
if isinstance(transport, TransportProxyProducer):
transport = transport._producer # depends on [control=['if'], data=[]]
if hasattr(transport, 'abortConnection'):
transport.abortConnection() # depends on [control=['if'], data=[]]
else:
self.log.error('Transport {} has no abortConnection method'.format(transport)) |
def check_number(cls, id_number):
"""
检查身份证号码是否符合校验规则;
:param:
* id_number: (string) 身份证号,比如 32012419870101001
:returns:
* 返回类型 (tuple),当前有一个值,第一个为 flag,以后第二个值会返回具体校验不通过的详细错误
* flag: (bool) 如果身份证号码校验通过,返回 True;如果身份证校验不通过,返回 False
举例如下::
from fishbase.fish_data import *
print('--- fish_data check_number demo ---')
# id number false
id1 = '320124198701010012'
print(id1, IdCard.check_number(id1)[0])
# id number true
id2 = '130522198407316471'
print(id2, IdCard.check_number(id2)[0])
print('---')
输出结果::
--- fish_data check_number demo ---
320124198701010012 False
130522198407316471 True
---
"""
if isinstance(id_number, int):
id_number = str(id_number)
# 调用函数计算身份证前面17位的 checkcode
result = IdCard.get_checkcode(id_number[0:17])
# 返回第一个 flag 是错误的话,表示身份证格式错误,直接透传返回,第二个为获得的校验码
flag = result[0]
checkcode = result[1]
if not flag:
return flag,
# 判断校验码是否正确
return checkcode == id_number[-1].upper(), | def function[check_number, parameter[cls, id_number]]:
constant[
检查身份证号码是否符合校验规则;
:param:
* id_number: (string) 身份证号,比如 32012419870101001
:returns:
* 返回类型 (tuple),当前有一个值,第一个为 flag,以后第二个值会返回具体校验不通过的详细错误
* flag: (bool) 如果身份证号码校验通过,返回 True;如果身份证校验不通过,返回 False
举例如下::
from fishbase.fish_data import *
print('--- fish_data check_number demo ---')
# id number false
id1 = '320124198701010012'
print(id1, IdCard.check_number(id1)[0])
# id number true
id2 = '130522198407316471'
print(id2, IdCard.check_number(id2)[0])
print('---')
输出结果::
--- fish_data check_number demo ---
320124198701010012 False
130522198407316471 True
---
]
if call[name[isinstance], parameter[name[id_number], name[int]]] begin[:]
variable[id_number] assign[=] call[name[str], parameter[name[id_number]]]
variable[result] assign[=] call[name[IdCard].get_checkcode, parameter[call[name[id_number]][<ast.Slice object at 0x7da1b08e6200>]]]
variable[flag] assign[=] call[name[result]][constant[0]]
variable[checkcode] assign[=] call[name[result]][constant[1]]
if <ast.UnaryOp object at 0x7da1b08e4820> begin[:]
return[tuple[[<ast.Name object at 0x7da1b08e4760>]]]
return[tuple[[<ast.Compare object at 0x7da1b08e67a0>]]] | keyword[def] identifier[check_number] ( identifier[cls] , identifier[id_number] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[id_number] , identifier[int] ):
identifier[id_number] = identifier[str] ( identifier[id_number] )
identifier[result] = identifier[IdCard] . identifier[get_checkcode] ( identifier[id_number] [ literal[int] : literal[int] ])
identifier[flag] = identifier[result] [ literal[int] ]
identifier[checkcode] = identifier[result] [ literal[int] ]
keyword[if] keyword[not] identifier[flag] :
keyword[return] identifier[flag] ,
keyword[return] identifier[checkcode] == identifier[id_number] [- literal[int] ]. identifier[upper] (), | def check_number(cls, id_number):
"""
检查身份证号码是否符合校验规则;
:param:
* id_number: (string) 身份证号,比如 32012419870101001
:returns:
* 返回类型 (tuple),当前有一个值,第一个为 flag,以后第二个值会返回具体校验不通过的详细错误
* flag: (bool) 如果身份证号码校验通过,返回 True;如果身份证校验不通过,返回 False
举例如下::
from fishbase.fish_data import *
print('--- fish_data check_number demo ---')
# id number false
id1 = '320124198701010012'
print(id1, IdCard.check_number(id1)[0])
# id number true
id2 = '130522198407316471'
print(id2, IdCard.check_number(id2)[0])
print('---')
输出结果::
--- fish_data check_number demo ---
320124198701010012 False
130522198407316471 True
---
"""
if isinstance(id_number, int):
id_number = str(id_number) # depends on [control=['if'], data=[]]
# 调用函数计算身份证前面17位的 checkcode
result = IdCard.get_checkcode(id_number[0:17])
# 返回第一个 flag 是错误的话,表示身份证格式错误,直接透传返回,第二个为获得的校验码
flag = result[0]
checkcode = result[1]
if not flag:
return (flag,) # depends on [control=['if'], data=[]]
# 判断校验码是否正确
return (checkcode == id_number[-1].upper(),) |
def terminate_ex(self, nodes, threads=False, attempts=3):
"""Wrapper method for terminate.
:param nodes: Nodes to be destroyed.
:type nodes: ``list``
:param attempts: The amount of attempts for retrying to terminate failed instances.
:type attempts: ``int``
:param threads: Whether to use the threaded approach or not.
:type threads: ``bool``
"""
while nodes and attempts > 0:
if threads:
nodes = self.terminate_with_threads(nodes)
else:
nodes = self.terminate(nodes)
if nodes:
logger.info("Attempt to terminate the remaining instances once more.")
attempts -= 1
return nodes | def function[terminate_ex, parameter[self, nodes, threads, attempts]]:
constant[Wrapper method for terminate.
:param nodes: Nodes to be destroyed.
:type nodes: ``list``
:param attempts: The amount of attempts for retrying to terminate failed instances.
:type attempts: ``int``
:param threads: Whether to use the threaded approach or not.
:type threads: ``bool``
]
while <ast.BoolOp object at 0x7da1b13a85e0> begin[:]
if name[threads] begin[:]
variable[nodes] assign[=] call[name[self].terminate_with_threads, parameter[name[nodes]]]
if name[nodes] begin[:]
call[name[logger].info, parameter[constant[Attempt to terminate the remaining instances once more.]]]
<ast.AugAssign object at 0x7da1b13b73a0>
return[name[nodes]] | keyword[def] identifier[terminate_ex] ( identifier[self] , identifier[nodes] , identifier[threads] = keyword[False] , identifier[attempts] = literal[int] ):
literal[string]
keyword[while] identifier[nodes] keyword[and] identifier[attempts] > literal[int] :
keyword[if] identifier[threads] :
identifier[nodes] = identifier[self] . identifier[terminate_with_threads] ( identifier[nodes] )
keyword[else] :
identifier[nodes] = identifier[self] . identifier[terminate] ( identifier[nodes] )
keyword[if] identifier[nodes] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[attempts] -= literal[int]
keyword[return] identifier[nodes] | def terminate_ex(self, nodes, threads=False, attempts=3):
"""Wrapper method for terminate.
:param nodes: Nodes to be destroyed.
:type nodes: ``list``
:param attempts: The amount of attempts for retrying to terminate failed instances.
:type attempts: ``int``
:param threads: Whether to use the threaded approach or not.
:type threads: ``bool``
"""
while nodes and attempts > 0:
if threads:
nodes = self.terminate_with_threads(nodes) # depends on [control=['if'], data=[]]
else:
nodes = self.terminate(nodes)
if nodes:
logger.info('Attempt to terminate the remaining instances once more.')
attempts -= 1 # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
return nodes |
def _add_global_secondary_index(ret, name, index_name, changes_old, changes_new, comments,
gsi_config, region, key, keyid, profile):
'''Updates ret iff there was a failure or in test mode.'''
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Dynamo table {0} will have a GSI added: {1}'.format(
name, index_name)
return
changes_new.setdefault('global_indexes', {})
success = __salt__['boto_dynamodb.create_global_secondary_index'](
name,
__salt__['boto_dynamodb.extract_index'](
gsi_config[index_name], global_index=True),
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if success:
comments.append('Created GSI {0}'.format(index_name))
changes_new['global_indexes'][index_name] = gsi_config[index_name]
else:
ret['result'] = False
ret['comment'] = 'Failed to create GSI {0}'.format(index_name) | def function[_add_global_secondary_index, parameter[ret, name, index_name, changes_old, changes_new, comments, gsi_config, region, key, keyid, profile]]:
constant[Updates ret iff there was a failure or in test mode.]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[None]
call[name[ret]][constant[comment]] assign[=] call[constant[Dynamo table {0} will have a GSI added: {1}].format, parameter[name[name], name[index_name]]]
return[None]
call[name[changes_new].setdefault, parameter[constant[global_indexes], dictionary[[], []]]]
variable[success] assign[=] call[call[name[__salt__]][constant[boto_dynamodb.create_global_secondary_index]], parameter[name[name], call[call[name[__salt__]][constant[boto_dynamodb.extract_index]], parameter[call[name[gsi_config]][name[index_name]]]]]]
if name[success] begin[:]
call[name[comments].append, parameter[call[constant[Created GSI {0}].format, parameter[name[index_name]]]]]
call[call[name[changes_new]][constant[global_indexes]]][name[index_name]] assign[=] call[name[gsi_config]][name[index_name]] | keyword[def] identifier[_add_global_secondary_index] ( identifier[ret] , identifier[name] , identifier[index_name] , identifier[changes_old] , identifier[changes_new] , identifier[comments] ,
identifier[gsi_config] , identifier[region] , identifier[key] , identifier[keyid] , identifier[profile] ):
literal[string]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] (
identifier[name] , identifier[index_name] )
keyword[return]
identifier[changes_new] . identifier[setdefault] ( literal[string] ,{})
identifier[success] = identifier[__salt__] [ literal[string] ](
identifier[name] ,
identifier[__salt__] [ literal[string] ](
identifier[gsi_config] [ identifier[index_name] ], identifier[global_index] = keyword[True] ),
identifier[region] = identifier[region] ,
identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] ,
identifier[profile] = identifier[profile] ,
)
keyword[if] identifier[success] :
identifier[comments] . identifier[append] ( literal[string] . identifier[format] ( identifier[index_name] ))
identifier[changes_new] [ literal[string] ][ identifier[index_name] ]= identifier[gsi_config] [ identifier[index_name] ]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[index_name] ) | def _add_global_secondary_index(ret, name, index_name, changes_old, changes_new, comments, gsi_config, region, key, keyid, profile):
"""Updates ret iff there was a failure or in test mode."""
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Dynamo table {0} will have a GSI added: {1}'.format(name, index_name)
return # depends on [control=['if'], data=[]]
changes_new.setdefault('global_indexes', {})
success = __salt__['boto_dynamodb.create_global_secondary_index'](name, __salt__['boto_dynamodb.extract_index'](gsi_config[index_name], global_index=True), region=region, key=key, keyid=keyid, profile=profile)
if success:
comments.append('Created GSI {0}'.format(index_name))
changes_new['global_indexes'][index_name] = gsi_config[index_name] # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Failed to create GSI {0}'.format(index_name) |
def isinstance_(x, A_tuple):
""" native isinstance_ with the test for typing.Union overridden """
if is_union(A_tuple):
return any(isinstance_(x, t) for t in A_tuple.__args__)
elif getattr(A_tuple, '__origin__', None) is not None:
return isinstance(x, A_tuple.__origin__)
else:
return isinstance(x, A_tuple) | def function[isinstance_, parameter[x, A_tuple]]:
constant[ native isinstance_ with the test for typing.Union overridden ]
if call[name[is_union], parameter[name[A_tuple]]] begin[:]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da20c990160>]]] | keyword[def] identifier[isinstance_] ( identifier[x] , identifier[A_tuple] ):
literal[string]
keyword[if] identifier[is_union] ( identifier[A_tuple] ):
keyword[return] identifier[any] ( identifier[isinstance_] ( identifier[x] , identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[A_tuple] . identifier[__args__] )
keyword[elif] identifier[getattr] ( identifier[A_tuple] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[isinstance] ( identifier[x] , identifier[A_tuple] . identifier[__origin__] )
keyword[else] :
keyword[return] identifier[isinstance] ( identifier[x] , identifier[A_tuple] ) | def isinstance_(x, A_tuple):
""" native isinstance_ with the test for typing.Union overridden """
if is_union(A_tuple):
return any((isinstance_(x, t) for t in A_tuple.__args__)) # depends on [control=['if'], data=[]]
elif getattr(A_tuple, '__origin__', None) is not None:
return isinstance(x, A_tuple.__origin__) # depends on [control=['if'], data=[]]
else:
return isinstance(x, A_tuple) |
def InputDNAQuantitySplines(seq_length, n_bases=10, name="DNASmoothPosition", **kwargs):
"""Convenience wrapper around keras.layers.Input:
`Input((seq_length, n_bases), name=name, **kwargs)`
"""
return Input((seq_length, n_bases), name=name, **kwargs) | def function[InputDNAQuantitySplines, parameter[seq_length, n_bases, name]]:
constant[Convenience wrapper around keras.layers.Input:
`Input((seq_length, n_bases), name=name, **kwargs)`
]
return[call[name[Input], parameter[tuple[[<ast.Name object at 0x7da1b031e440>, <ast.Name object at 0x7da1b031d450>]]]]] | keyword[def] identifier[InputDNAQuantitySplines] ( identifier[seq_length] , identifier[n_bases] = literal[int] , identifier[name] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[Input] (( identifier[seq_length] , identifier[n_bases] ), identifier[name] = identifier[name] ,** identifier[kwargs] ) | def InputDNAQuantitySplines(seq_length, n_bases=10, name='DNASmoothPosition', **kwargs):
"""Convenience wrapper around keras.layers.Input:
`Input((seq_length, n_bases), name=name, **kwargs)`
"""
return Input((seq_length, n_bases), name=name, **kwargs) |
def get_permissions_assignments(self, obj=None, permission=None):
"""
:param permission: return only roles having this permission
:returns: an dict where keys are `permissions` and values `roles` iterable.
"""
session = None
if obj is not None:
assert isinstance(obj, Entity)
session = object_session(obj)
if obj.id is None:
obj = None
if session is None:
session = db.session()
pa = session.query(
PermissionAssignment.permission, PermissionAssignment.role
).filter(PermissionAssignment.object == obj)
if permission:
pa = pa.filter(PermissionAssignment.permission == permission)
results = {}
for permission, role in pa.yield_per(1000):
results.setdefault(permission, set()).add(role)
return results | def function[get_permissions_assignments, parameter[self, obj, permission]]:
constant[
:param permission: return only roles having this permission
:returns: an dict where keys are `permissions` and values `roles` iterable.
]
variable[session] assign[=] constant[None]
if compare[name[obj] is_not constant[None]] begin[:]
assert[call[name[isinstance], parameter[name[obj], name[Entity]]]]
variable[session] assign[=] call[name[object_session], parameter[name[obj]]]
if compare[name[obj].id is constant[None]] begin[:]
variable[obj] assign[=] constant[None]
if compare[name[session] is constant[None]] begin[:]
variable[session] assign[=] call[name[db].session, parameter[]]
variable[pa] assign[=] call[call[name[session].query, parameter[name[PermissionAssignment].permission, name[PermissionAssignment].role]].filter, parameter[compare[name[PermissionAssignment].object equal[==] name[obj]]]]
if name[permission] begin[:]
variable[pa] assign[=] call[name[pa].filter, parameter[compare[name[PermissionAssignment].permission equal[==] name[permission]]]]
variable[results] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1913b80>, <ast.Name object at 0x7da1b1912500>]]] in starred[call[name[pa].yield_per, parameter[constant[1000]]]] begin[:]
call[call[name[results].setdefault, parameter[name[permission], call[name[set], parameter[]]]].add, parameter[name[role]]]
return[name[results]] | keyword[def] identifier[get_permissions_assignments] ( identifier[self] , identifier[obj] = keyword[None] , identifier[permission] = keyword[None] ):
literal[string]
identifier[session] = keyword[None]
keyword[if] identifier[obj] keyword[is] keyword[not] keyword[None] :
keyword[assert] identifier[isinstance] ( identifier[obj] , identifier[Entity] )
identifier[session] = identifier[object_session] ( identifier[obj] )
keyword[if] identifier[obj] . identifier[id] keyword[is] keyword[None] :
identifier[obj] = keyword[None]
keyword[if] identifier[session] keyword[is] keyword[None] :
identifier[session] = identifier[db] . identifier[session] ()
identifier[pa] = identifier[session] . identifier[query] (
identifier[PermissionAssignment] . identifier[permission] , identifier[PermissionAssignment] . identifier[role]
). identifier[filter] ( identifier[PermissionAssignment] . identifier[object] == identifier[obj] )
keyword[if] identifier[permission] :
identifier[pa] = identifier[pa] . identifier[filter] ( identifier[PermissionAssignment] . identifier[permission] == identifier[permission] )
identifier[results] ={}
keyword[for] identifier[permission] , identifier[role] keyword[in] identifier[pa] . identifier[yield_per] ( literal[int] ):
identifier[results] . identifier[setdefault] ( identifier[permission] , identifier[set] ()). identifier[add] ( identifier[role] )
keyword[return] identifier[results] | def get_permissions_assignments(self, obj=None, permission=None):
"""
:param permission: return only roles having this permission
:returns: an dict where keys are `permissions` and values `roles` iterable.
"""
session = None
if obj is not None:
assert isinstance(obj, Entity)
session = object_session(obj)
if obj.id is None:
obj = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['obj']]
if session is None:
session = db.session() # depends on [control=['if'], data=['session']]
pa = session.query(PermissionAssignment.permission, PermissionAssignment.role).filter(PermissionAssignment.object == obj)
if permission:
pa = pa.filter(PermissionAssignment.permission == permission) # depends on [control=['if'], data=[]]
results = {}
for (permission, role) in pa.yield_per(1000):
results.setdefault(permission, set()).add(role) # depends on [control=['for'], data=[]]
return results |
def resolve(self, host: str) -> ResolveResult:
'''Resolve hostname.
Args:
host: Hostname.
Returns:
Resolved IP addresses.
Raises:
DNSNotFound if the hostname could not be resolved or
NetworkError if there was an error connecting to DNS servers.
Coroutine.
'''
_logger.debug(__('Lookup address {0}.', host))
try:
host = self.hook_dispatcher.call(PluginFunctions.resolve_dns, host
) or host
except HookDisconnected:
pass
cache_key = (host, self._family)
if self._cache and cache_key in self._cache:
resolve_result = self._cache[cache_key]
_logger.debug(__('Return by cache {0}.', resolve_result))
if self._rotate:
resolve_result.rotate()
return resolve_result
address_infos = []
dns_infos = []
if not self.dns_python_enabled:
families = ()
elif self._family == IPFamilyPreference.any:
families = (socket.AF_INET, socket.AF_INET6)
elif self._family == IPFamilyPreference.ipv4_only:
families = (socket.AF_INET, )
else:
families = (socket.AF_INET6, )
for family in families:
datetime_now = datetime.datetime.utcnow()
try:
answer = yield from self._query_dns(host, family)
except DNSNotFound:
continue
else:
dns_infos.append(DNSInfo(datetime_now, answer.response.answer))
address_infos.extend(self._convert_dns_answer(answer))
if not address_infos:
# Maybe the address is defined in hosts file or mDNS
if self._family == IPFamilyPreference.any:
family = socket.AF_UNSPEC
elif self._family == IPFamilyPreference.ipv4_only:
family = socket.AF_INET
else:
family = socket.AF_INET6
results = yield from self._getaddrinfo(host, family)
address_infos.extend(self._convert_addrinfo(results))
_logger.debug(__('Resolved addresses: {0}.', address_infos))
resolve_result = ResolveResult(address_infos, dns_infos)
if self._cache:
self._cache[cache_key] = resolve_result
self.event_dispatcher.notify(PluginFunctions.resolve_dns_result, host, resolve_result)
if self._rotate:
resolve_result.shuffle()
return resolve_result | def function[resolve, parameter[self, host]]:
constant[Resolve hostname.
Args:
host: Hostname.
Returns:
Resolved IP addresses.
Raises:
DNSNotFound if the hostname could not be resolved or
NetworkError if there was an error connecting to DNS servers.
Coroutine.
]
call[name[_logger].debug, parameter[call[name[__], parameter[constant[Lookup address {0}.], name[host]]]]]
<ast.Try object at 0x7da204344d90>
variable[cache_key] assign[=] tuple[[<ast.Name object at 0x7da204346bc0>, <ast.Attribute object at 0x7da204345450>]]
if <ast.BoolOp object at 0x7da204347b20> begin[:]
variable[resolve_result] assign[=] call[name[self]._cache][name[cache_key]]
call[name[_logger].debug, parameter[call[name[__], parameter[constant[Return by cache {0}.], name[resolve_result]]]]]
if name[self]._rotate begin[:]
call[name[resolve_result].rotate, parameter[]]
return[name[resolve_result]]
variable[address_infos] assign[=] list[[]]
variable[dns_infos] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da204345720> begin[:]
variable[families] assign[=] tuple[[]]
for taget[name[family]] in starred[name[families]] begin[:]
variable[datetime_now] assign[=] call[name[datetime].datetime.utcnow, parameter[]]
<ast.Try object at 0x7da204345ae0>
if <ast.UnaryOp object at 0x7da20cabcee0> begin[:]
if compare[name[self]._family equal[==] name[IPFamilyPreference].any] begin[:]
variable[family] assign[=] name[socket].AF_UNSPEC
variable[results] assign[=] <ast.YieldFrom object at 0x7da1b2345840>
call[name[address_infos].extend, parameter[call[name[self]._convert_addrinfo, parameter[name[results]]]]]
call[name[_logger].debug, parameter[call[name[__], parameter[constant[Resolved addresses: {0}.], name[address_infos]]]]]
variable[resolve_result] assign[=] call[name[ResolveResult], parameter[name[address_infos], name[dns_infos]]]
if name[self]._cache begin[:]
call[name[self]._cache][name[cache_key]] assign[=] name[resolve_result]
call[name[self].event_dispatcher.notify, parameter[name[PluginFunctions].resolve_dns_result, name[host], name[resolve_result]]]
if name[self]._rotate begin[:]
call[name[resolve_result].shuffle, parameter[]]
return[name[resolve_result]] | keyword[def] identifier[resolve] ( identifier[self] , identifier[host] : identifier[str] )-> identifier[ResolveResult] :
literal[string]
identifier[_logger] . identifier[debug] ( identifier[__] ( literal[string] , identifier[host] ))
keyword[try] :
identifier[host] = identifier[self] . identifier[hook_dispatcher] . identifier[call] ( identifier[PluginFunctions] . identifier[resolve_dns] , identifier[host]
) keyword[or] identifier[host]
keyword[except] identifier[HookDisconnected] :
keyword[pass]
identifier[cache_key] =( identifier[host] , identifier[self] . identifier[_family] )
keyword[if] identifier[self] . identifier[_cache] keyword[and] identifier[cache_key] keyword[in] identifier[self] . identifier[_cache] :
identifier[resolve_result] = identifier[self] . identifier[_cache] [ identifier[cache_key] ]
identifier[_logger] . identifier[debug] ( identifier[__] ( literal[string] , identifier[resolve_result] ))
keyword[if] identifier[self] . identifier[_rotate] :
identifier[resolve_result] . identifier[rotate] ()
keyword[return] identifier[resolve_result]
identifier[address_infos] =[]
identifier[dns_infos] =[]
keyword[if] keyword[not] identifier[self] . identifier[dns_python_enabled] :
identifier[families] =()
keyword[elif] identifier[self] . identifier[_family] == identifier[IPFamilyPreference] . identifier[any] :
identifier[families] =( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[AF_INET6] )
keyword[elif] identifier[self] . identifier[_family] == identifier[IPFamilyPreference] . identifier[ipv4_only] :
identifier[families] =( identifier[socket] . identifier[AF_INET] ,)
keyword[else] :
identifier[families] =( identifier[socket] . identifier[AF_INET6] ,)
keyword[for] identifier[family] keyword[in] identifier[families] :
identifier[datetime_now] = identifier[datetime] . identifier[datetime] . identifier[utcnow] ()
keyword[try] :
identifier[answer] = keyword[yield] keyword[from] identifier[self] . identifier[_query_dns] ( identifier[host] , identifier[family] )
keyword[except] identifier[DNSNotFound] :
keyword[continue]
keyword[else] :
identifier[dns_infos] . identifier[append] ( identifier[DNSInfo] ( identifier[datetime_now] , identifier[answer] . identifier[response] . identifier[answer] ))
identifier[address_infos] . identifier[extend] ( identifier[self] . identifier[_convert_dns_answer] ( identifier[answer] ))
keyword[if] keyword[not] identifier[address_infos] :
keyword[if] identifier[self] . identifier[_family] == identifier[IPFamilyPreference] . identifier[any] :
identifier[family] = identifier[socket] . identifier[AF_UNSPEC]
keyword[elif] identifier[self] . identifier[_family] == identifier[IPFamilyPreference] . identifier[ipv4_only] :
identifier[family] = identifier[socket] . identifier[AF_INET]
keyword[else] :
identifier[family] = identifier[socket] . identifier[AF_INET6]
identifier[results] = keyword[yield] keyword[from] identifier[self] . identifier[_getaddrinfo] ( identifier[host] , identifier[family] )
identifier[address_infos] . identifier[extend] ( identifier[self] . identifier[_convert_addrinfo] ( identifier[results] ))
identifier[_logger] . identifier[debug] ( identifier[__] ( literal[string] , identifier[address_infos] ))
identifier[resolve_result] = identifier[ResolveResult] ( identifier[address_infos] , identifier[dns_infos] )
keyword[if] identifier[self] . identifier[_cache] :
identifier[self] . identifier[_cache] [ identifier[cache_key] ]= identifier[resolve_result]
identifier[self] . identifier[event_dispatcher] . identifier[notify] ( identifier[PluginFunctions] . identifier[resolve_dns_result] , identifier[host] , identifier[resolve_result] )
keyword[if] identifier[self] . identifier[_rotate] :
identifier[resolve_result] . identifier[shuffle] ()
keyword[return] identifier[resolve_result] | def resolve(self, host: str) -> ResolveResult:
"""Resolve hostname.
Args:
host: Hostname.
Returns:
Resolved IP addresses.
Raises:
DNSNotFound if the hostname could not be resolved or
NetworkError if there was an error connecting to DNS servers.
Coroutine.
"""
_logger.debug(__('Lookup address {0}.', host))
try:
host = self.hook_dispatcher.call(PluginFunctions.resolve_dns, host) or host # depends on [control=['try'], data=[]]
except HookDisconnected:
pass # depends on [control=['except'], data=[]]
cache_key = (host, self._family)
if self._cache and cache_key in self._cache:
resolve_result = self._cache[cache_key]
_logger.debug(__('Return by cache {0}.', resolve_result))
if self._rotate:
resolve_result.rotate() # depends on [control=['if'], data=[]]
return resolve_result # depends on [control=['if'], data=[]]
address_infos = []
dns_infos = []
if not self.dns_python_enabled:
families = () # depends on [control=['if'], data=[]]
elif self._family == IPFamilyPreference.any:
families = (socket.AF_INET, socket.AF_INET6) # depends on [control=['if'], data=[]]
elif self._family == IPFamilyPreference.ipv4_only:
families = (socket.AF_INET,) # depends on [control=['if'], data=[]]
else:
families = (socket.AF_INET6,)
for family in families:
datetime_now = datetime.datetime.utcnow()
try:
answer = (yield from self._query_dns(host, family)) # depends on [control=['try'], data=[]]
except DNSNotFound:
continue # depends on [control=['except'], data=[]]
else:
dns_infos.append(DNSInfo(datetime_now, answer.response.answer))
address_infos.extend(self._convert_dns_answer(answer)) # depends on [control=['for'], data=['family']]
if not address_infos:
# Maybe the address is defined in hosts file or mDNS
if self._family == IPFamilyPreference.any:
family = socket.AF_UNSPEC # depends on [control=['if'], data=[]]
elif self._family == IPFamilyPreference.ipv4_only:
family = socket.AF_INET # depends on [control=['if'], data=[]]
else:
family = socket.AF_INET6
results = (yield from self._getaddrinfo(host, family))
address_infos.extend(self._convert_addrinfo(results)) # depends on [control=['if'], data=[]]
_logger.debug(__('Resolved addresses: {0}.', address_infos))
resolve_result = ResolveResult(address_infos, dns_infos)
if self._cache:
self._cache[cache_key] = resolve_result # depends on [control=['if'], data=[]]
self.event_dispatcher.notify(PluginFunctions.resolve_dns_result, host, resolve_result)
if self._rotate:
resolve_result.shuffle() # depends on [control=['if'], data=[]]
return resolve_result |
def imagearm(sdmfile, scan, segment, npix=512, res=50, **kwargs):
""" Function to do end-to-end 1d, arm-based imaging """
import sdmpy
sdm = sdmpy.SDM(sdmfile)
ants = {ant.stationId:ant.name for ant in sdm['Antenna']}
stations = {st.stationId: st.name for st in sdm['Station'] if 'X' not in str(st.name)}
west = [int(str(ants[st]).lstrip('ea')) for st in stations if 'W' in str(stations[st])]
east = [int(str(ants[st]).lstrip('ea')) for st in stations if 'E' in str(stations[st])]
north = [int(str(ants[st]).lstrip('ea')) for st in stations if 'N' in str(stations[st])]
d = set_pipeline(sdmfile, scan, **kwargs)
blarr = rtlib.calc_blarr(d)
selwest = [i for i in range(len(blarr)) if all([b in west for b in blarr[i]])]
seleast = [i for i in range(len(blarr)) if all([b in east for b in blarr[i]])]
selnorth = [i for i in range(len(blarr)) if all([b in north for b in blarr[i]])]
u,v,w = ps.get_uvw_segment(d, segment=segment)
data = pipeline_reproduce(d, segment=segment, product='data')
dataw = data[:,selwest].mean(axis=3).mean(axis=2)
datae = data[:,seleast].mean(axis=3).mean(axis=2)
datan = data[:,selnorth].mean(axis=3).mean(axis=2)
uw = u[selwest]
ue = u[seleast]
un = u[selnorth]
vw = v[selwest]
ve = v[seleast]
vn = v[selnorth]
grid = n.zeros((len(data), npix), dtype='complex64')
grid2 = n.zeros((len(data), npix), dtype='float32')
datalist = []
for (uu, vv, dd) in [(uw, vw, dataw), (ue, ve, datae), (un, vn, datan)]:
# uu = n.round(uu/res).astype(int)
# vv = n.round(vv/res).astype(int)
uu = n.mod(uu/res, npix)
vv = n.mod(vv/res, npix)
uv = n.sqrt(uu**2 + vv**2)
uv = n.round(uv).astype(int)
for i in range(len(uv)):
if uv[i] < 512:
grid[:, uv[i]] = dd[:, i]
grid2 = n.fft.ifft(grid, axis=1).real
datalist.append(grid2)
return datalist | def function[imagearm, parameter[sdmfile, scan, segment, npix, res]]:
constant[ Function to do end-to-end 1d, arm-based imaging ]
import module[sdmpy]
variable[sdm] assign[=] call[name[sdmpy].SDM, parameter[name[sdmfile]]]
variable[ants] assign[=] <ast.DictComp object at 0x7da1b248c850>
variable[stations] assign[=] <ast.DictComp object at 0x7da1b248c160>
variable[west] assign[=] <ast.ListComp object at 0x7da1b248f6d0>
variable[east] assign[=] <ast.ListComp object at 0x7da1b248ead0>
variable[north] assign[=] <ast.ListComp object at 0x7da1b248eef0>
variable[d] assign[=] call[name[set_pipeline], parameter[name[sdmfile], name[scan]]]
variable[blarr] assign[=] call[name[rtlib].calc_blarr, parameter[name[d]]]
variable[selwest] assign[=] <ast.ListComp object at 0x7da1b251ada0>
variable[seleast] assign[=] <ast.ListComp object at 0x7da1b2480dc0>
variable[selnorth] assign[=] <ast.ListComp object at 0x7da1b24807c0>
<ast.Tuple object at 0x7da1b2480640> assign[=] call[name[ps].get_uvw_segment, parameter[name[d]]]
variable[data] assign[=] call[name[pipeline_reproduce], parameter[name[d]]]
variable[dataw] assign[=] call[call[call[name[data]][tuple[[<ast.Slice object at 0x7da1b2481690>, <ast.Name object at 0x7da1b2481570>]]].mean, parameter[]].mean, parameter[]]
variable[datae] assign[=] call[call[call[name[data]][tuple[[<ast.Slice object at 0x7da1b24805e0>, <ast.Name object at 0x7da1b2481900>]]].mean, parameter[]].mean, parameter[]]
variable[datan] assign[=] call[call[call[name[data]][tuple[[<ast.Slice object at 0x7da1b24812a0>, <ast.Name object at 0x7da1b2481390>]]].mean, parameter[]].mean, parameter[]]
variable[uw] assign[=] call[name[u]][name[selwest]]
variable[ue] assign[=] call[name[u]][name[seleast]]
variable[un] assign[=] call[name[u]][name[selnorth]]
variable[vw] assign[=] call[name[v]][name[selwest]]
variable[ve] assign[=] call[name[v]][name[seleast]]
variable[vn] assign[=] call[name[v]][name[selnorth]]
variable[grid] assign[=] call[name[n].zeros, parameter[tuple[[<ast.Call object at 0x7da1b25b0d60>, <ast.Name object at 0x7da1b244c4f0>]]]]
variable[grid2] assign[=] call[name[n].zeros, parameter[tuple[[<ast.Call object at 0x7da1b244c430>, <ast.Name object at 0x7da1b244c190>]]]]
variable[datalist] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b244c670>, <ast.Name object at 0x7da1b244c550>, <ast.Name object at 0x7da1b244c3a0>]]] in starred[list[[<ast.Tuple object at 0x7da1b244c070>, <ast.Tuple object at 0x7da1b244c3d0>, <ast.Tuple object at 0x7da1b244c1f0>]]] begin[:]
variable[uu] assign[=] call[name[n].mod, parameter[binary_operation[name[uu] / name[res]], name[npix]]]
variable[vv] assign[=] call[name[n].mod, parameter[binary_operation[name[vv] / name[res]], name[npix]]]
variable[uv] assign[=] call[name[n].sqrt, parameter[binary_operation[binary_operation[name[uu] ** constant[2]] + binary_operation[name[vv] ** constant[2]]]]]
variable[uv] assign[=] call[call[name[n].round, parameter[name[uv]]].astype, parameter[name[int]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[uv]]]]]] begin[:]
if compare[call[name[uv]][name[i]] less[<] constant[512]] begin[:]
call[name[grid]][tuple[[<ast.Slice object at 0x7da1b24ad450>, <ast.Subscript object at 0x7da1b24ad4b0>]]] assign[=] call[name[dd]][tuple[[<ast.Slice object at 0x7da1b24ad360>, <ast.Name object at 0x7da1b24ad390>]]]
variable[grid2] assign[=] call[name[n].fft.ifft, parameter[name[grid]]].real
call[name[datalist].append, parameter[name[grid2]]]
return[name[datalist]] | keyword[def] identifier[imagearm] ( identifier[sdmfile] , identifier[scan] , identifier[segment] , identifier[npix] = literal[int] , identifier[res] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[sdmpy]
identifier[sdm] = identifier[sdmpy] . identifier[SDM] ( identifier[sdmfile] )
identifier[ants] ={ identifier[ant] . identifier[stationId] : identifier[ant] . identifier[name] keyword[for] identifier[ant] keyword[in] identifier[sdm] [ literal[string] ]}
identifier[stations] ={ identifier[st] . identifier[stationId] : identifier[st] . identifier[name] keyword[for] identifier[st] keyword[in] identifier[sdm] [ literal[string] ] keyword[if] literal[string] keyword[not] keyword[in] identifier[str] ( identifier[st] . identifier[name] )}
identifier[west] =[ identifier[int] ( identifier[str] ( identifier[ants] [ identifier[st] ]). identifier[lstrip] ( literal[string] )) keyword[for] identifier[st] keyword[in] identifier[stations] keyword[if] literal[string] keyword[in] identifier[str] ( identifier[stations] [ identifier[st] ])]
identifier[east] =[ identifier[int] ( identifier[str] ( identifier[ants] [ identifier[st] ]). identifier[lstrip] ( literal[string] )) keyword[for] identifier[st] keyword[in] identifier[stations] keyword[if] literal[string] keyword[in] identifier[str] ( identifier[stations] [ identifier[st] ])]
identifier[north] =[ identifier[int] ( identifier[str] ( identifier[ants] [ identifier[st] ]). identifier[lstrip] ( literal[string] )) keyword[for] identifier[st] keyword[in] identifier[stations] keyword[if] literal[string] keyword[in] identifier[str] ( identifier[stations] [ identifier[st] ])]
identifier[d] = identifier[set_pipeline] ( identifier[sdmfile] , identifier[scan] ,** identifier[kwargs] )
identifier[blarr] = identifier[rtlib] . identifier[calc_blarr] ( identifier[d] )
identifier[selwest] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[blarr] )) keyword[if] identifier[all] ([ identifier[b] keyword[in] identifier[west] keyword[for] identifier[b] keyword[in] identifier[blarr] [ identifier[i] ]])]
identifier[seleast] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[blarr] )) keyword[if] identifier[all] ([ identifier[b] keyword[in] identifier[east] keyword[for] identifier[b] keyword[in] identifier[blarr] [ identifier[i] ]])]
identifier[selnorth] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[blarr] )) keyword[if] identifier[all] ([ identifier[b] keyword[in] identifier[north] keyword[for] identifier[b] keyword[in] identifier[blarr] [ identifier[i] ]])]
identifier[u] , identifier[v] , identifier[w] = identifier[ps] . identifier[get_uvw_segment] ( identifier[d] , identifier[segment] = identifier[segment] )
identifier[data] = identifier[pipeline_reproduce] ( identifier[d] , identifier[segment] = identifier[segment] , identifier[product] = literal[string] )
identifier[dataw] = identifier[data] [:, identifier[selwest] ]. identifier[mean] ( identifier[axis] = literal[int] ). identifier[mean] ( identifier[axis] = literal[int] )
identifier[datae] = identifier[data] [:, identifier[seleast] ]. identifier[mean] ( identifier[axis] = literal[int] ). identifier[mean] ( identifier[axis] = literal[int] )
identifier[datan] = identifier[data] [:, identifier[selnorth] ]. identifier[mean] ( identifier[axis] = literal[int] ). identifier[mean] ( identifier[axis] = literal[int] )
identifier[uw] = identifier[u] [ identifier[selwest] ]
identifier[ue] = identifier[u] [ identifier[seleast] ]
identifier[un] = identifier[u] [ identifier[selnorth] ]
identifier[vw] = identifier[v] [ identifier[selwest] ]
identifier[ve] = identifier[v] [ identifier[seleast] ]
identifier[vn] = identifier[v] [ identifier[selnorth] ]
identifier[grid] = identifier[n] . identifier[zeros] (( identifier[len] ( identifier[data] ), identifier[npix] ), identifier[dtype] = literal[string] )
identifier[grid2] = identifier[n] . identifier[zeros] (( identifier[len] ( identifier[data] ), identifier[npix] ), identifier[dtype] = literal[string] )
identifier[datalist] =[]
keyword[for] ( identifier[uu] , identifier[vv] , identifier[dd] ) keyword[in] [( identifier[uw] , identifier[vw] , identifier[dataw] ),( identifier[ue] , identifier[ve] , identifier[datae] ),( identifier[un] , identifier[vn] , identifier[datan] )]:
identifier[uu] = identifier[n] . identifier[mod] ( identifier[uu] / identifier[res] , identifier[npix] )
identifier[vv] = identifier[n] . identifier[mod] ( identifier[vv] / identifier[res] , identifier[npix] )
identifier[uv] = identifier[n] . identifier[sqrt] ( identifier[uu] ** literal[int] + identifier[vv] ** literal[int] )
identifier[uv] = identifier[n] . identifier[round] ( identifier[uv] ). identifier[astype] ( identifier[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[uv] )):
keyword[if] identifier[uv] [ identifier[i] ]< literal[int] :
identifier[grid] [:, identifier[uv] [ identifier[i] ]]= identifier[dd] [:, identifier[i] ]
identifier[grid2] = identifier[n] . identifier[fft] . identifier[ifft] ( identifier[grid] , identifier[axis] = literal[int] ). identifier[real]
identifier[datalist] . identifier[append] ( identifier[grid2] )
keyword[return] identifier[datalist] | def imagearm(sdmfile, scan, segment, npix=512, res=50, **kwargs):
""" Function to do end-to-end 1d, arm-based imaging """
import sdmpy
sdm = sdmpy.SDM(sdmfile)
ants = {ant.stationId: ant.name for ant in sdm['Antenna']}
stations = {st.stationId: st.name for st in sdm['Station'] if 'X' not in str(st.name)}
west = [int(str(ants[st]).lstrip('ea')) for st in stations if 'W' in str(stations[st])]
east = [int(str(ants[st]).lstrip('ea')) for st in stations if 'E' in str(stations[st])]
north = [int(str(ants[st]).lstrip('ea')) for st in stations if 'N' in str(stations[st])]
d = set_pipeline(sdmfile, scan, **kwargs)
blarr = rtlib.calc_blarr(d)
selwest = [i for i in range(len(blarr)) if all([b in west for b in blarr[i]])]
seleast = [i for i in range(len(blarr)) if all([b in east for b in blarr[i]])]
selnorth = [i for i in range(len(blarr)) if all([b in north for b in blarr[i]])]
(u, v, w) = ps.get_uvw_segment(d, segment=segment)
data = pipeline_reproduce(d, segment=segment, product='data')
dataw = data[:, selwest].mean(axis=3).mean(axis=2)
datae = data[:, seleast].mean(axis=3).mean(axis=2)
datan = data[:, selnorth].mean(axis=3).mean(axis=2)
uw = u[selwest]
ue = u[seleast]
un = u[selnorth]
vw = v[selwest]
ve = v[seleast]
vn = v[selnorth]
grid = n.zeros((len(data), npix), dtype='complex64')
grid2 = n.zeros((len(data), npix), dtype='float32')
datalist = []
for (uu, vv, dd) in [(uw, vw, dataw), (ue, ve, datae), (un, vn, datan)]:
# uu = n.round(uu/res).astype(int)
# vv = n.round(vv/res).astype(int)
uu = n.mod(uu / res, npix)
vv = n.mod(vv / res, npix)
uv = n.sqrt(uu ** 2 + vv ** 2)
uv = n.round(uv).astype(int)
for i in range(len(uv)):
if uv[i] < 512:
grid[:, uv[i]] = dd[:, i] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
grid2 = n.fft.ifft(grid, axis=1).real
datalist.append(grid2) # depends on [control=['for'], data=[]]
return datalist |
def get_dc_inventory(pbclient, dc=None):
''' gets inventory of one data center'''
if pbclient is None:
raise ValueError("argument 'pbclient' must not be None")
if dc is None:
raise ValueError("argument 'dc' must not be None")
dc_inv = [] # inventory list to return
dcid = dc['id']
# dc_data contains dc specific columns
dc_data = [dcid, dc['properties']['name'], dc['properties']['location']]
# first get the servers
# this will build a hash to relate volumes to servers later
# depth 3 is enough to get into volume/nic level plus details
servers = pbclient.list_servers(dcid, 3)
print("found %i servers in data center %s" % (len(servers['items']), dc['properties']['name']))
if verbose > 2:
print(str(servers))
# this will build a hash to relate volumes to servers later
bound_vols = dict() # hash volume-to-server relations
for server in servers['items']:
if verbose > 2:
print("SERVER: %s" % str(server))
serverid = server['id']
# server_data contains server specific columns for later output
server_data = [
server['type'], serverid, server['properties']['name'],
server['metadata']['state']
]
# OS is determined by boot device (volume||cdrom), not a server property.
# Might even be unspecified
bootOS = "NONE"
bootdev = server['properties']['bootVolume']
if bootdev is None:
bootdev = server['properties']['bootCdrom']
print("server %s has boot device %s" % (serverid, "CDROM"))
if bootdev is None:
print("server %s has NO boot device" % (serverid))
else:
bootOS = bootdev['properties']['licenceType']
server_data += [bootOS, server['properties']['cores'], server['properties']['ram']]
server_vols = server['entities']['volumes']['items']
n_volumes = len(server_vols)
total_disk = 0
licence_type = ""
for vol in server_vols:
total_disk += vol['properties']['size']
licence_type = str(vol['properties']['licenceType'])
bound_vols[vol['id']] = serverid
if verbose:
print("volume %s is connected to %s w/ OS %s" % (
vol['id'], bound_vols[vol['id']], licence_type))
server_nics = server['entities']['nics']['items']
n_nics = len(server_nics)
server_data += [
n_nics, n_volumes, total_disk, "",
server['metadata']['createdDate'], server['metadata']['lastModifiedDate']
]
dc_inv.append(dc_data + server_data)
# end for(servers)
# and now the volumes...
volumes = pbclient.list_volumes(dcid, 2) # depth 2 gives max. details
for volume in volumes['items']:
if verbose > 2:
print("VOLUME: %s" % str(volume))
volid = volume['id']
vol_data = [
volume['type'], volid, volume['properties']['name'], volume['metadata']['state'],
volume['properties']['licenceType'], "", "", "", "", volume['properties']['size']
]
connect = 'NONE'
if volid in bound_vols:
connect = bound_vols[volid]
vol_data += [
connect, volume['metadata']['createdDate'], volume['metadata']['lastModifiedDate']
]
dc_inv.append(dc_data + vol_data)
# end for(volumes)
return dc_inv | def function[get_dc_inventory, parameter[pbclient, dc]]:
constant[ gets inventory of one data center]
if compare[name[pbclient] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b00f6920>
if compare[name[dc] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b00f6b90>
variable[dc_inv] assign[=] list[[]]
variable[dcid] assign[=] call[name[dc]][constant[id]]
variable[dc_data] assign[=] list[[<ast.Name object at 0x7da1b00f49a0>, <ast.Subscript object at 0x7da1b00f6d10>, <ast.Subscript object at 0x7da1b00f5cc0>]]
variable[servers] assign[=] call[name[pbclient].list_servers, parameter[name[dcid], constant[3]]]
call[name[print], parameter[binary_operation[constant[found %i servers in data center %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b00f5270>, <ast.Subscript object at 0x7da1b00f52a0>]]]]]
if compare[name[verbose] greater[>] constant[2]] begin[:]
call[name[print], parameter[call[name[str], parameter[name[servers]]]]]
variable[bound_vols] assign[=] call[name[dict], parameter[]]
for taget[name[server]] in starred[call[name[servers]][constant[items]]] begin[:]
if compare[name[verbose] greater[>] constant[2]] begin[:]
call[name[print], parameter[binary_operation[constant[SERVER: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[server]]]]]]
variable[serverid] assign[=] call[name[server]][constant[id]]
variable[server_data] assign[=] list[[<ast.Subscript object at 0x7da1b00f6800>, <ast.Name object at 0x7da1b00f42e0>, <ast.Subscript object at 0x7da1b00f6c80>, <ast.Subscript object at 0x7da1b00f4b50>]]
variable[bootOS] assign[=] constant[NONE]
variable[bootdev] assign[=] call[call[name[server]][constant[properties]]][constant[bootVolume]]
if compare[name[bootdev] is constant[None]] begin[:]
variable[bootdev] assign[=] call[call[name[server]][constant[properties]]][constant[bootCdrom]]
call[name[print], parameter[binary_operation[constant[server %s has boot device %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b00f59f0>, <ast.Constant object at 0x7da1b00f7e50>]]]]]
if compare[name[bootdev] is constant[None]] begin[:]
call[name[print], parameter[binary_operation[constant[server %s has NO boot device] <ast.Mod object at 0x7da2590d6920> name[serverid]]]]
<ast.AugAssign object at 0x7da1b26ae6b0>
variable[server_vols] assign[=] call[call[call[name[server]][constant[entities]]][constant[volumes]]][constant[items]]
variable[n_volumes] assign[=] call[name[len], parameter[name[server_vols]]]
variable[total_disk] assign[=] constant[0]
variable[licence_type] assign[=] constant[]
for taget[name[vol]] in starred[name[server_vols]] begin[:]
<ast.AugAssign object at 0x7da1b26ad750>
variable[licence_type] assign[=] call[name[str], parameter[call[call[name[vol]][constant[properties]]][constant[licenceType]]]]
call[name[bound_vols]][call[name[vol]][constant[id]]] assign[=] name[serverid]
if name[verbose] begin[:]
call[name[print], parameter[binary_operation[constant[volume %s is connected to %s w/ OS %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b26ae770>, <ast.Subscript object at 0x7da1b26adf60>, <ast.Name object at 0x7da1b26ae920>]]]]]
variable[server_nics] assign[=] call[call[call[name[server]][constant[entities]]][constant[nics]]][constant[items]]
variable[n_nics] assign[=] call[name[len], parameter[name[server_nics]]]
<ast.AugAssign object at 0x7da1b26af5e0>
call[name[dc_inv].append, parameter[binary_operation[name[dc_data] + name[server_data]]]]
variable[volumes] assign[=] call[name[pbclient].list_volumes, parameter[name[dcid], constant[2]]]
for taget[name[volume]] in starred[call[name[volumes]][constant[items]]] begin[:]
if compare[name[verbose] greater[>] constant[2]] begin[:]
call[name[print], parameter[binary_operation[constant[VOLUME: %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[volume]]]]]]
variable[volid] assign[=] call[name[volume]][constant[id]]
variable[vol_data] assign[=] list[[<ast.Subscript object at 0x7da1b26aebf0>, <ast.Name object at 0x7da1b26add20>, <ast.Subscript object at 0x7da1b26aee30>, <ast.Subscript object at 0x7da1b26afc70>, <ast.Subscript object at 0x7da1b26ae0e0>, <ast.Constant object at 0x7da1b26af8b0>, <ast.Constant object at 0x7da1b26aec50>, <ast.Constant object at 0x7da1b26ad9c0>, <ast.Constant object at 0x7da1b26ac8e0>, <ast.Subscript object at 0x7da1b26aed40>]]
variable[connect] assign[=] constant[NONE]
if compare[name[volid] in name[bound_vols]] begin[:]
variable[connect] assign[=] call[name[bound_vols]][name[volid]]
<ast.AugAssign object at 0x7da1b26ac430>
call[name[dc_inv].append, parameter[binary_operation[name[dc_data] + name[vol_data]]]]
return[name[dc_inv]] | keyword[def] identifier[get_dc_inventory] ( identifier[pbclient] , identifier[dc] = keyword[None] ):
literal[string]
keyword[if] identifier[pbclient] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[dc] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[dc_inv] =[]
identifier[dcid] = identifier[dc] [ literal[string] ]
identifier[dc_data] =[ identifier[dcid] , identifier[dc] [ literal[string] ][ literal[string] ], identifier[dc] [ literal[string] ][ literal[string] ]]
identifier[servers] = identifier[pbclient] . identifier[list_servers] ( identifier[dcid] , literal[int] )
identifier[print] ( literal[string] %( identifier[len] ( identifier[servers] [ literal[string] ]), identifier[dc] [ literal[string] ][ literal[string] ]))
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( identifier[str] ( identifier[servers] ))
identifier[bound_vols] = identifier[dict] ()
keyword[for] identifier[server] keyword[in] identifier[servers] [ literal[string] ]:
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] % identifier[str] ( identifier[server] ))
identifier[serverid] = identifier[server] [ literal[string] ]
identifier[server_data] =[
identifier[server] [ literal[string] ], identifier[serverid] , identifier[server] [ literal[string] ][ literal[string] ],
identifier[server] [ literal[string] ][ literal[string] ]
]
identifier[bootOS] = literal[string]
identifier[bootdev] = identifier[server] [ literal[string] ][ literal[string] ]
keyword[if] identifier[bootdev] keyword[is] keyword[None] :
identifier[bootdev] = identifier[server] [ literal[string] ][ literal[string] ]
identifier[print] ( literal[string] %( identifier[serverid] , literal[string] ))
keyword[if] identifier[bootdev] keyword[is] keyword[None] :
identifier[print] ( literal[string] %( identifier[serverid] ))
keyword[else] :
identifier[bootOS] = identifier[bootdev] [ literal[string] ][ literal[string] ]
identifier[server_data] +=[ identifier[bootOS] , identifier[server] [ literal[string] ][ literal[string] ], identifier[server] [ literal[string] ][ literal[string] ]]
identifier[server_vols] = identifier[server] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[n_volumes] = identifier[len] ( identifier[server_vols] )
identifier[total_disk] = literal[int]
identifier[licence_type] = literal[string]
keyword[for] identifier[vol] keyword[in] identifier[server_vols] :
identifier[total_disk] += identifier[vol] [ literal[string] ][ literal[string] ]
identifier[licence_type] = identifier[str] ( identifier[vol] [ literal[string] ][ literal[string] ])
identifier[bound_vols] [ identifier[vol] [ literal[string] ]]= identifier[serverid]
keyword[if] identifier[verbose] :
identifier[print] ( literal[string] %(
identifier[vol] [ literal[string] ], identifier[bound_vols] [ identifier[vol] [ literal[string] ]], identifier[licence_type] ))
identifier[server_nics] = identifier[server] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[n_nics] = identifier[len] ( identifier[server_nics] )
identifier[server_data] +=[
identifier[n_nics] , identifier[n_volumes] , identifier[total_disk] , literal[string] ,
identifier[server] [ literal[string] ][ literal[string] ], identifier[server] [ literal[string] ][ literal[string] ]
]
identifier[dc_inv] . identifier[append] ( identifier[dc_data] + identifier[server_data] )
identifier[volumes] = identifier[pbclient] . identifier[list_volumes] ( identifier[dcid] , literal[int] )
keyword[for] identifier[volume] keyword[in] identifier[volumes] [ literal[string] ]:
keyword[if] identifier[verbose] > literal[int] :
identifier[print] ( literal[string] % identifier[str] ( identifier[volume] ))
identifier[volid] = identifier[volume] [ literal[string] ]
identifier[vol_data] =[
identifier[volume] [ literal[string] ], identifier[volid] , identifier[volume] [ literal[string] ][ literal[string] ], identifier[volume] [ literal[string] ][ literal[string] ],
identifier[volume] [ literal[string] ][ literal[string] ], literal[string] , literal[string] , literal[string] , literal[string] , identifier[volume] [ literal[string] ][ literal[string] ]
]
identifier[connect] = literal[string]
keyword[if] identifier[volid] keyword[in] identifier[bound_vols] :
identifier[connect] = identifier[bound_vols] [ identifier[volid] ]
identifier[vol_data] +=[
identifier[connect] , identifier[volume] [ literal[string] ][ literal[string] ], identifier[volume] [ literal[string] ][ literal[string] ]
]
identifier[dc_inv] . identifier[append] ( identifier[dc_data] + identifier[vol_data] )
keyword[return] identifier[dc_inv] | def get_dc_inventory(pbclient, dc=None):
""" gets inventory of one data center"""
if pbclient is None:
raise ValueError("argument 'pbclient' must not be None") # depends on [control=['if'], data=[]]
if dc is None:
raise ValueError("argument 'dc' must not be None") # depends on [control=['if'], data=[]]
dc_inv = [] # inventory list to return
dcid = dc['id']
# dc_data contains dc specific columns
dc_data = [dcid, dc['properties']['name'], dc['properties']['location']]
# first get the servers
# this will build a hash to relate volumes to servers later
# depth 3 is enough to get into volume/nic level plus details
servers = pbclient.list_servers(dcid, 3)
print('found %i servers in data center %s' % (len(servers['items']), dc['properties']['name']))
if verbose > 2:
print(str(servers)) # depends on [control=['if'], data=[]]
# this will build a hash to relate volumes to servers later
bound_vols = dict() # hash volume-to-server relations
for server in servers['items']:
if verbose > 2:
print('SERVER: %s' % str(server)) # depends on [control=['if'], data=[]]
serverid = server['id']
# server_data contains server specific columns for later output
server_data = [server['type'], serverid, server['properties']['name'], server['metadata']['state']]
# OS is determined by boot device (volume||cdrom), not a server property.
# Might even be unspecified
bootOS = 'NONE'
bootdev = server['properties']['bootVolume']
if bootdev is None:
bootdev = server['properties']['bootCdrom']
print('server %s has boot device %s' % (serverid, 'CDROM')) # depends on [control=['if'], data=['bootdev']]
if bootdev is None:
print('server %s has NO boot device' % serverid) # depends on [control=['if'], data=[]]
else:
bootOS = bootdev['properties']['licenceType']
server_data += [bootOS, server['properties']['cores'], server['properties']['ram']]
server_vols = server['entities']['volumes']['items']
n_volumes = len(server_vols)
total_disk = 0
licence_type = ''
for vol in server_vols:
total_disk += vol['properties']['size']
licence_type = str(vol['properties']['licenceType'])
bound_vols[vol['id']] = serverid
if verbose:
print('volume %s is connected to %s w/ OS %s' % (vol['id'], bound_vols[vol['id']], licence_type)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['vol']]
server_nics = server['entities']['nics']['items']
n_nics = len(server_nics)
server_data += [n_nics, n_volumes, total_disk, '', server['metadata']['createdDate'], server['metadata']['lastModifiedDate']]
dc_inv.append(dc_data + server_data) # depends on [control=['for'], data=['server']]
# end for(servers)
# and now the volumes...
volumes = pbclient.list_volumes(dcid, 2) # depth 2 gives max. details
for volume in volumes['items']:
if verbose > 2:
print('VOLUME: %s' % str(volume)) # depends on [control=['if'], data=[]]
volid = volume['id']
vol_data = [volume['type'], volid, volume['properties']['name'], volume['metadata']['state'], volume['properties']['licenceType'], '', '', '', '', volume['properties']['size']]
connect = 'NONE'
if volid in bound_vols:
connect = bound_vols[volid] # depends on [control=['if'], data=['volid', 'bound_vols']]
vol_data += [connect, volume['metadata']['createdDate'], volume['metadata']['lastModifiedDate']]
dc_inv.append(dc_data + vol_data) # depends on [control=['for'], data=['volume']]
# end for(volumes)
return dc_inv |
def index(self, symbol='incon.dat', group=False):
'''
获取指数数据
:param symbol:
:param group:
:return: pd.dataFrame or None
'''
reader = BlockReader()
symbol = os.path.join(self.tdxdir, symbol)
if symbol is not None:
return reader.get_df(symbol, group)
return None | def function[index, parameter[self, symbol, group]]:
constant[
获取指数数据
:param symbol:
:param group:
:return: pd.dataFrame or None
]
variable[reader] assign[=] call[name[BlockReader], parameter[]]
variable[symbol] assign[=] call[name[os].path.join, parameter[name[self].tdxdir, name[symbol]]]
if compare[name[symbol] is_not constant[None]] begin[:]
return[call[name[reader].get_df, parameter[name[symbol], name[group]]]]
return[constant[None]] | keyword[def] identifier[index] ( identifier[self] , identifier[symbol] = literal[string] , identifier[group] = keyword[False] ):
literal[string]
identifier[reader] = identifier[BlockReader] ()
identifier[symbol] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[tdxdir] , identifier[symbol] )
keyword[if] identifier[symbol] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[reader] . identifier[get_df] ( identifier[symbol] , identifier[group] )
keyword[return] keyword[None] | def index(self, symbol='incon.dat', group=False):
"""
获取指数数据
:param symbol:
:param group:
:return: pd.dataFrame or None
"""
reader = BlockReader()
symbol = os.path.join(self.tdxdir, symbol)
if symbol is not None:
return reader.get_df(symbol, group) # depends on [control=['if'], data=['symbol']]
return None |
def base_url(self):
"""Base URL for resolving resource URLs"""
if self.doc.package_url:
return self.doc.package_url
return self.doc._ref | def function[base_url, parameter[self]]:
constant[Base URL for resolving resource URLs]
if name[self].doc.package_url begin[:]
return[name[self].doc.package_url]
return[name[self].doc._ref] | keyword[def] identifier[base_url] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[doc] . identifier[package_url] :
keyword[return] identifier[self] . identifier[doc] . identifier[package_url]
keyword[return] identifier[self] . identifier[doc] . identifier[_ref] | def base_url(self):
"""Base URL for resolving resource URLs"""
if self.doc.package_url:
return self.doc.package_url # depends on [control=['if'], data=[]]
return self.doc._ref |
def wrappable_term(self, term):
"""Return the Root.Description, possibly combining multiple terms.
:return:
"""
return ' '.join(e.value.strip() for e in self['Root'].find(term) if e and e.value) | def function[wrappable_term, parameter[self, term]]:
constant[Return the Root.Description, possibly combining multiple terms.
:return:
]
return[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b195e980>]]] | keyword[def] identifier[wrappable_term] ( identifier[self] , identifier[term] ):
literal[string]
keyword[return] literal[string] . identifier[join] ( identifier[e] . identifier[value] . identifier[strip] () keyword[for] identifier[e] keyword[in] identifier[self] [ literal[string] ]. identifier[find] ( identifier[term] ) keyword[if] identifier[e] keyword[and] identifier[e] . identifier[value] ) | def wrappable_term(self, term):
"""Return the Root.Description, possibly combining multiple terms.
:return:
"""
return ' '.join((e.value.strip() for e in self['Root'].find(term) if e and e.value)) |
def redirect_profile(request):
'''
The default destination from logging in, redirect to the actual profile URL
'''
if request.user.is_authenticated:
return HttpResponseRedirect(reverse('wafer_user_profile',
args=(request.user.username,)))
else:
return redirect_to_login(next=reverse(redirect_profile)) | def function[redirect_profile, parameter[request]]:
constant[
The default destination from logging in, redirect to the actual profile URL
]
if name[request].user.is_authenticated begin[:]
return[call[name[HttpResponseRedirect], parameter[call[name[reverse], parameter[constant[wafer_user_profile]]]]]] | keyword[def] identifier[redirect_profile] ( identifier[request] ):
literal[string]
keyword[if] identifier[request] . identifier[user] . identifier[is_authenticated] :
keyword[return] identifier[HttpResponseRedirect] ( identifier[reverse] ( literal[string] ,
identifier[args] =( identifier[request] . identifier[user] . identifier[username] ,)))
keyword[else] :
keyword[return] identifier[redirect_to_login] ( identifier[next] = identifier[reverse] ( identifier[redirect_profile] )) | def redirect_profile(request):
"""
The default destination from logging in, redirect to the actual profile URL
"""
if request.user.is_authenticated:
return HttpResponseRedirect(reverse('wafer_user_profile', args=(request.user.username,))) # depends on [control=['if'], data=[]]
else:
return redirect_to_login(next=reverse(redirect_profile)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.