code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def self_sign(self, req, receiver='', aud=None):
"""
Sign the extended request.
:param req: Request, a :py:class:`fedoidcmsg.MetadataStatement' instance
:param receiver: The intended user of this metadata statement
:param aud: The audience, a list of receivers.
:return: An augmented set of request arguments
"""
if self.entity_id:
_iss = self.entity_id
else:
_iss = self.iss
creq = req.copy()
if not 'metadata_statement_uris' in creq and not \
'metadata_statements' in creq:
_copy = creq.copy()
_jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss,
aud=aud)
sms_spec = {'metadata_statements': {self.iss: _jws}}
else:
for ref in ['metadata_statement_uris', 'metadata_statements']:
try:
del creq[ref]
except KeyError:
pass
sms_spec = {'metadata_statements': Message()}
for ref in ['metadata_statement_uris', 'metadata_statements']:
if ref not in req:
continue
for foid, value in req[ref].items():
_copy = creq.copy()
_copy[ref] = Message()
_copy[ref][foid] = value
_jws = self.self_signer.sign(_copy, receiver=receiver,
iss=_iss, aud=aud)
sms_spec['metadata_statements'][foid] = _jws
creq.update(sms_spec)
return creq
|
def function[self_sign, parameter[self, req, receiver, aud]]:
constant[
Sign the extended request.
:param req: Request, a :py:class:`fedoidcmsg.MetadataStatement' instance
:param receiver: The intended user of this metadata statement
:param aud: The audience, a list of receivers.
:return: An augmented set of request arguments
]
if name[self].entity_id begin[:]
variable[_iss] assign[=] name[self].entity_id
variable[creq] assign[=] call[name[req].copy, parameter[]]
if <ast.BoolOp object at 0x7da20e954b50> begin[:]
variable[_copy] assign[=] call[name[creq].copy, parameter[]]
variable[_jws] assign[=] call[name[self].self_signer.sign, parameter[name[_copy]]]
variable[sms_spec] assign[=] dictionary[[<ast.Constant object at 0x7da20e957c10>], [<ast.Dict object at 0x7da20e955030>]]
call[name[creq].update, parameter[name[sms_spec]]]
return[name[creq]]
|
keyword[def] identifier[self_sign] ( identifier[self] , identifier[req] , identifier[receiver] = literal[string] , identifier[aud] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[entity_id] :
identifier[_iss] = identifier[self] . identifier[entity_id]
keyword[else] :
identifier[_iss] = identifier[self] . identifier[iss]
identifier[creq] = identifier[req] . identifier[copy] ()
keyword[if] keyword[not] literal[string] keyword[in] identifier[creq] keyword[and] keyword[not] literal[string] keyword[in] identifier[creq] :
identifier[_copy] = identifier[creq] . identifier[copy] ()
identifier[_jws] = identifier[self] . identifier[self_signer] . identifier[sign] ( identifier[_copy] , identifier[receiver] = identifier[receiver] , identifier[iss] = identifier[_iss] ,
identifier[aud] = identifier[aud] )
identifier[sms_spec] ={ literal[string] :{ identifier[self] . identifier[iss] : identifier[_jws] }}
keyword[else] :
keyword[for] identifier[ref] keyword[in] [ literal[string] , literal[string] ]:
keyword[try] :
keyword[del] identifier[creq] [ identifier[ref] ]
keyword[except] identifier[KeyError] :
keyword[pass]
identifier[sms_spec] ={ literal[string] : identifier[Message] ()}
keyword[for] identifier[ref] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[ref] keyword[not] keyword[in] identifier[req] :
keyword[continue]
keyword[for] identifier[foid] , identifier[value] keyword[in] identifier[req] [ identifier[ref] ]. identifier[items] ():
identifier[_copy] = identifier[creq] . identifier[copy] ()
identifier[_copy] [ identifier[ref] ]= identifier[Message] ()
identifier[_copy] [ identifier[ref] ][ identifier[foid] ]= identifier[value]
identifier[_jws] = identifier[self] . identifier[self_signer] . identifier[sign] ( identifier[_copy] , identifier[receiver] = identifier[receiver] ,
identifier[iss] = identifier[_iss] , identifier[aud] = identifier[aud] )
identifier[sms_spec] [ literal[string] ][ identifier[foid] ]= identifier[_jws]
identifier[creq] . identifier[update] ( identifier[sms_spec] )
keyword[return] identifier[creq]
|
def self_sign(self, req, receiver='', aud=None):
"""
Sign the extended request.
:param req: Request, a :py:class:`fedoidcmsg.MetadataStatement' instance
:param receiver: The intended user of this metadata statement
:param aud: The audience, a list of receivers.
:return: An augmented set of request arguments
"""
if self.entity_id:
_iss = self.entity_id # depends on [control=['if'], data=[]]
else:
_iss = self.iss
creq = req.copy()
if not 'metadata_statement_uris' in creq and (not 'metadata_statements' in creq):
_copy = creq.copy()
_jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss, aud=aud)
sms_spec = {'metadata_statements': {self.iss: _jws}} # depends on [control=['if'], data=[]]
else:
for ref in ['metadata_statement_uris', 'metadata_statements']:
try:
del creq[ref] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['ref']]
sms_spec = {'metadata_statements': Message()}
for ref in ['metadata_statement_uris', 'metadata_statements']:
if ref not in req:
continue # depends on [control=['if'], data=[]]
for (foid, value) in req[ref].items():
_copy = creq.copy()
_copy[ref] = Message()
_copy[ref][foid] = value
_jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss, aud=aud)
sms_spec['metadata_statements'][foid] = _jws # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['ref']]
creq.update(sms_spec)
return creq
|
def twostep(script, iterations=3, angle_threshold=60, normal_steps=20, fit_steps=20,
selected=False):
""" Two Step Smoothing, a feature preserving/enhancing fairing filter.
It is based on a Normal Smoothing step where similar normals are averaged
together and a step where the vertexes are fitted on the new normals.
Based on:
A. Belyaev and Y. Ohtake,
"A Comparison of Mesh Smoothing Methods"
Proc. Israel-Korea Bi-National Conf. Geometric Modeling and Computer
Graphics, pp. 83-87, 2003.
Args:
script: the FilterScript object or script filename to write
the filter to.
iterations (int): The number of times that the whole algorithm (normal
smoothing + vertex fitting) is iterated.
angle_threshold (float): Specify a threshold angle (0..90) for features
that you want to be preserved. Features forming angles LARGER than
the specified threshold will be preserved.
0 -> no smoothing
90 -> all faces will be smoothed
normal_steps (int): Number of iterations of normal smoothing step. The
larger the better and (the slower)
fit_steps (int): Number of iterations of the vertex fitting procedure
selected (bool): If selected the filter is performed only on the
selected faces
Layer stack:
No impacts
MeshLab versions:
2016.12
1.3.4BETA
"""
filter_xml = ''.join([
' <filter name="TwoStep Smooth">\n',
' <Param name="stepSmoothNum" ',
'value="{:d}" '.format(iterations),
'description="Smoothing steps" ',
'type="RichInt" ',
'/>\n',
' <Param name="normalThr" ',
'value="{}" '.format(angle_threshold),
'description="Feature Angle Threshold (deg)" ',
'type="RichFloat" ',
'/>\n',
' <Param name="stepNormalNum" ',
'value="{:d}" '.format(normal_steps),
'description="Normal Smoothing steps" ',
'type="RichInt" ',
'/>\n',
' <Param name="stepFitNum" ',
'value="{:d}" '.format(fit_steps),
'description="Vertex Fitting steps" ',
'type="RichInt" ',
'/>\n',
' <Param name="Selected" ',
'value="{}" '.format(str(selected).lower()),
'description="Affect only selected faces" ',
'type="RichBool" ',
'/>\n',
' </filter>\n'])
util.write_filter(script, filter_xml)
return None
|
def function[twostep, parameter[script, iterations, angle_threshold, normal_steps, fit_steps, selected]]:
constant[ Two Step Smoothing, a feature preserving/enhancing fairing filter.
It is based on a Normal Smoothing step where similar normals are averaged
together and a step where the vertexes are fitted on the new normals.
Based on:
A. Belyaev and Y. Ohtake,
"A Comparison of Mesh Smoothing Methods"
Proc. Israel-Korea Bi-National Conf. Geometric Modeling and Computer
Graphics, pp. 83-87, 2003.
Args:
script: the FilterScript object or script filename to write
the filter to.
iterations (int): The number of times that the whole algorithm (normal
smoothing + vertex fitting) is iterated.
angle_threshold (float): Specify a threshold angle (0..90) for features
that you want to be preserved. Features forming angles LARGER than
the specified threshold will be preserved.
0 -> no smoothing
90 -> all faces will be smoothed
normal_steps (int): Number of iterations of normal smoothing step. The
larger the better and (the slower)
fit_steps (int): Number of iterations of the vertex fitting procedure
selected (bool): If selected the filter is performed only on the
selected faces
Layer stack:
No impacts
MeshLab versions:
2016.12
1.3.4BETA
]
variable[filter_xml] assign[=] call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b02941c0>, <ast.Constant object at 0x7da1b02950c0>, <ast.Call object at 0x7da1b0297e80>, <ast.Constant object at 0x7da1b0294190>, <ast.Constant object at 0x7da1b02947f0>, <ast.Constant object at 0x7da1b0295330>, <ast.Constant object at 0x7da1b0295480>, <ast.Call object at 0x7da1b0297580>, <ast.Constant object at 0x7da1b0297760>, <ast.Constant object at 0x7da1b0297070>, <ast.Constant object at 0x7da1b0297be0>, <ast.Constant object at 0x7da1b0295960>, <ast.Call object at 0x7da1b0295c30>, <ast.Constant object at 0x7da1b0296020>, <ast.Constant object at 0x7da1b0295d80>, <ast.Constant object at 0x7da1b0297fd0>, <ast.Constant object at 0x7da1b0295e70>, <ast.Call object at 0x7da1b0295150>, <ast.Constant object at 0x7da1b0297970>, <ast.Constant object at 0x7da1b0295b10>, <ast.Constant object at 0x7da1b0296fe0>, <ast.Constant object at 0x7da1b0296a70>, <ast.Call object at 0x7da1b0297640>, <ast.Constant object at 0x7da1b02973d0>, <ast.Constant object at 0x7da1b0295510>, <ast.Constant object at 0x7da1b0296920>, <ast.Constant object at 0x7da1b0297d60>]]]]
call[name[util].write_filter, parameter[name[script], name[filter_xml]]]
return[constant[None]]
|
keyword[def] identifier[twostep] ( identifier[script] , identifier[iterations] = literal[int] , identifier[angle_threshold] = literal[int] , identifier[normal_steps] = literal[int] , identifier[fit_steps] = literal[int] ,
identifier[selected] = keyword[False] ):
literal[string]
identifier[filter_xml] = literal[string] . identifier[join] ([
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[iterations] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[angle_threshold] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[normal_steps] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[fit_steps] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[str] ( identifier[selected] ). identifier[lower] ()),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ])
identifier[util] . identifier[write_filter] ( identifier[script] , identifier[filter_xml] )
keyword[return] keyword[None]
|
def twostep(script, iterations=3, angle_threshold=60, normal_steps=20, fit_steps=20, selected=False):
""" Two Step Smoothing, a feature preserving/enhancing fairing filter.
It is based on a Normal Smoothing step where similar normals are averaged
together and a step where the vertexes are fitted on the new normals.
Based on:
A. Belyaev and Y. Ohtake,
"A Comparison of Mesh Smoothing Methods"
Proc. Israel-Korea Bi-National Conf. Geometric Modeling and Computer
Graphics, pp. 83-87, 2003.
Args:
script: the FilterScript object or script filename to write
the filter to.
iterations (int): The number of times that the whole algorithm (normal
smoothing + vertex fitting) is iterated.
angle_threshold (float): Specify a threshold angle (0..90) for features
that you want to be preserved. Features forming angles LARGER than
the specified threshold will be preserved.
0 -> no smoothing
90 -> all faces will be smoothed
normal_steps (int): Number of iterations of normal smoothing step. The
larger the better and (the slower)
fit_steps (int): Number of iterations of the vertex fitting procedure
selected (bool): If selected the filter is performed only on the
selected faces
Layer stack:
No impacts
MeshLab versions:
2016.12
1.3.4BETA
"""
filter_xml = ''.join([' <filter name="TwoStep Smooth">\n', ' <Param name="stepSmoothNum" ', 'value="{:d}" '.format(iterations), 'description="Smoothing steps" ', 'type="RichInt" ', '/>\n', ' <Param name="normalThr" ', 'value="{}" '.format(angle_threshold), 'description="Feature Angle Threshold (deg)" ', 'type="RichFloat" ', '/>\n', ' <Param name="stepNormalNum" ', 'value="{:d}" '.format(normal_steps), 'description="Normal Smoothing steps" ', 'type="RichInt" ', '/>\n', ' <Param name="stepFitNum" ', 'value="{:d}" '.format(fit_steps), 'description="Vertex Fitting steps" ', 'type="RichInt" ', '/>\n', ' <Param name="Selected" ', 'value="{}" '.format(str(selected).lower()), 'description="Affect only selected faces" ', 'type="RichBool" ', '/>\n', ' </filter>\n'])
util.write_filter(script, filter_xml)
return None
|
async def load_remote(
self, email: str, password: str,
skip_existing: bool = True) -> None:
"""Create a local client."""
auth_resp = await self._request(
'post',
'https://my.rainmachine.com/login/auth',
json={'user': {
'email': email,
'pwd': password,
'remember': 1
}})
access_token = auth_resp['access_token']
sprinklers_resp = await self._request(
'post',
'https://my.rainmachine.com/devices/get-sprinklers',
access_token=access_token,
json={'user': {
'email': email,
'pwd': password,
'remember': 1
}})
for sprinkler in sprinklers_resp['sprinklers']:
if skip_existing and sprinkler['mac'] in self.controllers:
continue
controller = RemoteController(self._request, self._websession)
await controller.login(
access_token, sprinkler['sprinklerId'], password)
version_data = await controller.api.versions()
controller.api_version = version_data['apiVer']
controller.hardware_version = version_data['hwVer']
controller.mac = sprinkler['mac']
controller.name = sprinkler['name']
controller.software_version = version_data['swVer']
self.controllers[sprinkler['mac']] = controller
|
<ast.AsyncFunctionDef object at 0x7da20c6c69e0>
|
keyword[async] keyword[def] identifier[load_remote] (
identifier[self] , identifier[email] : identifier[str] , identifier[password] : identifier[str] ,
identifier[skip_existing] : identifier[bool] = keyword[True] )-> keyword[None] :
literal[string]
identifier[auth_resp] = keyword[await] identifier[self] . identifier[_request] (
literal[string] ,
literal[string] ,
identifier[json] ={ literal[string] :{
literal[string] : identifier[email] ,
literal[string] : identifier[password] ,
literal[string] : literal[int]
}})
identifier[access_token] = identifier[auth_resp] [ literal[string] ]
identifier[sprinklers_resp] = keyword[await] identifier[self] . identifier[_request] (
literal[string] ,
literal[string] ,
identifier[access_token] = identifier[access_token] ,
identifier[json] ={ literal[string] :{
literal[string] : identifier[email] ,
literal[string] : identifier[password] ,
literal[string] : literal[int]
}})
keyword[for] identifier[sprinkler] keyword[in] identifier[sprinklers_resp] [ literal[string] ]:
keyword[if] identifier[skip_existing] keyword[and] identifier[sprinkler] [ literal[string] ] keyword[in] identifier[self] . identifier[controllers] :
keyword[continue]
identifier[controller] = identifier[RemoteController] ( identifier[self] . identifier[_request] , identifier[self] . identifier[_websession] )
keyword[await] identifier[controller] . identifier[login] (
identifier[access_token] , identifier[sprinkler] [ literal[string] ], identifier[password] )
identifier[version_data] = keyword[await] identifier[controller] . identifier[api] . identifier[versions] ()
identifier[controller] . identifier[api_version] = identifier[version_data] [ literal[string] ]
identifier[controller] . identifier[hardware_version] = identifier[version_data] [ literal[string] ]
identifier[controller] . identifier[mac] = identifier[sprinkler] [ literal[string] ]
identifier[controller] . identifier[name] = identifier[sprinkler] [ literal[string] ]
identifier[controller] . identifier[software_version] = identifier[version_data] [ literal[string] ]
identifier[self] . identifier[controllers] [ identifier[sprinkler] [ literal[string] ]]= identifier[controller]
|
async def load_remote(self, email: str, password: str, skip_existing: bool=True) -> None:
"""Create a local client."""
auth_resp = await self._request('post', 'https://my.rainmachine.com/login/auth', json={'user': {'email': email, 'pwd': password, 'remember': 1}})
access_token = auth_resp['access_token']
sprinklers_resp = await self._request('post', 'https://my.rainmachine.com/devices/get-sprinklers', access_token=access_token, json={'user': {'email': email, 'pwd': password, 'remember': 1}})
for sprinkler in sprinklers_resp['sprinklers']:
if skip_existing and sprinkler['mac'] in self.controllers:
continue # depends on [control=['if'], data=[]]
controller = RemoteController(self._request, self._websession)
await controller.login(access_token, sprinkler['sprinklerId'], password)
version_data = await controller.api.versions()
controller.api_version = version_data['apiVer']
controller.hardware_version = version_data['hwVer']
controller.mac = sprinkler['mac']
controller.name = sprinkler['name']
controller.software_version = version_data['swVer']
self.controllers[sprinkler['mac']] = controller # depends on [control=['for'], data=['sprinkler']]
|
def itake_column(list_, colx):
""" iterator version of get_list_column """
if isinstance(colx, list):
# multi select
return ([row[colx_] for colx_ in colx] for row in list_)
else:
return (row[colx] for row in list_)
|
def function[itake_column, parameter[list_, colx]]:
constant[ iterator version of get_list_column ]
if call[name[isinstance], parameter[name[colx], name[list]]] begin[:]
return[<ast.GeneratorExp object at 0x7da1b24eae90>]
|
keyword[def] identifier[itake_column] ( identifier[list_] , identifier[colx] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[colx] , identifier[list] ):
keyword[return] ([ identifier[row] [ identifier[colx_] ] keyword[for] identifier[colx_] keyword[in] identifier[colx] ] keyword[for] identifier[row] keyword[in] identifier[list_] )
keyword[else] :
keyword[return] ( identifier[row] [ identifier[colx] ] keyword[for] identifier[row] keyword[in] identifier[list_] )
|
def itake_column(list_, colx):
""" iterator version of get_list_column """
if isinstance(colx, list):
# multi select
return ([row[colx_] for colx_ in colx] for row in list_) # depends on [control=['if'], data=[]]
else:
return (row[colx] for row in list_)
|
def p_reserved_word(self, p):
"""reserved_word : BREAK
| CASE
| CATCH
| CONTINUE
| DEBUGGER
| DEFAULT
| DELETE
| DO
| ELSE
| FINALLY
| FOR
| FUNCTION
| IF
| IN
| INSTANCEOF
| NEW
| RETURN
| SWITCH
| THIS
| THROW
| TRY
| TYPEOF
| VAR
| VOID
| WHILE
| WITH
| NULL
| TRUE
| FALSE
| CLASS
| CONST
| ENUM
| EXPORT
| EXTENDS
| IMPORT
| SUPER
"""
p[0] = self.asttypes.Identifier(p[1])
p[0].setpos(p)
|
def function[p_reserved_word, parameter[self, p]]:
constant[reserved_word : BREAK
| CASE
| CATCH
| CONTINUE
| DEBUGGER
| DEFAULT
| DELETE
| DO
| ELSE
| FINALLY
| FOR
| FUNCTION
| IF
| IN
| INSTANCEOF
| NEW
| RETURN
| SWITCH
| THIS
| THROW
| TRY
| TYPEOF
| VAR
| VOID
| WHILE
| WITH
| NULL
| TRUE
| FALSE
| CLASS
| CONST
| ENUM
| EXPORT
| EXTENDS
| IMPORT
| SUPER
]
call[name[p]][constant[0]] assign[=] call[name[self].asttypes.Identifier, parameter[call[name[p]][constant[1]]]]
call[call[name[p]][constant[0]].setpos, parameter[name[p]]]
|
keyword[def] identifier[p_reserved_word] ( identifier[self] , identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[self] . identifier[asttypes] . identifier[Identifier] ( identifier[p] [ literal[int] ])
identifier[p] [ literal[int] ]. identifier[setpos] ( identifier[p] )
|
def p_reserved_word(self, p):
"""reserved_word : BREAK
| CASE
| CATCH
| CONTINUE
| DEBUGGER
| DEFAULT
| DELETE
| DO
| ELSE
| FINALLY
| FOR
| FUNCTION
| IF
| IN
| INSTANCEOF
| NEW
| RETURN
| SWITCH
| THIS
| THROW
| TRY
| TYPEOF
| VAR
| VOID
| WHILE
| WITH
| NULL
| TRUE
| FALSE
| CLASS
| CONST
| ENUM
| EXPORT
| EXTENDS
| IMPORT
| SUPER
"""
p[0] = self.asttypes.Identifier(p[1])
p[0].setpos(p)
|
def WatchMetadata(
self, handler, metadata_key='', recursive=True, timeout=None):
"""Watch for changes to the contents of the metadata server.
Args:
handler: callable, a function to call with the updated metadata contents.
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
"""
while True:
response = self._HandleMetadataUpdate(
metadata_key=metadata_key, recursive=recursive, wait=True,
timeout=timeout)
try:
handler(response)
except Exception as e:
self.logger.exception('Exception calling the response handler. %s.', e)
|
def function[WatchMetadata, parameter[self, handler, metadata_key, recursive, timeout]]:
constant[Watch for changes to the contents of the metadata server.
Args:
handler: callable, a function to call with the updated metadata contents.
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
]
while constant[True] begin[:]
variable[response] assign[=] call[name[self]._HandleMetadataUpdate, parameter[]]
<ast.Try object at 0x7da1b16b2aa0>
|
keyword[def] identifier[WatchMetadata] (
identifier[self] , identifier[handler] , identifier[metadata_key] = literal[string] , identifier[recursive] = keyword[True] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[while] keyword[True] :
identifier[response] = identifier[self] . identifier[_HandleMetadataUpdate] (
identifier[metadata_key] = identifier[metadata_key] , identifier[recursive] = identifier[recursive] , identifier[wait] = keyword[True] ,
identifier[timeout] = identifier[timeout] )
keyword[try] :
identifier[handler] ( identifier[response] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[logger] . identifier[exception] ( literal[string] , identifier[e] )
|
def WatchMetadata(self, handler, metadata_key='', recursive=True, timeout=None):
"""Watch for changes to the contents of the metadata server.
Args:
handler: callable, a function to call with the updated metadata contents.
metadata_key: string, the metadata key to watch for changes.
recursive: bool, True if we should recursively watch for metadata changes.
timeout: int, timeout in seconds for returning metadata output.
"""
while True:
response = self._HandleMetadataUpdate(metadata_key=metadata_key, recursive=recursive, wait=True, timeout=timeout)
try:
handler(response) # depends on [control=['try'], data=[]]
except Exception as e:
self.logger.exception('Exception calling the response handler. %s.', e) # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
|
def GetFileEntryByPathSpec(self, path_spec):
"""Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
NTFSFileEntry: file entry or None if not available.
Raises:
BackEndError: if the file entry cannot be opened.
"""
# Opening a file by MFT entry is faster than opening a file by location.
# However we need the index of the corresponding $FILE_NAME MFT attribute.
fsntfs_file_entry = None
location = getattr(path_spec, 'location', None)
mft_attribute = getattr(path_spec, 'mft_attribute', None)
mft_entry = getattr(path_spec, 'mft_entry', None)
if (location == self.LOCATION_ROOT or
mft_entry == self.MFT_ENTRY_ROOT_DIRECTORY):
fsntfs_file_entry = self._fsntfs_volume.get_root_directory()
return ntfs_file_entry.NTFSFileEntry(
self._resolver_context, self, path_spec,
fsntfs_file_entry=fsntfs_file_entry, is_root=True)
try:
if mft_attribute is not None and mft_entry is not None:
fsntfs_file_entry = self._fsntfs_volume.get_file_entry(mft_entry)
elif location is not None:
fsntfs_file_entry = self._fsntfs_volume.get_file_entry_by_path(location)
except IOError as exception:
raise errors.BackEndError(exception)
if fsntfs_file_entry is None:
return None
return ntfs_file_entry.NTFSFileEntry(
self._resolver_context, self, path_spec,
fsntfs_file_entry=fsntfs_file_entry)
|
def function[GetFileEntryByPathSpec, parameter[self, path_spec]]:
constant[Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
NTFSFileEntry: file entry or None if not available.
Raises:
BackEndError: if the file entry cannot be opened.
]
variable[fsntfs_file_entry] assign[=] constant[None]
variable[location] assign[=] call[name[getattr], parameter[name[path_spec], constant[location], constant[None]]]
variable[mft_attribute] assign[=] call[name[getattr], parameter[name[path_spec], constant[mft_attribute], constant[None]]]
variable[mft_entry] assign[=] call[name[getattr], parameter[name[path_spec], constant[mft_entry], constant[None]]]
if <ast.BoolOp object at 0x7da1b07b9720> begin[:]
variable[fsntfs_file_entry] assign[=] call[name[self]._fsntfs_volume.get_root_directory, parameter[]]
return[call[name[ntfs_file_entry].NTFSFileEntry, parameter[name[self]._resolver_context, name[self], name[path_spec]]]]
<ast.Try object at 0x7da1b07bbcd0>
if compare[name[fsntfs_file_entry] is constant[None]] begin[:]
return[constant[None]]
return[call[name[ntfs_file_entry].NTFSFileEntry, parameter[name[self]._resolver_context, name[self], name[path_spec]]]]
|
keyword[def] identifier[GetFileEntryByPathSpec] ( identifier[self] , identifier[path_spec] ):
literal[string]
identifier[fsntfs_file_entry] = keyword[None]
identifier[location] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
identifier[mft_attribute] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
identifier[mft_entry] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
keyword[if] ( identifier[location] == identifier[self] . identifier[LOCATION_ROOT] keyword[or]
identifier[mft_entry] == identifier[self] . identifier[MFT_ENTRY_ROOT_DIRECTORY] ):
identifier[fsntfs_file_entry] = identifier[self] . identifier[_fsntfs_volume] . identifier[get_root_directory] ()
keyword[return] identifier[ntfs_file_entry] . identifier[NTFSFileEntry] (
identifier[self] . identifier[_resolver_context] , identifier[self] , identifier[path_spec] ,
identifier[fsntfs_file_entry] = identifier[fsntfs_file_entry] , identifier[is_root] = keyword[True] )
keyword[try] :
keyword[if] identifier[mft_attribute] keyword[is] keyword[not] keyword[None] keyword[and] identifier[mft_entry] keyword[is] keyword[not] keyword[None] :
identifier[fsntfs_file_entry] = identifier[self] . identifier[_fsntfs_volume] . identifier[get_file_entry] ( identifier[mft_entry] )
keyword[elif] identifier[location] keyword[is] keyword[not] keyword[None] :
identifier[fsntfs_file_entry] = identifier[self] . identifier[_fsntfs_volume] . identifier[get_file_entry_by_path] ( identifier[location] )
keyword[except] identifier[IOError] keyword[as] identifier[exception] :
keyword[raise] identifier[errors] . identifier[BackEndError] ( identifier[exception] )
keyword[if] identifier[fsntfs_file_entry] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[ntfs_file_entry] . identifier[NTFSFileEntry] (
identifier[self] . identifier[_resolver_context] , identifier[self] , identifier[path_spec] ,
identifier[fsntfs_file_entry] = identifier[fsntfs_file_entry] )
|
def GetFileEntryByPathSpec(self, path_spec):
"""Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
NTFSFileEntry: file entry or None if not available.
Raises:
BackEndError: if the file entry cannot be opened.
"""
# Opening a file by MFT entry is faster than opening a file by location.
# However we need the index of the corresponding $FILE_NAME MFT attribute.
fsntfs_file_entry = None
location = getattr(path_spec, 'location', None)
mft_attribute = getattr(path_spec, 'mft_attribute', None)
mft_entry = getattr(path_spec, 'mft_entry', None)
if location == self.LOCATION_ROOT or mft_entry == self.MFT_ENTRY_ROOT_DIRECTORY:
fsntfs_file_entry = self._fsntfs_volume.get_root_directory()
return ntfs_file_entry.NTFSFileEntry(self._resolver_context, self, path_spec, fsntfs_file_entry=fsntfs_file_entry, is_root=True) # depends on [control=['if'], data=[]]
try:
if mft_attribute is not None and mft_entry is not None:
fsntfs_file_entry = self._fsntfs_volume.get_file_entry(mft_entry) # depends on [control=['if'], data=[]]
elif location is not None:
fsntfs_file_entry = self._fsntfs_volume.get_file_entry_by_path(location) # depends on [control=['if'], data=['location']] # depends on [control=['try'], data=[]]
except IOError as exception:
raise errors.BackEndError(exception) # depends on [control=['except'], data=['exception']]
if fsntfs_file_entry is None:
return None # depends on [control=['if'], data=[]]
return ntfs_file_entry.NTFSFileEntry(self._resolver_context, self, path_spec, fsntfs_file_entry=fsntfs_file_entry)
|
def quickRPCServer(provider, prefix, target,
maxsize=20,
workers=1,
useenv=True, conf=None, isolate=False):
"""Run an RPC server in the current thread
Calls are handled sequentially, and always in the current thread, if workers=1 (the default).
If workers>1 then calls are handled concurrently by a pool of worker threads.
Requires NTURI style argument encoding.
:param str provider: A provider name. Must be unique in this process.
:param str prefix: PV name prefix. Along with method names, must be globally unique.
:param target: The object which is exporting methods. (use the :func:`rpc` decorator)
:param int maxsize: Number of pending RPC calls to be queued.
:param int workers: Number of worker threads (default 1)
:param useenv: Passed to :class:`~p4p.server.Server`
:param conf: Passed to :class:`~p4p.server.Server`
:param isolate: Passed to :class:`~p4p.server.Server`
"""
from p4p.server import Server
import time
queue = ThreadedWorkQueue(maxsize=maxsize, workers=workers)
provider = NTURIDispatcher(queue, target=target, prefix=prefix, name=provider)
threads = []
server = Server(providers=[provider], useenv=useenv, conf=conf, isolate=isolate)
with server, queue:
while True:
time.sleep(10.0)
|
def function[quickRPCServer, parameter[provider, prefix, target, maxsize, workers, useenv, conf, isolate]]:
constant[Run an RPC server in the current thread
Calls are handled sequentially, and always in the current thread, if workers=1 (the default).
If workers>1 then calls are handled concurrently by a pool of worker threads.
Requires NTURI style argument encoding.
:param str provider: A provider name. Must be unique in this process.
:param str prefix: PV name prefix. Along with method names, must be globally unique.
:param target: The object which is exporting methods. (use the :func:`rpc` decorator)
:param int maxsize: Number of pending RPC calls to be queued.
:param int workers: Number of worker threads (default 1)
:param useenv: Passed to :class:`~p4p.server.Server`
:param conf: Passed to :class:`~p4p.server.Server`
:param isolate: Passed to :class:`~p4p.server.Server`
]
from relative_module[p4p.server] import module[Server]
import module[time]
variable[queue] assign[=] call[name[ThreadedWorkQueue], parameter[]]
variable[provider] assign[=] call[name[NTURIDispatcher], parameter[name[queue]]]
variable[threads] assign[=] list[[]]
variable[server] assign[=] call[name[Server], parameter[]]
with name[server] begin[:]
while constant[True] begin[:]
call[name[time].sleep, parameter[constant[10.0]]]
|
keyword[def] identifier[quickRPCServer] ( identifier[provider] , identifier[prefix] , identifier[target] ,
identifier[maxsize] = literal[int] ,
identifier[workers] = literal[int] ,
identifier[useenv] = keyword[True] , identifier[conf] = keyword[None] , identifier[isolate] = keyword[False] ):
literal[string]
keyword[from] identifier[p4p] . identifier[server] keyword[import] identifier[Server]
keyword[import] identifier[time]
identifier[queue] = identifier[ThreadedWorkQueue] ( identifier[maxsize] = identifier[maxsize] , identifier[workers] = identifier[workers] )
identifier[provider] = identifier[NTURIDispatcher] ( identifier[queue] , identifier[target] = identifier[target] , identifier[prefix] = identifier[prefix] , identifier[name] = identifier[provider] )
identifier[threads] =[]
identifier[server] = identifier[Server] ( identifier[providers] =[ identifier[provider] ], identifier[useenv] = identifier[useenv] , identifier[conf] = identifier[conf] , identifier[isolate] = identifier[isolate] )
keyword[with] identifier[server] , identifier[queue] :
keyword[while] keyword[True] :
identifier[time] . identifier[sleep] ( literal[int] )
|
def quickRPCServer(provider, prefix, target, maxsize=20, workers=1, useenv=True, conf=None, isolate=False):
"""Run an RPC server in the current thread
Calls are handled sequentially, and always in the current thread, if workers=1 (the default).
If workers>1 then calls are handled concurrently by a pool of worker threads.
Requires NTURI style argument encoding.
:param str provider: A provider name. Must be unique in this process.
:param str prefix: PV name prefix. Along with method names, must be globally unique.
:param target: The object which is exporting methods. (use the :func:`rpc` decorator)
:param int maxsize: Number of pending RPC calls to be queued.
:param int workers: Number of worker threads (default 1)
:param useenv: Passed to :class:`~p4p.server.Server`
:param conf: Passed to :class:`~p4p.server.Server`
:param isolate: Passed to :class:`~p4p.server.Server`
"""
from p4p.server import Server
import time
queue = ThreadedWorkQueue(maxsize=maxsize, workers=workers)
provider = NTURIDispatcher(queue, target=target, prefix=prefix, name=provider)
threads = []
server = Server(providers=[provider], useenv=useenv, conf=conf, isolate=isolate)
with server, queue:
while True:
time.sleep(10.0) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=[]]
|
def cv(self, t, structure=None):
"""
Constant volume specific heat C_v at temperature T obtained from the integration of the DOS.
Only positive frequencies will be used.
Result in J/(K*mol-c). A mol-c is the abbreviation of a mole-cell, that is, the number
of Avogadro times the atoms in a unit cell. To compare with experimental data the result
should be divided by the number of unit formulas in the cell. If the structure is provided
the division is performed internally and the result is in J/(K*mol)
Args:
t: a temperature in K
structure: the structure of the system. If not None it will be used to determine the numer of
formula units
Returns:
Constant volume specific heat C_v
"""
if t == 0:
return 0
freqs = self._positive_frequencies
dens = self._positive_densities
csch2 = lambda x: 1.0 / (np.sinh(x) ** 2)
wd2kt = freqs / (2 * BOLTZ_THZ_PER_K * t)
cv = np.trapz(wd2kt ** 2 * csch2(wd2kt) * dens, x=freqs)
cv *= const.Boltzmann * const.Avogadro
if structure:
formula_units = structure.composition.num_atoms / structure.composition.reduced_composition.num_atoms
cv /= formula_units
return cv
|
def function[cv, parameter[self, t, structure]]:
constant[
Constant volume specific heat C_v at temperature T obtained from the integration of the DOS.
Only positive frequencies will be used.
Result in J/(K*mol-c). A mol-c is the abbreviation of a mole-cell, that is, the number
of Avogadro times the atoms in a unit cell. To compare with experimental data the result
should be divided by the number of unit formulas in the cell. If the structure is provided
the division is performed internally and the result is in J/(K*mol)
Args:
t: a temperature in K
structure: the structure of the system. If not None it will be used to determine the numer of
formula units
Returns:
Constant volume specific heat C_v
]
if compare[name[t] equal[==] constant[0]] begin[:]
return[constant[0]]
variable[freqs] assign[=] name[self]._positive_frequencies
variable[dens] assign[=] name[self]._positive_densities
variable[csch2] assign[=] <ast.Lambda object at 0x7da18eb543d0>
variable[wd2kt] assign[=] binary_operation[name[freqs] / binary_operation[binary_operation[constant[2] * name[BOLTZ_THZ_PER_K]] * name[t]]]
variable[cv] assign[=] call[name[np].trapz, parameter[binary_operation[binary_operation[binary_operation[name[wd2kt] ** constant[2]] * call[name[csch2], parameter[name[wd2kt]]]] * name[dens]]]]
<ast.AugAssign object at 0x7da18eb56020>
if name[structure] begin[:]
variable[formula_units] assign[=] binary_operation[name[structure].composition.num_atoms / name[structure].composition.reduced_composition.num_atoms]
<ast.AugAssign object at 0x7da18eb56680>
return[name[cv]]
|
keyword[def] identifier[cv] ( identifier[self] , identifier[t] , identifier[structure] = keyword[None] ):
literal[string]
keyword[if] identifier[t] == literal[int] :
keyword[return] literal[int]
identifier[freqs] = identifier[self] . identifier[_positive_frequencies]
identifier[dens] = identifier[self] . identifier[_positive_densities]
identifier[csch2] = keyword[lambda] identifier[x] : literal[int] /( identifier[np] . identifier[sinh] ( identifier[x] )** literal[int] )
identifier[wd2kt] = identifier[freqs] /( literal[int] * identifier[BOLTZ_THZ_PER_K] * identifier[t] )
identifier[cv] = identifier[np] . identifier[trapz] ( identifier[wd2kt] ** literal[int] * identifier[csch2] ( identifier[wd2kt] )* identifier[dens] , identifier[x] = identifier[freqs] )
identifier[cv] *= identifier[const] . identifier[Boltzmann] * identifier[const] . identifier[Avogadro]
keyword[if] identifier[structure] :
identifier[formula_units] = identifier[structure] . identifier[composition] . identifier[num_atoms] / identifier[structure] . identifier[composition] . identifier[reduced_composition] . identifier[num_atoms]
identifier[cv] /= identifier[formula_units]
keyword[return] identifier[cv]
|
def cv(self, t, structure=None):
"""
Constant volume specific heat C_v at temperature T obtained from the integration of the DOS.
Only positive frequencies will be used.
Result in J/(K*mol-c). A mol-c is the abbreviation of a mole-cell, that is, the number
of Avogadro times the atoms in a unit cell. To compare with experimental data the result
should be divided by the number of unit formulas in the cell. If the structure is provided
the division is performed internally and the result is in J/(K*mol)
Args:
t: a temperature in K
structure: the structure of the system. If not None it will be used to determine the numer of
formula units
Returns:
Constant volume specific heat C_v
"""
if t == 0:
return 0 # depends on [control=['if'], data=[]]
freqs = self._positive_frequencies
dens = self._positive_densities
csch2 = lambda x: 1.0 / np.sinh(x) ** 2
wd2kt = freqs / (2 * BOLTZ_THZ_PER_K * t)
cv = np.trapz(wd2kt ** 2 * csch2(wd2kt) * dens, x=freqs)
cv *= const.Boltzmann * const.Avogadro
if structure:
formula_units = structure.composition.num_atoms / structure.composition.reduced_composition.num_atoms
cv /= formula_units # depends on [control=['if'], data=[]]
return cv
|
def update_nodes_published(self):
""" publish or unpublish nodes of current layer """
if self.pk:
self.node_set.all().update(is_published=self.is_published)
|
def function[update_nodes_published, parameter[self]]:
constant[ publish or unpublish nodes of current layer ]
if name[self].pk begin[:]
call[call[name[self].node_set.all, parameter[]].update, parameter[]]
|
keyword[def] identifier[update_nodes_published] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[pk] :
identifier[self] . identifier[node_set] . identifier[all] (). identifier[update] ( identifier[is_published] = identifier[self] . identifier[is_published] )
|
def update_nodes_published(self):
""" publish or unpublish nodes of current layer """
if self.pk:
self.node_set.all().update(is_published=self.is_published) # depends on [control=['if'], data=[]]
|
def check_array(array, accept_sparse=None, dtype=None, order=None, copy=False,
force_all_finite=True, ensure_2d=True, allow_nd=False):
"""Input validation on an array, list, sparse matrix or similar.
By default, the input is converted to an at least 2nd numpy array.
Parameters
----------
array : object
Input object to check / convert.
accept_sparse : string, list of string or None (default=None)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. None means that sparse matrix input will raise an error.
If the input is sparse but not in the allowed format, it will be
converted to the first listed format.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
ensure_2d : boolean (default=True)
Whether to make X at least 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
Returns
-------
X_converted : object
The converted and validated X.
"""
if isinstance(accept_sparse, str):
accept_sparse = [accept_sparse]
if sp.issparse(array):
array = _ensure_sparse_format(array, accept_sparse, dtype, order,
copy, force_all_finite)
else:
if ensure_2d:
array = np.atleast_2d(array)
array = np.array(array, dtype=dtype, order=order, copy=copy)
if not allow_nd and array.ndim >= 3:
raise ValueError("Found array with dim %d. Expected <= 2" %
array.ndim)
if force_all_finite:
_assert_all_finite(array)
return array
|
def function[check_array, parameter[array, accept_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd]]:
constant[Input validation on an array, list, sparse matrix or similar.
By default, the input is converted to an at least 2nd numpy array.
Parameters
----------
array : object
Input object to check / convert.
accept_sparse : string, list of string or None (default=None)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. None means that sparse matrix input will raise an error.
If the input is sparse but not in the allowed format, it will be
converted to the first listed format.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
ensure_2d : boolean (default=True)
Whether to make X at least 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
Returns
-------
X_converted : object
The converted and validated X.
]
if call[name[isinstance], parameter[name[accept_sparse], name[str]]] begin[:]
variable[accept_sparse] assign[=] list[[<ast.Name object at 0x7da1afef8cd0>]]
if call[name[sp].issparse, parameter[name[array]]] begin[:]
variable[array] assign[=] call[name[_ensure_sparse_format], parameter[name[array], name[accept_sparse], name[dtype], name[order], name[copy], name[force_all_finite]]]
return[name[array]]
|
keyword[def] identifier[check_array] ( identifier[array] , identifier[accept_sparse] = keyword[None] , identifier[dtype] = keyword[None] , identifier[order] = keyword[None] , identifier[copy] = keyword[False] ,
identifier[force_all_finite] = keyword[True] , identifier[ensure_2d] = keyword[True] , identifier[allow_nd] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[accept_sparse] , identifier[str] ):
identifier[accept_sparse] =[ identifier[accept_sparse] ]
keyword[if] identifier[sp] . identifier[issparse] ( identifier[array] ):
identifier[array] = identifier[_ensure_sparse_format] ( identifier[array] , identifier[accept_sparse] , identifier[dtype] , identifier[order] ,
identifier[copy] , identifier[force_all_finite] )
keyword[else] :
keyword[if] identifier[ensure_2d] :
identifier[array] = identifier[np] . identifier[atleast_2d] ( identifier[array] )
identifier[array] = identifier[np] . identifier[array] ( identifier[array] , identifier[dtype] = identifier[dtype] , identifier[order] = identifier[order] , identifier[copy] = identifier[copy] )
keyword[if] keyword[not] identifier[allow_nd] keyword[and] identifier[array] . identifier[ndim] >= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] %
identifier[array] . identifier[ndim] )
keyword[if] identifier[force_all_finite] :
identifier[_assert_all_finite] ( identifier[array] )
keyword[return] identifier[array]
|
def check_array(array, accept_sparse=None, dtype=None, order=None, copy=False, force_all_finite=True, ensure_2d=True, allow_nd=False):
"""Input validation on an array, list, sparse matrix or similar.
By default, the input is converted to an at least 2nd numpy array.
Parameters
----------
array : object
Input object to check / convert.
accept_sparse : string, list of string or None (default=None)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. None means that sparse matrix input will raise an error.
If the input is sparse but not in the allowed format, it will be
converted to the first listed format.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
ensure_2d : boolean (default=True)
Whether to make X at least 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
Returns
-------
X_converted : object
The converted and validated X.
"""
if isinstance(accept_sparse, str):
accept_sparse = [accept_sparse] # depends on [control=['if'], data=[]]
if sp.issparse(array):
array = _ensure_sparse_format(array, accept_sparse, dtype, order, copy, force_all_finite) # depends on [control=['if'], data=[]]
else:
if ensure_2d:
array = np.atleast_2d(array) # depends on [control=['if'], data=[]]
array = np.array(array, dtype=dtype, order=order, copy=copy)
if not allow_nd and array.ndim >= 3:
raise ValueError('Found array with dim %d. Expected <= 2' % array.ndim) # depends on [control=['if'], data=[]]
if force_all_finite:
_assert_all_finite(array) # depends on [control=['if'], data=[]]
return array
|
def _process_mark_toggle(self, p_todo_id, p_force=None):
"""
Adds p_todo_id to marked_todos attribute and returns True if p_todo_id
is not already marked. Removes p_todo_id from marked_todos and returns
False otherwise.
p_force parameter accepting 'mark' or 'unmark' values, if set, can force
desired action without checking p_todo_id presence in marked_todos.
"""
if p_force in ['mark', 'unmark']:
action = p_force
else:
action = 'mark' if p_todo_id not in self.marked_todos else 'unmark'
if action == 'mark':
self.marked_todos.add(p_todo_id)
return True
else:
self.marked_todos.remove(p_todo_id)
return False
|
def function[_process_mark_toggle, parameter[self, p_todo_id, p_force]]:
constant[
Adds p_todo_id to marked_todos attribute and returns True if p_todo_id
is not already marked. Removes p_todo_id from marked_todos and returns
False otherwise.
p_force parameter accepting 'mark' or 'unmark' values, if set, can force
desired action without checking p_todo_id presence in marked_todos.
]
if compare[name[p_force] in list[[<ast.Constant object at 0x7da1b2345810>, <ast.Constant object at 0x7da1b2347940>]]] begin[:]
variable[action] assign[=] name[p_force]
if compare[name[action] equal[==] constant[mark]] begin[:]
call[name[self].marked_todos.add, parameter[name[p_todo_id]]]
return[constant[True]]
|
keyword[def] identifier[_process_mark_toggle] ( identifier[self] , identifier[p_todo_id] , identifier[p_force] = keyword[None] ):
literal[string]
keyword[if] identifier[p_force] keyword[in] [ literal[string] , literal[string] ]:
identifier[action] = identifier[p_force]
keyword[else] :
identifier[action] = literal[string] keyword[if] identifier[p_todo_id] keyword[not] keyword[in] identifier[self] . identifier[marked_todos] keyword[else] literal[string]
keyword[if] identifier[action] == literal[string] :
identifier[self] . identifier[marked_todos] . identifier[add] ( identifier[p_todo_id] )
keyword[return] keyword[True]
keyword[else] :
identifier[self] . identifier[marked_todos] . identifier[remove] ( identifier[p_todo_id] )
keyword[return] keyword[False]
|
def _process_mark_toggle(self, p_todo_id, p_force=None):
"""
Adds p_todo_id to marked_todos attribute and returns True if p_todo_id
is not already marked. Removes p_todo_id from marked_todos and returns
False otherwise.
p_force parameter accepting 'mark' or 'unmark' values, if set, can force
desired action without checking p_todo_id presence in marked_todos.
"""
if p_force in ['mark', 'unmark']:
action = p_force # depends on [control=['if'], data=['p_force']]
else:
action = 'mark' if p_todo_id not in self.marked_todos else 'unmark'
if action == 'mark':
self.marked_todos.add(p_todo_id)
return True # depends on [control=['if'], data=[]]
else:
self.marked_todos.remove(p_todo_id)
return False
|
def _mark_refunded(self):
''' Marks the invoice as refunded, and updates the attached cart if
necessary. '''
self._release_cart()
self.invoice.status = commerce.Invoice.STATUS_REFUNDED
self.invoice.save()
|
def function[_mark_refunded, parameter[self]]:
constant[ Marks the invoice as refunded, and updates the attached cart if
necessary. ]
call[name[self]._release_cart, parameter[]]
name[self].invoice.status assign[=] name[commerce].Invoice.STATUS_REFUNDED
call[name[self].invoice.save, parameter[]]
|
keyword[def] identifier[_mark_refunded] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_release_cart] ()
identifier[self] . identifier[invoice] . identifier[status] = identifier[commerce] . identifier[Invoice] . identifier[STATUS_REFUNDED]
identifier[self] . identifier[invoice] . identifier[save] ()
|
def _mark_refunded(self):
""" Marks the invoice as refunded, and updates the attached cart if
necessary. """
self._release_cart()
self.invoice.status = commerce.Invoice.STATUS_REFUNDED
self.invoice.save()
|
def get_birthday(code):
"""``get_birthday(code) -> string``
Birthday of the person whose fiscal code is 'code', in the format DD-MM-YY.
Unfortunately it's not possible to guess the four digit birth year, given
that the Italian fiscal code uses only the last two digits (1983 -> 83).
Therefore, this function returns a string and not a datetime object.
eg: birthday('RCCMNL83S18D969H') -> 18-11-83
"""
assert isvalid(code)
day = int(code[9:11])
day = day < 32 and day or day - 40
month = MONTHSCODE.index(code[8]) + 1
year = int(code[6:8])
return "%02d-%02d-%02d" % (day, month, year)
|
def function[get_birthday, parameter[code]]:
constant[``get_birthday(code) -> string``
Birthday of the person whose fiscal code is 'code', in the format DD-MM-YY.
Unfortunately it's not possible to guess the four digit birth year, given
that the Italian fiscal code uses only the last two digits (1983 -> 83).
Therefore, this function returns a string and not a datetime object.
eg: birthday('RCCMNL83S18D969H') -> 18-11-83
]
assert[call[name[isvalid], parameter[name[code]]]]
variable[day] assign[=] call[name[int], parameter[call[name[code]][<ast.Slice object at 0x7da20c6e76d0>]]]
variable[day] assign[=] <ast.BoolOp object at 0x7da20c6e4fa0>
variable[month] assign[=] binary_operation[call[name[MONTHSCODE].index, parameter[call[name[code]][constant[8]]]] + constant[1]]
variable[year] assign[=] call[name[int], parameter[call[name[code]][<ast.Slice object at 0x7da20c6e4bb0>]]]
return[binary_operation[constant[%02d-%02d-%02d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e5870>, <ast.Name object at 0x7da20c6e7460>, <ast.Name object at 0x7da20c6e7f70>]]]]
|
keyword[def] identifier[get_birthday] ( identifier[code] ):
literal[string]
keyword[assert] identifier[isvalid] ( identifier[code] )
identifier[day] = identifier[int] ( identifier[code] [ literal[int] : literal[int] ])
identifier[day] = identifier[day] < literal[int] keyword[and] identifier[day] keyword[or] identifier[day] - literal[int]
identifier[month] = identifier[MONTHSCODE] . identifier[index] ( identifier[code] [ literal[int] ])+ literal[int]
identifier[year] = identifier[int] ( identifier[code] [ literal[int] : literal[int] ])
keyword[return] literal[string] %( identifier[day] , identifier[month] , identifier[year] )
|
def get_birthday(code):
"""``get_birthday(code) -> string``
Birthday of the person whose fiscal code is 'code', in the format DD-MM-YY.
Unfortunately it's not possible to guess the four digit birth year, given
that the Italian fiscal code uses only the last two digits (1983 -> 83).
Therefore, this function returns a string and not a datetime object.
eg: birthday('RCCMNL83S18D969H') -> 18-11-83
"""
assert isvalid(code)
day = int(code[9:11])
day = day < 32 and day or day - 40
month = MONTHSCODE.index(code[8]) + 1
year = int(code[6:8])
return '%02d-%02d-%02d' % (day, month, year)
|
def _generate_data_key(self, algorithm, encryption_context=None):
"""Generates data key and returns plaintext and ciphertext of key.
:param algorithm: Algorithm on which to base data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to pass to KMS
:returns: Generated data key
:rtype: aws_encryption_sdk.structures.DataKey
"""
kms_params = {"KeyId": self._key_id, "NumberOfBytes": algorithm.kdf_input_len}
if encryption_context is not None:
kms_params["EncryptionContext"] = encryption_context
if self.config.grant_tokens:
kms_params["GrantTokens"] = self.config.grant_tokens
# Catch any boto3 errors and normalize to expected EncryptKeyError
try:
response = self.config.client.generate_data_key(**kms_params)
plaintext = response["Plaintext"]
ciphertext = response["CiphertextBlob"]
key_id = response["KeyId"]
except (ClientError, KeyError):
error_message = "Master Key {key_id} unable to generate data key".format(key_id=self._key_id)
_LOGGER.exception(error_message)
raise GenerateKeyError(error_message)
return DataKey(
key_provider=MasterKeyInfo(provider_id=self.provider_id, key_info=key_id),
data_key=plaintext,
encrypted_data_key=ciphertext,
)
|
def function[_generate_data_key, parameter[self, algorithm, encryption_context]]:
constant[Generates data key and returns plaintext and ciphertext of key.
:param algorithm: Algorithm on which to base data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to pass to KMS
:returns: Generated data key
:rtype: aws_encryption_sdk.structures.DataKey
]
variable[kms_params] assign[=] dictionary[[<ast.Constant object at 0x7da204620130>, <ast.Constant object at 0x7da204622dd0>], [<ast.Attribute object at 0x7da204620a00>, <ast.Attribute object at 0x7da204622c50>]]
if compare[name[encryption_context] is_not constant[None]] begin[:]
call[name[kms_params]][constant[EncryptionContext]] assign[=] name[encryption_context]
if name[self].config.grant_tokens begin[:]
call[name[kms_params]][constant[GrantTokens]] assign[=] name[self].config.grant_tokens
<ast.Try object at 0x7da2046205e0>
return[call[name[DataKey], parameter[]]]
|
keyword[def] identifier[_generate_data_key] ( identifier[self] , identifier[algorithm] , identifier[encryption_context] = keyword[None] ):
literal[string]
identifier[kms_params] ={ literal[string] : identifier[self] . identifier[_key_id] , literal[string] : identifier[algorithm] . identifier[kdf_input_len] }
keyword[if] identifier[encryption_context] keyword[is] keyword[not] keyword[None] :
identifier[kms_params] [ literal[string] ]= identifier[encryption_context]
keyword[if] identifier[self] . identifier[config] . identifier[grant_tokens] :
identifier[kms_params] [ literal[string] ]= identifier[self] . identifier[config] . identifier[grant_tokens]
keyword[try] :
identifier[response] = identifier[self] . identifier[config] . identifier[client] . identifier[generate_data_key] (** identifier[kms_params] )
identifier[plaintext] = identifier[response] [ literal[string] ]
identifier[ciphertext] = identifier[response] [ literal[string] ]
identifier[key_id] = identifier[response] [ literal[string] ]
keyword[except] ( identifier[ClientError] , identifier[KeyError] ):
identifier[error_message] = literal[string] . identifier[format] ( identifier[key_id] = identifier[self] . identifier[_key_id] )
identifier[_LOGGER] . identifier[exception] ( identifier[error_message] )
keyword[raise] identifier[GenerateKeyError] ( identifier[error_message] )
keyword[return] identifier[DataKey] (
identifier[key_provider] = identifier[MasterKeyInfo] ( identifier[provider_id] = identifier[self] . identifier[provider_id] , identifier[key_info] = identifier[key_id] ),
identifier[data_key] = identifier[plaintext] ,
identifier[encrypted_data_key] = identifier[ciphertext] ,
)
|
def _generate_data_key(self, algorithm, encryption_context=None):
"""Generates data key and returns plaintext and ciphertext of key.
:param algorithm: Algorithm on which to base data key
:type algorithm: aws_encryption_sdk.identifiers.Algorithm
:param dict encryption_context: Encryption context to pass to KMS
:returns: Generated data key
:rtype: aws_encryption_sdk.structures.DataKey
"""
kms_params = {'KeyId': self._key_id, 'NumberOfBytes': algorithm.kdf_input_len}
if encryption_context is not None:
kms_params['EncryptionContext'] = encryption_context # depends on [control=['if'], data=['encryption_context']]
if self.config.grant_tokens:
kms_params['GrantTokens'] = self.config.grant_tokens # depends on [control=['if'], data=[]]
# Catch any boto3 errors and normalize to expected EncryptKeyError
try:
response = self.config.client.generate_data_key(**kms_params)
plaintext = response['Plaintext']
ciphertext = response['CiphertextBlob']
key_id = response['KeyId'] # depends on [control=['try'], data=[]]
except (ClientError, KeyError):
error_message = 'Master Key {key_id} unable to generate data key'.format(key_id=self._key_id)
_LOGGER.exception(error_message)
raise GenerateKeyError(error_message) # depends on [control=['except'], data=[]]
return DataKey(key_provider=MasterKeyInfo(provider_id=self.provider_id, key_info=key_id), data_key=plaintext, encrypted_data_key=ciphertext)
|
def _build_env(venv_dir):
"""Create a new virtual environment in `venv_dir`.
This uses the base prefix of any virtual environment that you may be using
when you call this.
"""
# NB: We had to create the because the venv modules wasn't doing what we
# needed. In particular, if we used it create a venv from an existing venv,
# it *always* created symlinks back to the original venv's python
# executables. Then, when you used those linked executables, you ended up
# interacting with the original venv. I could find no way around this, hence
# this function.
prefix = getattr(sys, 'real_prefix', sys.prefix)
python = Path(prefix) / 'bin' / 'python'
command = '{} -m venv {}'.format(python, venv_dir)
try:
log.info('Creating virtual environment: %s', command)
subprocess.run(command.split(),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
check=True)
except subprocess.CalledProcessError as exc:
log.error("Error creating virtual environment: %s", exc.output)
raise
|
def function[_build_env, parameter[venv_dir]]:
constant[Create a new virtual environment in `venv_dir`.
This uses the base prefix of any virtual environment that you may be using
when you call this.
]
variable[prefix] assign[=] call[name[getattr], parameter[name[sys], constant[real_prefix], name[sys].prefix]]
variable[python] assign[=] binary_operation[binary_operation[call[name[Path], parameter[name[prefix]]] / constant[bin]] / constant[python]]
variable[command] assign[=] call[constant[{} -m venv {}].format, parameter[name[python], name[venv_dir]]]
<ast.Try object at 0x7da204347910>
|
keyword[def] identifier[_build_env] ( identifier[venv_dir] ):
literal[string]
identifier[prefix] = identifier[getattr] ( identifier[sys] , literal[string] , identifier[sys] . identifier[prefix] )
identifier[python] = identifier[Path] ( identifier[prefix] )/ literal[string] / literal[string]
identifier[command] = literal[string] . identifier[format] ( identifier[python] , identifier[venv_dir] )
keyword[try] :
identifier[log] . identifier[info] ( literal[string] , identifier[command] )
identifier[subprocess] . identifier[run] ( identifier[command] . identifier[split] (),
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[STDOUT] ,
identifier[check] = keyword[True] )
keyword[except] identifier[subprocess] . identifier[CalledProcessError] keyword[as] identifier[exc] :
identifier[log] . identifier[error] ( literal[string] , identifier[exc] . identifier[output] )
keyword[raise]
|
def _build_env(venv_dir):
"""Create a new virtual environment in `venv_dir`.
This uses the base prefix of any virtual environment that you may be using
when you call this.
"""
# NB: We had to create the because the venv modules wasn't doing what we
# needed. In particular, if we used it create a venv from an existing venv,
# it *always* created symlinks back to the original venv's python
# executables. Then, when you used those linked executables, you ended up
# interacting with the original venv. I could find no way around this, hence
# this function.
prefix = getattr(sys, 'real_prefix', sys.prefix)
python = Path(prefix) / 'bin' / 'python'
command = '{} -m venv {}'.format(python, venv_dir)
try:
log.info('Creating virtual environment: %s', command)
subprocess.run(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=True) # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError as exc:
log.error('Error creating virtual environment: %s', exc.output)
raise # depends on [control=['except'], data=['exc']]
|
def unicode_body(self, ignore_errors=True, fix_special_entities=True):
"""
Return response body as unicode string.
"""
if not self._unicode_body:
self._unicode_body = self.convert_body_to_unicode(
body=self.body,
bom=self.bom,
charset=self.charset,
ignore_errors=ignore_errors,
fix_special_entities=fix_special_entities,
)
return self._unicode_body
|
def function[unicode_body, parameter[self, ignore_errors, fix_special_entities]]:
constant[
Return response body as unicode string.
]
if <ast.UnaryOp object at 0x7da1b184dd50> begin[:]
name[self]._unicode_body assign[=] call[name[self].convert_body_to_unicode, parameter[]]
return[name[self]._unicode_body]
|
keyword[def] identifier[unicode_body] ( identifier[self] , identifier[ignore_errors] = keyword[True] , identifier[fix_special_entities] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_unicode_body] :
identifier[self] . identifier[_unicode_body] = identifier[self] . identifier[convert_body_to_unicode] (
identifier[body] = identifier[self] . identifier[body] ,
identifier[bom] = identifier[self] . identifier[bom] ,
identifier[charset] = identifier[self] . identifier[charset] ,
identifier[ignore_errors] = identifier[ignore_errors] ,
identifier[fix_special_entities] = identifier[fix_special_entities] ,
)
keyword[return] identifier[self] . identifier[_unicode_body]
|
def unicode_body(self, ignore_errors=True, fix_special_entities=True):
"""
Return response body as unicode string.
"""
if not self._unicode_body:
self._unicode_body = self.convert_body_to_unicode(body=self.body, bom=self.bom, charset=self.charset, ignore_errors=ignore_errors, fix_special_entities=fix_special_entities) # depends on [control=['if'], data=[]]
return self._unicode_body
|
def _parseDebugDirectory(self, rva, size, magic = consts.PE32):
"""
Parses the C{IMAGE_DEBUG_DIRECTORY} directory.
@see: U{http://msdn.microsoft.com/es-es/library/windows/desktop/ms680307(v=vs.85).aspx}
@type rva: int
@param rva: The RVA where the C{IMAGE_DEBUG_DIRECTORY} directory starts.
@type size: int
@param size: The size of the C{IMAGE_DEBUG_DIRECTORY} directory.
@type magic: int
@param magic: (Optional) The type of PE. This value could be L{consts.PE32} or L{consts.PE64}.
@rtype: L{ImageDebugDirectory}
@return: A new L{ImageDebugDirectory} object.
"""
debugDirData = self.getDataAtRva(rva, size)
numberOfEntries = size / consts.SIZEOF_IMAGE_DEBUG_ENTRY32
rd = utils.ReadData(debugDirData)
return directories.ImageDebugDirectories.parse(rd, numberOfEntries)
|
def function[_parseDebugDirectory, parameter[self, rva, size, magic]]:
constant[
Parses the C{IMAGE_DEBUG_DIRECTORY} directory.
@see: U{http://msdn.microsoft.com/es-es/library/windows/desktop/ms680307(v=vs.85).aspx}
@type rva: int
@param rva: The RVA where the C{IMAGE_DEBUG_DIRECTORY} directory starts.
@type size: int
@param size: The size of the C{IMAGE_DEBUG_DIRECTORY} directory.
@type magic: int
@param magic: (Optional) The type of PE. This value could be L{consts.PE32} or L{consts.PE64}.
@rtype: L{ImageDebugDirectory}
@return: A new L{ImageDebugDirectory} object.
]
variable[debugDirData] assign[=] call[name[self].getDataAtRva, parameter[name[rva], name[size]]]
variable[numberOfEntries] assign[=] binary_operation[name[size] / name[consts].SIZEOF_IMAGE_DEBUG_ENTRY32]
variable[rd] assign[=] call[name[utils].ReadData, parameter[name[debugDirData]]]
return[call[name[directories].ImageDebugDirectories.parse, parameter[name[rd], name[numberOfEntries]]]]
|
keyword[def] identifier[_parseDebugDirectory] ( identifier[self] , identifier[rva] , identifier[size] , identifier[magic] = identifier[consts] . identifier[PE32] ):
literal[string]
identifier[debugDirData] = identifier[self] . identifier[getDataAtRva] ( identifier[rva] , identifier[size] )
identifier[numberOfEntries] = identifier[size] / identifier[consts] . identifier[SIZEOF_IMAGE_DEBUG_ENTRY32]
identifier[rd] = identifier[utils] . identifier[ReadData] ( identifier[debugDirData] )
keyword[return] identifier[directories] . identifier[ImageDebugDirectories] . identifier[parse] ( identifier[rd] , identifier[numberOfEntries] )
|
def _parseDebugDirectory(self, rva, size, magic=consts.PE32):
"""
Parses the C{IMAGE_DEBUG_DIRECTORY} directory.
@see: U{http://msdn.microsoft.com/es-es/library/windows/desktop/ms680307(v=vs.85).aspx}
@type rva: int
@param rva: The RVA where the C{IMAGE_DEBUG_DIRECTORY} directory starts.
@type size: int
@param size: The size of the C{IMAGE_DEBUG_DIRECTORY} directory.
@type magic: int
@param magic: (Optional) The type of PE. This value could be L{consts.PE32} or L{consts.PE64}.
@rtype: L{ImageDebugDirectory}
@return: A new L{ImageDebugDirectory} object.
"""
debugDirData = self.getDataAtRva(rva, size)
numberOfEntries = size / consts.SIZEOF_IMAGE_DEBUG_ENTRY32
rd = utils.ReadData(debugDirData)
return directories.ImageDebugDirectories.parse(rd, numberOfEntries)
|
def itemsize(self):
""" Individual item sizes """
return self._items[:self._count, 1] - self._items[:self._count, 0]
|
def function[itemsize, parameter[self]]:
constant[ Individual item sizes ]
return[binary_operation[call[name[self]._items][tuple[[<ast.Slice object at 0x7da1b0eadcc0>, <ast.Constant object at 0x7da1b0eadf60>]]] - call[name[self]._items][tuple[[<ast.Slice object at 0x7da1b0eadc60>, <ast.Constant object at 0x7da1b0eadf30>]]]]]
|
keyword[def] identifier[itemsize] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_items] [: identifier[self] . identifier[_count] , literal[int] ]- identifier[self] . identifier[_items] [: identifier[self] . identifier[_count] , literal[int] ]
|
def itemsize(self):
""" Individual item sizes """
return self._items[:self._count, 1] - self._items[:self._count, 0]
|
def comb_indices(n, k):
"""``n``-dimensional version of itertools.combinations.
Args:
a (np.ndarray): The array from which to get combinations.
k (int): The desired length of the combinations.
Returns:
np.ndarray: Indices that give the ``k``-combinations of ``n`` elements.
Example:
>>> n, k = 3, 2
>>> data = np.arange(6).reshape(2, 3)
>>> data[:, comb_indices(n, k)]
array([[[0, 1],
[0, 2],
[1, 2]],
<BLANKLINE>
[[3, 4],
[3, 5],
[4, 5]]])
"""
# Count the number of combinations for preallocation
count = comb(n, k, exact=True)
# Get numpy iterable from ``itertools.combinations``
indices = np.fromiter(
chain.from_iterable(combinations(range(n), k)),
int,
count=(count * k))
# Reshape output into the array of combination indicies
return indices.reshape(-1, k)
|
def function[comb_indices, parameter[n, k]]:
constant[``n``-dimensional version of itertools.combinations.
Args:
a (np.ndarray): The array from which to get combinations.
k (int): The desired length of the combinations.
Returns:
np.ndarray: Indices that give the ``k``-combinations of ``n`` elements.
Example:
>>> n, k = 3, 2
>>> data = np.arange(6).reshape(2, 3)
>>> data[:, comb_indices(n, k)]
array([[[0, 1],
[0, 2],
[1, 2]],
<BLANKLINE>
[[3, 4],
[3, 5],
[4, 5]]])
]
variable[count] assign[=] call[name[comb], parameter[name[n], name[k]]]
variable[indices] assign[=] call[name[np].fromiter, parameter[call[name[chain].from_iterable, parameter[call[name[combinations], parameter[call[name[range], parameter[name[n]]], name[k]]]]], name[int]]]
return[call[name[indices].reshape, parameter[<ast.UnaryOp object at 0x7da18ede6aa0>, name[k]]]]
|
keyword[def] identifier[comb_indices] ( identifier[n] , identifier[k] ):
literal[string]
identifier[count] = identifier[comb] ( identifier[n] , identifier[k] , identifier[exact] = keyword[True] )
identifier[indices] = identifier[np] . identifier[fromiter] (
identifier[chain] . identifier[from_iterable] ( identifier[combinations] ( identifier[range] ( identifier[n] ), identifier[k] )),
identifier[int] ,
identifier[count] =( identifier[count] * identifier[k] ))
keyword[return] identifier[indices] . identifier[reshape] (- literal[int] , identifier[k] )
|
def comb_indices(n, k):
"""``n``-dimensional version of itertools.combinations.
Args:
a (np.ndarray): The array from which to get combinations.
k (int): The desired length of the combinations.
Returns:
np.ndarray: Indices that give the ``k``-combinations of ``n`` elements.
Example:
>>> n, k = 3, 2
>>> data = np.arange(6).reshape(2, 3)
>>> data[:, comb_indices(n, k)]
array([[[0, 1],
[0, 2],
[1, 2]],
<BLANKLINE>
[[3, 4],
[3, 5],
[4, 5]]])
"""
# Count the number of combinations for preallocation
count = comb(n, k, exact=True)
# Get numpy iterable from ``itertools.combinations``
indices = np.fromiter(chain.from_iterable(combinations(range(n), k)), int, count=count * k)
# Reshape output into the array of combination indicies
return indices.reshape(-1, k)
|
def ipv6_is_defined(address):
"""
The function for checking if an IPv6 address is defined (does not need to
be resolved).
Args:
address (:obj:`str`): An IPv6 address.
Returns:
namedtuple:
:is_defined (bool): True if given address is defined, otherwise
False
:ietf_name (str): IETF assignment name if given address is
defined, otherwise ''
:ietf_rfc (str): IETF assignment RFC if given address is defined,
otherwise ''
"""
# Initialize the IP address object.
query_ip = IPv6Address(str(address))
# Initialize the results named tuple
results = namedtuple('ipv6_is_defined_results', 'is_defined, ietf_name, '
'ietf_rfc')
# Multicast
if query_ip.is_multicast:
return results(True, 'Multicast', 'RFC 4291, Section 2.7')
# Unspecified
elif query_ip.is_unspecified:
return results(True, 'Unspecified', 'RFC 4291, Section 2.5.2')
# Loopback.
elif query_ip.is_loopback:
return results(True, 'Loopback', 'RFC 4291, Section 2.5.3')
# Reserved
elif query_ip.is_reserved:
return results(True, 'Reserved', 'RFC 4291')
# Link-Local
elif query_ip.is_link_local:
return results(True, 'Link-Local', 'RFC 4291, Section 2.5.6')
# Site-Local
elif query_ip.is_site_local:
return results(True, 'Site-Local', 'RFC 4291, Section 2.5.7')
# Unique Local Unicast
elif query_ip.is_private:
return results(True, 'Unique Local Unicast', 'RFC 4193')
return results(False, '', '')
|
def function[ipv6_is_defined, parameter[address]]:
constant[
The function for checking if an IPv6 address is defined (does not need to
be resolved).
Args:
address (:obj:`str`): An IPv6 address.
Returns:
namedtuple:
:is_defined (bool): True if given address is defined, otherwise
False
:ietf_name (str): IETF assignment name if given address is
defined, otherwise ''
:ietf_rfc (str): IETF assignment RFC if given address is defined,
otherwise ''
]
variable[query_ip] assign[=] call[name[IPv6Address], parameter[call[name[str], parameter[name[address]]]]]
variable[results] assign[=] call[name[namedtuple], parameter[constant[ipv6_is_defined_results], constant[is_defined, ietf_name, ietf_rfc]]]
if name[query_ip].is_multicast begin[:]
return[call[name[results], parameter[constant[True], constant[Multicast], constant[RFC 4291, Section 2.7]]]]
return[call[name[results], parameter[constant[False], constant[], constant[]]]]
|
keyword[def] identifier[ipv6_is_defined] ( identifier[address] ):
literal[string]
identifier[query_ip] = identifier[IPv6Address] ( identifier[str] ( identifier[address] ))
identifier[results] = identifier[namedtuple] ( literal[string] , literal[string]
literal[string] )
keyword[if] identifier[query_ip] . identifier[is_multicast] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[elif] identifier[query_ip] . identifier[is_unspecified] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[elif] identifier[query_ip] . identifier[is_loopback] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[elif] identifier[query_ip] . identifier[is_reserved] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[elif] identifier[query_ip] . identifier[is_link_local] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[elif] identifier[query_ip] . identifier[is_site_local] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[elif] identifier[query_ip] . identifier[is_private] :
keyword[return] identifier[results] ( keyword[True] , literal[string] , literal[string] )
keyword[return] identifier[results] ( keyword[False] , literal[string] , literal[string] )
|
def ipv6_is_defined(address):
"""
The function for checking if an IPv6 address is defined (does not need to
be resolved).
Args:
address (:obj:`str`): An IPv6 address.
Returns:
namedtuple:
:is_defined (bool): True if given address is defined, otherwise
False
:ietf_name (str): IETF assignment name if given address is
defined, otherwise ''
:ietf_rfc (str): IETF assignment RFC if given address is defined,
otherwise ''
"""
# Initialize the IP address object.
query_ip = IPv6Address(str(address))
# Initialize the results named tuple
results = namedtuple('ipv6_is_defined_results', 'is_defined, ietf_name, ietf_rfc')
# Multicast
if query_ip.is_multicast:
return results(True, 'Multicast', 'RFC 4291, Section 2.7') # depends on [control=['if'], data=[]]
# Unspecified
elif query_ip.is_unspecified:
return results(True, 'Unspecified', 'RFC 4291, Section 2.5.2') # depends on [control=['if'], data=[]]
# Loopback.
elif query_ip.is_loopback:
return results(True, 'Loopback', 'RFC 4291, Section 2.5.3') # depends on [control=['if'], data=[]]
# Reserved
elif query_ip.is_reserved:
return results(True, 'Reserved', 'RFC 4291') # depends on [control=['if'], data=[]]
# Link-Local
elif query_ip.is_link_local:
return results(True, 'Link-Local', 'RFC 4291, Section 2.5.6') # depends on [control=['if'], data=[]]
# Site-Local
elif query_ip.is_site_local:
return results(True, 'Site-Local', 'RFC 4291, Section 2.5.7') # depends on [control=['if'], data=[]]
# Unique Local Unicast
elif query_ip.is_private:
return results(True, 'Unique Local Unicast', 'RFC 4193') # depends on [control=['if'], data=[]]
return results(False, '', '')
|
def validate(config):
'''
Validate the beacon configuration
'''
valid = True
messages = []
if not isinstance(config, list):
valid = False
messages.append('[-] Configuration for %s beacon must be a list', config)
else:
_config = {}
list(map(_config.update, config))
try:
sites = _config.get('sites', {})
except AttributeError:
valid = False
messages.append('[-] Sites for %s beacon must be a dict', __virtualname__)
if not sites:
valid = False
messages.append('[-] Configuration does not contain sites')
for site, settings in sites.items():
if required_site_attributes.isdisjoint(set(settings.keys())):
valid = False
messages.append('[-] Sites for {} beacon requires {}'.format(__virtualname__,
required_site_attributes))
log.debug('[+] site: %s', site)
log.debug('[+] settings: %s', settings)
for optional_attrs in itertools.chain(settings.get(attr, []) for attr in optional_site_attributes):
for item in optional_attrs:
cmp = item.get('comp')
if cmp and cmp not in comparisons:
valid = False
messages.append('[-] Invalid comparison operator %s', cmp)
messages.append('[+] Valid beacon configuration')
return valid, messages
|
def function[validate, parameter[config]]:
constant[
Validate the beacon configuration
]
variable[valid] assign[=] constant[True]
variable[messages] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b1c19b10> begin[:]
variable[valid] assign[=] constant[False]
call[name[messages].append, parameter[constant[[-] Configuration for %s beacon must be a list], name[config]]]
<ast.Try object at 0x7da1b1c18dc0>
if <ast.UnaryOp object at 0x7da1b1c18d60> begin[:]
variable[valid] assign[=] constant[False]
call[name[messages].append, parameter[constant[[-] Configuration does not contain sites]]]
for taget[tuple[[<ast.Name object at 0x7da1b1c1b2e0>, <ast.Name object at 0x7da1b1c18f40>]]] in starred[call[name[sites].items, parameter[]]] begin[:]
if call[name[required_site_attributes].isdisjoint, parameter[call[name[set], parameter[call[name[settings].keys, parameter[]]]]]] begin[:]
variable[valid] assign[=] constant[False]
call[name[messages].append, parameter[call[constant[[-] Sites for {} beacon requires {}].format, parameter[name[__virtualname__], name[required_site_attributes]]]]]
call[name[log].debug, parameter[constant[[+] site: %s], name[site]]]
call[name[log].debug, parameter[constant[[+] settings: %s], name[settings]]]
for taget[name[optional_attrs]] in starred[call[name[itertools].chain, parameter[<ast.GeneratorExp object at 0x7da1b1c1a830>]]] begin[:]
for taget[name[item]] in starred[name[optional_attrs]] begin[:]
variable[cmp] assign[=] call[name[item].get, parameter[constant[comp]]]
if <ast.BoolOp object at 0x7da1b1c23fd0> begin[:]
variable[valid] assign[=] constant[False]
call[name[messages].append, parameter[constant[[-] Invalid comparison operator %s], name[cmp]]]
call[name[messages].append, parameter[constant[[+] Valid beacon configuration]]]
return[tuple[[<ast.Name object at 0x7da1b1c21ae0>, <ast.Name object at 0x7da1b1c21b70>]]]
|
keyword[def] identifier[validate] ( identifier[config] ):
literal[string]
identifier[valid] = keyword[True]
identifier[messages] =[]
keyword[if] keyword[not] identifier[isinstance] ( identifier[config] , identifier[list] ):
identifier[valid] = keyword[False]
identifier[messages] . identifier[append] ( literal[string] , identifier[config] )
keyword[else] :
identifier[_config] ={}
identifier[list] ( identifier[map] ( identifier[_config] . identifier[update] , identifier[config] ))
keyword[try] :
identifier[sites] = identifier[_config] . identifier[get] ( literal[string] ,{})
keyword[except] identifier[AttributeError] :
identifier[valid] = keyword[False]
identifier[messages] . identifier[append] ( literal[string] , identifier[__virtualname__] )
keyword[if] keyword[not] identifier[sites] :
identifier[valid] = keyword[False]
identifier[messages] . identifier[append] ( literal[string] )
keyword[for] identifier[site] , identifier[settings] keyword[in] identifier[sites] . identifier[items] ():
keyword[if] identifier[required_site_attributes] . identifier[isdisjoint] ( identifier[set] ( identifier[settings] . identifier[keys] ())):
identifier[valid] = keyword[False]
identifier[messages] . identifier[append] ( literal[string] . identifier[format] ( identifier[__virtualname__] ,
identifier[required_site_attributes] ))
identifier[log] . identifier[debug] ( literal[string] , identifier[site] )
identifier[log] . identifier[debug] ( literal[string] , identifier[settings] )
keyword[for] identifier[optional_attrs] keyword[in] identifier[itertools] . identifier[chain] ( identifier[settings] . identifier[get] ( identifier[attr] ,[]) keyword[for] identifier[attr] keyword[in] identifier[optional_site_attributes] ):
keyword[for] identifier[item] keyword[in] identifier[optional_attrs] :
identifier[cmp] = identifier[item] . identifier[get] ( literal[string] )
keyword[if] identifier[cmp] keyword[and] identifier[cmp] keyword[not] keyword[in] identifier[comparisons] :
identifier[valid] = keyword[False]
identifier[messages] . identifier[append] ( literal[string] , identifier[cmp] )
identifier[messages] . identifier[append] ( literal[string] )
keyword[return] identifier[valid] , identifier[messages]
|
def validate(config):
"""
Validate the beacon configuration
"""
valid = True
messages = []
if not isinstance(config, list):
valid = False
messages.append('[-] Configuration for %s beacon must be a list', config) # depends on [control=['if'], data=[]]
else:
_config = {}
list(map(_config.update, config))
try:
sites = _config.get('sites', {}) # depends on [control=['try'], data=[]]
except AttributeError:
valid = False
messages.append('[-] Sites for %s beacon must be a dict', __virtualname__) # depends on [control=['except'], data=[]]
if not sites:
valid = False
messages.append('[-] Configuration does not contain sites') # depends on [control=['if'], data=[]]
for (site, settings) in sites.items():
if required_site_attributes.isdisjoint(set(settings.keys())):
valid = False
messages.append('[-] Sites for {} beacon requires {}'.format(__virtualname__, required_site_attributes)) # depends on [control=['if'], data=[]]
log.debug('[+] site: %s', site)
log.debug('[+] settings: %s', settings)
for optional_attrs in itertools.chain((settings.get(attr, []) for attr in optional_site_attributes)):
for item in optional_attrs:
cmp = item.get('comp')
if cmp and cmp not in comparisons:
valid = False
messages.append('[-] Invalid comparison operator %s', cmp) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['optional_attrs']] # depends on [control=['for'], data=[]]
messages.append('[+] Valid beacon configuration')
return (valid, messages)
|
def save(self):
"""
Saves changes made to the locally cached DesignDocument object's data
structures to the remote database. If the design document does not
exist remotely then it is created in the remote database. If the object
does exist remotely then the design document is updated remotely. In
either case the locally cached DesignDocument object is also updated
accordingly based on the successful response of the operation.
"""
if self.views:
if self.get('language', None) != QUERY_LANGUAGE:
for view_name, view in self.iterviews():
if isinstance(view, QueryIndexView):
raise CloudantDesignDocumentException(104, view_name)
else:
for view_name, view in self.iterviews():
if not isinstance(view, QueryIndexView):
raise CloudantDesignDocumentException(105, view_name)
if self.indexes:
if self.get('language', None) != QUERY_LANGUAGE:
for index_name, search in self.iterindexes():
# Check the instance of the javascript search function
if not isinstance(search['index'], STRTYPE):
raise CloudantDesignDocumentException(106, index_name)
else:
for index_name, index in self.iterindexes():
if not isinstance(index['index'], dict):
raise CloudantDesignDocumentException(107, index_name)
for prop in self._nested_object_names:
if not getattr(self, prop):
# Ensure empty dict for each sub-object is not saved remotely.
self.__delitem__(prop)
super(DesignDocument, self).save()
for prop in self._nested_object_names:
# Ensure views, indexes, and lists dict exist in locally cached DesignDocument.
getattr(self, prop, self.setdefault(prop, dict()))
|
def function[save, parameter[self]]:
constant[
Saves changes made to the locally cached DesignDocument object's data
structures to the remote database. If the design document does not
exist remotely then it is created in the remote database. If the object
does exist remotely then the design document is updated remotely. In
either case the locally cached DesignDocument object is also updated
accordingly based on the successful response of the operation.
]
if name[self].views begin[:]
if compare[call[name[self].get, parameter[constant[language], constant[None]]] not_equal[!=] name[QUERY_LANGUAGE]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18fe92020>, <ast.Name object at 0x7da18fe92710>]]] in starred[call[name[self].iterviews, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[view], name[QueryIndexView]]] begin[:]
<ast.Raise object at 0x7da18fe91b10>
if name[self].indexes begin[:]
if compare[call[name[self].get, parameter[constant[language], constant[None]]] not_equal[!=] name[QUERY_LANGUAGE]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18fe90cd0>, <ast.Name object at 0x7da18fe910c0>]]] in starred[call[name[self].iterindexes, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da18fe900d0> begin[:]
<ast.Raise object at 0x7da20e956bf0>
for taget[name[prop]] in starred[name[self]._nested_object_names] begin[:]
if <ast.UnaryOp object at 0x7da20e957be0> begin[:]
call[name[self].__delitem__, parameter[name[prop]]]
call[call[name[super], parameter[name[DesignDocument], name[self]]].save, parameter[]]
for taget[name[prop]] in starred[name[self]._nested_object_names] begin[:]
call[name[getattr], parameter[name[self], name[prop], call[name[self].setdefault, parameter[name[prop], call[name[dict], parameter[]]]]]]
|
keyword[def] identifier[save] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[views] :
keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[None] )!= identifier[QUERY_LANGUAGE] :
keyword[for] identifier[view_name] , identifier[view] keyword[in] identifier[self] . identifier[iterviews] ():
keyword[if] identifier[isinstance] ( identifier[view] , identifier[QueryIndexView] ):
keyword[raise] identifier[CloudantDesignDocumentException] ( literal[int] , identifier[view_name] )
keyword[else] :
keyword[for] identifier[view_name] , identifier[view] keyword[in] identifier[self] . identifier[iterviews] ():
keyword[if] keyword[not] identifier[isinstance] ( identifier[view] , identifier[QueryIndexView] ):
keyword[raise] identifier[CloudantDesignDocumentException] ( literal[int] , identifier[view_name] )
keyword[if] identifier[self] . identifier[indexes] :
keyword[if] identifier[self] . identifier[get] ( literal[string] , keyword[None] )!= identifier[QUERY_LANGUAGE] :
keyword[for] identifier[index_name] , identifier[search] keyword[in] identifier[self] . identifier[iterindexes] ():
keyword[if] keyword[not] identifier[isinstance] ( identifier[search] [ literal[string] ], identifier[STRTYPE] ):
keyword[raise] identifier[CloudantDesignDocumentException] ( literal[int] , identifier[index_name] )
keyword[else] :
keyword[for] identifier[index_name] , identifier[index] keyword[in] identifier[self] . identifier[iterindexes] ():
keyword[if] keyword[not] identifier[isinstance] ( identifier[index] [ literal[string] ], identifier[dict] ):
keyword[raise] identifier[CloudantDesignDocumentException] ( literal[int] , identifier[index_name] )
keyword[for] identifier[prop] keyword[in] identifier[self] . identifier[_nested_object_names] :
keyword[if] keyword[not] identifier[getattr] ( identifier[self] , identifier[prop] ):
identifier[self] . identifier[__delitem__] ( identifier[prop] )
identifier[super] ( identifier[DesignDocument] , identifier[self] ). identifier[save] ()
keyword[for] identifier[prop] keyword[in] identifier[self] . identifier[_nested_object_names] :
identifier[getattr] ( identifier[self] , identifier[prop] , identifier[self] . identifier[setdefault] ( identifier[prop] , identifier[dict] ()))
|
def save(self):
"""
Saves changes made to the locally cached DesignDocument object's data
structures to the remote database. If the design document does not
exist remotely then it is created in the remote database. If the object
does exist remotely then the design document is updated remotely. In
either case the locally cached DesignDocument object is also updated
accordingly based on the successful response of the operation.
"""
if self.views:
if self.get('language', None) != QUERY_LANGUAGE:
for (view_name, view) in self.iterviews():
if isinstance(view, QueryIndexView):
raise CloudantDesignDocumentException(104, view_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
for (view_name, view) in self.iterviews():
if not isinstance(view, QueryIndexView):
raise CloudantDesignDocumentException(105, view_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if self.indexes:
if self.get('language', None) != QUERY_LANGUAGE:
for (index_name, search) in self.iterindexes():
# Check the instance of the javascript search function
if not isinstance(search['index'], STRTYPE):
raise CloudantDesignDocumentException(106, index_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
for (index_name, index) in self.iterindexes():
if not isinstance(index['index'], dict):
raise CloudantDesignDocumentException(107, index_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
for prop in self._nested_object_names:
if not getattr(self, prop):
# Ensure empty dict for each sub-object is not saved remotely.
self.__delitem__(prop) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prop']]
super(DesignDocument, self).save()
for prop in self._nested_object_names:
# Ensure views, indexes, and lists dict exist in locally cached DesignDocument.
getattr(self, prop, self.setdefault(prop, dict())) # depends on [control=['for'], data=['prop']]
|
def retrieve(endpoint='incidents',
api_url=None,
page_id=None,
api_key=None,
api_version=None):
'''
Retrieve a specific endpoint from the Statuspage API.
endpoint: incidents
Request a specific endpoint.
page_id
Page ID. Can also be specified in the config file.
api_key
API key. Can also be specified in the config file.
api_version: 1
API version. Can also be specified in the config file.
api_url
Custom API URL in case the user has a StatusPage service running in a custom environment.
CLI Example:
.. code-block:: bash
salt 'minion' statuspage.retrieve components
Example output:
.. code-block:: bash
minion:
----------
comment:
out:
|_
----------
backfilled:
False
created_at:
2015-01-26T20:25:02.702Z
id:
kh2qwjbheqdc36
impact:
major
impact_override:
None
incident_updates:
|_
----------
affected_components:
None
body:
We are currently investigating this issue.
created_at:
2015-01-26T20:25:02.849Z
display_at:
2015-01-26T20:25:02.849Z
id:
zvx7xz2z5skr
incident_id:
kh2qwjbheqdc36
status:
investigating
twitter_updated_at:
None
updated_at:
2015-01-26T20:25:02.849Z
wants_twitter_update:
False
monitoring_at:
None
name:
just testing some stuff
page_id:
ksdhgfyiuhaa
postmortem_body:
None
postmortem_body_last_updated_at:
None
postmortem_ignored:
False
postmortem_notified_subscribers:
False
postmortem_notified_twitter:
False
postmortem_published_at:
None
resolved_at:
None
scheduled_auto_completed:
False
scheduled_auto_in_progress:
False
scheduled_for:
None
scheduled_remind_prior:
False
scheduled_reminded_at:
None
scheduled_until:
None
shortlink:
http://stspg.io/voY
status:
investigating
updated_at:
2015-01-26T20:25:13.379Z
result:
True
'''
params = _get_api_params(api_url=api_url,
page_id=page_id,
api_key=api_key,
api_version=api_version)
if not _validate_api_params(params):
log.error('Invalid API params.')
log.error(params)
return {
'result': False,
'comment': 'Invalid API params. See log for details'
}
headers = _get_headers(params)
retrieve_url = '{base_url}/v{version}/pages/{page_id}/{endpoint}.json'.format(
base_url=params['api_url'],
version=params['api_version'],
page_id=params['api_page_id'],
endpoint=endpoint
)
return _http_request(retrieve_url,
headers=headers)
|
def function[retrieve, parameter[endpoint, api_url, page_id, api_key, api_version]]:
constant[
Retrieve a specific endpoint from the Statuspage API.
endpoint: incidents
Request a specific endpoint.
page_id
Page ID. Can also be specified in the config file.
api_key
API key. Can also be specified in the config file.
api_version: 1
API version. Can also be specified in the config file.
api_url
Custom API URL in case the user has a StatusPage service running in a custom environment.
CLI Example:
.. code-block:: bash
salt 'minion' statuspage.retrieve components
Example output:
.. code-block:: bash
minion:
----------
comment:
out:
|_
----------
backfilled:
False
created_at:
2015-01-26T20:25:02.702Z
id:
kh2qwjbheqdc36
impact:
major
impact_override:
None
incident_updates:
|_
----------
affected_components:
None
body:
We are currently investigating this issue.
created_at:
2015-01-26T20:25:02.849Z
display_at:
2015-01-26T20:25:02.849Z
id:
zvx7xz2z5skr
incident_id:
kh2qwjbheqdc36
status:
investigating
twitter_updated_at:
None
updated_at:
2015-01-26T20:25:02.849Z
wants_twitter_update:
False
monitoring_at:
None
name:
just testing some stuff
page_id:
ksdhgfyiuhaa
postmortem_body:
None
postmortem_body_last_updated_at:
None
postmortem_ignored:
False
postmortem_notified_subscribers:
False
postmortem_notified_twitter:
False
postmortem_published_at:
None
resolved_at:
None
scheduled_auto_completed:
False
scheduled_auto_in_progress:
False
scheduled_for:
None
scheduled_remind_prior:
False
scheduled_reminded_at:
None
scheduled_until:
None
shortlink:
http://stspg.io/voY
status:
investigating
updated_at:
2015-01-26T20:25:13.379Z
result:
True
]
variable[params] assign[=] call[name[_get_api_params], parameter[]]
if <ast.UnaryOp object at 0x7da1b1f24970> begin[:]
call[name[log].error, parameter[constant[Invalid API params.]]]
call[name[log].error, parameter[name[params]]]
return[dictionary[[<ast.Constant object at 0x7da1b1f27190>, <ast.Constant object at 0x7da1b1f27220>], [<ast.Constant object at 0x7da1b1f272b0>, <ast.Constant object at 0x7da1b1f27460>]]]
variable[headers] assign[=] call[name[_get_headers], parameter[name[params]]]
variable[retrieve_url] assign[=] call[constant[{base_url}/v{version}/pages/{page_id}/{endpoint}.json].format, parameter[]]
return[call[name[_http_request], parameter[name[retrieve_url]]]]
|
keyword[def] identifier[retrieve] ( identifier[endpoint] = literal[string] ,
identifier[api_url] = keyword[None] ,
identifier[page_id] = keyword[None] ,
identifier[api_key] = keyword[None] ,
identifier[api_version] = keyword[None] ):
literal[string]
identifier[params] = identifier[_get_api_params] ( identifier[api_url] = identifier[api_url] ,
identifier[page_id] = identifier[page_id] ,
identifier[api_key] = identifier[api_key] ,
identifier[api_version] = identifier[api_version] )
keyword[if] keyword[not] identifier[_validate_api_params] ( identifier[params] ):
identifier[log] . identifier[error] ( literal[string] )
identifier[log] . identifier[error] ( identifier[params] )
keyword[return] {
literal[string] : keyword[False] ,
literal[string] : literal[string]
}
identifier[headers] = identifier[_get_headers] ( identifier[params] )
identifier[retrieve_url] = literal[string] . identifier[format] (
identifier[base_url] = identifier[params] [ literal[string] ],
identifier[version] = identifier[params] [ literal[string] ],
identifier[page_id] = identifier[params] [ literal[string] ],
identifier[endpoint] = identifier[endpoint]
)
keyword[return] identifier[_http_request] ( identifier[retrieve_url] ,
identifier[headers] = identifier[headers] )
|
def retrieve(endpoint='incidents', api_url=None, page_id=None, api_key=None, api_version=None):
"""
Retrieve a specific endpoint from the Statuspage API.
endpoint: incidents
Request a specific endpoint.
page_id
Page ID. Can also be specified in the config file.
api_key
API key. Can also be specified in the config file.
api_version: 1
API version. Can also be specified in the config file.
api_url
Custom API URL in case the user has a StatusPage service running in a custom environment.
CLI Example:
.. code-block:: bash
salt 'minion' statuspage.retrieve components
Example output:
.. code-block:: bash
minion:
----------
comment:
out:
|_
----------
backfilled:
False
created_at:
2015-01-26T20:25:02.702Z
id:
kh2qwjbheqdc36
impact:
major
impact_override:
None
incident_updates:
|_
----------
affected_components:
None
body:
We are currently investigating this issue.
created_at:
2015-01-26T20:25:02.849Z
display_at:
2015-01-26T20:25:02.849Z
id:
zvx7xz2z5skr
incident_id:
kh2qwjbheqdc36
status:
investigating
twitter_updated_at:
None
updated_at:
2015-01-26T20:25:02.849Z
wants_twitter_update:
False
monitoring_at:
None
name:
just testing some stuff
page_id:
ksdhgfyiuhaa
postmortem_body:
None
postmortem_body_last_updated_at:
None
postmortem_ignored:
False
postmortem_notified_subscribers:
False
postmortem_notified_twitter:
False
postmortem_published_at:
None
resolved_at:
None
scheduled_auto_completed:
False
scheduled_auto_in_progress:
False
scheduled_for:
None
scheduled_remind_prior:
False
scheduled_reminded_at:
None
scheduled_until:
None
shortlink:
http://stspg.io/voY
status:
investigating
updated_at:
2015-01-26T20:25:13.379Z
result:
True
"""
params = _get_api_params(api_url=api_url, page_id=page_id, api_key=api_key, api_version=api_version)
if not _validate_api_params(params):
log.error('Invalid API params.')
log.error(params)
return {'result': False, 'comment': 'Invalid API params. See log for details'} # depends on [control=['if'], data=[]]
headers = _get_headers(params)
retrieve_url = '{base_url}/v{version}/pages/{page_id}/{endpoint}.json'.format(base_url=params['api_url'], version=params['api_version'], page_id=params['api_page_id'], endpoint=endpoint)
return _http_request(retrieve_url, headers=headers)
|
def assignment_source(num_pre, num_post, LISTNAME, ITERNAME):
u"""
Accepts num_pre and num_post, which are counts of values
before and after the starg (not including the starg)
Returns a source fit for Assign() from fixer_util
"""
children = []
pre = unicode(num_pre)
post = unicode(num_post)
# This code builds the assignment source from lib2to3 tree primitives.
# It's not very readable, but it seems like the most correct way to do it.
if num_pre > 0:
pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Leaf(token.COLON, u":"), Number(pre)]), Leaf(token.RSQB, u"]")])])
children.append(pre_part)
children.append(Leaf(token.PLUS, u"+", prefix=u" "))
main_part = Node(syms.power, [Leaf(token.LSQB, u"[", prefix=u" "), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u""), Leaf(token.COLON, u":"), Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]) if num_post > 0 else Leaf(1, u"")]), Leaf(token.RSQB, u"]"), Leaf(token.RSQB, u"]")])])
children.append(main_part)
if num_post > 0:
children.append(Leaf(token.PLUS, u"+", prefix=u" "))
post_part = Node(syms.power, [Name(LISTNAME, prefix=u" "), Node(syms.trailer, [Leaf(token.LSQB, u"["), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u"-"), Number(post)]), Leaf(token.COLON, u":")]), Leaf(token.RSQB, u"]")])])
children.append(post_part)
source = Node(syms.arith_expr, children)
return source
|
def function[assignment_source, parameter[num_pre, num_post, LISTNAME, ITERNAME]]:
constant[
Accepts num_pre and num_post, which are counts of values
before and after the starg (not including the starg)
Returns a source fit for Assign() from fixer_util
]
variable[children] assign[=] list[[]]
variable[pre] assign[=] call[name[unicode], parameter[name[num_pre]]]
variable[post] assign[=] call[name[unicode], parameter[name[num_post]]]
if compare[name[num_pre] greater[>] constant[0]] begin[:]
variable[pre_part] assign[=] call[name[Node], parameter[name[syms].power, list[[<ast.Call object at 0x7da20e960a90>, <ast.Call object at 0x7da20e9608e0>]]]]
call[name[children].append, parameter[name[pre_part]]]
call[name[children].append, parameter[call[name[Leaf], parameter[name[token].PLUS, constant[+]]]]]
variable[main_part] assign[=] call[name[Node], parameter[name[syms].power, list[[<ast.Call object at 0x7da20e9618a0>, <ast.Call object at 0x7da20e962e90>, <ast.Call object at 0x7da20e960130>]]]]
call[name[children].append, parameter[name[main_part]]]
if compare[name[num_post] greater[>] constant[0]] begin[:]
call[name[children].append, parameter[call[name[Leaf], parameter[name[token].PLUS, constant[+]]]]]
variable[post_part] assign[=] call[name[Node], parameter[name[syms].power, list[[<ast.Call object at 0x7da18f810370>, <ast.Call object at 0x7da18f813460>]]]]
call[name[children].append, parameter[name[post_part]]]
variable[source] assign[=] call[name[Node], parameter[name[syms].arith_expr, name[children]]]
return[name[source]]
|
keyword[def] identifier[assignment_source] ( identifier[num_pre] , identifier[num_post] , identifier[LISTNAME] , identifier[ITERNAME] ):
literal[string]
identifier[children] =[]
identifier[pre] = identifier[unicode] ( identifier[num_pre] )
identifier[post] = identifier[unicode] ( identifier[num_post] )
keyword[if] identifier[num_pre] > literal[int] :
identifier[pre_part] = identifier[Node] ( identifier[syms] . identifier[power] ,[ identifier[Name] ( identifier[LISTNAME] ), identifier[Node] ( identifier[syms] . identifier[trailer] ,[ identifier[Leaf] ( identifier[token] . identifier[LSQB] , literal[string] ), identifier[Node] ( identifier[syms] . identifier[subscript] ,[ identifier[Leaf] ( identifier[token] . identifier[COLON] , literal[string] ), identifier[Number] ( identifier[pre] )]), identifier[Leaf] ( identifier[token] . identifier[RSQB] , literal[string] )])])
identifier[children] . identifier[append] ( identifier[pre_part] )
identifier[children] . identifier[append] ( identifier[Leaf] ( identifier[token] . identifier[PLUS] , literal[string] , identifier[prefix] = literal[string] ))
identifier[main_part] = identifier[Node] ( identifier[syms] . identifier[power] ,[ identifier[Leaf] ( identifier[token] . identifier[LSQB] , literal[string] , identifier[prefix] = literal[string] ), identifier[Name] ( identifier[LISTNAME] ), identifier[Node] ( identifier[syms] . identifier[trailer] ,[ identifier[Leaf] ( identifier[token] . identifier[LSQB] , literal[string] ), identifier[Node] ( identifier[syms] . identifier[subscript] ,[ identifier[Number] ( identifier[pre] ) keyword[if] identifier[num_pre] > literal[int] keyword[else] identifier[Leaf] ( literal[int] , literal[string] ), identifier[Leaf] ( identifier[token] . identifier[COLON] , literal[string] ), identifier[Node] ( identifier[syms] . identifier[factor] ,[ identifier[Leaf] ( identifier[token] . identifier[MINUS] , literal[string] ), identifier[Number] ( identifier[post] )]) keyword[if] identifier[num_post] > literal[int] keyword[else] identifier[Leaf] ( literal[int] , literal[string] )]), identifier[Leaf] ( identifier[token] . identifier[RSQB] , literal[string] ), identifier[Leaf] ( identifier[token] . identifier[RSQB] , literal[string] )])])
identifier[children] . identifier[append] ( identifier[main_part] )
keyword[if] identifier[num_post] > literal[int] :
identifier[children] . identifier[append] ( identifier[Leaf] ( identifier[token] . identifier[PLUS] , literal[string] , identifier[prefix] = literal[string] ))
identifier[post_part] = identifier[Node] ( identifier[syms] . identifier[power] ,[ identifier[Name] ( identifier[LISTNAME] , identifier[prefix] = literal[string] ), identifier[Node] ( identifier[syms] . identifier[trailer] ,[ identifier[Leaf] ( identifier[token] . identifier[LSQB] , literal[string] ), identifier[Node] ( identifier[syms] . identifier[subscript] ,[ identifier[Node] ( identifier[syms] . identifier[factor] ,[ identifier[Leaf] ( identifier[token] . identifier[MINUS] , literal[string] ), identifier[Number] ( identifier[post] )]), identifier[Leaf] ( identifier[token] . identifier[COLON] , literal[string] )]), identifier[Leaf] ( identifier[token] . identifier[RSQB] , literal[string] )])])
identifier[children] . identifier[append] ( identifier[post_part] )
identifier[source] = identifier[Node] ( identifier[syms] . identifier[arith_expr] , identifier[children] )
keyword[return] identifier[source]
|
def assignment_source(num_pre, num_post, LISTNAME, ITERNAME):
u"""
Accepts num_pre and num_post, which are counts of values
before and after the starg (not including the starg)
Returns a source fit for Assign() from fixer_util
"""
children = []
pre = unicode(num_pre)
post = unicode(num_post)
# This code builds the assignment source from lib2to3 tree primitives.
# It's not very readable, but it seems like the most correct way to do it.
if num_pre > 0:
pre_part = Node(syms.power, [Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u'['), Node(syms.subscript, [Leaf(token.COLON, u':'), Number(pre)]), Leaf(token.RSQB, u']')])])
children.append(pre_part)
children.append(Leaf(token.PLUS, u'+', prefix=u' ')) # depends on [control=['if'], data=[]]
main_part = Node(syms.power, [Leaf(token.LSQB, u'[', prefix=u' '), Name(LISTNAME), Node(syms.trailer, [Leaf(token.LSQB, u'['), Node(syms.subscript, [Number(pre) if num_pre > 0 else Leaf(1, u''), Leaf(token.COLON, u':'), Node(syms.factor, [Leaf(token.MINUS, u'-'), Number(post)]) if num_post > 0 else Leaf(1, u'')]), Leaf(token.RSQB, u']'), Leaf(token.RSQB, u']')])])
children.append(main_part)
if num_post > 0:
children.append(Leaf(token.PLUS, u'+', prefix=u' '))
post_part = Node(syms.power, [Name(LISTNAME, prefix=u' '), Node(syms.trailer, [Leaf(token.LSQB, u'['), Node(syms.subscript, [Node(syms.factor, [Leaf(token.MINUS, u'-'), Number(post)]), Leaf(token.COLON, u':')]), Leaf(token.RSQB, u']')])])
children.append(post_part) # depends on [control=['if'], data=[]]
source = Node(syms.arith_expr, children)
return source
|
def clear_dcnm_out_part(self, tenant_id, fw_dict, is_fw_virt=False):
"""Clear DCNM out partition information.
Clear the DCNM OUT partition service node IP address and update
the result
"""
res = fw_const.DCNM_OUT_PART_UPDDEL_SUCCESS
self.update_fw_db_result(tenant_id, dcnm_status=res)
LOG.info("Out partition cleared -noop- with service ip addr")
return True
|
def function[clear_dcnm_out_part, parameter[self, tenant_id, fw_dict, is_fw_virt]]:
constant[Clear DCNM out partition information.
Clear the DCNM OUT partition service node IP address and update
the result
]
variable[res] assign[=] name[fw_const].DCNM_OUT_PART_UPDDEL_SUCCESS
call[name[self].update_fw_db_result, parameter[name[tenant_id]]]
call[name[LOG].info, parameter[constant[Out partition cleared -noop- with service ip addr]]]
return[constant[True]]
|
keyword[def] identifier[clear_dcnm_out_part] ( identifier[self] , identifier[tenant_id] , identifier[fw_dict] , identifier[is_fw_virt] = keyword[False] ):
literal[string]
identifier[res] = identifier[fw_const] . identifier[DCNM_OUT_PART_UPDDEL_SUCCESS]
identifier[self] . identifier[update_fw_db_result] ( identifier[tenant_id] , identifier[dcnm_status] = identifier[res] )
identifier[LOG] . identifier[info] ( literal[string] )
keyword[return] keyword[True]
|
def clear_dcnm_out_part(self, tenant_id, fw_dict, is_fw_virt=False):
"""Clear DCNM out partition information.
Clear the DCNM OUT partition service node IP address and update
the result
"""
res = fw_const.DCNM_OUT_PART_UPDDEL_SUCCESS
self.update_fw_db_result(tenant_id, dcnm_status=res)
LOG.info('Out partition cleared -noop- with service ip addr')
return True
|
def wait(self):
"""Waits and returns received messages.
If running in compatibility mode for older uWSGI versions,
it also sends messages that have been queued by send().
A return value of None means that connection was closed.
This must be called repeatedly. For uWSGI < 2.1.x it must
be called from the main greenlet."""
while True:
if self._req_ctx is not None:
try:
msg = uwsgi.websocket_recv(request_context=self._req_ctx)
except IOError: # connection closed
return None
return self._decode_received(msg)
else:
# we wake up at least every 3 seconds to let uWSGI
# do its ping/ponging
event_set = self._event.wait(timeout=3)
if event_set:
self._event.clear()
# maybe there is something to send
msgs = []
while True:
try:
msgs.append(self._send_queue.get(block=False))
except gevent.queue.Empty:
break
for msg in msgs:
self._send(msg)
# maybe there is something to receive, if not, at least
# ensure uWSGI does its ping/ponging
try:
msg = uwsgi.websocket_recv_nb()
except IOError: # connection closed
self._select_greenlet.kill()
return None
if msg: # message available
return self._decode_received(msg)
|
def function[wait, parameter[self]]:
constant[Waits and returns received messages.
If running in compatibility mode for older uWSGI versions,
it also sends messages that have been queued by send().
A return value of None means that connection was closed.
This must be called repeatedly. For uWSGI < 2.1.x it must
be called from the main greenlet.]
while constant[True] begin[:]
if compare[name[self]._req_ctx is_not constant[None]] begin[:]
<ast.Try object at 0x7da1b26ad9f0>
return[call[name[self]._decode_received, parameter[name[msg]]]]
|
keyword[def] identifier[wait] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
keyword[if] identifier[self] . identifier[_req_ctx] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[msg] = identifier[uwsgi] . identifier[websocket_recv] ( identifier[request_context] = identifier[self] . identifier[_req_ctx] )
keyword[except] identifier[IOError] :
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[_decode_received] ( identifier[msg] )
keyword[else] :
identifier[event_set] = identifier[self] . identifier[_event] . identifier[wait] ( identifier[timeout] = literal[int] )
keyword[if] identifier[event_set] :
identifier[self] . identifier[_event] . identifier[clear] ()
identifier[msgs] =[]
keyword[while] keyword[True] :
keyword[try] :
identifier[msgs] . identifier[append] ( identifier[self] . identifier[_send_queue] . identifier[get] ( identifier[block] = keyword[False] ))
keyword[except] identifier[gevent] . identifier[queue] . identifier[Empty] :
keyword[break]
keyword[for] identifier[msg] keyword[in] identifier[msgs] :
identifier[self] . identifier[_send] ( identifier[msg] )
keyword[try] :
identifier[msg] = identifier[uwsgi] . identifier[websocket_recv_nb] ()
keyword[except] identifier[IOError] :
identifier[self] . identifier[_select_greenlet] . identifier[kill] ()
keyword[return] keyword[None]
keyword[if] identifier[msg] :
keyword[return] identifier[self] . identifier[_decode_received] ( identifier[msg] )
|
def wait(self):
"""Waits and returns received messages.
If running in compatibility mode for older uWSGI versions,
it also sends messages that have been queued by send().
A return value of None means that connection was closed.
This must be called repeatedly. For uWSGI < 2.1.x it must
be called from the main greenlet."""
while True:
if self._req_ctx is not None:
try:
msg = uwsgi.websocket_recv(request_context=self._req_ctx) # depends on [control=['try'], data=[]]
except IOError: # connection closed
return None # depends on [control=['except'], data=[]]
return self._decode_received(msg) # depends on [control=['if'], data=[]]
else:
# we wake up at least every 3 seconds to let uWSGI
# do its ping/ponging
event_set = self._event.wait(timeout=3)
if event_set:
self._event.clear()
# maybe there is something to send
msgs = []
while True:
try:
msgs.append(self._send_queue.get(block=False)) # depends on [control=['try'], data=[]]
except gevent.queue.Empty:
break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
for msg in msgs:
self._send(msg) # depends on [control=['for'], data=['msg']] # depends on [control=['if'], data=[]]
# maybe there is something to receive, if not, at least
# ensure uWSGI does its ping/ponging
try:
msg = uwsgi.websocket_recv_nb() # depends on [control=['try'], data=[]]
except IOError: # connection closed
self._select_greenlet.kill()
return None # depends on [control=['except'], data=[]]
if msg: # message available
return self._decode_received(msg) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def __setup(self):
"""
Setup
:return:
"""
if not os.path.exists(self.__local_download_dir_warc):
os.makedirs(self.__local_download_dir_warc)
# make loggers quite
configure_logging({"LOG_LEVEL": "ERROR"})
logging.getLogger('requests').setLevel(logging.CRITICAL)
logging.getLogger('readability').setLevel(logging.CRITICAL)
logging.getLogger('PIL').setLevel(logging.CRITICAL)
logging.getLogger('newspaper').setLevel(logging.CRITICAL)
logging.getLogger('newsplease').setLevel(logging.CRITICAL)
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
# set own logger
logging.basicConfig(level=self.__log_level)
self.__logger = logging.getLogger(__name__)
self.__logger.setLevel(self.__log_level)
|
def function[__setup, parameter[self]]:
constant[
Setup
:return:
]
if <ast.UnaryOp object at 0x7da20cabf250> begin[:]
call[name[os].makedirs, parameter[name[self].__local_download_dir_warc]]
call[name[configure_logging], parameter[dictionary[[<ast.Constant object at 0x7da20cabc250>], [<ast.Constant object at 0x7da20cabed70>]]]]
call[call[name[logging].getLogger, parameter[constant[requests]]].setLevel, parameter[name[logging].CRITICAL]]
call[call[name[logging].getLogger, parameter[constant[readability]]].setLevel, parameter[name[logging].CRITICAL]]
call[call[name[logging].getLogger, parameter[constant[PIL]]].setLevel, parameter[name[logging].CRITICAL]]
call[call[name[logging].getLogger, parameter[constant[newspaper]]].setLevel, parameter[name[logging].CRITICAL]]
call[call[name[logging].getLogger, parameter[constant[newsplease]]].setLevel, parameter[name[logging].CRITICAL]]
call[call[name[logging].getLogger, parameter[constant[urllib3]]].setLevel, parameter[name[logging].CRITICAL]]
call[name[logging].basicConfig, parameter[]]
name[self].__logger assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
call[name[self].__logger.setLevel, parameter[name[self].__log_level]]
|
keyword[def] identifier[__setup] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[__local_download_dir_warc] ):
identifier[os] . identifier[makedirs] ( identifier[self] . identifier[__local_download_dir_warc] )
identifier[configure_logging] ({ literal[string] : literal[string] })
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[CRITICAL] )
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[self] . identifier[__log_level] )
identifier[self] . identifier[__logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[self] . identifier[__logger] . identifier[setLevel] ( identifier[self] . identifier[__log_level] )
|
def __setup(self):
"""
Setup
:return:
"""
if not os.path.exists(self.__local_download_dir_warc):
os.makedirs(self.__local_download_dir_warc) # depends on [control=['if'], data=[]]
# make loggers quite
configure_logging({'LOG_LEVEL': 'ERROR'})
logging.getLogger('requests').setLevel(logging.CRITICAL)
logging.getLogger('readability').setLevel(logging.CRITICAL)
logging.getLogger('PIL').setLevel(logging.CRITICAL)
logging.getLogger('newspaper').setLevel(logging.CRITICAL)
logging.getLogger('newsplease').setLevel(logging.CRITICAL)
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
# set own logger
logging.basicConfig(level=self.__log_level)
self.__logger = logging.getLogger(__name__)
self.__logger.setLevel(self.__log_level)
|
def setup_neighbors_distances_and_angles(self, indices):
"""
Initializes the angle and distance separations
:param indices: indices of the sites for which the Voronoi is needed
"""
self.neighbors_distances = [None] * len(self.structure)
self.neighbors_normalized_distances = [None] * len(self.structure)
self.neighbors_angles = [None] * len(self.structure)
self.neighbors_normalized_angles = [None] * len(self.structure)
for isite in indices:
results = self.voronoi_list2[isite]
if results is None:
continue
#Initializes neighbors distances and normalized distances groups
self.neighbors_distances[isite] = []
self.neighbors_normalized_distances[isite] = []
normalized_distances = [nb_dict['normalized_distance'] for nb_dict in results]
isorted_distances = np.argsort(normalized_distances)
self.neighbors_normalized_distances[isite].append({'min': normalized_distances[isorted_distances[0]],
'max': normalized_distances[isorted_distances[0]]})
self.neighbors_distances[isite].append({'min': results[isorted_distances[0]]['distance'],
'max': results[isorted_distances[0]]['distance']})
icurrent = 0
nb_indices = {int(isorted_distances[0])}
dnb_indices = {int(isorted_distances[0])}
for idist in iter(isorted_distances):
wd = normalized_distances[idist]
if self.maximum_distance_factor is not None:
if wd > self.maximum_distance_factor:
self.neighbors_normalized_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
break
if np.isclose(wd, self.neighbors_normalized_distances[isite][icurrent]['max'],
rtol=0.0, atol=self.normalized_distance_tolerance):
self.neighbors_normalized_distances[isite][icurrent]['max'] = wd
self.neighbors_distances[isite][icurrent]['max'] = results[idist]['distance']
dnb_indices.add(int(idist))
else:
self.neighbors_normalized_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
dnb_indices = {int(idist)}
self.neighbors_normalized_distances[isite].append({'min': wd,
'max': wd})
self.neighbors_distances[isite].append({'min': results[idist]['distance'],
'max': results[idist]['distance']})
icurrent += 1
nb_indices.add(int(idist))
else:
self.neighbors_normalized_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
for idist in range(len(self.neighbors_distances[isite]) - 1):
dist_dict = self.neighbors_distances[isite][idist]
dist_dict_next = self.neighbors_distances[isite][idist+1]
dist_dict['next'] = dist_dict_next['min']
ndist_dict = self.neighbors_normalized_distances[isite][idist]
ndist_dict_next = self.neighbors_normalized_distances[isite][idist + 1]
ndist_dict['next'] = ndist_dict_next['min']
if self.maximum_distance_factor is not None:
dfact = self.maximum_distance_factor
else:
dfact = self.default_voronoi_cutoff / self.neighbors_distances[isite][0]['min']
self.neighbors_normalized_distances[isite][-1]['next'] = dfact
self.neighbors_distances[isite][-1]['next'] = dfact * self.neighbors_distances[isite][0]['min']
#Initializes neighbors angles and normalized angles groups
self.neighbors_angles[isite] = []
self.neighbors_normalized_angles[isite] = []
normalized_angles = [nb_dict['normalized_angle'] for nb_dict in results]
isorted_angles = np.argsort(normalized_angles)[::-1]
self.neighbors_normalized_angles[isite].append({'max': normalized_angles[isorted_angles[0]],
'min': normalized_angles[isorted_angles[0]]})
self.neighbors_angles[isite].append({'max': results[isorted_angles[0]]['angle'],
'min': results[isorted_angles[0]]['angle']})
icurrent = 0
nb_indices = {int(isorted_angles[0])}
dnb_indices = {int(isorted_angles[0])}
for iang in iter(isorted_angles):
wa = normalized_angles[iang]
if self.minimum_angle_factor is not None:
if wa < self.minimum_angle_factor:
self.neighbors_normalized_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
break
if np.isclose(wa, self.neighbors_normalized_angles[isite][icurrent]['min'],
rtol=0.0, atol=self.normalized_angle_tolerance):
self.neighbors_normalized_angles[isite][icurrent]['min'] = wa
self.neighbors_angles[isite][icurrent]['min'] = results[iang]['angle']
dnb_indices.add(int(iang))
else:
self.neighbors_normalized_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
dnb_indices = {int(iang)}
self.neighbors_normalized_angles[isite].append({'max': wa,
'min': wa})
self.neighbors_angles[isite].append({'max': results[iang]['angle'],
'min': results[iang]['angle']})
icurrent += 1
nb_indices.add(int(iang))
else:
self.neighbors_normalized_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
for iang in range(len(self.neighbors_angles[isite]) - 1):
ang_dict = self.neighbors_angles[isite][iang]
ang_dict_next = self.neighbors_angles[isite][iang + 1]
ang_dict['next'] = ang_dict_next['max']
nang_dict = self.neighbors_normalized_angles[isite][iang]
nang_dict_next = self.neighbors_normalized_angles[isite][iang + 1]
nang_dict['next'] = nang_dict_next['max']
if self.minimum_angle_factor is not None:
afact = self.minimum_angle_factor
else:
afact = 0.0
self.neighbors_normalized_angles[isite][-1]['next'] = afact
self.neighbors_angles[isite][-1]['next'] = afact * self.neighbors_angles[isite][0]['max']
|
def function[setup_neighbors_distances_and_angles, parameter[self, indices]]:
constant[
Initializes the angle and distance separations
:param indices: indices of the sites for which the Voronoi is needed
]
name[self].neighbors_distances assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc9be50>]] * call[name[len], parameter[name[self].structure]]]
name[self].neighbors_normalized_distances assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc9b7c0>]] * call[name[len], parameter[name[self].structure]]]
name[self].neighbors_angles assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc9a9e0>]] * call[name[len], parameter[name[self].structure]]]
name[self].neighbors_normalized_angles assign[=] binary_operation[list[[<ast.Constant object at 0x7da18dc99300>]] * call[name[len], parameter[name[self].structure]]]
for taget[name[isite]] in starred[name[indices]] begin[:]
variable[results] assign[=] call[name[self].voronoi_list2][name[isite]]
if compare[name[results] is constant[None]] begin[:]
continue
call[name[self].neighbors_distances][name[isite]] assign[=] list[[]]
call[name[self].neighbors_normalized_distances][name[isite]] assign[=] list[[]]
variable[normalized_distances] assign[=] <ast.ListComp object at 0x7da18dc9a830>
variable[isorted_distances] assign[=] call[name[np].argsort, parameter[name[normalized_distances]]]
call[call[name[self].neighbors_normalized_distances][name[isite]].append, parameter[dictionary[[<ast.Constant object at 0x7da18dc9b070>, <ast.Constant object at 0x7da18dc9b5e0>], [<ast.Subscript object at 0x7da18dc99ba0>, <ast.Subscript object at 0x7da18dc9a560>]]]]
call[call[name[self].neighbors_distances][name[isite]].append, parameter[dictionary[[<ast.Constant object at 0x7da18dc98bb0>, <ast.Constant object at 0x7da18dc9a1d0>], [<ast.Subscript object at 0x7da18dc9b700>, <ast.Subscript object at 0x7da18dc98f70>]]]]
variable[icurrent] assign[=] constant[0]
variable[nb_indices] assign[=] <ast.Set object at 0x7da18dc9ad70>
variable[dnb_indices] assign[=] <ast.Set object at 0x7da18dc99a50>
for taget[name[idist]] in starred[call[name[iter], parameter[name[isorted_distances]]]] begin[:]
variable[wd] assign[=] call[name[normalized_distances]][name[idist]]
if compare[name[self].maximum_distance_factor is_not constant[None]] begin[:]
if compare[name[wd] greater[>] name[self].maximum_distance_factor] begin[:]
call[call[call[name[self].neighbors_normalized_distances][name[isite]]][name[icurrent]]][constant[nb_indices]] assign[=] call[name[list], parameter[name[nb_indices]]]
call[call[call[name[self].neighbors_distances][name[isite]]][name[icurrent]]][constant[nb_indices]] assign[=] call[name[list], parameter[name[nb_indices]]]
call[call[call[name[self].neighbors_normalized_distances][name[isite]]][name[icurrent]]][constant[dnb_indices]] assign[=] call[name[list], parameter[name[dnb_indices]]]
call[call[call[name[self].neighbors_distances][name[isite]]][name[icurrent]]][constant[dnb_indices]] assign[=] call[name[list], parameter[name[dnb_indices]]]
break
if call[name[np].isclose, parameter[name[wd], call[call[call[name[self].neighbors_normalized_distances][name[isite]]][name[icurrent]]][constant[max]]]] begin[:]
call[call[call[name[self].neighbors_normalized_distances][name[isite]]][name[icurrent]]][constant[max]] assign[=] name[wd]
call[call[call[name[self].neighbors_distances][name[isite]]][name[icurrent]]][constant[max]] assign[=] call[call[name[results]][name[idist]]][constant[distance]]
call[name[dnb_indices].add, parameter[call[name[int], parameter[name[idist]]]]]
call[name[nb_indices].add, parameter[call[name[int], parameter[name[idist]]]]]
for taget[name[idist]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[call[name[self].neighbors_distances][name[isite]]]] - constant[1]]]]] begin[:]
variable[dist_dict] assign[=] call[call[name[self].neighbors_distances][name[isite]]][name[idist]]
variable[dist_dict_next] assign[=] call[call[name[self].neighbors_distances][name[isite]]][binary_operation[name[idist] + constant[1]]]
call[name[dist_dict]][constant[next]] assign[=] call[name[dist_dict_next]][constant[min]]
variable[ndist_dict] assign[=] call[call[name[self].neighbors_normalized_distances][name[isite]]][name[idist]]
variable[ndist_dict_next] assign[=] call[call[name[self].neighbors_normalized_distances][name[isite]]][binary_operation[name[idist] + constant[1]]]
call[name[ndist_dict]][constant[next]] assign[=] call[name[ndist_dict_next]][constant[min]]
if compare[name[self].maximum_distance_factor is_not constant[None]] begin[:]
variable[dfact] assign[=] name[self].maximum_distance_factor
call[call[call[name[self].neighbors_normalized_distances][name[isite]]][<ast.UnaryOp object at 0x7da2041d9900>]][constant[next]] assign[=] name[dfact]
call[call[call[name[self].neighbors_distances][name[isite]]][<ast.UnaryOp object at 0x7da2041dafb0>]][constant[next]] assign[=] binary_operation[name[dfact] * call[call[call[name[self].neighbors_distances][name[isite]]][constant[0]]][constant[min]]]
call[name[self].neighbors_angles][name[isite]] assign[=] list[[]]
call[name[self].neighbors_normalized_angles][name[isite]] assign[=] list[[]]
variable[normalized_angles] assign[=] <ast.ListComp object at 0x7da18f8134c0>
variable[isorted_angles] assign[=] call[call[name[np].argsort, parameter[name[normalized_angles]]]][<ast.Slice object at 0x7da18f813310>]
call[call[name[self].neighbors_normalized_angles][name[isite]].append, parameter[dictionary[[<ast.Constant object at 0x7da18f8105e0>, <ast.Constant object at 0x7da18f811810>], [<ast.Subscript object at 0x7da18f811000>, <ast.Subscript object at 0x7da18f812110>]]]]
call[call[name[self].neighbors_angles][name[isite]].append, parameter[dictionary[[<ast.Constant object at 0x7da18f811570>, <ast.Constant object at 0x7da18f811390>], [<ast.Subscript object at 0x7da18f812800>, <ast.Subscript object at 0x7da18f810490>]]]]
variable[icurrent] assign[=] constant[0]
variable[nb_indices] assign[=] <ast.Set object at 0x7da18f811ed0>
variable[dnb_indices] assign[=] <ast.Set object at 0x7da18f813b50>
for taget[name[iang]] in starred[call[name[iter], parameter[name[isorted_angles]]]] begin[:]
variable[wa] assign[=] call[name[normalized_angles]][name[iang]]
if compare[name[self].minimum_angle_factor is_not constant[None]] begin[:]
if compare[name[wa] less[<] name[self].minimum_angle_factor] begin[:]
call[call[call[name[self].neighbors_normalized_angles][name[isite]]][name[icurrent]]][constant[nb_indices]] assign[=] call[name[list], parameter[name[nb_indices]]]
call[call[call[name[self].neighbors_angles][name[isite]]][name[icurrent]]][constant[nb_indices]] assign[=] call[name[list], parameter[name[nb_indices]]]
call[call[call[name[self].neighbors_normalized_angles][name[isite]]][name[icurrent]]][constant[dnb_indices]] assign[=] call[name[list], parameter[name[dnb_indices]]]
call[call[call[name[self].neighbors_angles][name[isite]]][name[icurrent]]][constant[dnb_indices]] assign[=] call[name[list], parameter[name[dnb_indices]]]
break
if call[name[np].isclose, parameter[name[wa], call[call[call[name[self].neighbors_normalized_angles][name[isite]]][name[icurrent]]][constant[min]]]] begin[:]
call[call[call[name[self].neighbors_normalized_angles][name[isite]]][name[icurrent]]][constant[min]] assign[=] name[wa]
call[call[call[name[self].neighbors_angles][name[isite]]][name[icurrent]]][constant[min]] assign[=] call[call[name[results]][name[iang]]][constant[angle]]
call[name[dnb_indices].add, parameter[call[name[int], parameter[name[iang]]]]]
call[name[nb_indices].add, parameter[call[name[int], parameter[name[iang]]]]]
for taget[name[iang]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[call[name[self].neighbors_angles][name[isite]]]] - constant[1]]]]] begin[:]
variable[ang_dict] assign[=] call[call[name[self].neighbors_angles][name[isite]]][name[iang]]
variable[ang_dict_next] assign[=] call[call[name[self].neighbors_angles][name[isite]]][binary_operation[name[iang] + constant[1]]]
call[name[ang_dict]][constant[next]] assign[=] call[name[ang_dict_next]][constant[max]]
variable[nang_dict] assign[=] call[call[name[self].neighbors_normalized_angles][name[isite]]][name[iang]]
variable[nang_dict_next] assign[=] call[call[name[self].neighbors_normalized_angles][name[isite]]][binary_operation[name[iang] + constant[1]]]
call[name[nang_dict]][constant[next]] assign[=] call[name[nang_dict_next]][constant[max]]
if compare[name[self].minimum_angle_factor is_not constant[None]] begin[:]
variable[afact] assign[=] name[self].minimum_angle_factor
call[call[call[name[self].neighbors_normalized_angles][name[isite]]][<ast.UnaryOp object at 0x7da1b26aebc0>]][constant[next]] assign[=] name[afact]
call[call[call[name[self].neighbors_angles][name[isite]]][<ast.UnaryOp object at 0x7da1b26ad1e0>]][constant[next]] assign[=] binary_operation[name[afact] * call[call[call[name[self].neighbors_angles][name[isite]]][constant[0]]][constant[max]]]
|
keyword[def] identifier[setup_neighbors_distances_and_angles] ( identifier[self] , identifier[indices] ):
literal[string]
identifier[self] . identifier[neighbors_distances] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[structure] )
identifier[self] . identifier[neighbors_normalized_distances] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[structure] )
identifier[self] . identifier[neighbors_angles] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[structure] )
identifier[self] . identifier[neighbors_normalized_angles] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[structure] )
keyword[for] identifier[isite] keyword[in] identifier[indices] :
identifier[results] = identifier[self] . identifier[voronoi_list2] [ identifier[isite] ]
keyword[if] identifier[results] keyword[is] keyword[None] :
keyword[continue]
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ]=[]
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ]=[]
identifier[normalized_distances] =[ identifier[nb_dict] [ literal[string] ] keyword[for] identifier[nb_dict] keyword[in] identifier[results] ]
identifier[isorted_distances] = identifier[np] . identifier[argsort] ( identifier[normalized_distances] )
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[normalized_distances] [ identifier[isorted_distances] [ literal[int] ]],
literal[string] : identifier[normalized_distances] [ identifier[isorted_distances] [ literal[int] ]]})
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[results] [ identifier[isorted_distances] [ literal[int] ]][ literal[string] ],
literal[string] : identifier[results] [ identifier[isorted_distances] [ literal[int] ]][ literal[string] ]})
identifier[icurrent] = literal[int]
identifier[nb_indices] ={ identifier[int] ( identifier[isorted_distances] [ literal[int] ])}
identifier[dnb_indices] ={ identifier[int] ( identifier[isorted_distances] [ literal[int] ])}
keyword[for] identifier[idist] keyword[in] identifier[iter] ( identifier[isorted_distances] ):
identifier[wd] = identifier[normalized_distances] [ identifier[idist] ]
keyword[if] identifier[self] . identifier[maximum_distance_factor] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[wd] > identifier[self] . identifier[maximum_distance_factor] :
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
keyword[break]
keyword[if] identifier[np] . identifier[isclose] ( identifier[wd] , identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ],
identifier[rtol] = literal[int] , identifier[atol] = identifier[self] . identifier[normalized_distance_tolerance] ):
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[wd]
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[results] [ identifier[idist] ][ literal[string] ]
identifier[dnb_indices] . identifier[add] ( identifier[int] ( identifier[idist] ))
keyword[else] :
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[dnb_indices] ={ identifier[int] ( identifier[idist] )}
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[wd] ,
literal[string] : identifier[wd] })
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[results] [ identifier[idist] ][ literal[string] ],
literal[string] : identifier[results] [ identifier[idist] ][ literal[string] ]})
identifier[icurrent] += literal[int]
identifier[nb_indices] . identifier[add] ( identifier[int] ( identifier[idist] ))
keyword[else] :
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
keyword[for] identifier[idist] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[neighbors_distances] [ identifier[isite] ])- literal[int] ):
identifier[dist_dict] = identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[idist] ]
identifier[dist_dict_next] = identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ identifier[idist] + literal[int] ]
identifier[dist_dict] [ literal[string] ]= identifier[dist_dict_next] [ literal[string] ]
identifier[ndist_dict] = identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[idist] ]
identifier[ndist_dict_next] = identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][ identifier[idist] + literal[int] ]
identifier[ndist_dict] [ literal[string] ]= identifier[ndist_dict_next] [ literal[string] ]
keyword[if] identifier[self] . identifier[maximum_distance_factor] keyword[is] keyword[not] keyword[None] :
identifier[dfact] = identifier[self] . identifier[maximum_distance_factor]
keyword[else] :
identifier[dfact] = identifier[self] . identifier[default_voronoi_cutoff] / identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ literal[int] ][ literal[string] ]
identifier[self] . identifier[neighbors_normalized_distances] [ identifier[isite] ][- literal[int] ][ literal[string] ]= identifier[dfact]
identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][- literal[int] ][ literal[string] ]= identifier[dfact] * identifier[self] . identifier[neighbors_distances] [ identifier[isite] ][ literal[int] ][ literal[string] ]
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ]=[]
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ]=[]
identifier[normalized_angles] =[ identifier[nb_dict] [ literal[string] ] keyword[for] identifier[nb_dict] keyword[in] identifier[results] ]
identifier[isorted_angles] = identifier[np] . identifier[argsort] ( identifier[normalized_angles] )[::- literal[int] ]
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[normalized_angles] [ identifier[isorted_angles] [ literal[int] ]],
literal[string] : identifier[normalized_angles] [ identifier[isorted_angles] [ literal[int] ]]})
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[results] [ identifier[isorted_angles] [ literal[int] ]][ literal[string] ],
literal[string] : identifier[results] [ identifier[isorted_angles] [ literal[int] ]][ literal[string] ]})
identifier[icurrent] = literal[int]
identifier[nb_indices] ={ identifier[int] ( identifier[isorted_angles] [ literal[int] ])}
identifier[dnb_indices] ={ identifier[int] ( identifier[isorted_angles] [ literal[int] ])}
keyword[for] identifier[iang] keyword[in] identifier[iter] ( identifier[isorted_angles] ):
identifier[wa] = identifier[normalized_angles] [ identifier[iang] ]
keyword[if] identifier[self] . identifier[minimum_angle_factor] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[wa] < identifier[self] . identifier[minimum_angle_factor] :
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
keyword[break]
keyword[if] identifier[np] . identifier[isclose] ( identifier[wa] , identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ],
identifier[rtol] = literal[int] , identifier[atol] = identifier[self] . identifier[normalized_angle_tolerance] ):
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[wa]
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[results] [ identifier[iang] ][ literal[string] ]
identifier[dnb_indices] . identifier[add] ( identifier[int] ( identifier[iang] ))
keyword[else] :
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[dnb_indices] ={ identifier[int] ( identifier[iang] )}
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[wa] ,
literal[string] : identifier[wa] })
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ]. identifier[append] ({ literal[string] : identifier[results] [ identifier[iang] ][ literal[string] ],
literal[string] : identifier[results] [ identifier[iang] ][ literal[string] ]})
identifier[icurrent] += literal[int]
identifier[nb_indices] . identifier[add] ( identifier[int] ( identifier[iang] ))
keyword[else] :
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[nb_indices] )
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[icurrent] ][ literal[string] ]= identifier[list] ( identifier[dnb_indices] )
keyword[for] identifier[iang] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[neighbors_angles] [ identifier[isite] ])- literal[int] ):
identifier[ang_dict] = identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[iang] ]
identifier[ang_dict_next] = identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ identifier[iang] + literal[int] ]
identifier[ang_dict] [ literal[string] ]= identifier[ang_dict_next] [ literal[string] ]
identifier[nang_dict] = identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[iang] ]
identifier[nang_dict_next] = identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][ identifier[iang] + literal[int] ]
identifier[nang_dict] [ literal[string] ]= identifier[nang_dict_next] [ literal[string] ]
keyword[if] identifier[self] . identifier[minimum_angle_factor] keyword[is] keyword[not] keyword[None] :
identifier[afact] = identifier[self] . identifier[minimum_angle_factor]
keyword[else] :
identifier[afact] = literal[int]
identifier[self] . identifier[neighbors_normalized_angles] [ identifier[isite] ][- literal[int] ][ literal[string] ]= identifier[afact]
identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][- literal[int] ][ literal[string] ]= identifier[afact] * identifier[self] . identifier[neighbors_angles] [ identifier[isite] ][ literal[int] ][ literal[string] ]
|
def setup_neighbors_distances_and_angles(self, indices):
"""
Initializes the angle and distance separations
:param indices: indices of the sites for which the Voronoi is needed
"""
self.neighbors_distances = [None] * len(self.structure)
self.neighbors_normalized_distances = [None] * len(self.structure)
self.neighbors_angles = [None] * len(self.structure)
self.neighbors_normalized_angles = [None] * len(self.structure)
for isite in indices:
results = self.voronoi_list2[isite]
if results is None:
continue # depends on [control=['if'], data=[]]
#Initializes neighbors distances and normalized distances groups
self.neighbors_distances[isite] = []
self.neighbors_normalized_distances[isite] = []
normalized_distances = [nb_dict['normalized_distance'] for nb_dict in results]
isorted_distances = np.argsort(normalized_distances)
self.neighbors_normalized_distances[isite].append({'min': normalized_distances[isorted_distances[0]], 'max': normalized_distances[isorted_distances[0]]})
self.neighbors_distances[isite].append({'min': results[isorted_distances[0]]['distance'], 'max': results[isorted_distances[0]]['distance']})
icurrent = 0
nb_indices = {int(isorted_distances[0])}
dnb_indices = {int(isorted_distances[0])}
for idist in iter(isorted_distances):
wd = normalized_distances[idist]
if self.maximum_distance_factor is not None:
if wd > self.maximum_distance_factor:
self.neighbors_normalized_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if np.isclose(wd, self.neighbors_normalized_distances[isite][icurrent]['max'], rtol=0.0, atol=self.normalized_distance_tolerance):
self.neighbors_normalized_distances[isite][icurrent]['max'] = wd
self.neighbors_distances[isite][icurrent]['max'] = results[idist]['distance']
dnb_indices.add(int(idist)) # depends on [control=['if'], data=[]]
else:
self.neighbors_normalized_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
dnb_indices = {int(idist)}
self.neighbors_normalized_distances[isite].append({'min': wd, 'max': wd})
self.neighbors_distances[isite].append({'min': results[idist]['distance'], 'max': results[idist]['distance']})
icurrent += 1
nb_indices.add(int(idist)) # depends on [control=['for'], data=['idist']]
else:
self.neighbors_normalized_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_distances[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_distances[isite][icurrent]['dnb_indices'] = list(dnb_indices)
for idist in range(len(self.neighbors_distances[isite]) - 1):
dist_dict = self.neighbors_distances[isite][idist]
dist_dict_next = self.neighbors_distances[isite][idist + 1]
dist_dict['next'] = dist_dict_next['min']
ndist_dict = self.neighbors_normalized_distances[isite][idist]
ndist_dict_next = self.neighbors_normalized_distances[isite][idist + 1]
ndist_dict['next'] = ndist_dict_next['min'] # depends on [control=['for'], data=['idist']]
if self.maximum_distance_factor is not None:
dfact = self.maximum_distance_factor # depends on [control=['if'], data=[]]
else:
dfact = self.default_voronoi_cutoff / self.neighbors_distances[isite][0]['min']
self.neighbors_normalized_distances[isite][-1]['next'] = dfact
self.neighbors_distances[isite][-1]['next'] = dfact * self.neighbors_distances[isite][0]['min']
#Initializes neighbors angles and normalized angles groups
self.neighbors_angles[isite] = []
self.neighbors_normalized_angles[isite] = []
normalized_angles = [nb_dict['normalized_angle'] for nb_dict in results]
isorted_angles = np.argsort(normalized_angles)[::-1]
self.neighbors_normalized_angles[isite].append({'max': normalized_angles[isorted_angles[0]], 'min': normalized_angles[isorted_angles[0]]})
self.neighbors_angles[isite].append({'max': results[isorted_angles[0]]['angle'], 'min': results[isorted_angles[0]]['angle']})
icurrent = 0
nb_indices = {int(isorted_angles[0])}
dnb_indices = {int(isorted_angles[0])}
for iang in iter(isorted_angles):
wa = normalized_angles[iang]
if self.minimum_angle_factor is not None:
if wa < self.minimum_angle_factor:
self.neighbors_normalized_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if np.isclose(wa, self.neighbors_normalized_angles[isite][icurrent]['min'], rtol=0.0, atol=self.normalized_angle_tolerance):
self.neighbors_normalized_angles[isite][icurrent]['min'] = wa
self.neighbors_angles[isite][icurrent]['min'] = results[iang]['angle']
dnb_indices.add(int(iang)) # depends on [control=['if'], data=[]]
else:
self.neighbors_normalized_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
dnb_indices = {int(iang)}
self.neighbors_normalized_angles[isite].append({'max': wa, 'min': wa})
self.neighbors_angles[isite].append({'max': results[iang]['angle'], 'min': results[iang]['angle']})
icurrent += 1
nb_indices.add(int(iang)) # depends on [control=['for'], data=['iang']]
else:
self.neighbors_normalized_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_angles[isite][icurrent]['nb_indices'] = list(nb_indices)
self.neighbors_normalized_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
self.neighbors_angles[isite][icurrent]['dnb_indices'] = list(dnb_indices)
for iang in range(len(self.neighbors_angles[isite]) - 1):
ang_dict = self.neighbors_angles[isite][iang]
ang_dict_next = self.neighbors_angles[isite][iang + 1]
ang_dict['next'] = ang_dict_next['max']
nang_dict = self.neighbors_normalized_angles[isite][iang]
nang_dict_next = self.neighbors_normalized_angles[isite][iang + 1]
nang_dict['next'] = nang_dict_next['max'] # depends on [control=['for'], data=['iang']]
if self.minimum_angle_factor is not None:
afact = self.minimum_angle_factor # depends on [control=['if'], data=[]]
else:
afact = 0.0
self.neighbors_normalized_angles[isite][-1]['next'] = afact
self.neighbors_angles[isite][-1]['next'] = afact * self.neighbors_angles[isite][0]['max'] # depends on [control=['for'], data=['isite']]
|
def _get_assistants_snippets(path, name):
'''Get Assistants and Snippets for a given DAP name on a given path'''
result = []
subdirs = {'assistants': 2, 'snippets': 1} # Values used for stripping leading path tokens
for loc in subdirs:
for root, dirs, files in os.walk(os.path.join(path, loc)):
for filename in [utils.strip_prefix(os.path.join(root, f), path) for f in files]:
stripped = os.path.sep.join(filename.split(os.path.sep)[subdirs[loc]:])
if stripped.startswith(os.path.join(name, '')) or stripped == name + '.yaml':
result.append(os.path.join('fakeroot', filename))
return result
|
def function[_get_assistants_snippets, parameter[path, name]]:
constant[Get Assistants and Snippets for a given DAP name on a given path]
variable[result] assign[=] list[[]]
variable[subdirs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1151570>, <ast.Constant object at 0x7da1b1152b30>], [<ast.Constant object at 0x7da1b1151930>, <ast.Constant object at 0x7da1b1152dd0>]]
for taget[name[loc]] in starred[name[subdirs]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1041690>, <ast.Name object at 0x7da1b10407c0>, <ast.Name object at 0x7da1b10412a0>]]] in starred[call[name[os].walk, parameter[call[name[os].path.join, parameter[name[path], name[loc]]]]]] begin[:]
for taget[name[filename]] in starred[<ast.ListComp object at 0x7da1b1041de0>] begin[:]
variable[stripped] assign[=] call[name[os].path.sep.join, parameter[call[call[name[filename].split, parameter[name[os].path.sep]]][<ast.Slice object at 0x7da1b1042ce0>]]]
if <ast.BoolOp object at 0x7da1b10416c0> begin[:]
call[name[result].append, parameter[call[name[os].path.join, parameter[constant[fakeroot], name[filename]]]]]
return[name[result]]
|
keyword[def] identifier[_get_assistants_snippets] ( identifier[path] , identifier[name] ):
literal[string]
identifier[result] =[]
identifier[subdirs] ={ literal[string] : literal[int] , literal[string] : literal[int] }
keyword[for] identifier[loc] keyword[in] identifier[subdirs] :
keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[loc] )):
keyword[for] identifier[filename] keyword[in] [ identifier[utils] . identifier[strip_prefix] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[f] ), identifier[path] ) keyword[for] identifier[f] keyword[in] identifier[files] ]:
identifier[stripped] = identifier[os] . identifier[path] . identifier[sep] . identifier[join] ( identifier[filename] . identifier[split] ( identifier[os] . identifier[path] . identifier[sep] )[ identifier[subdirs] [ identifier[loc] ]:])
keyword[if] identifier[stripped] . identifier[startswith] ( identifier[os] . identifier[path] . identifier[join] ( identifier[name] , literal[string] )) keyword[or] identifier[stripped] == identifier[name] + literal[string] :
identifier[result] . identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[filename] ))
keyword[return] identifier[result]
|
def _get_assistants_snippets(path, name):
"""Get Assistants and Snippets for a given DAP name on a given path"""
result = []
subdirs = {'assistants': 2, 'snippets': 1} # Values used for stripping leading path tokens
for loc in subdirs:
for (root, dirs, files) in os.walk(os.path.join(path, loc)):
for filename in [utils.strip_prefix(os.path.join(root, f), path) for f in files]:
stripped = os.path.sep.join(filename.split(os.path.sep)[subdirs[loc]:])
if stripped.startswith(os.path.join(name, '')) or stripped == name + '.yaml':
result.append(os.path.join('fakeroot', filename)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['loc']]
return result
|
def psdcompletion(A, reordered = True, **kwargs):
"""
Maximum determinant positive semidefinite matrix completion. The
routine takes a cspmatrix :math:`A` and returns the maximum determinant
positive semidefinite matrix completion :math:`X` as a dense matrix, i.e.,
.. math::
P( X ) = A
:param A: :py:class:`cspmatrix`
:param reordered: boolean
"""
assert isinstance(A, cspmatrix) and A.is_factor is False, "A must be a cspmatrix"
tol = kwargs.get('tol',1e-15)
X = matrix(A.spmatrix(reordered = True, symmetric = True))
symb = A.symb
n = symb.n
snptr = symb.snptr
sncolptr = symb.sncolptr
snrowidx = symb.snrowidx
# visit supernodes in reverse (descending) order
for k in range(symb.Nsn-1,-1,-1):
nn = snptr[k+1]-snptr[k]
beta = snrowidx[sncolptr[k]:sncolptr[k+1]]
nj = len(beta)
if nj-nn == 0: continue
alpha = beta[nn:]
nu = beta[:nn]
eta = matrix([matrix(range(beta[kk]+1,beta[kk+1])) for kk in range(nj-1)] + [matrix(range(beta[-1]+1,n))])
try:
# Try Cholesky factorization first
Xaa = X[alpha,alpha]
lapack.potrf(Xaa)
Xan = X[alpha,nu]
lapack.trtrs(Xaa, Xan, trans = 'N')
XeaT = X[eta,alpha].T
lapack.trtrs(Xaa, XeaT, trans = 'N')
# Compute update
tmp = XeaT.T*Xan
except:
# If Cholesky fact. fails, switch to EVD: Xaa = Z*diag(w)*Z.T
Xaa = X[alpha,alpha]
w = matrix(0.0,(Xaa.size[0],1))
Z = matrix(0.0,Xaa.size)
lapack.syevr(Xaa, w, jobz='V', range='A', uplo='L', Z=Z)
# Pseudo-inverse: Xp = pinv(Xaa)
lambda_max = max(w)
Xp = Z*spmatrix([1.0/wi if wi > lambda_max*tol else 0.0 for wi in w],range(len(w)),range(len(w)))*Z.T
# Compute update
tmp = X[eta,alpha]*Xp*X[alpha,nu]
X[eta,nu] = tmp
X[nu,eta] = tmp.T
if reordered:
return X
else:
return X[symb.ip,symb.ip]
|
def function[psdcompletion, parameter[A, reordered]]:
constant[
Maximum determinant positive semidefinite matrix completion. The
routine takes a cspmatrix :math:`A` and returns the maximum determinant
positive semidefinite matrix completion :math:`X` as a dense matrix, i.e.,
.. math::
P( X ) = A
:param A: :py:class:`cspmatrix`
:param reordered: boolean
]
assert[<ast.BoolOp object at 0x7da1b2590fd0>]
variable[tol] assign[=] call[name[kwargs].get, parameter[constant[tol], constant[1e-15]]]
variable[X] assign[=] call[name[matrix], parameter[call[name[A].spmatrix, parameter[]]]]
variable[symb] assign[=] name[A].symb
variable[n] assign[=] name[symb].n
variable[snptr] assign[=] name[symb].snptr
variable[sncolptr] assign[=] name[symb].sncolptr
variable[snrowidx] assign[=] name[symb].snrowidx
for taget[name[k]] in starred[call[name[range], parameter[binary_operation[name[symb].Nsn - constant[1]], <ast.UnaryOp object at 0x7da1b25a0220>, <ast.UnaryOp object at 0x7da1b25a01c0>]]] begin[:]
variable[nn] assign[=] binary_operation[call[name[snptr]][binary_operation[name[k] + constant[1]]] - call[name[snptr]][name[k]]]
variable[beta] assign[=] call[name[snrowidx]][<ast.Slice object at 0x7da1b257bb80>]
variable[nj] assign[=] call[name[len], parameter[name[beta]]]
if compare[binary_operation[name[nj] - name[nn]] equal[==] constant[0]] begin[:]
continue
variable[alpha] assign[=] call[name[beta]][<ast.Slice object at 0x7da1b257b6a0>]
variable[nu] assign[=] call[name[beta]][<ast.Slice object at 0x7da1b257b580>]
variable[eta] assign[=] call[name[matrix], parameter[binary_operation[<ast.ListComp object at 0x7da1b257b430> + list[[<ast.Call object at 0x7da1b257afb0>]]]]]
<ast.Try object at 0x7da1b257bf40>
call[name[X]][tuple[[<ast.Name object at 0x7da1b25b9c90>, <ast.Name object at 0x7da1b25b9cc0>]]] assign[=] name[tmp]
call[name[X]][tuple[[<ast.Name object at 0x7da1b25b9de0>, <ast.Name object at 0x7da1b25b9e10>]]] assign[=] name[tmp].T
if name[reordered] begin[:]
return[name[X]]
|
keyword[def] identifier[psdcompletion] ( identifier[A] , identifier[reordered] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[A] , identifier[cspmatrix] ) keyword[and] identifier[A] . identifier[is_factor] keyword[is] keyword[False] , literal[string]
identifier[tol] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[X] = identifier[matrix] ( identifier[A] . identifier[spmatrix] ( identifier[reordered] = keyword[True] , identifier[symmetric] = keyword[True] ))
identifier[symb] = identifier[A] . identifier[symb]
identifier[n] = identifier[symb] . identifier[n]
identifier[snptr] = identifier[symb] . identifier[snptr]
identifier[sncolptr] = identifier[symb] . identifier[sncolptr]
identifier[snrowidx] = identifier[symb] . identifier[snrowidx]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[symb] . identifier[Nsn] - literal[int] ,- literal[int] ,- literal[int] ):
identifier[nn] = identifier[snptr] [ identifier[k] + literal[int] ]- identifier[snptr] [ identifier[k] ]
identifier[beta] = identifier[snrowidx] [ identifier[sncolptr] [ identifier[k] ]: identifier[sncolptr] [ identifier[k] + literal[int] ]]
identifier[nj] = identifier[len] ( identifier[beta] )
keyword[if] identifier[nj] - identifier[nn] == literal[int] : keyword[continue]
identifier[alpha] = identifier[beta] [ identifier[nn] :]
identifier[nu] = identifier[beta] [: identifier[nn] ]
identifier[eta] = identifier[matrix] ([ identifier[matrix] ( identifier[range] ( identifier[beta] [ identifier[kk] ]+ literal[int] , identifier[beta] [ identifier[kk] + literal[int] ])) keyword[for] identifier[kk] keyword[in] identifier[range] ( identifier[nj] - literal[int] )]+[ identifier[matrix] ( identifier[range] ( identifier[beta] [- literal[int] ]+ literal[int] , identifier[n] ))])
keyword[try] :
identifier[Xaa] = identifier[X] [ identifier[alpha] , identifier[alpha] ]
identifier[lapack] . identifier[potrf] ( identifier[Xaa] )
identifier[Xan] = identifier[X] [ identifier[alpha] , identifier[nu] ]
identifier[lapack] . identifier[trtrs] ( identifier[Xaa] , identifier[Xan] , identifier[trans] = literal[string] )
identifier[XeaT] = identifier[X] [ identifier[eta] , identifier[alpha] ]. identifier[T]
identifier[lapack] . identifier[trtrs] ( identifier[Xaa] , identifier[XeaT] , identifier[trans] = literal[string] )
identifier[tmp] = identifier[XeaT] . identifier[T] * identifier[Xan]
keyword[except] :
identifier[Xaa] = identifier[X] [ identifier[alpha] , identifier[alpha] ]
identifier[w] = identifier[matrix] ( literal[int] ,( identifier[Xaa] . identifier[size] [ literal[int] ], literal[int] ))
identifier[Z] = identifier[matrix] ( literal[int] , identifier[Xaa] . identifier[size] )
identifier[lapack] . identifier[syevr] ( identifier[Xaa] , identifier[w] , identifier[jobz] = literal[string] , identifier[range] = literal[string] , identifier[uplo] = literal[string] , identifier[Z] = identifier[Z] )
identifier[lambda_max] = identifier[max] ( identifier[w] )
identifier[Xp] = identifier[Z] * identifier[spmatrix] ([ literal[int] / identifier[wi] keyword[if] identifier[wi] > identifier[lambda_max] * identifier[tol] keyword[else] literal[int] keyword[for] identifier[wi] keyword[in] identifier[w] ], identifier[range] ( identifier[len] ( identifier[w] )), identifier[range] ( identifier[len] ( identifier[w] )))* identifier[Z] . identifier[T]
identifier[tmp] = identifier[X] [ identifier[eta] , identifier[alpha] ]* identifier[Xp] * identifier[X] [ identifier[alpha] , identifier[nu] ]
identifier[X] [ identifier[eta] , identifier[nu] ]= identifier[tmp]
identifier[X] [ identifier[nu] , identifier[eta] ]= identifier[tmp] . identifier[T]
keyword[if] identifier[reordered] :
keyword[return] identifier[X]
keyword[else] :
keyword[return] identifier[X] [ identifier[symb] . identifier[ip] , identifier[symb] . identifier[ip] ]
|
def psdcompletion(A, reordered=True, **kwargs):
"""
Maximum determinant positive semidefinite matrix completion. The
routine takes a cspmatrix :math:`A` and returns the maximum determinant
positive semidefinite matrix completion :math:`X` as a dense matrix, i.e.,
.. math::
P( X ) = A
:param A: :py:class:`cspmatrix`
:param reordered: boolean
"""
assert isinstance(A, cspmatrix) and A.is_factor is False, 'A must be a cspmatrix'
tol = kwargs.get('tol', 1e-15)
X = matrix(A.spmatrix(reordered=True, symmetric=True))
symb = A.symb
n = symb.n
snptr = symb.snptr
sncolptr = symb.sncolptr
snrowidx = symb.snrowidx
# visit supernodes in reverse (descending) order
for k in range(symb.Nsn - 1, -1, -1):
nn = snptr[k + 1] - snptr[k]
beta = snrowidx[sncolptr[k]:sncolptr[k + 1]]
nj = len(beta)
if nj - nn == 0:
continue # depends on [control=['if'], data=[]]
alpha = beta[nn:]
nu = beta[:nn]
eta = matrix([matrix(range(beta[kk] + 1, beta[kk + 1])) for kk in range(nj - 1)] + [matrix(range(beta[-1] + 1, n))])
try:
# Try Cholesky factorization first
Xaa = X[alpha, alpha]
lapack.potrf(Xaa)
Xan = X[alpha, nu]
lapack.trtrs(Xaa, Xan, trans='N')
XeaT = X[eta, alpha].T
lapack.trtrs(Xaa, XeaT, trans='N')
# Compute update
tmp = XeaT.T * Xan # depends on [control=['try'], data=[]]
except:
# If Cholesky fact. fails, switch to EVD: Xaa = Z*diag(w)*Z.T
Xaa = X[alpha, alpha]
w = matrix(0.0, (Xaa.size[0], 1))
Z = matrix(0.0, Xaa.size)
lapack.syevr(Xaa, w, jobz='V', range='A', uplo='L', Z=Z)
# Pseudo-inverse: Xp = pinv(Xaa)
lambda_max = max(w)
Xp = Z * spmatrix([1.0 / wi if wi > lambda_max * tol else 0.0 for wi in w], range(len(w)), range(len(w))) * Z.T
# Compute update
tmp = X[eta, alpha] * Xp * X[alpha, nu] # depends on [control=['except'], data=[]]
X[eta, nu] = tmp
X[nu, eta] = tmp.T # depends on [control=['for'], data=['k']]
if reordered:
return X # depends on [control=['if'], data=[]]
else:
return X[symb.ip, symb.ip]
|
def _create_model(self, view_getter=None, source_getters=None,
model_factory=None, officer=None):
"""
Creates a model from the model factory after retrieving
the source and the view. The officer is the IOfficer
FOR THE MODEL TO BE CREATED and NO OFFICER CHECKS ARE PERFORMED.
"""
if view_getter is not None:
d = defer.succeed(view_getter)
d.addCallback(self._retrieve_view)
d.addCallback(self._check_view)
else:
# views are inherited
d = defer.succeed(self.model.view)
d.addCallback(self._retrieve_model, source_getters,
model_factory, officer)
d.addErrback(self._filter_errors)
return d
|
def function[_create_model, parameter[self, view_getter, source_getters, model_factory, officer]]:
constant[
Creates a model from the model factory after retrieving
the source and the view. The officer is the IOfficer
FOR THE MODEL TO BE CREATED and NO OFFICER CHECKS ARE PERFORMED.
]
if compare[name[view_getter] is_not constant[None]] begin[:]
variable[d] assign[=] call[name[defer].succeed, parameter[name[view_getter]]]
call[name[d].addCallback, parameter[name[self]._retrieve_view]]
call[name[d].addCallback, parameter[name[self]._check_view]]
call[name[d].addCallback, parameter[name[self]._retrieve_model, name[source_getters], name[model_factory], name[officer]]]
call[name[d].addErrback, parameter[name[self]._filter_errors]]
return[name[d]]
|
keyword[def] identifier[_create_model] ( identifier[self] , identifier[view_getter] = keyword[None] , identifier[source_getters] = keyword[None] ,
identifier[model_factory] = keyword[None] , identifier[officer] = keyword[None] ):
literal[string]
keyword[if] identifier[view_getter] keyword[is] keyword[not] keyword[None] :
identifier[d] = identifier[defer] . identifier[succeed] ( identifier[view_getter] )
identifier[d] . identifier[addCallback] ( identifier[self] . identifier[_retrieve_view] )
identifier[d] . identifier[addCallback] ( identifier[self] . identifier[_check_view] )
keyword[else] :
identifier[d] = identifier[defer] . identifier[succeed] ( identifier[self] . identifier[model] . identifier[view] )
identifier[d] . identifier[addCallback] ( identifier[self] . identifier[_retrieve_model] , identifier[source_getters] ,
identifier[model_factory] , identifier[officer] )
identifier[d] . identifier[addErrback] ( identifier[self] . identifier[_filter_errors] )
keyword[return] identifier[d]
|
def _create_model(self, view_getter=None, source_getters=None, model_factory=None, officer=None):
"""
Creates a model from the model factory after retrieving
the source and the view. The officer is the IOfficer
FOR THE MODEL TO BE CREATED and NO OFFICER CHECKS ARE PERFORMED.
"""
if view_getter is not None:
d = defer.succeed(view_getter)
d.addCallback(self._retrieve_view)
d.addCallback(self._check_view) # depends on [control=['if'], data=['view_getter']]
else:
# views are inherited
d = defer.succeed(self.model.view)
d.addCallback(self._retrieve_model, source_getters, model_factory, officer)
d.addErrback(self._filter_errors)
return d
|
def MI_enumInstanceNames(self,
env,
objPath):
# pylint: disable=invalid-name
"""Return instance names of a given CIM class
Implements the WBEM operation EnumerateInstanceNames in terms
of the enum_instances method. A derived class will not normally
override this method.
"""
logger = env.get_logger()
logger.log_debug('CIMProvider2 MI_enumInstanceNames called...')
model = pywbem.CIMInstance(classname=objPath.classname,
path=objPath)
gen = self.enum_instances(env=env,
model=model,
keys_only=True)
try:
iter(gen)
except TypeError:
logger.log_debug('CIMProvider2 MI_enumInstanceNames returning')
return
for inst in gen:
yield inst.path
logger.log_debug('CIMProvider2 MI_enumInstanceNames returning')
|
def function[MI_enumInstanceNames, parameter[self, env, objPath]]:
constant[Return instance names of a given CIM class
Implements the WBEM operation EnumerateInstanceNames in terms
of the enum_instances method. A derived class will not normally
override this method.
]
variable[logger] assign[=] call[name[env].get_logger, parameter[]]
call[name[logger].log_debug, parameter[constant[CIMProvider2 MI_enumInstanceNames called...]]]
variable[model] assign[=] call[name[pywbem].CIMInstance, parameter[]]
variable[gen] assign[=] call[name[self].enum_instances, parameter[]]
<ast.Try object at 0x7da1b0e9c2e0>
for taget[name[inst]] in starred[name[gen]] begin[:]
<ast.Yield object at 0x7da1b26ace50>
call[name[logger].log_debug, parameter[constant[CIMProvider2 MI_enumInstanceNames returning]]]
|
keyword[def] identifier[MI_enumInstanceNames] ( identifier[self] ,
identifier[env] ,
identifier[objPath] ):
literal[string]
identifier[logger] = identifier[env] . identifier[get_logger] ()
identifier[logger] . identifier[log_debug] ( literal[string] )
identifier[model] = identifier[pywbem] . identifier[CIMInstance] ( identifier[classname] = identifier[objPath] . identifier[classname] ,
identifier[path] = identifier[objPath] )
identifier[gen] = identifier[self] . identifier[enum_instances] ( identifier[env] = identifier[env] ,
identifier[model] = identifier[model] ,
identifier[keys_only] = keyword[True] )
keyword[try] :
identifier[iter] ( identifier[gen] )
keyword[except] identifier[TypeError] :
identifier[logger] . identifier[log_debug] ( literal[string] )
keyword[return]
keyword[for] identifier[inst] keyword[in] identifier[gen] :
keyword[yield] identifier[inst] . identifier[path]
identifier[logger] . identifier[log_debug] ( literal[string] )
|
def MI_enumInstanceNames(self, env, objPath):
# pylint: disable=invalid-name
'Return instance names of a given CIM class\n\n Implements the WBEM operation EnumerateInstanceNames in terms\n of the enum_instances method. A derived class will not normally\n override this method.\n\n '
logger = env.get_logger()
logger.log_debug('CIMProvider2 MI_enumInstanceNames called...')
model = pywbem.CIMInstance(classname=objPath.classname, path=objPath)
gen = self.enum_instances(env=env, model=model, keys_only=True)
try:
iter(gen) # depends on [control=['try'], data=[]]
except TypeError:
logger.log_debug('CIMProvider2 MI_enumInstanceNames returning')
return # depends on [control=['except'], data=[]]
for inst in gen:
yield inst.path # depends on [control=['for'], data=['inst']]
logger.log_debug('CIMProvider2 MI_enumInstanceNames returning')
|
def insert(self, docs, *args, **kwargs):
"""Backwards compatibility with insert"""
if isinstance(docs, list):
return self.insert_many(docs, *args, **kwargs)
else:
return self.insert_one(docs, *args, **kwargs)
|
def function[insert, parameter[self, docs]]:
constant[Backwards compatibility with insert]
if call[name[isinstance], parameter[name[docs], name[list]]] begin[:]
return[call[name[self].insert_many, parameter[name[docs], <ast.Starred object at 0x7da18f58d3c0>]]]
|
keyword[def] identifier[insert] ( identifier[self] , identifier[docs] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[docs] , identifier[list] ):
keyword[return] identifier[self] . identifier[insert_many] ( identifier[docs] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[return] identifier[self] . identifier[insert_one] ( identifier[docs] ,* identifier[args] ,** identifier[kwargs] )
|
def insert(self, docs, *args, **kwargs):
"""Backwards compatibility with insert"""
if isinstance(docs, list):
return self.insert_many(docs, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
return self.insert_one(docs, *args, **kwargs)
|
def iterate(self, delay=None):
"""See twisted.internet.interfaces.IReactorCore.iterate.
"""
self.runUntilCurrent()
self.doEvents()
self.doIteration(delay)
|
def function[iterate, parameter[self, delay]]:
constant[See twisted.internet.interfaces.IReactorCore.iterate.
]
call[name[self].runUntilCurrent, parameter[]]
call[name[self].doEvents, parameter[]]
call[name[self].doIteration, parameter[name[delay]]]
|
keyword[def] identifier[iterate] ( identifier[self] , identifier[delay] = keyword[None] ):
literal[string]
identifier[self] . identifier[runUntilCurrent] ()
identifier[self] . identifier[doEvents] ()
identifier[self] . identifier[doIteration] ( identifier[delay] )
|
def iterate(self, delay=None):
"""See twisted.internet.interfaces.IReactorCore.iterate.
"""
self.runUntilCurrent()
self.doEvents()
self.doIteration(delay)
|
def starargs(self):
"""The positional arguments that unpack something.
:type: list(Starred)
"""
args = self.args or []
return [arg for arg in args if isinstance(arg, Starred)]
|
def function[starargs, parameter[self]]:
constant[The positional arguments that unpack something.
:type: list(Starred)
]
variable[args] assign[=] <ast.BoolOp object at 0x7da1b1e78d30>
return[<ast.ListComp object at 0x7da1b1e7b5e0>]
|
keyword[def] identifier[starargs] ( identifier[self] ):
literal[string]
identifier[args] = identifier[self] . identifier[args] keyword[or] []
keyword[return] [ identifier[arg] keyword[for] identifier[arg] keyword[in] identifier[args] keyword[if] identifier[isinstance] ( identifier[arg] , identifier[Starred] )]
|
def starargs(self):
"""The positional arguments that unpack something.
:type: list(Starred)
"""
args = self.args or []
return [arg for arg in args if isinstance(arg, Starred)]
|
def initdb(self):
"""Create tables and indices as needed."""
if hasattr(self, 'alchemist'):
self.alchemist.meta.create_all(self.engine)
if 'branch' not in self.globl:
self.globl['branch'] = 'trunk'
if 'rev' not in self.globl:
self.globl['rev'] = 0
return
from sqlite3 import OperationalError
cursor = self.connection.cursor()
try:
cursor.execute('SELECT * FROM global;')
except OperationalError:
cursor.execute(self.strings['create_global'])
if 'branch' not in self.globl:
self.globl['branch'] = 'trunk'
if 'turn' not in self.globl:
self.globl['turn'] = 0
if 'tick' not in self.globl:
self.globl['tick'] = 0
for table in (
'branches',
'turns',
'graphs',
'graph_val',
'nodes',
'node_val',
'edges',
'edge_val',
'plans',
'plan_ticks'
):
try:
cursor.execute('SELECT * FROM ' + table + ';')
except OperationalError:
cursor.execute(self.strings['create_' + table])
|
def function[initdb, parameter[self]]:
constant[Create tables and indices as needed.]
if call[name[hasattr], parameter[name[self], constant[alchemist]]] begin[:]
call[name[self].alchemist.meta.create_all, parameter[name[self].engine]]
if compare[constant[branch] <ast.NotIn object at 0x7da2590d7190> name[self].globl] begin[:]
call[name[self].globl][constant[branch]] assign[=] constant[trunk]
if compare[constant[rev] <ast.NotIn object at 0x7da2590d7190> name[self].globl] begin[:]
call[name[self].globl][constant[rev]] assign[=] constant[0]
return[None]
from relative_module[sqlite3] import module[OperationalError]
variable[cursor] assign[=] call[name[self].connection.cursor, parameter[]]
<ast.Try object at 0x7da20c795ff0>
if compare[constant[branch] <ast.NotIn object at 0x7da2590d7190> name[self].globl] begin[:]
call[name[self].globl][constant[branch]] assign[=] constant[trunk]
if compare[constant[turn] <ast.NotIn object at 0x7da2590d7190> name[self].globl] begin[:]
call[name[self].globl][constant[turn]] assign[=] constant[0]
if compare[constant[tick] <ast.NotIn object at 0x7da2590d7190> name[self].globl] begin[:]
call[name[self].globl][constant[tick]] assign[=] constant[0]
for taget[name[table]] in starred[tuple[[<ast.Constant object at 0x7da20c796050>, <ast.Constant object at 0x7da20c796410>, <ast.Constant object at 0x7da20c794dc0>, <ast.Constant object at 0x7da20c796290>, <ast.Constant object at 0x7da20c7951b0>, <ast.Constant object at 0x7da20c796530>, <ast.Constant object at 0x7da20c7948e0>, <ast.Constant object at 0x7da20c794e80>, <ast.Constant object at 0x7da20c7944c0>, <ast.Constant object at 0x7da20c796320>]]] begin[:]
<ast.Try object at 0x7da20c794490>
|
keyword[def] identifier[initdb] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[alchemist] . identifier[meta] . identifier[create_all] ( identifier[self] . identifier[engine] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[globl] :
identifier[self] . identifier[globl] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[globl] :
identifier[self] . identifier[globl] [ literal[string] ]= literal[int]
keyword[return]
keyword[from] identifier[sqlite3] keyword[import] identifier[OperationalError]
identifier[cursor] = identifier[self] . identifier[connection] . identifier[cursor] ()
keyword[try] :
identifier[cursor] . identifier[execute] ( literal[string] )
keyword[except] identifier[OperationalError] :
identifier[cursor] . identifier[execute] ( identifier[self] . identifier[strings] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[globl] :
identifier[self] . identifier[globl] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[globl] :
identifier[self] . identifier[globl] [ literal[string] ]= literal[int]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[globl] :
identifier[self] . identifier[globl] [ literal[string] ]= literal[int]
keyword[for] identifier[table] keyword[in] (
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
):
keyword[try] :
identifier[cursor] . identifier[execute] ( literal[string] + identifier[table] + literal[string] )
keyword[except] identifier[OperationalError] :
identifier[cursor] . identifier[execute] ( identifier[self] . identifier[strings] [ literal[string] + identifier[table] ])
|
def initdb(self):
"""Create tables and indices as needed."""
if hasattr(self, 'alchemist'):
self.alchemist.meta.create_all(self.engine)
if 'branch' not in self.globl:
self.globl['branch'] = 'trunk' # depends on [control=['if'], data=[]]
if 'rev' not in self.globl:
self.globl['rev'] = 0 # depends on [control=['if'], data=[]]
return # depends on [control=['if'], data=[]]
from sqlite3 import OperationalError
cursor = self.connection.cursor()
try:
cursor.execute('SELECT * FROM global;') # depends on [control=['try'], data=[]]
except OperationalError:
cursor.execute(self.strings['create_global']) # depends on [control=['except'], data=[]]
if 'branch' not in self.globl:
self.globl['branch'] = 'trunk' # depends on [control=['if'], data=[]]
if 'turn' not in self.globl:
self.globl['turn'] = 0 # depends on [control=['if'], data=[]]
if 'tick' not in self.globl:
self.globl['tick'] = 0 # depends on [control=['if'], data=[]]
for table in ('branches', 'turns', 'graphs', 'graph_val', 'nodes', 'node_val', 'edges', 'edge_val', 'plans', 'plan_ticks'):
try:
cursor.execute('SELECT * FROM ' + table + ';') # depends on [control=['try'], data=[]]
except OperationalError:
cursor.execute(self.strings['create_' + table]) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['table']]
|
def stop_consumer(self):
"""Stop the consumer object and allow it to do a clean shutdown if it
has the ability to do so.
"""
try:
LOGGER.info('Shutting down the consumer')
self.consumer.shutdown()
except AttributeError:
LOGGER.debug('Consumer does not have a shutdown method')
|
def function[stop_consumer, parameter[self]]:
constant[Stop the consumer object and allow it to do a clean shutdown if it
has the ability to do so.
]
<ast.Try object at 0x7da18dc05f30>
|
keyword[def] identifier[stop_consumer] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[LOGGER] . identifier[info] ( literal[string] )
identifier[self] . identifier[consumer] . identifier[shutdown] ()
keyword[except] identifier[AttributeError] :
identifier[LOGGER] . identifier[debug] ( literal[string] )
|
def stop_consumer(self):
"""Stop the consumer object and allow it to do a clean shutdown if it
has the ability to do so.
"""
try:
LOGGER.info('Shutting down the consumer')
self.consumer.shutdown() # depends on [control=['try'], data=[]]
except AttributeError:
LOGGER.debug('Consumer does not have a shutdown method') # depends on [control=['except'], data=[]]
|
def get_all_user_groups(self, **kwargs): # noqa: E501
"""Get all user groups for a customer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_user_groups(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedUserGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_user_groups_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_user_groups_with_http_info(**kwargs) # noqa: E501
return data
|
def function[get_all_user_groups, parameter[self]]:
constant[Get all user groups for a customer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_user_groups(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedUserGroup
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].get_all_user_groups_with_http_info, parameter[]]]
|
keyword[def] identifier[get_all_user_groups] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[get_all_user_groups_with_http_info] (** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[get_all_user_groups_with_http_info] (** identifier[kwargs] )
keyword[return] identifier[data]
|
def get_all_user_groups(self, **kwargs): # noqa: E501
'Get all user groups for a customer # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_all_user_groups(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int offset:\n :param int limit:\n :return: ResponseContainerPagedUserGroup\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_user_groups_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.get_all_user_groups_with_http_info(**kwargs) # noqa: E501
return data
|
def get_routes(self, viewset):
"""
DREST routes injection, overrides DRF's get_routes() method, which
gets called for each registered viewset.
"""
routes = super(DynamicRouter, self).get_routes(viewset)
routes += self.get_relation_routes(viewset)
return routes
|
def function[get_routes, parameter[self, viewset]]:
constant[
DREST routes injection, overrides DRF's get_routes() method, which
gets called for each registered viewset.
]
variable[routes] assign[=] call[call[name[super], parameter[name[DynamicRouter], name[self]]].get_routes, parameter[name[viewset]]]
<ast.AugAssign object at 0x7da18eb57d60>
return[name[routes]]
|
keyword[def] identifier[get_routes] ( identifier[self] , identifier[viewset] ):
literal[string]
identifier[routes] = identifier[super] ( identifier[DynamicRouter] , identifier[self] ). identifier[get_routes] ( identifier[viewset] )
identifier[routes] += identifier[self] . identifier[get_relation_routes] ( identifier[viewset] )
keyword[return] identifier[routes]
|
def get_routes(self, viewset):
"""
DREST routes injection, overrides DRF's get_routes() method, which
gets called for each registered viewset.
"""
routes = super(DynamicRouter, self).get_routes(viewset)
routes += self.get_relation_routes(viewset)
return routes
|
def ts_stats_significance_bootstrap(ts, stats_ts, stats_func, B=1000, b=3):
""" Compute the statistical significance of a test statistic at each point
of the time series by using timeseries boootstrap.
"""
pvals = []
for tp in np.arange(0, len(stats_ts)):
pf = partial(stats_func, t=tp)
bs = bootstrap_ts(ts, pf, B=B, b=b)
ci = get_ci(bs, blockratio=b / len(stats_ts))
pval = abs(get_pvalue(stats_ts[tp], ci))
pvals.append(pval)
return pvals
|
def function[ts_stats_significance_bootstrap, parameter[ts, stats_ts, stats_func, B, b]]:
constant[ Compute the statistical significance of a test statistic at each point
of the time series by using timeseries boootstrap.
]
variable[pvals] assign[=] list[[]]
for taget[name[tp]] in starred[call[name[np].arange, parameter[constant[0], call[name[len], parameter[name[stats_ts]]]]]] begin[:]
variable[pf] assign[=] call[name[partial], parameter[name[stats_func]]]
variable[bs] assign[=] call[name[bootstrap_ts], parameter[name[ts], name[pf]]]
variable[ci] assign[=] call[name[get_ci], parameter[name[bs]]]
variable[pval] assign[=] call[name[abs], parameter[call[name[get_pvalue], parameter[call[name[stats_ts]][name[tp]], name[ci]]]]]
call[name[pvals].append, parameter[name[pval]]]
return[name[pvals]]
|
keyword[def] identifier[ts_stats_significance_bootstrap] ( identifier[ts] , identifier[stats_ts] , identifier[stats_func] , identifier[B] = literal[int] , identifier[b] = literal[int] ):
literal[string]
identifier[pvals] =[]
keyword[for] identifier[tp] keyword[in] identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[stats_ts] )):
identifier[pf] = identifier[partial] ( identifier[stats_func] , identifier[t] = identifier[tp] )
identifier[bs] = identifier[bootstrap_ts] ( identifier[ts] , identifier[pf] , identifier[B] = identifier[B] , identifier[b] = identifier[b] )
identifier[ci] = identifier[get_ci] ( identifier[bs] , identifier[blockratio] = identifier[b] / identifier[len] ( identifier[stats_ts] ))
identifier[pval] = identifier[abs] ( identifier[get_pvalue] ( identifier[stats_ts] [ identifier[tp] ], identifier[ci] ))
identifier[pvals] . identifier[append] ( identifier[pval] )
keyword[return] identifier[pvals]
|
def ts_stats_significance_bootstrap(ts, stats_ts, stats_func, B=1000, b=3):
""" Compute the statistical significance of a test statistic at each point
of the time series by using timeseries boootstrap.
"""
pvals = []
for tp in np.arange(0, len(stats_ts)):
pf = partial(stats_func, t=tp)
bs = bootstrap_ts(ts, pf, B=B, b=b)
ci = get_ci(bs, blockratio=b / len(stats_ts))
pval = abs(get_pvalue(stats_ts[tp], ci))
pvals.append(pval) # depends on [control=['for'], data=['tp']]
return pvals
|
def _get_prepare_env(self, script, job_descriptor, inputs, outputs, mounts):
"""Return a dict with variables for the 'prepare' action."""
# Add the _SCRIPT_REPR with the repr(script) contents
# Add the _META_YAML_REPR with the repr(meta) contents
# Add variables for directories that need to be created, for example:
# DIR_COUNT: 2
# DIR_0: /mnt/data/input/gs/bucket/path1/
# DIR_1: /mnt/data/output/gs/bucket/path2
# List the directories in sorted order so that they are created in that
# order. This is primarily to ensure that permissions are set as we create
# each directory.
# For example:
# mkdir -m 777 -p /root/first/second
# mkdir -m 777 -p /root/first
# *may* not actually set 777 on /root/first
docker_paths = sorted([
var.docker_path if var.recursive else os.path.dirname(var.docker_path)
for var in inputs | outputs | mounts
if var.value
])
env = {
_SCRIPT_VARNAME: repr(script.value),
_META_YAML_VARNAME: repr(job_descriptor.to_yaml()),
'DIR_COUNT': str(len(docker_paths))
}
for idx, path in enumerate(docker_paths):
env['DIR_{}'.format(idx)] = os.path.join(providers_util.DATA_MOUNT_POINT,
path)
return env
|
def function[_get_prepare_env, parameter[self, script, job_descriptor, inputs, outputs, mounts]]:
constant[Return a dict with variables for the 'prepare' action.]
variable[docker_paths] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da1b012ea70>]]
variable[env] assign[=] dictionary[[<ast.Name object at 0x7da1b0137760>, <ast.Name object at 0x7da1b01379a0>, <ast.Constant object at 0x7da1b0136e60>], [<ast.Call object at 0x7da1b01370d0>, <ast.Call object at 0x7da1b0137160>, <ast.Call object at 0x7da1b0134b20>]]
for taget[tuple[[<ast.Name object at 0x7da1b01376d0>, <ast.Name object at 0x7da1b0135ea0>]]] in starred[call[name[enumerate], parameter[name[docker_paths]]]] begin[:]
call[name[env]][call[constant[DIR_{}].format, parameter[name[idx]]]] assign[=] call[name[os].path.join, parameter[name[providers_util].DATA_MOUNT_POINT, name[path]]]
return[name[env]]
|
keyword[def] identifier[_get_prepare_env] ( identifier[self] , identifier[script] , identifier[job_descriptor] , identifier[inputs] , identifier[outputs] , identifier[mounts] ):
literal[string]
identifier[docker_paths] = identifier[sorted] ([
identifier[var] . identifier[docker_path] keyword[if] identifier[var] . identifier[recursive] keyword[else] identifier[os] . identifier[path] . identifier[dirname] ( identifier[var] . identifier[docker_path] )
keyword[for] identifier[var] keyword[in] identifier[inputs] | identifier[outputs] | identifier[mounts]
keyword[if] identifier[var] . identifier[value]
])
identifier[env] ={
identifier[_SCRIPT_VARNAME] : identifier[repr] ( identifier[script] . identifier[value] ),
identifier[_META_YAML_VARNAME] : identifier[repr] ( identifier[job_descriptor] . identifier[to_yaml] ()),
literal[string] : identifier[str] ( identifier[len] ( identifier[docker_paths] ))
}
keyword[for] identifier[idx] , identifier[path] keyword[in] identifier[enumerate] ( identifier[docker_paths] ):
identifier[env] [ literal[string] . identifier[format] ( identifier[idx] )]= identifier[os] . identifier[path] . identifier[join] ( identifier[providers_util] . identifier[DATA_MOUNT_POINT] ,
identifier[path] )
keyword[return] identifier[env]
|
def _get_prepare_env(self, script, job_descriptor, inputs, outputs, mounts):
"""Return a dict with variables for the 'prepare' action."""
# Add the _SCRIPT_REPR with the repr(script) contents
# Add the _META_YAML_REPR with the repr(meta) contents
# Add variables for directories that need to be created, for example:
# DIR_COUNT: 2
# DIR_0: /mnt/data/input/gs/bucket/path1/
# DIR_1: /mnt/data/output/gs/bucket/path2
# List the directories in sorted order so that they are created in that
# order. This is primarily to ensure that permissions are set as we create
# each directory.
# For example:
# mkdir -m 777 -p /root/first/second
# mkdir -m 777 -p /root/first
# *may* not actually set 777 on /root/first
docker_paths = sorted([var.docker_path if var.recursive else os.path.dirname(var.docker_path) for var in inputs | outputs | mounts if var.value])
env = {_SCRIPT_VARNAME: repr(script.value), _META_YAML_VARNAME: repr(job_descriptor.to_yaml()), 'DIR_COUNT': str(len(docker_paths))}
for (idx, path) in enumerate(docker_paths):
env['DIR_{}'.format(idx)] = os.path.join(providers_util.DATA_MOUNT_POINT, path) # depends on [control=['for'], data=[]]
return env
|
def assign_funds_to_account_pending_invoices(account_id: str) -> Sequence[str]:
"""
Tries to pay pending account invoices (starting from the oldest) with available funds.
:param account_id: the account on which to perform the operation
:return: The ids of the invoices that were paid (possibly empty list).
"""
logger.info('assign-funds-to-pending-invoices', account_id=str(account_id))
paid_invoice_ids = []
for invoice in Invoice.objects.filter(status=Invoice.PENDING, account_id=account_id).order_by('due_date'):
invoice_was_paid = assign_funds_to_invoice(invoice.pk)
if invoice_was_paid:
paid_invoice_ids.append(invoice.id)
else:
break # Bail even though there may be funds in another currency to pay more recent invoices.
logger.info('assign-funds-to-pending-invoices.end', account_id=str(account_id),
paid_invoice_count=len(paid_invoice_ids))
return paid_invoice_ids
|
def function[assign_funds_to_account_pending_invoices, parameter[account_id]]:
constant[
Tries to pay pending account invoices (starting from the oldest) with available funds.
:param account_id: the account on which to perform the operation
:return: The ids of the invoices that were paid (possibly empty list).
]
call[name[logger].info, parameter[constant[assign-funds-to-pending-invoices]]]
variable[paid_invoice_ids] assign[=] list[[]]
for taget[name[invoice]] in starred[call[call[name[Invoice].objects.filter, parameter[]].order_by, parameter[constant[due_date]]]] begin[:]
variable[invoice_was_paid] assign[=] call[name[assign_funds_to_invoice], parameter[name[invoice].pk]]
if name[invoice_was_paid] begin[:]
call[name[paid_invoice_ids].append, parameter[name[invoice].id]]
call[name[logger].info, parameter[constant[assign-funds-to-pending-invoices.end]]]
return[name[paid_invoice_ids]]
|
keyword[def] identifier[assign_funds_to_account_pending_invoices] ( identifier[account_id] : identifier[str] )-> identifier[Sequence] [ identifier[str] ]:
literal[string]
identifier[logger] . identifier[info] ( literal[string] , identifier[account_id] = identifier[str] ( identifier[account_id] ))
identifier[paid_invoice_ids] =[]
keyword[for] identifier[invoice] keyword[in] identifier[Invoice] . identifier[objects] . identifier[filter] ( identifier[status] = identifier[Invoice] . identifier[PENDING] , identifier[account_id] = identifier[account_id] ). identifier[order_by] ( literal[string] ):
identifier[invoice_was_paid] = identifier[assign_funds_to_invoice] ( identifier[invoice] . identifier[pk] )
keyword[if] identifier[invoice_was_paid] :
identifier[paid_invoice_ids] . identifier[append] ( identifier[invoice] . identifier[id] )
keyword[else] :
keyword[break]
identifier[logger] . identifier[info] ( literal[string] , identifier[account_id] = identifier[str] ( identifier[account_id] ),
identifier[paid_invoice_count] = identifier[len] ( identifier[paid_invoice_ids] ))
keyword[return] identifier[paid_invoice_ids]
|
def assign_funds_to_account_pending_invoices(account_id: str) -> Sequence[str]:
"""
Tries to pay pending account invoices (starting from the oldest) with available funds.
:param account_id: the account on which to perform the operation
:return: The ids of the invoices that were paid (possibly empty list).
"""
logger.info('assign-funds-to-pending-invoices', account_id=str(account_id))
paid_invoice_ids = []
for invoice in Invoice.objects.filter(status=Invoice.PENDING, account_id=account_id).order_by('due_date'):
invoice_was_paid = assign_funds_to_invoice(invoice.pk)
if invoice_was_paid:
paid_invoice_ids.append(invoice.id) # depends on [control=['if'], data=[]]
else:
break # Bail even though there may be funds in another currency to pay more recent invoices. # depends on [control=['for'], data=['invoice']]
logger.info('assign-funds-to-pending-invoices.end', account_id=str(account_id), paid_invoice_count=len(paid_invoice_ids))
return paid_invoice_ids
|
def api_user(request, userPk, key=None, hproPk=None):
"""Return information about an user"""
if not check_api_key(request, key, hproPk):
return HttpResponseForbidden
if settings.PIAPI_STANDALONE:
if not settings.PIAPI_REALUSERS:
user = generate_user(pk=userPk)
if user is None:
return HttpResponseNotFound()
else:
user = get_object_or_404(DUser, pk=userPk)
hproject = None
else:
from users.models import TechUser
user = get_object_or_404(TechUser, pk=userPk)
(_, _, hproject) = getPlugItObject(hproPk)
user.ebuio_member = hproject.isMemberRead(user)
user.ebuio_admin = hproject.isMemberWrite(user)
user.subscription_labels = _get_subscription_labels(user, hproject)
retour = {}
# Append properties for the user data
for prop in settings.PIAPI_USERDATA:
if hasattr(user, prop):
retour[prop] = getattr(user, prop)
retour['id'] = str(retour['pk'])
# Append the users organisation and access levels
orgas = {}
if user:
limitedOrgas = []
if hproject and hproject.plugItLimitOrgaJoinable:
# Get List of Plugit Available Orgas first
projectOrgaIds = hproject.plugItOrgaJoinable.order_by('name').values_list('pk', flat=True)
for (orga, isAdmin) in user.getOrgas(distinct=True):
if orga.pk in projectOrgaIds:
limitedOrgas.append((orga, isAdmin))
elif hasattr(user, 'getOrgas'):
limitedOrgas = user.getOrgas(distinct=True)
# Create List
orgas = [{'id': orga.pk, 'name': orga.name, 'codops': orga.ebu_codops, 'is_admin': isAdmin} for (orga, isAdmin)
in limitedOrgas]
retour['orgas'] = orgas
return HttpResponse(json.dumps(retour), content_type="application/json")
|
def function[api_user, parameter[request, userPk, key, hproPk]]:
constant[Return information about an user]
if <ast.UnaryOp object at 0x7da2041da710> begin[:]
return[name[HttpResponseForbidden]]
if name[settings].PIAPI_STANDALONE begin[:]
if <ast.UnaryOp object at 0x7da2041dab30> begin[:]
variable[user] assign[=] call[name[generate_user], parameter[]]
if compare[name[user] is constant[None]] begin[:]
return[call[name[HttpResponseNotFound], parameter[]]]
variable[hproject] assign[=] constant[None]
variable[retour] assign[=] dictionary[[], []]
for taget[name[prop]] in starred[name[settings].PIAPI_USERDATA] begin[:]
if call[name[hasattr], parameter[name[user], name[prop]]] begin[:]
call[name[retour]][name[prop]] assign[=] call[name[getattr], parameter[name[user], name[prop]]]
call[name[retour]][constant[id]] assign[=] call[name[str], parameter[call[name[retour]][constant[pk]]]]
variable[orgas] assign[=] dictionary[[], []]
if name[user] begin[:]
variable[limitedOrgas] assign[=] list[[]]
if <ast.BoolOp object at 0x7da2041d8d30> begin[:]
variable[projectOrgaIds] assign[=] call[call[name[hproject].plugItOrgaJoinable.order_by, parameter[constant[name]]].values_list, parameter[constant[pk]]]
for taget[tuple[[<ast.Name object at 0x7da2041d96c0>, <ast.Name object at 0x7da2041d90c0>]]] in starred[call[name[user].getOrgas, parameter[]]] begin[:]
if compare[name[orga].pk in name[projectOrgaIds]] begin[:]
call[name[limitedOrgas].append, parameter[tuple[[<ast.Name object at 0x7da2041d9a80>, <ast.Name object at 0x7da2041d8be0>]]]]
variable[orgas] assign[=] <ast.ListComp object at 0x7da2041db4f0>
call[name[retour]][constant[orgas]] assign[=] name[orgas]
return[call[name[HttpResponse], parameter[call[name[json].dumps, parameter[name[retour]]]]]]
|
keyword[def] identifier[api_user] ( identifier[request] , identifier[userPk] , identifier[key] = keyword[None] , identifier[hproPk] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[check_api_key] ( identifier[request] , identifier[key] , identifier[hproPk] ):
keyword[return] identifier[HttpResponseForbidden]
keyword[if] identifier[settings] . identifier[PIAPI_STANDALONE] :
keyword[if] keyword[not] identifier[settings] . identifier[PIAPI_REALUSERS] :
identifier[user] = identifier[generate_user] ( identifier[pk] = identifier[userPk] )
keyword[if] identifier[user] keyword[is] keyword[None] :
keyword[return] identifier[HttpResponseNotFound] ()
keyword[else] :
identifier[user] = identifier[get_object_or_404] ( identifier[DUser] , identifier[pk] = identifier[userPk] )
identifier[hproject] = keyword[None]
keyword[else] :
keyword[from] identifier[users] . identifier[models] keyword[import] identifier[TechUser]
identifier[user] = identifier[get_object_or_404] ( identifier[TechUser] , identifier[pk] = identifier[userPk] )
( identifier[_] , identifier[_] , identifier[hproject] )= identifier[getPlugItObject] ( identifier[hproPk] )
identifier[user] . identifier[ebuio_member] = identifier[hproject] . identifier[isMemberRead] ( identifier[user] )
identifier[user] . identifier[ebuio_admin] = identifier[hproject] . identifier[isMemberWrite] ( identifier[user] )
identifier[user] . identifier[subscription_labels] = identifier[_get_subscription_labels] ( identifier[user] , identifier[hproject] )
identifier[retour] ={}
keyword[for] identifier[prop] keyword[in] identifier[settings] . identifier[PIAPI_USERDATA] :
keyword[if] identifier[hasattr] ( identifier[user] , identifier[prop] ):
identifier[retour] [ identifier[prop] ]= identifier[getattr] ( identifier[user] , identifier[prop] )
identifier[retour] [ literal[string] ]= identifier[str] ( identifier[retour] [ literal[string] ])
identifier[orgas] ={}
keyword[if] identifier[user] :
identifier[limitedOrgas] =[]
keyword[if] identifier[hproject] keyword[and] identifier[hproject] . identifier[plugItLimitOrgaJoinable] :
identifier[projectOrgaIds] = identifier[hproject] . identifier[plugItOrgaJoinable] . identifier[order_by] ( literal[string] ). identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )
keyword[for] ( identifier[orga] , identifier[isAdmin] ) keyword[in] identifier[user] . identifier[getOrgas] ( identifier[distinct] = keyword[True] ):
keyword[if] identifier[orga] . identifier[pk] keyword[in] identifier[projectOrgaIds] :
identifier[limitedOrgas] . identifier[append] (( identifier[orga] , identifier[isAdmin] ))
keyword[elif] identifier[hasattr] ( identifier[user] , literal[string] ):
identifier[limitedOrgas] = identifier[user] . identifier[getOrgas] ( identifier[distinct] = keyword[True] )
identifier[orgas] =[{ literal[string] : identifier[orga] . identifier[pk] , literal[string] : identifier[orga] . identifier[name] , literal[string] : identifier[orga] . identifier[ebu_codops] , literal[string] : identifier[isAdmin] } keyword[for] ( identifier[orga] , identifier[isAdmin] )
keyword[in] identifier[limitedOrgas] ]
identifier[retour] [ literal[string] ]= identifier[orgas]
keyword[return] identifier[HttpResponse] ( identifier[json] . identifier[dumps] ( identifier[retour] ), identifier[content_type] = literal[string] )
|
def api_user(request, userPk, key=None, hproPk=None):
"""Return information about an user"""
if not check_api_key(request, key, hproPk):
return HttpResponseForbidden # depends on [control=['if'], data=[]]
if settings.PIAPI_STANDALONE:
if not settings.PIAPI_REALUSERS:
user = generate_user(pk=userPk)
if user is None:
return HttpResponseNotFound() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
user = get_object_or_404(DUser, pk=userPk)
hproject = None # depends on [control=['if'], data=[]]
else:
from users.models import TechUser
user = get_object_or_404(TechUser, pk=userPk)
(_, _, hproject) = getPlugItObject(hproPk)
user.ebuio_member = hproject.isMemberRead(user)
user.ebuio_admin = hproject.isMemberWrite(user)
user.subscription_labels = _get_subscription_labels(user, hproject)
retour = {}
# Append properties for the user data
for prop in settings.PIAPI_USERDATA:
if hasattr(user, prop):
retour[prop] = getattr(user, prop) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prop']]
retour['id'] = str(retour['pk'])
# Append the users organisation and access levels
orgas = {}
if user:
limitedOrgas = []
if hproject and hproject.plugItLimitOrgaJoinable:
# Get List of Plugit Available Orgas first
projectOrgaIds = hproject.plugItOrgaJoinable.order_by('name').values_list('pk', flat=True)
for (orga, isAdmin) in user.getOrgas(distinct=True):
if orga.pk in projectOrgaIds:
limitedOrgas.append((orga, isAdmin)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif hasattr(user, 'getOrgas'):
limitedOrgas = user.getOrgas(distinct=True) # depends on [control=['if'], data=[]]
# Create List
orgas = [{'id': orga.pk, 'name': orga.name, 'codops': orga.ebu_codops, 'is_admin': isAdmin} for (orga, isAdmin) in limitedOrgas] # depends on [control=['if'], data=[]]
retour['orgas'] = orgas
return HttpResponse(json.dumps(retour), content_type='application/json')
|
def deploy_password(username, password, host=None, admin_username=None,
admin_password=None, module=None):
'''
Change the QuickDeploy password, used for switches as well
CLI Example:
.. code-block:: bash
salt dell dracr.deploy_password [USERNAME] [PASSWORD]
host=<remote DRAC> admin_username=<DRAC user>
admin_password=<DRAC PW>
salt dell dracr.change_password diana secret
Note that if only a username is specified then this module will look up
details for all 16 possible DRAC users. This is time consuming, but might
be necessary if one is not sure which user slot contains the one you want.
Many late-model Dell chassis have 'root' as UID 1, so if you can depend
on that then setting the password is much quicker.
'''
return __execute_cmd('deploy -u {0} -p {1}'.format(
username, password), host=host, admin_username=admin_username,
admin_password=admin_password, module=module
)
|
def function[deploy_password, parameter[username, password, host, admin_username, admin_password, module]]:
constant[
Change the QuickDeploy password, used for switches as well
CLI Example:
.. code-block:: bash
salt dell dracr.deploy_password [USERNAME] [PASSWORD]
host=<remote DRAC> admin_username=<DRAC user>
admin_password=<DRAC PW>
salt dell dracr.change_password diana secret
Note that if only a username is specified then this module will look up
details for all 16 possible DRAC users. This is time consuming, but might
be necessary if one is not sure which user slot contains the one you want.
Many late-model Dell chassis have 'root' as UID 1, so if you can depend
on that then setting the password is much quicker.
]
return[call[name[__execute_cmd], parameter[call[constant[deploy -u {0} -p {1}].format, parameter[name[username], name[password]]]]]]
|
keyword[def] identifier[deploy_password] ( identifier[username] , identifier[password] , identifier[host] = keyword[None] , identifier[admin_username] = keyword[None] ,
identifier[admin_password] = keyword[None] , identifier[module] = keyword[None] ):
literal[string]
keyword[return] identifier[__execute_cmd] ( literal[string] . identifier[format] (
identifier[username] , identifier[password] ), identifier[host] = identifier[host] , identifier[admin_username] = identifier[admin_username] ,
identifier[admin_password] = identifier[admin_password] , identifier[module] = identifier[module]
)
|
def deploy_password(username, password, host=None, admin_username=None, admin_password=None, module=None):
"""
Change the QuickDeploy password, used for switches as well
CLI Example:
.. code-block:: bash
salt dell dracr.deploy_password [USERNAME] [PASSWORD]
host=<remote DRAC> admin_username=<DRAC user>
admin_password=<DRAC PW>
salt dell dracr.change_password diana secret
Note that if only a username is specified then this module will look up
details for all 16 possible DRAC users. This is time consuming, but might
be necessary if one is not sure which user slot contains the one you want.
Many late-model Dell chassis have 'root' as UID 1, so if you can depend
on that then setting the password is much quicker.
"""
return __execute_cmd('deploy -u {0} -p {1}'.format(username, password), host=host, admin_username=admin_username, admin_password=admin_password, module=module)
|
def derive_fernet_key(input_key):
"""Derive a 32-bit b64-encoded Fernet key from arbitrary input key."""
hkdf = HKDF(
algorithm=hashes.SHA256(),
length=32,
salt=salt,
info=info,
backend=backend,
)
return base64.urlsafe_b64encode(hkdf.derive(force_bytes(input_key)))
|
def function[derive_fernet_key, parameter[input_key]]:
constant[Derive a 32-bit b64-encoded Fernet key from arbitrary input key.]
variable[hkdf] assign[=] call[name[HKDF], parameter[]]
return[call[name[base64].urlsafe_b64encode, parameter[call[name[hkdf].derive, parameter[call[name[force_bytes], parameter[name[input_key]]]]]]]]
|
keyword[def] identifier[derive_fernet_key] ( identifier[input_key] ):
literal[string]
identifier[hkdf] = identifier[HKDF] (
identifier[algorithm] = identifier[hashes] . identifier[SHA256] (),
identifier[length] = literal[int] ,
identifier[salt] = identifier[salt] ,
identifier[info] = identifier[info] ,
identifier[backend] = identifier[backend] ,
)
keyword[return] identifier[base64] . identifier[urlsafe_b64encode] ( identifier[hkdf] . identifier[derive] ( identifier[force_bytes] ( identifier[input_key] )))
|
def derive_fernet_key(input_key):
"""Derive a 32-bit b64-encoded Fernet key from arbitrary input key."""
hkdf = HKDF(algorithm=hashes.SHA256(), length=32, salt=salt, info=info, backend=backend)
return base64.urlsafe_b64encode(hkdf.derive(force_bytes(input_key)))
|
def name(self, value):
"""Name associated with this email.
:param value: Name associated with this email.
:type value: string
"""
if not (value is None or isinstance(value, str)):
raise TypeError('name must be of type string.')
# Escape common CSV delimiters as workaround for
# https://github.com/sendgrid/sendgrid-python/issues/578
if value is not None and (',' in value or ';' in value):
value = html_entity_decode(value)
value = '"' + value + '"'
self._name = value
|
def function[name, parameter[self, value]]:
constant[Name associated with this email.
:param value: Name associated with this email.
:type value: string
]
if <ast.UnaryOp object at 0x7da18f09d720> begin[:]
<ast.Raise object at 0x7da18f09d270>
if <ast.BoolOp object at 0x7da2047e8700> begin[:]
variable[value] assign[=] call[name[html_entity_decode], parameter[name[value]]]
variable[value] assign[=] binary_operation[binary_operation[constant["] + name[value]] + constant["]]
name[self]._name assign[=] name[value]
|
keyword[def] identifier[name] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] keyword[not] ( identifier[value] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[value] , identifier[str] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] keyword[and] ( literal[string] keyword[in] identifier[value] keyword[or] literal[string] keyword[in] identifier[value] ):
identifier[value] = identifier[html_entity_decode] ( identifier[value] )
identifier[value] = literal[string] + identifier[value] + literal[string]
identifier[self] . identifier[_name] = identifier[value]
|
def name(self, value):
"""Name associated with this email.
:param value: Name associated with this email.
:type value: string
"""
if not (value is None or isinstance(value, str)):
raise TypeError('name must be of type string.') # depends on [control=['if'], data=[]]
# Escape common CSV delimiters as workaround for
# https://github.com/sendgrid/sendgrid-python/issues/578
if value is not None and (',' in value or ';' in value):
value = html_entity_decode(value)
value = '"' + value + '"' # depends on [control=['if'], data=[]]
self._name = value
|
def encrypt(self, msg, alg='aes_128_cbc', padding='PKCS#7', b64enc=True,
block_size=AES_BLOCK_SIZE):
"""
:param key: The encryption key
:param msg: Message to be encrypted
:param padding: Which padding that should be used
:param b64enc: Whether the result should be base64encoded
:param block_size: If PKCS#7 padding which block size to use
:return: The encrypted message
"""
self.__class__._deprecation_notice()
if padding == 'PKCS#7':
_block_size = block_size
elif padding == 'PKCS#5':
_block_size = 8
else:
_block_size = 0
if _block_size:
plen = _block_size - (len(msg) % _block_size)
c = chr(plen).encode()
msg += c * plen
cipher, iv = self.build_cipher(alg)
encryptor = cipher.encryptor()
cmsg = iv + encryptor.update(msg) + encryptor.finalize()
if b64enc:
enc_msg = _base64.b64encode(cmsg)
else:
enc_msg = cmsg
return enc_msg
|
def function[encrypt, parameter[self, msg, alg, padding, b64enc, block_size]]:
constant[
:param key: The encryption key
:param msg: Message to be encrypted
:param padding: Which padding that should be used
:param b64enc: Whether the result should be base64encoded
:param block_size: If PKCS#7 padding which block size to use
:return: The encrypted message
]
call[name[self].__class__._deprecation_notice, parameter[]]
if compare[name[padding] equal[==] constant[PKCS#7]] begin[:]
variable[_block_size] assign[=] name[block_size]
if name[_block_size] begin[:]
variable[plen] assign[=] binary_operation[name[_block_size] - binary_operation[call[name[len], parameter[name[msg]]] <ast.Mod object at 0x7da2590d6920> name[_block_size]]]
variable[c] assign[=] call[call[name[chr], parameter[name[plen]]].encode, parameter[]]
<ast.AugAssign object at 0x7da2054a7a60>
<ast.Tuple object at 0x7da2054a47f0> assign[=] call[name[self].build_cipher, parameter[name[alg]]]
variable[encryptor] assign[=] call[name[cipher].encryptor, parameter[]]
variable[cmsg] assign[=] binary_operation[binary_operation[name[iv] + call[name[encryptor].update, parameter[name[msg]]]] + call[name[encryptor].finalize, parameter[]]]
if name[b64enc] begin[:]
variable[enc_msg] assign[=] call[name[_base64].b64encode, parameter[name[cmsg]]]
return[name[enc_msg]]
|
keyword[def] identifier[encrypt] ( identifier[self] , identifier[msg] , identifier[alg] = literal[string] , identifier[padding] = literal[string] , identifier[b64enc] = keyword[True] ,
identifier[block_size] = identifier[AES_BLOCK_SIZE] ):
literal[string]
identifier[self] . identifier[__class__] . identifier[_deprecation_notice] ()
keyword[if] identifier[padding] == literal[string] :
identifier[_block_size] = identifier[block_size]
keyword[elif] identifier[padding] == literal[string] :
identifier[_block_size] = literal[int]
keyword[else] :
identifier[_block_size] = literal[int]
keyword[if] identifier[_block_size] :
identifier[plen] = identifier[_block_size] -( identifier[len] ( identifier[msg] )% identifier[_block_size] )
identifier[c] = identifier[chr] ( identifier[plen] ). identifier[encode] ()
identifier[msg] += identifier[c] * identifier[plen]
identifier[cipher] , identifier[iv] = identifier[self] . identifier[build_cipher] ( identifier[alg] )
identifier[encryptor] = identifier[cipher] . identifier[encryptor] ()
identifier[cmsg] = identifier[iv] + identifier[encryptor] . identifier[update] ( identifier[msg] )+ identifier[encryptor] . identifier[finalize] ()
keyword[if] identifier[b64enc] :
identifier[enc_msg] = identifier[_base64] . identifier[b64encode] ( identifier[cmsg] )
keyword[else] :
identifier[enc_msg] = identifier[cmsg]
keyword[return] identifier[enc_msg]
|
def encrypt(self, msg, alg='aes_128_cbc', padding='PKCS#7', b64enc=True, block_size=AES_BLOCK_SIZE):
"""
:param key: The encryption key
:param msg: Message to be encrypted
:param padding: Which padding that should be used
:param b64enc: Whether the result should be base64encoded
:param block_size: If PKCS#7 padding which block size to use
:return: The encrypted message
"""
self.__class__._deprecation_notice()
if padding == 'PKCS#7':
_block_size = block_size # depends on [control=['if'], data=[]]
elif padding == 'PKCS#5':
_block_size = 8 # depends on [control=['if'], data=[]]
else:
_block_size = 0
if _block_size:
plen = _block_size - len(msg) % _block_size
c = chr(plen).encode()
msg += c * plen # depends on [control=['if'], data=[]]
(cipher, iv) = self.build_cipher(alg)
encryptor = cipher.encryptor()
cmsg = iv + encryptor.update(msg) + encryptor.finalize()
if b64enc:
enc_msg = _base64.b64encode(cmsg) # depends on [control=['if'], data=[]]
else:
enc_msg = cmsg
return enc_msg
|
def ls_dir(base_dir):
"""List files recursively."""
return [
os.path.join(dirpath.replace(base_dir, '', 1), f)
for (dirpath, dirnames, files) in os.walk(base_dir)
for f in files
]
|
def function[ls_dir, parameter[base_dir]]:
constant[List files recursively.]
return[<ast.ListComp object at 0x7da18ede7970>]
|
keyword[def] identifier[ls_dir] ( identifier[base_dir] ):
literal[string]
keyword[return] [
identifier[os] . identifier[path] . identifier[join] ( identifier[dirpath] . identifier[replace] ( identifier[base_dir] , literal[string] , literal[int] ), identifier[f] )
keyword[for] ( identifier[dirpath] , identifier[dirnames] , identifier[files] ) keyword[in] identifier[os] . identifier[walk] ( identifier[base_dir] )
keyword[for] identifier[f] keyword[in] identifier[files]
]
|
def ls_dir(base_dir):
"""List files recursively."""
return [os.path.join(dirpath.replace(base_dir, '', 1), f) for (dirpath, dirnames, files) in os.walk(base_dir) for f in files]
|
def updated(name=None, cyg_arch='x86_64', mirrors=None):
'''
Make sure all packages are up to date.
name : None
No affect, salt fails poorly without the arg available
cyg_arch : x86_64
The cygwin architecture to update.
Current options are x86 and x86_64
mirrors : None
List of mirrors to check.
None will use a default mirror (kernel.org)
CLI Example:
.. code-block:: yaml
rsync:
cyg.updated:
- mirrors:
- http://mirror/without/public/key: ""
- http://mirror/with/public/key: http://url/of/public/key
'''
ret = {'name': 'cyg.updated', 'result': None, 'comment': '', 'changes': {}}
if cyg_arch not in ['x86', 'x86_64']:
ret['result'] = False
ret['comment'] = 'The \'cyg_arch\' argument must\
be one of \'x86\' or \'x86_64\''
return ret
if __opts__['test']:
ret['comment'] = 'All packages would have been updated'
return ret
if not mirrors:
LOG.warning('No mirror given, using the default.')
before = __salt__['cyg.list'](cyg_arch=cyg_arch)
if __salt__['cyg.update'](cyg_arch, mirrors=mirrors):
after = __salt__['cyg.list'](cyg_arch=cyg_arch)
differ = DictDiffer(after, before)
ret['result'] = True
if differ.same():
ret['comment'] = 'Nothing to update.'
else:
ret['changes']['added'] = list(differ.added())
ret['changes']['removed'] = list(differ.removed())
ret['changes']['changed'] = list(differ.changed())
ret['comment'] = 'All packages successfully updated.'
else:
ret['result'] = False
ret['comment'] = 'Could not update packages.'
return ret
|
def function[updated, parameter[name, cyg_arch, mirrors]]:
constant[
Make sure all packages are up to date.
name : None
No affect, salt fails poorly without the arg available
cyg_arch : x86_64
The cygwin architecture to update.
Current options are x86 and x86_64
mirrors : None
List of mirrors to check.
None will use a default mirror (kernel.org)
CLI Example:
.. code-block:: yaml
rsync:
cyg.updated:
- mirrors:
- http://mirror/without/public/key: ""
- http://mirror/with/public/key: http://url/of/public/key
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18bcca0b0>, <ast.Constant object at 0x7da18bcc9960>, <ast.Constant object at 0x7da18bcca3b0>, <ast.Constant object at 0x7da18bccb400>], [<ast.Constant object at 0x7da18bccbfd0>, <ast.Constant object at 0x7da18bccbd30>, <ast.Constant object at 0x7da18bccbeb0>, <ast.Dict object at 0x7da18bcca710>]]
if compare[name[cyg_arch] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da18bccab30>, <ast.Constant object at 0x7da18bcc9ea0>]]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] constant[The 'cyg_arch' argument must be one of 'x86' or 'x86_64']
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[comment]] assign[=] constant[All packages would have been updated]
return[name[ret]]
if <ast.UnaryOp object at 0x7da18bccb4f0> begin[:]
call[name[LOG].warning, parameter[constant[No mirror given, using the default.]]]
variable[before] assign[=] call[call[name[__salt__]][constant[cyg.list]], parameter[]]
if call[call[name[__salt__]][constant[cyg.update]], parameter[name[cyg_arch]]] begin[:]
variable[after] assign[=] call[call[name[__salt__]][constant[cyg.list]], parameter[]]
variable[differ] assign[=] call[name[DictDiffer], parameter[name[after], name[before]]]
call[name[ret]][constant[result]] assign[=] constant[True]
if call[name[differ].same, parameter[]] begin[:]
call[name[ret]][constant[comment]] assign[=] constant[Nothing to update.]
return[name[ret]]
|
keyword[def] identifier[updated] ( identifier[name] = keyword[None] , identifier[cyg_arch] = literal[string] , identifier[mirrors] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] :{}}
keyword[if] identifier[cyg_arch] keyword[not] keyword[in] [ literal[string] , literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] keyword[not] identifier[mirrors] :
identifier[LOG] . identifier[warning] ( literal[string] )
identifier[before] = identifier[__salt__] [ literal[string] ]( identifier[cyg_arch] = identifier[cyg_arch] )
keyword[if] identifier[__salt__] [ literal[string] ]( identifier[cyg_arch] , identifier[mirrors] = identifier[mirrors] ):
identifier[after] = identifier[__salt__] [ literal[string] ]( identifier[cyg_arch] = identifier[cyg_arch] )
identifier[differ] = identifier[DictDiffer] ( identifier[after] , identifier[before] )
identifier[ret] [ literal[string] ]= keyword[True]
keyword[if] identifier[differ] . identifier[same] ():
identifier[ret] [ literal[string] ]= literal[string]
keyword[else] :
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[list] ( identifier[differ] . identifier[added] ())
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[list] ( identifier[differ] . identifier[removed] ())
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[list] ( identifier[differ] . identifier[changed] ())
identifier[ret] [ literal[string] ]= literal[string]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
|
def updated(name=None, cyg_arch='x86_64', mirrors=None):
"""
Make sure all packages are up to date.
name : None
No affect, salt fails poorly without the arg available
cyg_arch : x86_64
The cygwin architecture to update.
Current options are x86 and x86_64
mirrors : None
List of mirrors to check.
None will use a default mirror (kernel.org)
CLI Example:
.. code-block:: yaml
rsync:
cyg.updated:
- mirrors:
- http://mirror/without/public/key: ""
- http://mirror/with/public/key: http://url/of/public/key
"""
ret = {'name': 'cyg.updated', 'result': None, 'comment': '', 'changes': {}}
if cyg_arch not in ['x86', 'x86_64']:
ret['result'] = False
ret['comment'] = "The 'cyg_arch' argument must be one of 'x86' or 'x86_64'"
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret['comment'] = 'All packages would have been updated'
return ret # depends on [control=['if'], data=[]]
if not mirrors:
LOG.warning('No mirror given, using the default.') # depends on [control=['if'], data=[]]
before = __salt__['cyg.list'](cyg_arch=cyg_arch)
if __salt__['cyg.update'](cyg_arch, mirrors=mirrors):
after = __salt__['cyg.list'](cyg_arch=cyg_arch)
differ = DictDiffer(after, before)
ret['result'] = True
if differ.same():
ret['comment'] = 'Nothing to update.' # depends on [control=['if'], data=[]]
else:
ret['changes']['added'] = list(differ.added())
ret['changes']['removed'] = list(differ.removed())
ret['changes']['changed'] = list(differ.changed())
ret['comment'] = 'All packages successfully updated.' # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Could not update packages.'
return ret
|
def _set_minimum_links(self, v, load=False):
"""
Setter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_minimum_links is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_minimum_links() directly.
YANG Description: The least number of operationally 'UP' links to
indicate port-channel being UP.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 64']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'interface_po', u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'display-when': u'not(../insight/insight-enable)'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """minimum_links must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 64']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'interface_po', u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'display-when': u'not(../insight/insight-enable)'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True)""",
})
self.__minimum_links = t
if hasattr(self, '_set'):
self._set()
|
def function[_set_minimum_links, parameter[self, v, load]]:
constant[
Setter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_minimum_links is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_minimum_links() directly.
YANG Description: The least number of operationally 'UP' links to
indicate port-channel being UP.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18f721030>
name[self].__minimum_links assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]]
|
keyword[def] identifier[_set_minimum_links] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[default] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] )( literal[int] ), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__minimum_links] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] ()
|
def _set_minimum_links(self, v, load=False):
"""
Setter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_minimum_links is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_minimum_links() directly.
YANG Description: The least number of operationally 'UP' links to
indicate port-channel being UP.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 64']}), default=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)(1), is_leaf=True, yang_name='minimum-links', rest_name='minimum-links', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'interface_po', u'info': u'Least number of operationally UP links to declare \nport-channel UP', u'display-when': u'not(../insight/insight-enable)'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='uint32', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'minimum_links must be of a type compatible with uint32', 'defined-type': 'uint32', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'0..4294967295\']}, int_size=32), restriction_dict={\'range\': [u\'1 .. 64\']}), default=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'0..4294967295\']}, int_size=32)(1), is_leaf=True, yang_name="minimum-links", rest_name="minimum-links", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'interface_po\', u\'info\': u\'Least number of operationally UP links to declare \nport-channel UP\', u\'display-when\': u\'not(../insight/insight-enable)\'}}, namespace=\'urn:brocade.com:mgmt:brocade-interface\', defining_module=\'brocade-interface\', yang_type=\'uint32\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__minimum_links = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]]
|
def do_imports(self):
"""
Import all importable options
"""
self.do_import('worker_class', Worker)
self.do_import('queue_model', self.options.worker_class.queue_model)
self.do_import('error_model', self.options.worker_class.error_model)
self.do_import('callback', self.options.worker_class.callback)
|
def function[do_imports, parameter[self]]:
constant[
Import all importable options
]
call[name[self].do_import, parameter[constant[worker_class], name[Worker]]]
call[name[self].do_import, parameter[constant[queue_model], name[self].options.worker_class.queue_model]]
call[name[self].do_import, parameter[constant[error_model], name[self].options.worker_class.error_model]]
call[name[self].do_import, parameter[constant[callback], name[self].options.worker_class.callback]]
|
keyword[def] identifier[do_imports] ( identifier[self] ):
literal[string]
identifier[self] . identifier[do_import] ( literal[string] , identifier[Worker] )
identifier[self] . identifier[do_import] ( literal[string] , identifier[self] . identifier[options] . identifier[worker_class] . identifier[queue_model] )
identifier[self] . identifier[do_import] ( literal[string] , identifier[self] . identifier[options] . identifier[worker_class] . identifier[error_model] )
identifier[self] . identifier[do_import] ( literal[string] , identifier[self] . identifier[options] . identifier[worker_class] . identifier[callback] )
|
def do_imports(self):
"""
Import all importable options
"""
self.do_import('worker_class', Worker)
self.do_import('queue_model', self.options.worker_class.queue_model)
self.do_import('error_model', self.options.worker_class.error_model)
self.do_import('callback', self.options.worker_class.callback)
|
def merge_envs(*args):
"""Union of one or more dictionaries.
In case of duplicate keys, the values in the right-most arguments will
squash (overwrite) the value provided by any dict preceding it.
:param args: Sequence of ``dict`` objects that should be merged.
:return: A ``dict`` containing the union of keys in all input dicts.
"""
env = {}
for arg in args:
if not arg:
continue
env.update(arg)
return env
|
def function[merge_envs, parameter[]]:
constant[Union of one or more dictionaries.
In case of duplicate keys, the values in the right-most arguments will
squash (overwrite) the value provided by any dict preceding it.
:param args: Sequence of ``dict`` objects that should be merged.
:return: A ``dict`` containing the union of keys in all input dicts.
]
variable[env] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[name[args]] begin[:]
if <ast.UnaryOp object at 0x7da1b146cc40> begin[:]
continue
call[name[env].update, parameter[name[arg]]]
return[name[env]]
|
keyword[def] identifier[merge_envs] (* identifier[args] ):
literal[string]
identifier[env] ={}
keyword[for] identifier[arg] keyword[in] identifier[args] :
keyword[if] keyword[not] identifier[arg] :
keyword[continue]
identifier[env] . identifier[update] ( identifier[arg] )
keyword[return] identifier[env]
|
def merge_envs(*args):
"""Union of one or more dictionaries.
In case of duplicate keys, the values in the right-most arguments will
squash (overwrite) the value provided by any dict preceding it.
:param args: Sequence of ``dict`` objects that should be merged.
:return: A ``dict`` containing the union of keys in all input dicts.
"""
env = {}
for arg in args:
if not arg:
continue # depends on [control=['if'], data=[]]
env.update(arg) # depends on [control=['for'], data=['arg']]
return env
|
def g(self, id):
"""
If the given id is known, the numerical representation is returned,
otherwise a new running number is assigned to the id and returned"""
if id not in self._m:
if self.orig_ids:
self._m[id] = id
if self.warn:
try:
int(id)
except:
sys.stderr.write(
'Warning: ID "%s" is not an integer.\n' % id)
self.warn = False
else:
self._m[id] = self.index
self.index += 1
return self._m[id]
|
def function[g, parameter[self, id]]:
constant[
If the given id is known, the numerical representation is returned,
otherwise a new running number is assigned to the id and returned]
if compare[name[id] <ast.NotIn object at 0x7da2590d7190> name[self]._m] begin[:]
if name[self].orig_ids begin[:]
call[name[self]._m][name[id]] assign[=] name[id]
if name[self].warn begin[:]
<ast.Try object at 0x7da1b09ea950>
return[call[name[self]._m][name[id]]]
|
keyword[def] identifier[g] ( identifier[self] , identifier[id] ):
literal[string]
keyword[if] identifier[id] keyword[not] keyword[in] identifier[self] . identifier[_m] :
keyword[if] identifier[self] . identifier[orig_ids] :
identifier[self] . identifier[_m] [ identifier[id] ]= identifier[id]
keyword[if] identifier[self] . identifier[warn] :
keyword[try] :
identifier[int] ( identifier[id] )
keyword[except] :
identifier[sys] . identifier[stderr] . identifier[write] (
literal[string] % identifier[id] )
identifier[self] . identifier[warn] = keyword[False]
keyword[else] :
identifier[self] . identifier[_m] [ identifier[id] ]= identifier[self] . identifier[index]
identifier[self] . identifier[index] += literal[int]
keyword[return] identifier[self] . identifier[_m] [ identifier[id] ]
|
def g(self, id):
"""
If the given id is known, the numerical representation is returned,
otherwise a new running number is assigned to the id and returned"""
if id not in self._m:
if self.orig_ids:
self._m[id] = id
if self.warn:
try:
int(id) # depends on [control=['try'], data=[]]
except:
sys.stderr.write('Warning: ID "%s" is not an integer.\n' % id)
self.warn = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self._m[id] = self.index
self.index += 1 # depends on [control=['if'], data=['id']]
return self._m[id]
|
def build_response(self, request, response, from_cache=False):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
if not from_cache and request.method == 'GET':
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif response.status == 301:
self.controller.cache_response(request, response)
else:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response,
request,
response,
)
)
resp = super(CacheControlAdapter, self).build_response(
request, response
)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
|
def function[build_response, parameter[self, request, response, from_cache]]:
constant[
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
]
if <ast.BoolOp object at 0x7da20e961b40> begin[:]
if compare[name[response].status equal[==] constant[304]] begin[:]
variable[cached_response] assign[=] call[name[self].controller.update_cached_response, parameter[name[request], name[response]]]
if compare[name[cached_response] is_not name[response]] begin[:]
variable[from_cache] assign[=] constant[True]
call[name[response].read, parameter[]]
call[name[response].release_conn, parameter[]]
variable[response] assign[=] name[cached_response]
variable[resp] assign[=] call[call[name[super], parameter[name[CacheControlAdapter], name[self]]].build_response, parameter[name[request], name[response]]]
if <ast.BoolOp object at 0x7da1b26afa90> begin[:]
variable[cache_url] assign[=] call[name[self].controller.cache_url, parameter[name[request].url]]
call[name[self].cache.delete, parameter[name[cache_url]]]
name[resp].from_cache assign[=] name[from_cache]
return[name[resp]]
|
keyword[def] identifier[build_response] ( identifier[self] , identifier[request] , identifier[response] , identifier[from_cache] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[from_cache] keyword[and] identifier[request] . identifier[method] == literal[string] :
keyword[if] identifier[response] . identifier[status] == literal[int] :
identifier[cached_response] = identifier[self] . identifier[controller] . identifier[update_cached_response] (
identifier[request] , identifier[response]
)
keyword[if] identifier[cached_response] keyword[is] keyword[not] identifier[response] :
identifier[from_cache] = keyword[True]
identifier[response] . identifier[read] ( identifier[decode_content] = keyword[False] )
identifier[response] . identifier[release_conn] ()
identifier[response] = identifier[cached_response]
keyword[elif] identifier[response] . identifier[status] == literal[int] :
identifier[self] . identifier[controller] . identifier[cache_response] ( identifier[request] , identifier[response] )
keyword[else] :
keyword[if] identifier[self] . identifier[heuristic] :
identifier[response] = identifier[self] . identifier[heuristic] . identifier[apply] ( identifier[response] )
identifier[response] . identifier[_fp] = identifier[CallbackFileWrapper] (
identifier[response] . identifier[_fp] ,
identifier[functools] . identifier[partial] (
identifier[self] . identifier[controller] . identifier[cache_response] ,
identifier[request] ,
identifier[response] ,
)
)
identifier[resp] = identifier[super] ( identifier[CacheControlAdapter] , identifier[self] ). identifier[build_response] (
identifier[request] , identifier[response]
)
keyword[if] identifier[request] . identifier[method] keyword[in] identifier[self] . identifier[invalidating_methods] keyword[and] identifier[resp] . identifier[ok] :
identifier[cache_url] = identifier[self] . identifier[controller] . identifier[cache_url] ( identifier[request] . identifier[url] )
identifier[self] . identifier[cache] . identifier[delete] ( identifier[cache_url] )
identifier[resp] . identifier[from_cache] = identifier[from_cache]
keyword[return] identifier[resp]
|
def build_response(self, request, response, from_cache=False):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
if not from_cache and request.method == 'GET':
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(request, response)
if cached_response is not response:
from_cache = True # depends on [control=['if'], data=[]]
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response # depends on [control=['if'], data=[]]
# We always cache the 301 responses
elif response.status == 301:
self.controller.cache_response(request, response) # depends on [control=['if'], data=[]]
else:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response) # depends on [control=['if'], data=[]]
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(response._fp, functools.partial(self.controller.cache_response, request, response)) # depends on [control=['if'], data=[]]
resp = super(CacheControlAdapter, self).build_response(request, response)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url) # depends on [control=['if'], data=[]]
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
|
def get_average_color(colors):
"""Calculate the average color from the list of colors, where each color
is a 3-tuple of (r, g, b) values.
"""
c = reduce(color_reducer, colors)
total = len(colors)
return tuple(v / total for v in c)
|
def function[get_average_color, parameter[colors]]:
constant[Calculate the average color from the list of colors, where each color
is a 3-tuple of (r, g, b) values.
]
variable[c] assign[=] call[name[reduce], parameter[name[color_reducer], name[colors]]]
variable[total] assign[=] call[name[len], parameter[name[colors]]]
return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18bcc9ab0>]]]
|
keyword[def] identifier[get_average_color] ( identifier[colors] ):
literal[string]
identifier[c] = identifier[reduce] ( identifier[color_reducer] , identifier[colors] )
identifier[total] = identifier[len] ( identifier[colors] )
keyword[return] identifier[tuple] ( identifier[v] / identifier[total] keyword[for] identifier[v] keyword[in] identifier[c] )
|
def get_average_color(colors):
"""Calculate the average color from the list of colors, where each color
is a 3-tuple of (r, g, b) values.
"""
c = reduce(color_reducer, colors)
total = len(colors)
return tuple((v / total for v in c))
|
def _save_ack_callback(self, msgid, callback):
"""Keep a reference of the callback on this socket."""
if msgid in self.ack_callbacks:
return False
self.ack_callbacks[msgid] = callback
|
def function[_save_ack_callback, parameter[self, msgid, callback]]:
constant[Keep a reference of the callback on this socket.]
if compare[name[msgid] in name[self].ack_callbacks] begin[:]
return[constant[False]]
call[name[self].ack_callbacks][name[msgid]] assign[=] name[callback]
|
keyword[def] identifier[_save_ack_callback] ( identifier[self] , identifier[msgid] , identifier[callback] ):
literal[string]
keyword[if] identifier[msgid] keyword[in] identifier[self] . identifier[ack_callbacks] :
keyword[return] keyword[False]
identifier[self] . identifier[ack_callbacks] [ identifier[msgid] ]= identifier[callback]
|
def _save_ack_callback(self, msgid, callback):
"""Keep a reference of the callback on this socket."""
if msgid in self.ack_callbacks:
return False # depends on [control=['if'], data=[]]
self.ack_callbacks[msgid] = callback
|
def register(self, resource, event, trigger, **kwargs):
"""Called in trunk plugin's AFTER_INIT"""
super(AristaTrunkDriver, self).register(resource, event,
trigger, kwargs)
registry.subscribe(self.subport_create,
resources.SUBPORTS, events.AFTER_CREATE)
registry.subscribe(self.subport_delete,
resources.SUBPORTS, events.AFTER_DELETE)
registry.subscribe(self.trunk_create,
resources.TRUNK, events.AFTER_CREATE)
registry.subscribe(self.trunk_update,
resources.TRUNK, events.AFTER_UPDATE)
registry.subscribe(self.trunk_delete,
resources.TRUNK, events.AFTER_DELETE)
self.core_plugin = directory.get_plugin()
LOG.debug("Arista trunk driver initialized.")
|
def function[register, parameter[self, resource, event, trigger]]:
constant[Called in trunk plugin's AFTER_INIT]
call[call[name[super], parameter[name[AristaTrunkDriver], name[self]]].register, parameter[name[resource], name[event], name[trigger], name[kwargs]]]
call[name[registry].subscribe, parameter[name[self].subport_create, name[resources].SUBPORTS, name[events].AFTER_CREATE]]
call[name[registry].subscribe, parameter[name[self].subport_delete, name[resources].SUBPORTS, name[events].AFTER_DELETE]]
call[name[registry].subscribe, parameter[name[self].trunk_create, name[resources].TRUNK, name[events].AFTER_CREATE]]
call[name[registry].subscribe, parameter[name[self].trunk_update, name[resources].TRUNK, name[events].AFTER_UPDATE]]
call[name[registry].subscribe, parameter[name[self].trunk_delete, name[resources].TRUNK, name[events].AFTER_DELETE]]
name[self].core_plugin assign[=] call[name[directory].get_plugin, parameter[]]
call[name[LOG].debug, parameter[constant[Arista trunk driver initialized.]]]
|
keyword[def] identifier[register] ( identifier[self] , identifier[resource] , identifier[event] , identifier[trigger] ,** identifier[kwargs] ):
literal[string]
identifier[super] ( identifier[AristaTrunkDriver] , identifier[self] ). identifier[register] ( identifier[resource] , identifier[event] ,
identifier[trigger] , identifier[kwargs] )
identifier[registry] . identifier[subscribe] ( identifier[self] . identifier[subport_create] ,
identifier[resources] . identifier[SUBPORTS] , identifier[events] . identifier[AFTER_CREATE] )
identifier[registry] . identifier[subscribe] ( identifier[self] . identifier[subport_delete] ,
identifier[resources] . identifier[SUBPORTS] , identifier[events] . identifier[AFTER_DELETE] )
identifier[registry] . identifier[subscribe] ( identifier[self] . identifier[trunk_create] ,
identifier[resources] . identifier[TRUNK] , identifier[events] . identifier[AFTER_CREATE] )
identifier[registry] . identifier[subscribe] ( identifier[self] . identifier[trunk_update] ,
identifier[resources] . identifier[TRUNK] , identifier[events] . identifier[AFTER_UPDATE] )
identifier[registry] . identifier[subscribe] ( identifier[self] . identifier[trunk_delete] ,
identifier[resources] . identifier[TRUNK] , identifier[events] . identifier[AFTER_DELETE] )
identifier[self] . identifier[core_plugin] = identifier[directory] . identifier[get_plugin] ()
identifier[LOG] . identifier[debug] ( literal[string] )
|
def register(self, resource, event, trigger, **kwargs):
"""Called in trunk plugin's AFTER_INIT"""
super(AristaTrunkDriver, self).register(resource, event, trigger, kwargs)
registry.subscribe(self.subport_create, resources.SUBPORTS, events.AFTER_CREATE)
registry.subscribe(self.subport_delete, resources.SUBPORTS, events.AFTER_DELETE)
registry.subscribe(self.trunk_create, resources.TRUNK, events.AFTER_CREATE)
registry.subscribe(self.trunk_update, resources.TRUNK, events.AFTER_UPDATE)
registry.subscribe(self.trunk_delete, resources.TRUNK, events.AFTER_DELETE)
self.core_plugin = directory.get_plugin()
LOG.debug('Arista trunk driver initialized.')
|
def delete_items(item_list, reason, login, mediawiki_api_url='https://www.wikidata.org/w/api.php',
user_agent=config['USER_AGENT_DEFAULT']):
"""
Takes a list of items and posts them for deletion by Wikidata moderators, appends at the end of the deletion
request page.
:param item_list: a list of QIDs which should be deleted
:type item_list: list
:param reason: short text about the reason for the deletion request
:type reason: str
:param login: A WDI login object which contains username and password the edit should be performed with.
:type login: wdi_login.WDLogin
"""
url = mediawiki_api_url
bulk_deletion_string = '\n==Bulk deletion request==\n'
bulk_deletion_string += '{{{{subst:Rfd group | {0} | reason = {1} }}}}'.format(' | '.join(item_list), reason)
# get page text
params = {
'action': 'query',
'titles': 'Wikidata:Requests_for_deletions',
'prop': 'revisions',
'rvprop': 'content',
'format': 'json'
}
headers = {
'User-Agent': user_agent
}
page_text = [x['revisions'][0]['*']
for x in requests.get(url=url, params=params, headers=headers).json()['query']['pages'].values()][
0]
if not login:
print(page_text)
print(bulk_deletion_string)
else:
# Append new deletion request to existing list of deletions being processed
params = {
'action': 'edit',
'title': 'Portal:Gene_Wiki/Quick_Links',
'section': '0',
'text': page_text + bulk_deletion_string,
'token': login.get_edit_token(),
'format': 'json'
}
r = requests.post(url=url, data=params, cookies=login.get_edit_cookie(), headers=headers)
print(r.json())
|
def function[delete_items, parameter[item_list, reason, login, mediawiki_api_url, user_agent]]:
constant[
Takes a list of items and posts them for deletion by Wikidata moderators, appends at the end of the deletion
request page.
:param item_list: a list of QIDs which should be deleted
:type item_list: list
:param reason: short text about the reason for the deletion request
:type reason: str
:param login: A WDI login object which contains username and password the edit should be performed with.
:type login: wdi_login.WDLogin
]
variable[url] assign[=] name[mediawiki_api_url]
variable[bulk_deletion_string] assign[=] constant[
==Bulk deletion request==
]
<ast.AugAssign object at 0x7da1b0d76890>
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0d77e80>, <ast.Constant object at 0x7da1b0d76d40>, <ast.Constant object at 0x7da1b0d77940>, <ast.Constant object at 0x7da1b0d77430>, <ast.Constant object at 0x7da1b0d75d20>], [<ast.Constant object at 0x7da207f03190>, <ast.Constant object at 0x7da207f00460>, <ast.Constant object at 0x7da207f038b0>, <ast.Constant object at 0x7da207f01180>, <ast.Constant object at 0x7da207f006a0>]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da207f00490>], [<ast.Name object at 0x7da207f00040>]]
variable[page_text] assign[=] call[<ast.ListComp object at 0x7da207f02410>][constant[0]]
if <ast.UnaryOp object at 0x7da1b0d77b80> begin[:]
call[name[print], parameter[name[page_text]]]
call[name[print], parameter[name[bulk_deletion_string]]]
|
keyword[def] identifier[delete_items] ( identifier[item_list] , identifier[reason] , identifier[login] , identifier[mediawiki_api_url] = literal[string] ,
identifier[user_agent] = identifier[config] [ literal[string] ]):
literal[string]
identifier[url] = identifier[mediawiki_api_url]
identifier[bulk_deletion_string] = literal[string]
identifier[bulk_deletion_string] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[item_list] ), identifier[reason] )
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[headers] ={
literal[string] : identifier[user_agent]
}
identifier[page_text] =[ identifier[x] [ literal[string] ][ literal[int] ][ literal[string] ]
keyword[for] identifier[x] keyword[in] identifier[requests] . identifier[get] ( identifier[url] = identifier[url] , identifier[params] = identifier[params] , identifier[headers] = identifier[headers] ). identifier[json] ()[ literal[string] ][ literal[string] ]. identifier[values] ()][
literal[int] ]
keyword[if] keyword[not] identifier[login] :
identifier[print] ( identifier[page_text] )
identifier[print] ( identifier[bulk_deletion_string] )
keyword[else] :
identifier[params] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[page_text] + identifier[bulk_deletion_string] ,
literal[string] : identifier[login] . identifier[get_edit_token] (),
literal[string] : literal[string]
}
identifier[r] = identifier[requests] . identifier[post] ( identifier[url] = identifier[url] , identifier[data] = identifier[params] , identifier[cookies] = identifier[login] . identifier[get_edit_cookie] (), identifier[headers] = identifier[headers] )
identifier[print] ( identifier[r] . identifier[json] ())
|
def delete_items(item_list, reason, login, mediawiki_api_url='https://www.wikidata.org/w/api.php', user_agent=config['USER_AGENT_DEFAULT']):
"""
Takes a list of items and posts them for deletion by Wikidata moderators, appends at the end of the deletion
request page.
:param item_list: a list of QIDs which should be deleted
:type item_list: list
:param reason: short text about the reason for the deletion request
:type reason: str
:param login: A WDI login object which contains username and password the edit should be performed with.
:type login: wdi_login.WDLogin
"""
url = mediawiki_api_url
bulk_deletion_string = '\n==Bulk deletion request==\n'
bulk_deletion_string += '{{{{subst:Rfd group | {0} | reason = {1} }}}}'.format(' | '.join(item_list), reason)
# get page text
params = {'action': 'query', 'titles': 'Wikidata:Requests_for_deletions', 'prop': 'revisions', 'rvprop': 'content', 'format': 'json'}
headers = {'User-Agent': user_agent}
page_text = [x['revisions'][0]['*'] for x in requests.get(url=url, params=params, headers=headers).json()['query']['pages'].values()][0]
if not login:
print(page_text)
print(bulk_deletion_string) # depends on [control=['if'], data=[]]
else:
# Append new deletion request to existing list of deletions being processed
params = {'action': 'edit', 'title': 'Portal:Gene_Wiki/Quick_Links', 'section': '0', 'text': page_text + bulk_deletion_string, 'token': login.get_edit_token(), 'format': 'json'}
r = requests.post(url=url, data=params, cookies=login.get_edit_cookie(), headers=headers)
print(r.json())
|
def _list_result_paths(target_path, log_file_name='log'):
"""list_result_paths."""
result_list = []
for root, _dirs, _files in os.walk(os.path.abspath(target_path)):
for name in _files:
if name == log_file_name:
result_list.append(root)
return result_list
|
def function[_list_result_paths, parameter[target_path, log_file_name]]:
constant[list_result_paths.]
variable[result_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0dc0550>, <ast.Name object at 0x7da1b0dc0c10>, <ast.Name object at 0x7da1b0dc0fa0>]]] in starred[call[name[os].walk, parameter[call[name[os].path.abspath, parameter[name[target_path]]]]]] begin[:]
for taget[name[name]] in starred[name[_files]] begin[:]
if compare[name[name] equal[==] name[log_file_name]] begin[:]
call[name[result_list].append, parameter[name[root]]]
return[name[result_list]]
|
keyword[def] identifier[_list_result_paths] ( identifier[target_path] , identifier[log_file_name] = literal[string] ):
literal[string]
identifier[result_list] =[]
keyword[for] identifier[root] , identifier[_dirs] , identifier[_files] keyword[in] identifier[os] . identifier[walk] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[target_path] )):
keyword[for] identifier[name] keyword[in] identifier[_files] :
keyword[if] identifier[name] == identifier[log_file_name] :
identifier[result_list] . identifier[append] ( identifier[root] )
keyword[return] identifier[result_list]
|
def _list_result_paths(target_path, log_file_name='log'):
"""list_result_paths."""
result_list = []
for (root, _dirs, _files) in os.walk(os.path.abspath(target_path)):
for name in _files:
if name == log_file_name:
result_list.append(root) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] # depends on [control=['for'], data=[]]
return result_list
|
def server_receives_without_validation(self, *parameters):
"""Receive a message with template defined using `New Message`.
Message template has to be defined with `New Message` before calling
this.
Optional parameters:
- `name` the client name (default is the latest used) example: `name=Client 1`
- `connection` alias. example: `connection=connection 1`
- `timeout` for receiving message. example: `timeout=0.1`
- `latest` if set to True, get latest message from buffer instead first. Default is False. Example: `latest=True`
Examples:
| ${msg} = | Server receives without validation |
| ${msg} = | Server receives without validation | name=Server1 | alias=my_connection | timeout=5 |
"""
with self._receive(self._servers, *parameters) as (msg, _, _):
return msg
|
def function[server_receives_without_validation, parameter[self]]:
constant[Receive a message with template defined using `New Message`.
Message template has to be defined with `New Message` before calling
this.
Optional parameters:
- `name` the client name (default is the latest used) example: `name=Client 1`
- `connection` alias. example: `connection=connection 1`
- `timeout` for receiving message. example: `timeout=0.1`
- `latest` if set to True, get latest message from buffer instead first. Default is False. Example: `latest=True`
Examples:
| ${msg} = | Server receives without validation |
| ${msg} = | Server receives without validation | name=Server1 | alias=my_connection | timeout=5 |
]
with call[name[self]._receive, parameter[name[self]._servers, <ast.Starred object at 0x7da18ede7df0>]] begin[:]
return[name[msg]]
|
keyword[def] identifier[server_receives_without_validation] ( identifier[self] ,* identifier[parameters] ):
literal[string]
keyword[with] identifier[self] . identifier[_receive] ( identifier[self] . identifier[_servers] ,* identifier[parameters] ) keyword[as] ( identifier[msg] , identifier[_] , identifier[_] ):
keyword[return] identifier[msg]
|
def server_receives_without_validation(self, *parameters):
"""Receive a message with template defined using `New Message`.
Message template has to be defined with `New Message` before calling
this.
Optional parameters:
- `name` the client name (default is the latest used) example: `name=Client 1`
- `connection` alias. example: `connection=connection 1`
- `timeout` for receiving message. example: `timeout=0.1`
- `latest` if set to True, get latest message from buffer instead first. Default is False. Example: `latest=True`
Examples:
| ${msg} = | Server receives without validation |
| ${msg} = | Server receives without validation | name=Server1 | alias=my_connection | timeout=5 |
"""
with self._receive(self._servers, *parameters) as (msg, _, _):
return msg # depends on [control=['with'], data=[]]
|
def find_stacks(node, strict=False):
"""Find pushes and pops to the stack and annotate them as such.
Args:
node: An AST node that might contain stack pushes and pops.
strict: A boolean indicating whether to stringently test whether each
push and pop are matched. This is not always possible when taking
higher-order derivatives of code generated in split-motion.
Returns:
node: The node passed in, but with pushes and pops annotated in AST nodes.
"""
# First, find all stack operation IDs.
fso = FindStackOps()
fso.visit(node)
# Using those IDs, make annotations onto the push and pop nodes.
AnnotateStacks(fso.push_pop_pairs, strict).visit(node)
return node
|
def function[find_stacks, parameter[node, strict]]:
constant[Find pushes and pops to the stack and annotate them as such.
Args:
node: An AST node that might contain stack pushes and pops.
strict: A boolean indicating whether to stringently test whether each
push and pop are matched. This is not always possible when taking
higher-order derivatives of code generated in split-motion.
Returns:
node: The node passed in, but with pushes and pops annotated in AST nodes.
]
variable[fso] assign[=] call[name[FindStackOps], parameter[]]
call[name[fso].visit, parameter[name[node]]]
call[call[name[AnnotateStacks], parameter[name[fso].push_pop_pairs, name[strict]]].visit, parameter[name[node]]]
return[name[node]]
|
keyword[def] identifier[find_stacks] ( identifier[node] , identifier[strict] = keyword[False] ):
literal[string]
identifier[fso] = identifier[FindStackOps] ()
identifier[fso] . identifier[visit] ( identifier[node] )
identifier[AnnotateStacks] ( identifier[fso] . identifier[push_pop_pairs] , identifier[strict] ). identifier[visit] ( identifier[node] )
keyword[return] identifier[node]
|
def find_stacks(node, strict=False):
"""Find pushes and pops to the stack and annotate them as such.
Args:
node: An AST node that might contain stack pushes and pops.
strict: A boolean indicating whether to stringently test whether each
push and pop are matched. This is not always possible when taking
higher-order derivatives of code generated in split-motion.
Returns:
node: The node passed in, but with pushes and pops annotated in AST nodes.
"""
# First, find all stack operation IDs.
fso = FindStackOps()
fso.visit(node)
# Using those IDs, make annotations onto the push and pop nodes.
AnnotateStacks(fso.push_pop_pairs, strict).visit(node)
return node
|
def action_attr(attr_name):
"""
Creates a getter that will drop the current value
and retrieve the action's attribute with specified name.
@param attr_name: the name of an attribute belonging to the action.
@type attr_name: str
"""
def action_attr(_value, context, **_params):
value = getattr(context["action"], attr_name)
return _attr(value)
return action_attr
|
def function[action_attr, parameter[attr_name]]:
constant[
Creates a getter that will drop the current value
and retrieve the action's attribute with specified name.
@param attr_name: the name of an attribute belonging to the action.
@type attr_name: str
]
def function[action_attr, parameter[_value, context]]:
variable[value] assign[=] call[name[getattr], parameter[call[name[context]][constant[action]], name[attr_name]]]
return[call[name[_attr], parameter[name[value]]]]
return[name[action_attr]]
|
keyword[def] identifier[action_attr] ( identifier[attr_name] ):
literal[string]
keyword[def] identifier[action_attr] ( identifier[_value] , identifier[context] ,** identifier[_params] ):
identifier[value] = identifier[getattr] ( identifier[context] [ literal[string] ], identifier[attr_name] )
keyword[return] identifier[_attr] ( identifier[value] )
keyword[return] identifier[action_attr]
|
def action_attr(attr_name):
"""
Creates a getter that will drop the current value
and retrieve the action's attribute with specified name.
@param attr_name: the name of an attribute belonging to the action.
@type attr_name: str
"""
def action_attr(_value, context, **_params):
value = getattr(context['action'], attr_name)
return _attr(value)
return action_attr
|
def seek(self, offset, whence=SEEK_SET):
"""Seek pointer in lob data buffer to requested position.
Might trigger further loading of data from the database if the pointer is beyond currently read data.
"""
# A nice trick is to (ab)use BytesIO.seek() to go to the desired position for easier calculation.
# This will not add any data to the buffer however - very convenient!
self.data.seek(offset, whence)
new_pos = self.data.tell()
missing_bytes_to_read = new_pos - self._current_lob_length
if missing_bytes_to_read > 0:
# Trying to seek beyond currently available LOB data, so need to load some more first.
# We are smart here: (at least trying...):
# If a user sets a certain file position s/he probably wants to read data from
# there. So already read some extra data to avoid yet another immediate
# reading step. Try with EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK additional items (bytes/chars).
# jump to the end of the current buffer and read the new data:
self.data.seek(0, SEEK_END)
self.read(missing_bytes_to_read + self.EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK)
# reposition file pointer a originally desired position:
self.data.seek(new_pos)
return new_pos
|
def function[seek, parameter[self, offset, whence]]:
constant[Seek pointer in lob data buffer to requested position.
Might trigger further loading of data from the database if the pointer is beyond currently read data.
]
call[name[self].data.seek, parameter[name[offset], name[whence]]]
variable[new_pos] assign[=] call[name[self].data.tell, parameter[]]
variable[missing_bytes_to_read] assign[=] binary_operation[name[new_pos] - name[self]._current_lob_length]
if compare[name[missing_bytes_to_read] greater[>] constant[0]] begin[:]
call[name[self].data.seek, parameter[constant[0], name[SEEK_END]]]
call[name[self].read, parameter[binary_operation[name[missing_bytes_to_read] + name[self].EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK]]]
call[name[self].data.seek, parameter[name[new_pos]]]
return[name[new_pos]]
|
keyword[def] identifier[seek] ( identifier[self] , identifier[offset] , identifier[whence] = identifier[SEEK_SET] ):
literal[string]
identifier[self] . identifier[data] . identifier[seek] ( identifier[offset] , identifier[whence] )
identifier[new_pos] = identifier[self] . identifier[data] . identifier[tell] ()
identifier[missing_bytes_to_read] = identifier[new_pos] - identifier[self] . identifier[_current_lob_length]
keyword[if] identifier[missing_bytes_to_read] > literal[int] :
identifier[self] . identifier[data] . identifier[seek] ( literal[int] , identifier[SEEK_END] )
identifier[self] . identifier[read] ( identifier[missing_bytes_to_read] + identifier[self] . identifier[EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK] )
identifier[self] . identifier[data] . identifier[seek] ( identifier[new_pos] )
keyword[return] identifier[new_pos]
|
def seek(self, offset, whence=SEEK_SET):
"""Seek pointer in lob data buffer to requested position.
Might trigger further loading of data from the database if the pointer is beyond currently read data.
"""
# A nice trick is to (ab)use BytesIO.seek() to go to the desired position for easier calculation.
# This will not add any data to the buffer however - very convenient!
self.data.seek(offset, whence)
new_pos = self.data.tell()
missing_bytes_to_read = new_pos - self._current_lob_length
if missing_bytes_to_read > 0:
# Trying to seek beyond currently available LOB data, so need to load some more first.
# We are smart here: (at least trying...):
# If a user sets a certain file position s/he probably wants to read data from
# there. So already read some extra data to avoid yet another immediate
# reading step. Try with EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK additional items (bytes/chars).
# jump to the end of the current buffer and read the new data:
self.data.seek(0, SEEK_END)
self.read(missing_bytes_to_read + self.EXTRA_NUM_ITEMS_TO_READ_AFTER_SEEK)
# reposition file pointer a originally desired position:
self.data.seek(new_pos) # depends on [control=['if'], data=['missing_bytes_to_read']]
return new_pos
|
def get_opts(opts):
"""
Validate options and apply defaults for options not supplied.
:param opts: dictionary mapping str->str.
:return: dictionary mapping str->Opt. All possible keys are present.
"""
defaults = {
'board': None,
'terrain': Opt.random,
'numbers': Opt.preset,
'ports': Opt.preset,
'pieces': Opt.preset,
'players': Opt.preset,
}
_opts = defaults.copy()
if opts is None:
opts = dict()
try:
for key, val in opts.copy().items():
if key == 'board':
# board is a string, not a regular opt, and gets special handling
# in _read_tiles_from_string
continue
opts[key] = Opt(val)
_opts.update(opts)
except Exception:
raise ValueError('Invalid options={}'.format(opts))
logging.debug('used defaults=\n{}\n on opts=\n{}\nreturned total opts=\n{}'.format(
pprint.pformat(defaults),
pprint.pformat(opts),
pprint.pformat(_opts)))
return _opts
|
def function[get_opts, parameter[opts]]:
constant[
Validate options and apply defaults for options not supplied.
:param opts: dictionary mapping str->str.
:return: dictionary mapping str->Opt. All possible keys are present.
]
variable[defaults] assign[=] dictionary[[<ast.Constant object at 0x7da1b2358f10>, <ast.Constant object at 0x7da1b2359450>, <ast.Constant object at 0x7da1b23599f0>, <ast.Constant object at 0x7da1b2359ab0>, <ast.Constant object at 0x7da1b235bdc0>, <ast.Constant object at 0x7da1b235b370>], [<ast.Constant object at 0x7da1b2358af0>, <ast.Attribute object at 0x7da1b2358d00>, <ast.Attribute object at 0x7da1b235beb0>, <ast.Attribute object at 0x7da1b235bfa0>, <ast.Attribute object at 0x7da1b235be80>, <ast.Attribute object at 0x7da1b235bd90>]]
variable[_opts] assign[=] call[name[defaults].copy, parameter[]]
if compare[name[opts] is constant[None]] begin[:]
variable[opts] assign[=] call[name[dict], parameter[]]
<ast.Try object at 0x7da1b25056f0>
call[name[logging].debug, parameter[call[constant[used defaults=
{}
on opts=
{}
returned total opts=
{}].format, parameter[call[name[pprint].pformat, parameter[name[defaults]]], call[name[pprint].pformat, parameter[name[opts]]], call[name[pprint].pformat, parameter[name[_opts]]]]]]]
return[name[_opts]]
|
keyword[def] identifier[get_opts] ( identifier[opts] ):
literal[string]
identifier[defaults] ={
literal[string] : keyword[None] ,
literal[string] : identifier[Opt] . identifier[random] ,
literal[string] : identifier[Opt] . identifier[preset] ,
literal[string] : identifier[Opt] . identifier[preset] ,
literal[string] : identifier[Opt] . identifier[preset] ,
literal[string] : identifier[Opt] . identifier[preset] ,
}
identifier[_opts] = identifier[defaults] . identifier[copy] ()
keyword[if] identifier[opts] keyword[is] keyword[None] :
identifier[opts] = identifier[dict] ()
keyword[try] :
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[opts] . identifier[copy] (). identifier[items] ():
keyword[if] identifier[key] == literal[string] :
keyword[continue]
identifier[opts] [ identifier[key] ]= identifier[Opt] ( identifier[val] )
identifier[_opts] . identifier[update] ( identifier[opts] )
keyword[except] identifier[Exception] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[opts] ))
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] (
identifier[pprint] . identifier[pformat] ( identifier[defaults] ),
identifier[pprint] . identifier[pformat] ( identifier[opts] ),
identifier[pprint] . identifier[pformat] ( identifier[_opts] )))
keyword[return] identifier[_opts]
|
def get_opts(opts):
"""
Validate options and apply defaults for options not supplied.
:param opts: dictionary mapping str->str.
:return: dictionary mapping str->Opt. All possible keys are present.
"""
defaults = {'board': None, 'terrain': Opt.random, 'numbers': Opt.preset, 'ports': Opt.preset, 'pieces': Opt.preset, 'players': Opt.preset}
_opts = defaults.copy()
if opts is None:
opts = dict() # depends on [control=['if'], data=['opts']]
try:
for (key, val) in opts.copy().items():
if key == 'board':
# board is a string, not a regular opt, and gets special handling
# in _read_tiles_from_string
continue # depends on [control=['if'], data=[]]
opts[key] = Opt(val) # depends on [control=['for'], data=[]]
_opts.update(opts) # depends on [control=['try'], data=[]]
except Exception:
raise ValueError('Invalid options={}'.format(opts)) # depends on [control=['except'], data=[]]
logging.debug('used defaults=\n{}\n on opts=\n{}\nreturned total opts=\n{}'.format(pprint.pformat(defaults), pprint.pformat(opts), pprint.pformat(_opts)))
return _opts
|
def uninstall(pkg,
user=None,
env=None):
'''
Uninstall a cabal package.
pkg
The package to uninstall
user
The user to run ghc-pkg unregister with
env
Environment variables to set when invoking cabal. Uses the
same ``env`` format as the :py:func:`cmd.run
<salt.modules.cmdmod.run>` execution function
CLI Example:
.. code-block:: bash
salt '*' cabal.uninstall ShellCheck
'''
cmd = ['ghc-pkg unregister']
cmd.append('"{0}"'.format(pkg))
result = __salt__['cmd.run_all'](' '.join(cmd), runas=user, env=env)
if result['retcode'] != 0:
raise CommandExecutionError(result['stderr'])
return result
|
def function[uninstall, parameter[pkg, user, env]]:
constant[
Uninstall a cabal package.
pkg
The package to uninstall
user
The user to run ghc-pkg unregister with
env
Environment variables to set when invoking cabal. Uses the
same ``env`` format as the :py:func:`cmd.run
<salt.modules.cmdmod.run>` execution function
CLI Example:
.. code-block:: bash
salt '*' cabal.uninstall ShellCheck
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da207f03700>]]
call[name[cmd].append, parameter[call[constant["{0}"].format, parameter[name[pkg]]]]]
variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[constant[ ].join, parameter[name[cmd]]]]]
if compare[call[name[result]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da207f016c0>
return[name[result]]
|
keyword[def] identifier[uninstall] ( identifier[pkg] ,
identifier[user] = keyword[None] ,
identifier[env] = keyword[None] ):
literal[string]
identifier[cmd] =[ literal[string] ]
identifier[cmd] . identifier[append] ( literal[string] . identifier[format] ( identifier[pkg] ))
identifier[result] = identifier[__salt__] [ literal[string] ]( literal[string] . identifier[join] ( identifier[cmd] ), identifier[runas] = identifier[user] , identifier[env] = identifier[env] )
keyword[if] identifier[result] [ literal[string] ]!= literal[int] :
keyword[raise] identifier[CommandExecutionError] ( identifier[result] [ literal[string] ])
keyword[return] identifier[result]
|
def uninstall(pkg, user=None, env=None):
"""
Uninstall a cabal package.
pkg
The package to uninstall
user
The user to run ghc-pkg unregister with
env
Environment variables to set when invoking cabal. Uses the
same ``env`` format as the :py:func:`cmd.run
<salt.modules.cmdmod.run>` execution function
CLI Example:
.. code-block:: bash
salt '*' cabal.uninstall ShellCheck
"""
cmd = ['ghc-pkg unregister']
cmd.append('"{0}"'.format(pkg))
result = __salt__['cmd.run_all'](' '.join(cmd), runas=user, env=env)
if result['retcode'] != 0:
raise CommandExecutionError(result['stderr']) # depends on [control=['if'], data=[]]
return result
|
def output(data, **kwargs): # pylint: disable=unused-argument
'''
Print the output data in JSON
'''
try:
dump_opts = {'indent': 4, 'default': repr}
if 'output_indent' in __opts__:
indent = __opts__.get('output_indent')
sort_keys = False
if indent == 'pretty':
indent = 4
sort_keys = True
elif isinstance(indent, six.integer_types):
if indent >= 0:
indent = indent
else:
indent = None
dump_opts['indent'] = indent
dump_opts['sort_keys'] = sort_keys
return dson.dumps(data, **dump_opts)
except UnicodeDecodeError as exc:
log.error('Unable to serialize output to dson')
return dson.dumps(
{'error': 'Unable to serialize output to DSON',
'message': six.text_type(exc)}
)
except TypeError:
log.debug('An error occurred while outputting DSON', exc_info=True)
# Return valid JSON for unserializable objects
return dson.dumps({})
|
def function[output, parameter[data]]:
constant[
Print the output data in JSON
]
<ast.Try object at 0x7da20e9570a0>
return[call[name[dson].dumps, parameter[dictionary[[], []]]]]
|
keyword[def] identifier[output] ( identifier[data] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[dump_opts] ={ literal[string] : literal[int] , literal[string] : identifier[repr] }
keyword[if] literal[string] keyword[in] identifier[__opts__] :
identifier[indent] = identifier[__opts__] . identifier[get] ( literal[string] )
identifier[sort_keys] = keyword[False]
keyword[if] identifier[indent] == literal[string] :
identifier[indent] = literal[int]
identifier[sort_keys] = keyword[True]
keyword[elif] identifier[isinstance] ( identifier[indent] , identifier[six] . identifier[integer_types] ):
keyword[if] identifier[indent] >= literal[int] :
identifier[indent] = identifier[indent]
keyword[else] :
identifier[indent] = keyword[None]
identifier[dump_opts] [ literal[string] ]= identifier[indent]
identifier[dump_opts] [ literal[string] ]= identifier[sort_keys]
keyword[return] identifier[dson] . identifier[dumps] ( identifier[data] ,** identifier[dump_opts] )
keyword[except] identifier[UnicodeDecodeError] keyword[as] identifier[exc] :
identifier[log] . identifier[error] ( literal[string] )
keyword[return] identifier[dson] . identifier[dumps] (
{ literal[string] : literal[string] ,
literal[string] : identifier[six] . identifier[text_type] ( identifier[exc] )}
)
keyword[except] identifier[TypeError] :
identifier[log] . identifier[debug] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[return] identifier[dson] . identifier[dumps] ({})
|
def output(data, **kwargs): # pylint: disable=unused-argument
'\n Print the output data in JSON\n '
try:
dump_opts = {'indent': 4, 'default': repr}
if 'output_indent' in __opts__:
indent = __opts__.get('output_indent')
sort_keys = False
if indent == 'pretty':
indent = 4
sort_keys = True # depends on [control=['if'], data=['indent']]
elif isinstance(indent, six.integer_types):
if indent >= 0:
indent = indent # depends on [control=['if'], data=['indent']]
else:
indent = None # depends on [control=['if'], data=[]]
dump_opts['indent'] = indent
dump_opts['sort_keys'] = sort_keys # depends on [control=['if'], data=['__opts__']]
return dson.dumps(data, **dump_opts) # depends on [control=['try'], data=[]]
except UnicodeDecodeError as exc:
log.error('Unable to serialize output to dson')
return dson.dumps({'error': 'Unable to serialize output to DSON', 'message': six.text_type(exc)}) # depends on [control=['except'], data=['exc']]
except TypeError:
log.debug('An error occurred while outputting DSON', exc_info=True) # depends on [control=['except'], data=[]]
# Return valid JSON for unserializable objects
return dson.dumps({})
|
def free(self):
"""Release the results and connection lock from the TornadoSession
object. This **must** be called after you finish processing the results
from :py:meth:`TornadoSession.query <queries.TornadoSession.query>` or
:py:meth:`TornadoSession.callproc <queries.TornadoSession.callproc>`
or the connection will not be able to be reused by other asynchronous
requests.
"""
self._freed = True
self._cleanup(self.cursor, self._fd)
|
def function[free, parameter[self]]:
constant[Release the results and connection lock from the TornadoSession
object. This **must** be called after you finish processing the results
from :py:meth:`TornadoSession.query <queries.TornadoSession.query>` or
:py:meth:`TornadoSession.callproc <queries.TornadoSession.callproc>`
or the connection will not be able to be reused by other asynchronous
requests.
]
name[self]._freed assign[=] constant[True]
call[name[self]._cleanup, parameter[name[self].cursor, name[self]._fd]]
|
keyword[def] identifier[free] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_freed] = keyword[True]
identifier[self] . identifier[_cleanup] ( identifier[self] . identifier[cursor] , identifier[self] . identifier[_fd] )
|
def free(self):
"""Release the results and connection lock from the TornadoSession
object. This **must** be called after you finish processing the results
from :py:meth:`TornadoSession.query <queries.TornadoSession.query>` or
:py:meth:`TornadoSession.callproc <queries.TornadoSession.callproc>`
or the connection will not be able to be reused by other asynchronous
requests.
"""
self._freed = True
self._cleanup(self.cursor, self._fd)
|
def match(self, models, results, relation):
"""
Match the eagerly loaded results to their parents.
:type models: list
:type results: Collection
:type relation: str
"""
foreign = self._foreign_key
other = self._other_key
dictionary = {}
for result in results:
dictionary[result.get_attribute(other)] = result
for model in models:
value = model.get_attribute(foreign)
if value in dictionary:
model.set_relation(relation, dictionary[value])
return models
|
def function[match, parameter[self, models, results, relation]]:
constant[
Match the eagerly loaded results to their parents.
:type models: list
:type results: Collection
:type relation: str
]
variable[foreign] assign[=] name[self]._foreign_key
variable[other] assign[=] name[self]._other_key
variable[dictionary] assign[=] dictionary[[], []]
for taget[name[result]] in starred[name[results]] begin[:]
call[name[dictionary]][call[name[result].get_attribute, parameter[name[other]]]] assign[=] name[result]
for taget[name[model]] in starred[name[models]] begin[:]
variable[value] assign[=] call[name[model].get_attribute, parameter[name[foreign]]]
if compare[name[value] in name[dictionary]] begin[:]
call[name[model].set_relation, parameter[name[relation], call[name[dictionary]][name[value]]]]
return[name[models]]
|
keyword[def] identifier[match] ( identifier[self] , identifier[models] , identifier[results] , identifier[relation] ):
literal[string]
identifier[foreign] = identifier[self] . identifier[_foreign_key]
identifier[other] = identifier[self] . identifier[_other_key]
identifier[dictionary] ={}
keyword[for] identifier[result] keyword[in] identifier[results] :
identifier[dictionary] [ identifier[result] . identifier[get_attribute] ( identifier[other] )]= identifier[result]
keyword[for] identifier[model] keyword[in] identifier[models] :
identifier[value] = identifier[model] . identifier[get_attribute] ( identifier[foreign] )
keyword[if] identifier[value] keyword[in] identifier[dictionary] :
identifier[model] . identifier[set_relation] ( identifier[relation] , identifier[dictionary] [ identifier[value] ])
keyword[return] identifier[models]
|
def match(self, models, results, relation):
"""
Match the eagerly loaded results to their parents.
:type models: list
:type results: Collection
:type relation: str
"""
foreign = self._foreign_key
other = self._other_key
dictionary = {}
for result in results:
dictionary[result.get_attribute(other)] = result # depends on [control=['for'], data=['result']]
for model in models:
value = model.get_attribute(foreign)
if value in dictionary:
model.set_relation(relation, dictionary[value]) # depends on [control=['if'], data=['value', 'dictionary']] # depends on [control=['for'], data=['model']]
return models
|
def render(self, **kwargs):
""" Make breadcrumbs for a route
:param kwargs: dictionary of named arguments used to construct the view
:type kwargs: dict
:return: List of dict items the view can use to construct the link.
:rtype: {str: list({ "link": str, "title", str, "args", dict})}
"""
breadcrumbs = []
# this is the list of items we want to accumulate in the breadcrumb trail.
# item[0] is the key into the kwargs["url"] object and item[1] is the name of the route
# setting a route name to None means that it's needed to construct the route of the next item in the list
# but shouldn't be included in the list itself (this is currently the case for work --
# at some point we probably should include work in the navigation)
breadcrumbs = []
if "collections" in kwargs:
breadcrumbs = [{
"title": "Text Collections",
"link": ".r_collections",
"args": {}
}]
if "parents" in kwargs["collections"]:
breadcrumbs += [
{
"title": parent["label"],
"link": ".r_collection_semantic",
"args": {
"objectId": parent["id"],
"semantic": f_slugify(parent["label"]),
},
}
for parent in kwargs["collections"]["parents"]
][::-1]
if "current" in kwargs["collections"]:
breadcrumbs.append({
"title": kwargs["collections"]["current"]["label"],
"link": None,
"args": {}
})
# don't link the last item in the trail
if len(breadcrumbs) > 0:
breadcrumbs[-1]["link"] = None
return {"breadcrumbs": breadcrumbs}
|
def function[render, parameter[self]]:
constant[ Make breadcrumbs for a route
:param kwargs: dictionary of named arguments used to construct the view
:type kwargs: dict
:return: List of dict items the view can use to construct the link.
:rtype: {str: list({ "link": str, "title", str, "args", dict})}
]
variable[breadcrumbs] assign[=] list[[]]
variable[breadcrumbs] assign[=] list[[]]
if compare[constant[collections] in name[kwargs]] begin[:]
variable[breadcrumbs] assign[=] list[[<ast.Dict object at 0x7da1affbdb10>]]
if compare[constant[parents] in call[name[kwargs]][constant[collections]]] begin[:]
<ast.AugAssign object at 0x7da1affbdde0>
if compare[constant[current] in call[name[kwargs]][constant[collections]]] begin[:]
call[name[breadcrumbs].append, parameter[dictionary[[<ast.Constant object at 0x7da1affbd300>, <ast.Constant object at 0x7da1affbd360>, <ast.Constant object at 0x7da1affbd2d0>], [<ast.Subscript object at 0x7da1affbd270>, <ast.Constant object at 0x7da1b0022ce0>, <ast.Dict object at 0x7da1b0023100>]]]]
if compare[call[name[len], parameter[name[breadcrumbs]]] greater[>] constant[0]] begin[:]
call[call[name[breadcrumbs]][<ast.UnaryOp object at 0x7da1b0023940>]][constant[link]] assign[=] constant[None]
return[dictionary[[<ast.Constant object at 0x7da1b0021c00>], [<ast.Name object at 0x7da1b0022a10>]]]
|
keyword[def] identifier[render] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[breadcrumbs] =[]
identifier[breadcrumbs] =[]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[breadcrumbs] =[{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :{}
}]
keyword[if] literal[string] keyword[in] identifier[kwargs] [ literal[string] ]:
identifier[breadcrumbs] +=[
{
literal[string] : identifier[parent] [ literal[string] ],
literal[string] : literal[string] ,
literal[string] :{
literal[string] : identifier[parent] [ literal[string] ],
literal[string] : identifier[f_slugify] ( identifier[parent] [ literal[string] ]),
},
}
keyword[for] identifier[parent] keyword[in] identifier[kwargs] [ literal[string] ][ literal[string] ]
][::- literal[int] ]
keyword[if] literal[string] keyword[in] identifier[kwargs] [ literal[string] ]:
identifier[breadcrumbs] . identifier[append] ({
literal[string] : identifier[kwargs] [ literal[string] ][ literal[string] ][ literal[string] ],
literal[string] : keyword[None] ,
literal[string] :{}
})
keyword[if] identifier[len] ( identifier[breadcrumbs] )> literal[int] :
identifier[breadcrumbs] [- literal[int] ][ literal[string] ]= keyword[None]
keyword[return] { literal[string] : identifier[breadcrumbs] }
|
def render(self, **kwargs):
""" Make breadcrumbs for a route
:param kwargs: dictionary of named arguments used to construct the view
:type kwargs: dict
:return: List of dict items the view can use to construct the link.
:rtype: {str: list({ "link": str, "title", str, "args", dict})}
"""
breadcrumbs = []
# this is the list of items we want to accumulate in the breadcrumb trail.
# item[0] is the key into the kwargs["url"] object and item[1] is the name of the route
# setting a route name to None means that it's needed to construct the route of the next item in the list
# but shouldn't be included in the list itself (this is currently the case for work --
# at some point we probably should include work in the navigation)
breadcrumbs = []
if 'collections' in kwargs:
breadcrumbs = [{'title': 'Text Collections', 'link': '.r_collections', 'args': {}}]
if 'parents' in kwargs['collections']:
breadcrumbs += [{'title': parent['label'], 'link': '.r_collection_semantic', 'args': {'objectId': parent['id'], 'semantic': f_slugify(parent['label'])}} for parent in kwargs['collections']['parents']][::-1] # depends on [control=['if'], data=[]]
if 'current' in kwargs['collections']:
breadcrumbs.append({'title': kwargs['collections']['current']['label'], 'link': None, 'args': {}}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kwargs']]
# don't link the last item in the trail
if len(breadcrumbs) > 0:
breadcrumbs[-1]['link'] = None # depends on [control=['if'], data=[]]
return {'breadcrumbs': breadcrumbs}
|
def filter_inactive_ports(query):
"""Filter ports that aren't in active status """
port_model = models_v2.Port
query = (query
.filter(port_model.status == n_const.PORT_STATUS_ACTIVE))
return query
|
def function[filter_inactive_ports, parameter[query]]:
constant[Filter ports that aren't in active status ]
variable[port_model] assign[=] name[models_v2].Port
variable[query] assign[=] call[name[query].filter, parameter[compare[name[port_model].status equal[==] name[n_const].PORT_STATUS_ACTIVE]]]
return[name[query]]
|
keyword[def] identifier[filter_inactive_ports] ( identifier[query] ):
literal[string]
identifier[port_model] = identifier[models_v2] . identifier[Port]
identifier[query] =( identifier[query]
. identifier[filter] ( identifier[port_model] . identifier[status] == identifier[n_const] . identifier[PORT_STATUS_ACTIVE] ))
keyword[return] identifier[query]
|
def filter_inactive_ports(query):
"""Filter ports that aren't in active status """
port_model = models_v2.Port
query = query.filter(port_model.status == n_const.PORT_STATUS_ACTIVE)
return query
|
def _shift_wavelengths(model1, model2):
"""One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
Possible combos::
RedshiftScaleFactor | Model
Scale | Model
Model | Scale
"""
if isinstance(model1, _models.RedshiftScaleFactor):
val = _get_sampleset(model2)
if val is None:
w = val
else:
w = model1.inverse(val)
elif isinstance(model1, _models.Scale):
w = _get_sampleset(model2)
else:
w = _get_sampleset(model1)
return w
|
def function[_shift_wavelengths, parameter[model1, model2]]:
constant[One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
Possible combos::
RedshiftScaleFactor | Model
Scale | Model
Model | Scale
]
if call[name[isinstance], parameter[name[model1], name[_models].RedshiftScaleFactor]] begin[:]
variable[val] assign[=] call[name[_get_sampleset], parameter[name[model2]]]
if compare[name[val] is constant[None]] begin[:]
variable[w] assign[=] name[val]
return[name[w]]
|
keyword[def] identifier[_shift_wavelengths] ( identifier[model1] , identifier[model2] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[model1] , identifier[_models] . identifier[RedshiftScaleFactor] ):
identifier[val] = identifier[_get_sampleset] ( identifier[model2] )
keyword[if] identifier[val] keyword[is] keyword[None] :
identifier[w] = identifier[val]
keyword[else] :
identifier[w] = identifier[model1] . identifier[inverse] ( identifier[val] )
keyword[elif] identifier[isinstance] ( identifier[model1] , identifier[_models] . identifier[Scale] ):
identifier[w] = identifier[_get_sampleset] ( identifier[model2] )
keyword[else] :
identifier[w] = identifier[_get_sampleset] ( identifier[model1] )
keyword[return] identifier[w]
|
def _shift_wavelengths(model1, model2):
"""One of the models is either ``RedshiftScaleFactor`` or ``Scale``.
Possible combos::
RedshiftScaleFactor | Model
Scale | Model
Model | Scale
"""
if isinstance(model1, _models.RedshiftScaleFactor):
val = _get_sampleset(model2)
if val is None:
w = val # depends on [control=['if'], data=['val']]
else:
w = model1.inverse(val) # depends on [control=['if'], data=[]]
elif isinstance(model1, _models.Scale):
w = _get_sampleset(model2) # depends on [control=['if'], data=[]]
else:
w = _get_sampleset(model1)
return w
|
def add_term(set_id, term, access_token):
"""Add the given term to the given set.
:param term: Instance of Term.
"""
api_call('post', 'sets/{}/terms'.format(set_id), term.to_dict(), access_token=access_token)
|
def function[add_term, parameter[set_id, term, access_token]]:
constant[Add the given term to the given set.
:param term: Instance of Term.
]
call[name[api_call], parameter[constant[post], call[constant[sets/{}/terms].format, parameter[name[set_id]]], call[name[term].to_dict, parameter[]]]]
|
keyword[def] identifier[add_term] ( identifier[set_id] , identifier[term] , identifier[access_token] ):
literal[string]
identifier[api_call] ( literal[string] , literal[string] . identifier[format] ( identifier[set_id] ), identifier[term] . identifier[to_dict] (), identifier[access_token] = identifier[access_token] )
|
def add_term(set_id, term, access_token):
"""Add the given term to the given set.
:param term: Instance of Term.
"""
api_call('post', 'sets/{}/terms'.format(set_id), term.to_dict(), access_token=access_token)
|
def __ensure_provisioning_reads(table_name, key_name, num_consec_read_checks):
""" Ensure that provisioning is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type num_consec_read_checks: int
:param num_consec_read_checks: How many consecutive checks have we had
:returns: (bool, int, int)
update_needed, updated_read_units, num_consec_read_checks
"""
if not get_table_option(key_name, 'enable_reads_autoscaling'):
logger.info(
'{0} - Autoscaling of reads has been disabled'.format(table_name))
return False, dynamodb.get_provisioned_table_read_units(table_name), 0
update_needed = False
try:
lookback_window_start = get_table_option(
key_name, 'lookback_window_start')
lookback_period = get_table_option(key_name, 'lookback_period')
current_read_units = dynamodb.get_provisioned_table_read_units(
table_name)
consumed_read_units_percent = \
table_stats.get_consumed_read_units_percent(
table_name, lookback_window_start, lookback_period)
throttled_read_count = \
table_stats.get_throttled_read_event_count(
table_name, lookback_window_start, lookback_period)
throttled_by_provisioned_read_percent = \
table_stats.get_throttled_by_provisioned_read_event_percent(
table_name, lookback_window_start, lookback_period)
throttled_by_consumed_read_percent = \
table_stats.get_throttled_by_consumed_read_percent(
table_name, lookback_window_start, lookback_period)
reads_upper_threshold = \
get_table_option(key_name, 'reads_upper_threshold')
reads_lower_threshold = \
get_table_option(key_name, 'reads_lower_threshold')
throttled_reads_upper_threshold = \
get_table_option(key_name, 'throttled_reads_upper_threshold')
increase_reads_with = \
get_table_option(key_name, 'increase_reads_with')
increase_reads_unit = \
get_table_option(key_name, 'increase_reads_unit')
decrease_reads_with = \
get_table_option(key_name, 'decrease_reads_with')
decrease_reads_unit = \
get_table_option(key_name, 'decrease_reads_unit')
min_provisioned_reads = \
get_table_option(key_name, 'min_provisioned_reads')
max_provisioned_reads = \
get_table_option(key_name, 'max_provisioned_reads')
num_read_checks_before_scale_down = \
get_table_option(key_name, 'num_read_checks_before_scale_down')
num_read_checks_reset_percent = \
get_table_option(key_name, 'num_read_checks_reset_percent')
increase_throttled_by_provisioned_reads_unit = \
get_table_option(
key_name, 'increase_throttled_by_provisioned_reads_unit')
increase_throttled_by_provisioned_reads_scale = \
get_table_option(
key_name, 'increase_throttled_by_provisioned_reads_scale')
increase_throttled_by_consumed_reads_unit = \
get_table_option(
key_name, 'increase_throttled_by_consumed_reads_unit')
increase_throttled_by_consumed_reads_scale = \
get_table_option(
key_name, 'increase_throttled_by_consumed_reads_scale')
increase_consumed_reads_unit = \
get_table_option(key_name, 'increase_consumed_reads_unit')
increase_consumed_reads_with = \
get_table_option(key_name, 'increase_consumed_reads_with')
increase_consumed_reads_scale = \
get_table_option(key_name, 'increase_consumed_reads_scale')
decrease_consumed_reads_unit = \
get_table_option(key_name, 'decrease_consumed_reads_unit')
decrease_consumed_reads_with = \
get_table_option(key_name, 'decrease_consumed_reads_with')
decrease_consumed_reads_scale = \
get_table_option(key_name, 'decrease_consumed_reads_scale')
except JSONResponseError:
raise
except BotoServerError:
raise
# Set the updated units to the current read unit value
updated_read_units = current_read_units
# Reset consecutive reads if num_read_checks_reset_percent is reached
if num_read_checks_reset_percent:
if consumed_read_units_percent >= num_read_checks_reset_percent:
logger.info(
'{0} - Resetting the number of consecutive '
'read checks. Reason: Consumed percent {1} is '
'greater than reset percent: {2}'.format(
table_name,
consumed_read_units_percent,
num_read_checks_reset_percent))
num_consec_read_checks = 0
# Exit if up scaling has been disabled
if not get_table_option(key_name, 'enable_reads_up_scaling'):
logger.debug(
'{0} - Up scaling event detected. No action taken as scaling '
'up reads has been disabled in the configuration'.format(
table_name))
else:
# If local/granular values not specified use global values
increase_consumed_reads_unit = \
increase_consumed_reads_unit or increase_reads_unit
increase_throttled_by_provisioned_reads_unit = \
increase_throttled_by_provisioned_reads_unit or increase_reads_unit
increase_throttled_by_consumed_reads_unit = \
increase_throttled_by_consumed_reads_unit or increase_reads_unit
increase_consumed_reads_with = \
increase_consumed_reads_with or increase_reads_with
# Initialise variables to store calculated provisioning
throttled_by_provisioned_calculated_provisioning = scale_reader(
increase_throttled_by_provisioned_reads_scale,
throttled_by_provisioned_read_percent)
throttled_by_consumed_calculated_provisioning = scale_reader(
increase_throttled_by_consumed_reads_scale,
throttled_by_consumed_read_percent)
consumed_calculated_provisioning = scale_reader(
increase_consumed_reads_scale,
consumed_read_units_percent)
throttled_count_calculated_provisioning = 0
calculated_provisioning = 0
# Increase needed due to high throttled to provisioned ratio
if throttled_by_provisioned_calculated_provisioning:
if increase_throttled_by_provisioned_reads_unit == 'percent':
throttled_by_provisioned_calculated_provisioning = \
calculators.increase_reads_in_percent(
current_read_units,
throttled_by_provisioned_calculated_provisioning,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
throttled_by_provisioned_calculated_provisioning = \
calculators.increase_reads_in_units(
current_read_units,
throttled_by_provisioned_calculated_provisioning,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
# Increase needed due to high throttled to consumed ratio
if throttled_by_consumed_calculated_provisioning:
if increase_throttled_by_consumed_reads_unit == 'percent':
throttled_by_consumed_calculated_provisioning = \
calculators.increase_reads_in_percent(
current_read_units,
throttled_by_consumed_calculated_provisioning,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
throttled_by_consumed_calculated_provisioning = \
calculators.increase_reads_in_units(
current_read_units,
throttled_by_consumed_calculated_provisioning,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
# Increase needed due to high CU consumption
if consumed_calculated_provisioning:
if increase_consumed_reads_unit == 'percent':
consumed_calculated_provisioning = \
calculators.increase_reads_in_percent(
current_read_units,
consumed_calculated_provisioning,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
consumed_calculated_provisioning = \
calculators.increase_reads_in_units(
current_read_units,
consumed_calculated_provisioning,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
elif (reads_upper_threshold
and consumed_read_units_percent > reads_upper_threshold
and not increase_consumed_reads_scale):
if increase_consumed_reads_unit == 'percent':
consumed_calculated_provisioning = \
calculators.increase_reads_in_percent(
current_read_units,
increase_consumed_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
consumed_calculated_provisioning = \
calculators.increase_reads_in_units(
current_read_units,
increase_consumed_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
# Increase needed due to high throttling
if (throttled_reads_upper_threshold
and throttled_read_count > throttled_reads_upper_threshold):
if increase_reads_unit == 'percent':
throttled_count_calculated_provisioning = \
calculators.increase_reads_in_percent(
updated_read_units,
increase_consumed_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
else:
throttled_count_calculated_provisioning = \
calculators.increase_reads_in_units(
updated_read_units,
increase_reads_with,
get_table_option(key_name, 'max_provisioned_reads'),
consumed_read_units_percent,
table_name)
# Determine which metric requires the most scaling
if (throttled_by_provisioned_calculated_provisioning
> calculated_provisioning):
calculated_provisioning = \
throttled_by_provisioned_calculated_provisioning
scale_reason = (
"due to throttled events by provisioned "
"units threshold being exceeded")
if (throttled_by_consumed_calculated_provisioning
> calculated_provisioning):
calculated_provisioning = \
throttled_by_consumed_calculated_provisioning
scale_reason = (
"due to throttled events by consumed "
"units threshold being exceeded")
if consumed_calculated_provisioning > calculated_provisioning:
calculated_provisioning = consumed_calculated_provisioning
scale_reason = "due to consumed threshold being exceeded"
if throttled_count_calculated_provisioning > calculated_provisioning:
calculated_provisioning = throttled_count_calculated_provisioning
scale_reason = "due to throttled events threshold being exceeded"
if calculated_provisioning > current_read_units:
logger.info(
'{0} - Resetting the number of consecutive '
'read checks. Reason: scale up {1}'.format(
table_name, scale_reason))
num_consec_read_checks = 0
update_needed = True
updated_read_units = calculated_provisioning
# Decrease needed due to low CU consumption
if not update_needed:
# If local/granular values not specified use global values
decrease_consumed_reads_unit = \
decrease_consumed_reads_unit or decrease_reads_unit
decrease_consumed_reads_with = \
decrease_consumed_reads_with or decrease_reads_with
# Initialise variables to store calculated provisioning
consumed_calculated_provisioning = scale_reader_decrease(
decrease_consumed_reads_scale,
consumed_read_units_percent)
calculated_provisioning = None
# Exit if down scaling has been disabled
if not get_table_option(key_name, 'enable_reads_down_scaling'):
logger.debug(
'{0} - Down scaling event detected. No action taken as scaling'
' down reads has been disabled in the configuration'.format(
table_name))
# Exit if reads == 0% and downscaling has been disabled at 0%
elif (consumed_read_units_percent == 0 and not
get_table_option(
key_name, 'allow_scaling_down_reads_on_0_percent')):
logger.info(
'{0} - Down scaling event detected. No action taken as scaling'
' down reads is not done when usage is at 0%'.format(
table_name))
else:
if consumed_calculated_provisioning:
if decrease_consumed_reads_unit == 'percent':
calculated_provisioning = \
calculators.decrease_reads_in_percent(
updated_read_units,
consumed_calculated_provisioning,
get_table_option(
key_name, 'min_provisioned_reads'),
table_name)
else:
calculated_provisioning = \
calculators.decrease_reads_in_units(
updated_read_units,
consumed_calculated_provisioning,
get_table_option(
key_name, 'min_provisioned_reads'),
table_name)
elif (reads_lower_threshold
and consumed_read_units_percent < reads_lower_threshold
and not decrease_consumed_reads_scale):
if decrease_consumed_reads_unit == 'percent':
calculated_provisioning = \
calculators.decrease_reads_in_percent(
updated_read_units,
decrease_consumed_reads_with,
get_table_option(
key_name, 'min_provisioned_reads'),
table_name)
else:
calculated_provisioning = \
calculators.decrease_reads_in_units(
updated_read_units,
decrease_consumed_reads_with,
get_table_option(
key_name, 'min_provisioned_reads'),
table_name)
if (calculated_provisioning
and current_read_units != calculated_provisioning):
num_consec_read_checks += 1
if num_consec_read_checks >= num_read_checks_before_scale_down:
update_needed = True
updated_read_units = calculated_provisioning
# Never go over the configured max provisioning
if max_provisioned_reads:
if int(updated_read_units) > int(max_provisioned_reads):
update_needed = True
updated_read_units = int(max_provisioned_reads)
logger.info(
'Will not increase writes over max-provisioned-reads '
'limit ({0} writes)'.format(updated_read_units))
# Ensure that we have met the min-provisioning
if min_provisioned_reads:
if int(min_provisioned_reads) > int(updated_read_units):
update_needed = True
updated_read_units = int(min_provisioned_reads)
logger.info(
'{0} - Increasing reads to meet min-provisioned-reads '
'limit ({1} reads)'.format(table_name, updated_read_units))
if calculators.is_consumed_over_proposed(
current_read_units,
updated_read_units,
consumed_read_units_percent):
update_needed = False
updated_read_units = current_read_units
logger.info(
'{0} - Consumed is over proposed read units. Will leave table at '
'current setting.'.format(table_name))
logger.info('{0} - Consecutive read checks {1}/{2}'.format(
table_name,
num_consec_read_checks,
num_read_checks_before_scale_down))
return update_needed, updated_read_units, num_consec_read_checks
|
def function[__ensure_provisioning_reads, parameter[table_name, key_name, num_consec_read_checks]]:
constant[ Ensure that provisioning is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type num_consec_read_checks: int
:param num_consec_read_checks: How many consecutive checks have we had
:returns: (bool, int, int)
update_needed, updated_read_units, num_consec_read_checks
]
if <ast.UnaryOp object at 0x7da18f58de40> begin[:]
call[name[logger].info, parameter[call[constant[{0} - Autoscaling of reads has been disabled].format, parameter[name[table_name]]]]]
return[tuple[[<ast.Constant object at 0x7da18f58f970>, <ast.Call object at 0x7da18f58f8e0>, <ast.Constant object at 0x7da18f58c940>]]]
variable[update_needed] assign[=] constant[False]
<ast.Try object at 0x7da18f58d3f0>
variable[updated_read_units] assign[=] name[current_read_units]
if name[num_read_checks_reset_percent] begin[:]
if compare[name[consumed_read_units_percent] greater_or_equal[>=] name[num_read_checks_reset_percent]] begin[:]
call[name[logger].info, parameter[call[constant[{0} - Resetting the number of consecutive read checks. Reason: Consumed percent {1} is greater than reset percent: {2}].format, parameter[name[table_name], name[consumed_read_units_percent], name[num_read_checks_reset_percent]]]]]
variable[num_consec_read_checks] assign[=] constant[0]
if <ast.UnaryOp object at 0x7da204960850> begin[:]
call[name[logger].debug, parameter[call[constant[{0} - Up scaling event detected. No action taken as scaling up reads has been disabled in the configuration].format, parameter[name[table_name]]]]]
if <ast.UnaryOp object at 0x7da1b2344310> begin[:]
variable[decrease_consumed_reads_unit] assign[=] <ast.BoolOp object at 0x7da1b2344130>
variable[decrease_consumed_reads_with] assign[=] <ast.BoolOp object at 0x7da1b23440a0>
variable[consumed_calculated_provisioning] assign[=] call[name[scale_reader_decrease], parameter[name[decrease_consumed_reads_scale], name[consumed_read_units_percent]]]
variable[calculated_provisioning] assign[=] constant[None]
if <ast.UnaryOp object at 0x7da1b2344100> begin[:]
call[name[logger].debug, parameter[call[constant[{0} - Down scaling event detected. No action taken as scaling down reads has been disabled in the configuration].format, parameter[name[table_name]]]]]
if name[max_provisioned_reads] begin[:]
if compare[call[name[int], parameter[name[updated_read_units]]] greater[>] call[name[int], parameter[name[max_provisioned_reads]]]] begin[:]
variable[update_needed] assign[=] constant[True]
variable[updated_read_units] assign[=] call[name[int], parameter[name[max_provisioned_reads]]]
call[name[logger].info, parameter[call[constant[Will not increase writes over max-provisioned-reads limit ({0} writes)].format, parameter[name[updated_read_units]]]]]
if name[min_provisioned_reads] begin[:]
if compare[call[name[int], parameter[name[min_provisioned_reads]]] greater[>] call[name[int], parameter[name[updated_read_units]]]] begin[:]
variable[update_needed] assign[=] constant[True]
variable[updated_read_units] assign[=] call[name[int], parameter[name[min_provisioned_reads]]]
call[name[logger].info, parameter[call[constant[{0} - Increasing reads to meet min-provisioned-reads limit ({1} reads)].format, parameter[name[table_name], name[updated_read_units]]]]]
if call[name[calculators].is_consumed_over_proposed, parameter[name[current_read_units], name[updated_read_units], name[consumed_read_units_percent]]] begin[:]
variable[update_needed] assign[=] constant[False]
variable[updated_read_units] assign[=] name[current_read_units]
call[name[logger].info, parameter[call[constant[{0} - Consumed is over proposed read units. Will leave table at current setting.].format, parameter[name[table_name]]]]]
call[name[logger].info, parameter[call[constant[{0} - Consecutive read checks {1}/{2}].format, parameter[name[table_name], name[num_consec_read_checks], name[num_read_checks_before_scale_down]]]]]
return[tuple[[<ast.Name object at 0x7da20e9577f0>, <ast.Name object at 0x7da20e956410>, <ast.Name object at 0x7da20e956bf0>]]]
|
keyword[def] identifier[__ensure_provisioning_reads] ( identifier[table_name] , identifier[key_name] , identifier[num_consec_read_checks] ):
literal[string]
keyword[if] keyword[not] identifier[get_table_option] ( identifier[key_name] , literal[string] ):
identifier[logger] . identifier[info] (
literal[string] . identifier[format] ( identifier[table_name] ))
keyword[return] keyword[False] , identifier[dynamodb] . identifier[get_provisioned_table_read_units] ( identifier[table_name] ), literal[int]
identifier[update_needed] = keyword[False]
keyword[try] :
identifier[lookback_window_start] = identifier[get_table_option] (
identifier[key_name] , literal[string] )
identifier[lookback_period] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[current_read_units] = identifier[dynamodb] . identifier[get_provisioned_table_read_units] (
identifier[table_name] )
identifier[consumed_read_units_percent] = identifier[table_stats] . identifier[get_consumed_read_units_percent] (
identifier[table_name] , identifier[lookback_window_start] , identifier[lookback_period] )
identifier[throttled_read_count] = identifier[table_stats] . identifier[get_throttled_read_event_count] (
identifier[table_name] , identifier[lookback_window_start] , identifier[lookback_period] )
identifier[throttled_by_provisioned_read_percent] = identifier[table_stats] . identifier[get_throttled_by_provisioned_read_event_percent] (
identifier[table_name] , identifier[lookback_window_start] , identifier[lookback_period] )
identifier[throttled_by_consumed_read_percent] = identifier[table_stats] . identifier[get_throttled_by_consumed_read_percent] (
identifier[table_name] , identifier[lookback_window_start] , identifier[lookback_period] )
identifier[reads_upper_threshold] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[reads_lower_threshold] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[throttled_reads_upper_threshold] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[increase_reads_with] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[increase_reads_unit] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[decrease_reads_with] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[decrease_reads_unit] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[min_provisioned_reads] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[max_provisioned_reads] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[num_read_checks_before_scale_down] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[num_read_checks_reset_percent] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[increase_throttled_by_provisioned_reads_unit] = identifier[get_table_option] (
identifier[key_name] , literal[string] )
identifier[increase_throttled_by_provisioned_reads_scale] = identifier[get_table_option] (
identifier[key_name] , literal[string] )
identifier[increase_throttled_by_consumed_reads_unit] = identifier[get_table_option] (
identifier[key_name] , literal[string] )
identifier[increase_throttled_by_consumed_reads_scale] = identifier[get_table_option] (
identifier[key_name] , literal[string] )
identifier[increase_consumed_reads_unit] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[increase_consumed_reads_with] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[increase_consumed_reads_scale] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[decrease_consumed_reads_unit] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[decrease_consumed_reads_with] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
identifier[decrease_consumed_reads_scale] = identifier[get_table_option] ( identifier[key_name] , literal[string] )
keyword[except] identifier[JSONResponseError] :
keyword[raise]
keyword[except] identifier[BotoServerError] :
keyword[raise]
identifier[updated_read_units] = identifier[current_read_units]
keyword[if] identifier[num_read_checks_reset_percent] :
keyword[if] identifier[consumed_read_units_percent] >= identifier[num_read_checks_reset_percent] :
identifier[logger] . identifier[info] (
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[table_name] ,
identifier[consumed_read_units_percent] ,
identifier[num_read_checks_reset_percent] ))
identifier[num_consec_read_checks] = literal[int]
keyword[if] keyword[not] identifier[get_table_option] ( identifier[key_name] , literal[string] ):
identifier[logger] . identifier[debug] (
literal[string]
literal[string] . identifier[format] (
identifier[table_name] ))
keyword[else] :
identifier[increase_consumed_reads_unit] = identifier[increase_consumed_reads_unit] keyword[or] identifier[increase_reads_unit]
identifier[increase_throttled_by_provisioned_reads_unit] = identifier[increase_throttled_by_provisioned_reads_unit] keyword[or] identifier[increase_reads_unit]
identifier[increase_throttled_by_consumed_reads_unit] = identifier[increase_throttled_by_consumed_reads_unit] keyword[or] identifier[increase_reads_unit]
identifier[increase_consumed_reads_with] = identifier[increase_consumed_reads_with] keyword[or] identifier[increase_reads_with]
identifier[throttled_by_provisioned_calculated_provisioning] = identifier[scale_reader] (
identifier[increase_throttled_by_provisioned_reads_scale] ,
identifier[throttled_by_provisioned_read_percent] )
identifier[throttled_by_consumed_calculated_provisioning] = identifier[scale_reader] (
identifier[increase_throttled_by_consumed_reads_scale] ,
identifier[throttled_by_consumed_read_percent] )
identifier[consumed_calculated_provisioning] = identifier[scale_reader] (
identifier[increase_consumed_reads_scale] ,
identifier[consumed_read_units_percent] )
identifier[throttled_count_calculated_provisioning] = literal[int]
identifier[calculated_provisioning] = literal[int]
keyword[if] identifier[throttled_by_provisioned_calculated_provisioning] :
keyword[if] identifier[increase_throttled_by_provisioned_reads_unit] == literal[string] :
identifier[throttled_by_provisioned_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_percent] (
identifier[current_read_units] ,
identifier[throttled_by_provisioned_calculated_provisioning] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[else] :
identifier[throttled_by_provisioned_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_units] (
identifier[current_read_units] ,
identifier[throttled_by_provisioned_calculated_provisioning] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[if] identifier[throttled_by_consumed_calculated_provisioning] :
keyword[if] identifier[increase_throttled_by_consumed_reads_unit] == literal[string] :
identifier[throttled_by_consumed_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_percent] (
identifier[current_read_units] ,
identifier[throttled_by_consumed_calculated_provisioning] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[else] :
identifier[throttled_by_consumed_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_units] (
identifier[current_read_units] ,
identifier[throttled_by_consumed_calculated_provisioning] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[if] identifier[consumed_calculated_provisioning] :
keyword[if] identifier[increase_consumed_reads_unit] == literal[string] :
identifier[consumed_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_percent] (
identifier[current_read_units] ,
identifier[consumed_calculated_provisioning] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[else] :
identifier[consumed_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_units] (
identifier[current_read_units] ,
identifier[consumed_calculated_provisioning] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[elif] ( identifier[reads_upper_threshold]
keyword[and] identifier[consumed_read_units_percent] > identifier[reads_upper_threshold]
keyword[and] keyword[not] identifier[increase_consumed_reads_scale] ):
keyword[if] identifier[increase_consumed_reads_unit] == literal[string] :
identifier[consumed_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_percent] (
identifier[current_read_units] ,
identifier[increase_consumed_reads_with] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[else] :
identifier[consumed_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_units] (
identifier[current_read_units] ,
identifier[increase_consumed_reads_with] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[if] ( identifier[throttled_reads_upper_threshold]
keyword[and] identifier[throttled_read_count] > identifier[throttled_reads_upper_threshold] ):
keyword[if] identifier[increase_reads_unit] == literal[string] :
identifier[throttled_count_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_percent] (
identifier[updated_read_units] ,
identifier[increase_consumed_reads_with] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[else] :
identifier[throttled_count_calculated_provisioning] = identifier[calculators] . identifier[increase_reads_in_units] (
identifier[updated_read_units] ,
identifier[increase_reads_with] ,
identifier[get_table_option] ( identifier[key_name] , literal[string] ),
identifier[consumed_read_units_percent] ,
identifier[table_name] )
keyword[if] ( identifier[throttled_by_provisioned_calculated_provisioning]
> identifier[calculated_provisioning] ):
identifier[calculated_provisioning] = identifier[throttled_by_provisioned_calculated_provisioning]
identifier[scale_reason] =(
literal[string]
literal[string] )
keyword[if] ( identifier[throttled_by_consumed_calculated_provisioning]
> identifier[calculated_provisioning] ):
identifier[calculated_provisioning] = identifier[throttled_by_consumed_calculated_provisioning]
identifier[scale_reason] =(
literal[string]
literal[string] )
keyword[if] identifier[consumed_calculated_provisioning] > identifier[calculated_provisioning] :
identifier[calculated_provisioning] = identifier[consumed_calculated_provisioning]
identifier[scale_reason] = literal[string]
keyword[if] identifier[throttled_count_calculated_provisioning] > identifier[calculated_provisioning] :
identifier[calculated_provisioning] = identifier[throttled_count_calculated_provisioning]
identifier[scale_reason] = literal[string]
keyword[if] identifier[calculated_provisioning] > identifier[current_read_units] :
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] (
identifier[table_name] , identifier[scale_reason] ))
identifier[num_consec_read_checks] = literal[int]
identifier[update_needed] = keyword[True]
identifier[updated_read_units] = identifier[calculated_provisioning]
keyword[if] keyword[not] identifier[update_needed] :
identifier[decrease_consumed_reads_unit] = identifier[decrease_consumed_reads_unit] keyword[or] identifier[decrease_reads_unit]
identifier[decrease_consumed_reads_with] = identifier[decrease_consumed_reads_with] keyword[or] identifier[decrease_reads_with]
identifier[consumed_calculated_provisioning] = identifier[scale_reader_decrease] (
identifier[decrease_consumed_reads_scale] ,
identifier[consumed_read_units_percent] )
identifier[calculated_provisioning] = keyword[None]
keyword[if] keyword[not] identifier[get_table_option] ( identifier[key_name] , literal[string] ):
identifier[logger] . identifier[debug] (
literal[string]
literal[string] . identifier[format] (
identifier[table_name] ))
keyword[elif] ( identifier[consumed_read_units_percent] == literal[int] keyword[and] keyword[not]
identifier[get_table_option] (
identifier[key_name] , literal[string] )):
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] (
identifier[table_name] ))
keyword[else] :
keyword[if] identifier[consumed_calculated_provisioning] :
keyword[if] identifier[decrease_consumed_reads_unit] == literal[string] :
identifier[calculated_provisioning] = identifier[calculators] . identifier[decrease_reads_in_percent] (
identifier[updated_read_units] ,
identifier[consumed_calculated_provisioning] ,
identifier[get_table_option] (
identifier[key_name] , literal[string] ),
identifier[table_name] )
keyword[else] :
identifier[calculated_provisioning] = identifier[calculators] . identifier[decrease_reads_in_units] (
identifier[updated_read_units] ,
identifier[consumed_calculated_provisioning] ,
identifier[get_table_option] (
identifier[key_name] , literal[string] ),
identifier[table_name] )
keyword[elif] ( identifier[reads_lower_threshold]
keyword[and] identifier[consumed_read_units_percent] < identifier[reads_lower_threshold]
keyword[and] keyword[not] identifier[decrease_consumed_reads_scale] ):
keyword[if] identifier[decrease_consumed_reads_unit] == literal[string] :
identifier[calculated_provisioning] = identifier[calculators] . identifier[decrease_reads_in_percent] (
identifier[updated_read_units] ,
identifier[decrease_consumed_reads_with] ,
identifier[get_table_option] (
identifier[key_name] , literal[string] ),
identifier[table_name] )
keyword[else] :
identifier[calculated_provisioning] = identifier[calculators] . identifier[decrease_reads_in_units] (
identifier[updated_read_units] ,
identifier[decrease_consumed_reads_with] ,
identifier[get_table_option] (
identifier[key_name] , literal[string] ),
identifier[table_name] )
keyword[if] ( identifier[calculated_provisioning]
keyword[and] identifier[current_read_units] != identifier[calculated_provisioning] ):
identifier[num_consec_read_checks] += literal[int]
keyword[if] identifier[num_consec_read_checks] >= identifier[num_read_checks_before_scale_down] :
identifier[update_needed] = keyword[True]
identifier[updated_read_units] = identifier[calculated_provisioning]
keyword[if] identifier[max_provisioned_reads] :
keyword[if] identifier[int] ( identifier[updated_read_units] )> identifier[int] ( identifier[max_provisioned_reads] ):
identifier[update_needed] = keyword[True]
identifier[updated_read_units] = identifier[int] ( identifier[max_provisioned_reads] )
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] ( identifier[updated_read_units] ))
keyword[if] identifier[min_provisioned_reads] :
keyword[if] identifier[int] ( identifier[min_provisioned_reads] )> identifier[int] ( identifier[updated_read_units] ):
identifier[update_needed] = keyword[True]
identifier[updated_read_units] = identifier[int] ( identifier[min_provisioned_reads] )
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] ( identifier[table_name] , identifier[updated_read_units] ))
keyword[if] identifier[calculators] . identifier[is_consumed_over_proposed] (
identifier[current_read_units] ,
identifier[updated_read_units] ,
identifier[consumed_read_units_percent] ):
identifier[update_needed] = keyword[False]
identifier[updated_read_units] = identifier[current_read_units]
identifier[logger] . identifier[info] (
literal[string]
literal[string] . identifier[format] ( identifier[table_name] ))
identifier[logger] . identifier[info] ( literal[string] . identifier[format] (
identifier[table_name] ,
identifier[num_consec_read_checks] ,
identifier[num_read_checks_before_scale_down] ))
keyword[return] identifier[update_needed] , identifier[updated_read_units] , identifier[num_consec_read_checks]
|
def __ensure_provisioning_reads(table_name, key_name, num_consec_read_checks):
""" Ensure that provisioning is correct
:type table_name: str
:param table_name: Name of the DynamoDB table
:type key_name: str
:param key_name: Configuration option key name
:type num_consec_read_checks: int
:param num_consec_read_checks: How many consecutive checks have we had
:returns: (bool, int, int)
update_needed, updated_read_units, num_consec_read_checks
"""
if not get_table_option(key_name, 'enable_reads_autoscaling'):
logger.info('{0} - Autoscaling of reads has been disabled'.format(table_name))
return (False, dynamodb.get_provisioned_table_read_units(table_name), 0) # depends on [control=['if'], data=[]]
update_needed = False
try:
lookback_window_start = get_table_option(key_name, 'lookback_window_start')
lookback_period = get_table_option(key_name, 'lookback_period')
current_read_units = dynamodb.get_provisioned_table_read_units(table_name)
consumed_read_units_percent = table_stats.get_consumed_read_units_percent(table_name, lookback_window_start, lookback_period)
throttled_read_count = table_stats.get_throttled_read_event_count(table_name, lookback_window_start, lookback_period)
throttled_by_provisioned_read_percent = table_stats.get_throttled_by_provisioned_read_event_percent(table_name, lookback_window_start, lookback_period)
throttled_by_consumed_read_percent = table_stats.get_throttled_by_consumed_read_percent(table_name, lookback_window_start, lookback_period)
reads_upper_threshold = get_table_option(key_name, 'reads_upper_threshold')
reads_lower_threshold = get_table_option(key_name, 'reads_lower_threshold')
throttled_reads_upper_threshold = get_table_option(key_name, 'throttled_reads_upper_threshold')
increase_reads_with = get_table_option(key_name, 'increase_reads_with')
increase_reads_unit = get_table_option(key_name, 'increase_reads_unit')
decrease_reads_with = get_table_option(key_name, 'decrease_reads_with')
decrease_reads_unit = get_table_option(key_name, 'decrease_reads_unit')
min_provisioned_reads = get_table_option(key_name, 'min_provisioned_reads')
max_provisioned_reads = get_table_option(key_name, 'max_provisioned_reads')
num_read_checks_before_scale_down = get_table_option(key_name, 'num_read_checks_before_scale_down')
num_read_checks_reset_percent = get_table_option(key_name, 'num_read_checks_reset_percent')
increase_throttled_by_provisioned_reads_unit = get_table_option(key_name, 'increase_throttled_by_provisioned_reads_unit')
increase_throttled_by_provisioned_reads_scale = get_table_option(key_name, 'increase_throttled_by_provisioned_reads_scale')
increase_throttled_by_consumed_reads_unit = get_table_option(key_name, 'increase_throttled_by_consumed_reads_unit')
increase_throttled_by_consumed_reads_scale = get_table_option(key_name, 'increase_throttled_by_consumed_reads_scale')
increase_consumed_reads_unit = get_table_option(key_name, 'increase_consumed_reads_unit')
increase_consumed_reads_with = get_table_option(key_name, 'increase_consumed_reads_with')
increase_consumed_reads_scale = get_table_option(key_name, 'increase_consumed_reads_scale')
decrease_consumed_reads_unit = get_table_option(key_name, 'decrease_consumed_reads_unit')
decrease_consumed_reads_with = get_table_option(key_name, 'decrease_consumed_reads_with')
decrease_consumed_reads_scale = get_table_option(key_name, 'decrease_consumed_reads_scale') # depends on [control=['try'], data=[]]
except JSONResponseError:
raise # depends on [control=['except'], data=[]]
except BotoServerError:
raise # depends on [control=['except'], data=[]]
# Set the updated units to the current read unit value
updated_read_units = current_read_units
# Reset consecutive reads if num_read_checks_reset_percent is reached
if num_read_checks_reset_percent:
if consumed_read_units_percent >= num_read_checks_reset_percent:
logger.info('{0} - Resetting the number of consecutive read checks. Reason: Consumed percent {1} is greater than reset percent: {2}'.format(table_name, consumed_read_units_percent, num_read_checks_reset_percent))
num_consec_read_checks = 0 # depends on [control=['if'], data=['consumed_read_units_percent', 'num_read_checks_reset_percent']] # depends on [control=['if'], data=[]]
# Exit if up scaling has been disabled
if not get_table_option(key_name, 'enable_reads_up_scaling'):
logger.debug('{0} - Up scaling event detected. No action taken as scaling up reads has been disabled in the configuration'.format(table_name)) # depends on [control=['if'], data=[]]
else:
# If local/granular values not specified use global values
increase_consumed_reads_unit = increase_consumed_reads_unit or increase_reads_unit
increase_throttled_by_provisioned_reads_unit = increase_throttled_by_provisioned_reads_unit or increase_reads_unit
increase_throttled_by_consumed_reads_unit = increase_throttled_by_consumed_reads_unit or increase_reads_unit
increase_consumed_reads_with = increase_consumed_reads_with or increase_reads_with
# Initialise variables to store calculated provisioning
throttled_by_provisioned_calculated_provisioning = scale_reader(increase_throttled_by_provisioned_reads_scale, throttled_by_provisioned_read_percent)
throttled_by_consumed_calculated_provisioning = scale_reader(increase_throttled_by_consumed_reads_scale, throttled_by_consumed_read_percent)
consumed_calculated_provisioning = scale_reader(increase_consumed_reads_scale, consumed_read_units_percent)
throttled_count_calculated_provisioning = 0
calculated_provisioning = 0
# Increase needed due to high throttled to provisioned ratio
if throttled_by_provisioned_calculated_provisioning:
if increase_throttled_by_provisioned_reads_unit == 'percent':
throttled_by_provisioned_calculated_provisioning = calculators.increase_reads_in_percent(current_read_units, throttled_by_provisioned_calculated_provisioning, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
else:
throttled_by_provisioned_calculated_provisioning = calculators.increase_reads_in_units(current_read_units, throttled_by_provisioned_calculated_provisioning, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
# Increase needed due to high throttled to consumed ratio
if throttled_by_consumed_calculated_provisioning:
if increase_throttled_by_consumed_reads_unit == 'percent':
throttled_by_consumed_calculated_provisioning = calculators.increase_reads_in_percent(current_read_units, throttled_by_consumed_calculated_provisioning, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
else:
throttled_by_consumed_calculated_provisioning = calculators.increase_reads_in_units(current_read_units, throttled_by_consumed_calculated_provisioning, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
# Increase needed due to high CU consumption
if consumed_calculated_provisioning:
if increase_consumed_reads_unit == 'percent':
consumed_calculated_provisioning = calculators.increase_reads_in_percent(current_read_units, consumed_calculated_provisioning, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
else:
consumed_calculated_provisioning = calculators.increase_reads_in_units(current_read_units, consumed_calculated_provisioning, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
elif reads_upper_threshold and consumed_read_units_percent > reads_upper_threshold and (not increase_consumed_reads_scale):
if increase_consumed_reads_unit == 'percent':
consumed_calculated_provisioning = calculators.increase_reads_in_percent(current_read_units, increase_consumed_reads_with, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
else:
consumed_calculated_provisioning = calculators.increase_reads_in_units(current_read_units, increase_consumed_reads_with, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
# Increase needed due to high throttling
if throttled_reads_upper_threshold and throttled_read_count > throttled_reads_upper_threshold:
if increase_reads_unit == 'percent':
throttled_count_calculated_provisioning = calculators.increase_reads_in_percent(updated_read_units, increase_consumed_reads_with, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
else:
throttled_count_calculated_provisioning = calculators.increase_reads_in_units(updated_read_units, increase_reads_with, get_table_option(key_name, 'max_provisioned_reads'), consumed_read_units_percent, table_name) # depends on [control=['if'], data=[]]
# Determine which metric requires the most scaling
if throttled_by_provisioned_calculated_provisioning > calculated_provisioning:
calculated_provisioning = throttled_by_provisioned_calculated_provisioning
scale_reason = 'due to throttled events by provisioned units threshold being exceeded' # depends on [control=['if'], data=['throttled_by_provisioned_calculated_provisioning', 'calculated_provisioning']]
if throttled_by_consumed_calculated_provisioning > calculated_provisioning:
calculated_provisioning = throttled_by_consumed_calculated_provisioning
scale_reason = 'due to throttled events by consumed units threshold being exceeded' # depends on [control=['if'], data=['throttled_by_consumed_calculated_provisioning', 'calculated_provisioning']]
if consumed_calculated_provisioning > calculated_provisioning:
calculated_provisioning = consumed_calculated_provisioning
scale_reason = 'due to consumed threshold being exceeded' # depends on [control=['if'], data=['consumed_calculated_provisioning', 'calculated_provisioning']]
if throttled_count_calculated_provisioning > calculated_provisioning:
calculated_provisioning = throttled_count_calculated_provisioning
scale_reason = 'due to throttled events threshold being exceeded' # depends on [control=['if'], data=['throttled_count_calculated_provisioning', 'calculated_provisioning']]
if calculated_provisioning > current_read_units:
logger.info('{0} - Resetting the number of consecutive read checks. Reason: scale up {1}'.format(table_name, scale_reason))
num_consec_read_checks = 0
update_needed = True
updated_read_units = calculated_provisioning # depends on [control=['if'], data=['calculated_provisioning']]
# Decrease needed due to low CU consumption
if not update_needed:
# If local/granular values not specified use global values
decrease_consumed_reads_unit = decrease_consumed_reads_unit or decrease_reads_unit
decrease_consumed_reads_with = decrease_consumed_reads_with or decrease_reads_with
# Initialise variables to store calculated provisioning
consumed_calculated_provisioning = scale_reader_decrease(decrease_consumed_reads_scale, consumed_read_units_percent)
calculated_provisioning = None
# Exit if down scaling has been disabled
if not get_table_option(key_name, 'enable_reads_down_scaling'):
logger.debug('{0} - Down scaling event detected. No action taken as scaling down reads has been disabled in the configuration'.format(table_name)) # depends on [control=['if'], data=[]]
# Exit if reads == 0% and downscaling has been disabled at 0%
elif consumed_read_units_percent == 0 and (not get_table_option(key_name, 'allow_scaling_down_reads_on_0_percent')):
logger.info('{0} - Down scaling event detected. No action taken as scaling down reads is not done when usage is at 0%'.format(table_name)) # depends on [control=['if'], data=[]]
else:
if consumed_calculated_provisioning:
if decrease_consumed_reads_unit == 'percent':
calculated_provisioning = calculators.decrease_reads_in_percent(updated_read_units, consumed_calculated_provisioning, get_table_option(key_name, 'min_provisioned_reads'), table_name) # depends on [control=['if'], data=[]]
else:
calculated_provisioning = calculators.decrease_reads_in_units(updated_read_units, consumed_calculated_provisioning, get_table_option(key_name, 'min_provisioned_reads'), table_name) # depends on [control=['if'], data=[]]
elif reads_lower_threshold and consumed_read_units_percent < reads_lower_threshold and (not decrease_consumed_reads_scale):
if decrease_consumed_reads_unit == 'percent':
calculated_provisioning = calculators.decrease_reads_in_percent(updated_read_units, decrease_consumed_reads_with, get_table_option(key_name, 'min_provisioned_reads'), table_name) # depends on [control=['if'], data=[]]
else:
calculated_provisioning = calculators.decrease_reads_in_units(updated_read_units, decrease_consumed_reads_with, get_table_option(key_name, 'min_provisioned_reads'), table_name) # depends on [control=['if'], data=[]]
if calculated_provisioning and current_read_units != calculated_provisioning:
num_consec_read_checks += 1
if num_consec_read_checks >= num_read_checks_before_scale_down:
update_needed = True
updated_read_units = calculated_provisioning # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Never go over the configured max provisioning
if max_provisioned_reads:
if int(updated_read_units) > int(max_provisioned_reads):
update_needed = True
updated_read_units = int(max_provisioned_reads)
logger.info('Will not increase writes over max-provisioned-reads limit ({0} writes)'.format(updated_read_units)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Ensure that we have met the min-provisioning
if min_provisioned_reads:
if int(min_provisioned_reads) > int(updated_read_units):
update_needed = True
updated_read_units = int(min_provisioned_reads)
logger.info('{0} - Increasing reads to meet min-provisioned-reads limit ({1} reads)'.format(table_name, updated_read_units)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if calculators.is_consumed_over_proposed(current_read_units, updated_read_units, consumed_read_units_percent):
update_needed = False
updated_read_units = current_read_units
logger.info('{0} - Consumed is over proposed read units. Will leave table at current setting.'.format(table_name)) # depends on [control=['if'], data=[]]
logger.info('{0} - Consecutive read checks {1}/{2}'.format(table_name, num_consec_read_checks, num_read_checks_before_scale_down))
return (update_needed, updated_read_units, num_consec_read_checks)
|
def get_average_cross_validation_fitness(self): # pragma: no cover
"""Get the per-generation average cross_validation fitness."""
avg_cross_validation_fitness = []
for stats in self.generation_cross_validation_statistics:
scores = []
for fitness in stats.values():
scores.extend(fitness)
avg_cross_validation_fitness.append(mean(scores))
return avg_cross_validation_fitness
|
def function[get_average_cross_validation_fitness, parameter[self]]:
constant[Get the per-generation average cross_validation fitness.]
variable[avg_cross_validation_fitness] assign[=] list[[]]
for taget[name[stats]] in starred[name[self].generation_cross_validation_statistics] begin[:]
variable[scores] assign[=] list[[]]
for taget[name[fitness]] in starred[call[name[stats].values, parameter[]]] begin[:]
call[name[scores].extend, parameter[name[fitness]]]
call[name[avg_cross_validation_fitness].append, parameter[call[name[mean], parameter[name[scores]]]]]
return[name[avg_cross_validation_fitness]]
|
keyword[def] identifier[get_average_cross_validation_fitness] ( identifier[self] ):
literal[string]
identifier[avg_cross_validation_fitness] =[]
keyword[for] identifier[stats] keyword[in] identifier[self] . identifier[generation_cross_validation_statistics] :
identifier[scores] =[]
keyword[for] identifier[fitness] keyword[in] identifier[stats] . identifier[values] ():
identifier[scores] . identifier[extend] ( identifier[fitness] )
identifier[avg_cross_validation_fitness] . identifier[append] ( identifier[mean] ( identifier[scores] ))
keyword[return] identifier[avg_cross_validation_fitness]
|
def get_average_cross_validation_fitness(self): # pragma: no cover
'Get the per-generation average cross_validation fitness.'
avg_cross_validation_fitness = []
for stats in self.generation_cross_validation_statistics:
scores = []
for fitness in stats.values():
scores.extend(fitness) # depends on [control=['for'], data=['fitness']]
avg_cross_validation_fitness.append(mean(scores)) # depends on [control=['for'], data=['stats']]
return avg_cross_validation_fitness
|
def send_message(client, message):
"""Send message to client and close the connection."""
print(message)
client.send("HTTP/1.1 200 OK\r\n\r\n{}".format(message).encode("utf-8"))
client.close()
|
def function[send_message, parameter[client, message]]:
constant[Send message to client and close the connection.]
call[name[print], parameter[name[message]]]
call[name[client].send, parameter[call[call[constant[HTTP/1.1 200 OK
{}].format, parameter[name[message]]].encode, parameter[constant[utf-8]]]]]
call[name[client].close, parameter[]]
|
keyword[def] identifier[send_message] ( identifier[client] , identifier[message] ):
literal[string]
identifier[print] ( identifier[message] )
identifier[client] . identifier[send] ( literal[string] . identifier[format] ( identifier[message] ). identifier[encode] ( literal[string] ))
identifier[client] . identifier[close] ()
|
def send_message(client, message):
"""Send message to client and close the connection."""
print(message)
client.send('HTTP/1.1 200 OK\r\n\r\n{}'.format(message).encode('utf-8'))
client.close()
|
def _insert(self, element, new_element, before):
"""
Insert a element before or after other element.
:param element: The reference element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
:param new_element: The element that be inserted.
:type new_element: hatemile.util.html.htmldomelement.HTMLDOMElement
:param before: To insert the element before the other element.
:type before: bool
"""
tag_name = element.get_tag_name()
append_tags = [
'BODY',
'A',
'FIGCAPTION',
'LI',
'DT',
'DD',
'LABEL',
'OPTION',
'TD',
'TH'
]
controls = ['INPUT', 'SELECT', 'TEXTAREA']
if tag_name == 'HTML':
body = self.parser.find('body').first_result()
if body is not None:
self._insert(body, new_element, before)
elif tag_name in append_tags:
if before:
element.prepend_element(new_element)
else:
element.append_element(new_element)
elif tag_name in controls:
labels = []
if element.has_attribute('id'):
labels = self.parser.find(
'label[for="'
+ element.get_attribute('id')
+ '"]'
).list_results()
if not labels:
labels = self.parser.find(element).find_ancestors(
'label'
).list_results()
for label in labels:
self._insert(label, new_element, before)
elif before:
element.insert_before(new_element)
else:
element.insert_after(new_element)
|
def function[_insert, parameter[self, element, new_element, before]]:
constant[
Insert a element before or after other element.
:param element: The reference element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
:param new_element: The element that be inserted.
:type new_element: hatemile.util.html.htmldomelement.HTMLDOMElement
:param before: To insert the element before the other element.
:type before: bool
]
variable[tag_name] assign[=] call[name[element].get_tag_name, parameter[]]
variable[append_tags] assign[=] list[[<ast.Constant object at 0x7da18dc98fa0>, <ast.Constant object at 0x7da18dc9a290>, <ast.Constant object at 0x7da18dc98d60>, <ast.Constant object at 0x7da18dc99300>, <ast.Constant object at 0x7da18dc98610>, <ast.Constant object at 0x7da18dc98790>, <ast.Constant object at 0x7da18dc99120>, <ast.Constant object at 0x7da18dc9b3a0>, <ast.Constant object at 0x7da18dc9a890>, <ast.Constant object at 0x7da18dc9b130>]]
variable[controls] assign[=] list[[<ast.Constant object at 0x7da18dc98700>, <ast.Constant object at 0x7da18dc9af80>, <ast.Constant object at 0x7da18dc991b0>]]
if compare[name[tag_name] equal[==] constant[HTML]] begin[:]
variable[body] assign[=] call[call[name[self].parser.find, parameter[constant[body]]].first_result, parameter[]]
if compare[name[body] is_not constant[None]] begin[:]
call[name[self]._insert, parameter[name[body], name[new_element], name[before]]]
|
keyword[def] identifier[_insert] ( identifier[self] , identifier[element] , identifier[new_element] , identifier[before] ):
literal[string]
identifier[tag_name] = identifier[element] . identifier[get_tag_name] ()
identifier[append_tags] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string]
]
identifier[controls] =[ literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[tag_name] == literal[string] :
identifier[body] = identifier[self] . identifier[parser] . identifier[find] ( literal[string] ). identifier[first_result] ()
keyword[if] identifier[body] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_insert] ( identifier[body] , identifier[new_element] , identifier[before] )
keyword[elif] identifier[tag_name] keyword[in] identifier[append_tags] :
keyword[if] identifier[before] :
identifier[element] . identifier[prepend_element] ( identifier[new_element] )
keyword[else] :
identifier[element] . identifier[append_element] ( identifier[new_element] )
keyword[elif] identifier[tag_name] keyword[in] identifier[controls] :
identifier[labels] =[]
keyword[if] identifier[element] . identifier[has_attribute] ( literal[string] ):
identifier[labels] = identifier[self] . identifier[parser] . identifier[find] (
literal[string]
+ identifier[element] . identifier[get_attribute] ( literal[string] )
+ literal[string]
). identifier[list_results] ()
keyword[if] keyword[not] identifier[labels] :
identifier[labels] = identifier[self] . identifier[parser] . identifier[find] ( identifier[element] ). identifier[find_ancestors] (
literal[string]
). identifier[list_results] ()
keyword[for] identifier[label] keyword[in] identifier[labels] :
identifier[self] . identifier[_insert] ( identifier[label] , identifier[new_element] , identifier[before] )
keyword[elif] identifier[before] :
identifier[element] . identifier[insert_before] ( identifier[new_element] )
keyword[else] :
identifier[element] . identifier[insert_after] ( identifier[new_element] )
|
def _insert(self, element, new_element, before):
"""
Insert a element before or after other element.
:param element: The reference element.
:type element: hatemile.util.html.htmldomelement.HTMLDOMElement
:param new_element: The element that be inserted.
:type new_element: hatemile.util.html.htmldomelement.HTMLDOMElement
:param before: To insert the element before the other element.
:type before: bool
"""
tag_name = element.get_tag_name()
append_tags = ['BODY', 'A', 'FIGCAPTION', 'LI', 'DT', 'DD', 'LABEL', 'OPTION', 'TD', 'TH']
controls = ['INPUT', 'SELECT', 'TEXTAREA']
if tag_name == 'HTML':
body = self.parser.find('body').first_result()
if body is not None:
self._insert(body, new_element, before) # depends on [control=['if'], data=['body']] # depends on [control=['if'], data=[]]
elif tag_name in append_tags:
if before:
element.prepend_element(new_element) # depends on [control=['if'], data=[]]
else:
element.append_element(new_element) # depends on [control=['if'], data=[]]
elif tag_name in controls:
labels = []
if element.has_attribute('id'):
labels = self.parser.find('label[for="' + element.get_attribute('id') + '"]').list_results() # depends on [control=['if'], data=[]]
if not labels:
labels = self.parser.find(element).find_ancestors('label').list_results() # depends on [control=['if'], data=[]]
for label in labels:
self._insert(label, new_element, before) # depends on [control=['for'], data=['label']] # depends on [control=['if'], data=[]]
elif before:
element.insert_before(new_element) # depends on [control=['if'], data=[]]
else:
element.insert_after(new_element)
|
def downstream_index(dir_value, i, j, alg='taudem'):
"""find downslope coordinate for D8 direction."""
assert alg.lower() in FlowModelConst.d8_deltas
delta = FlowModelConst.d8_deltas.get(alg.lower())
drow, dcol = delta[int(dir_value)]
return i + drow, j + dcol
|
def function[downstream_index, parameter[dir_value, i, j, alg]]:
constant[find downslope coordinate for D8 direction.]
assert[compare[call[name[alg].lower, parameter[]] in name[FlowModelConst].d8_deltas]]
variable[delta] assign[=] call[name[FlowModelConst].d8_deltas.get, parameter[call[name[alg].lower, parameter[]]]]
<ast.Tuple object at 0x7da2054a6ef0> assign[=] call[name[delta]][call[name[int], parameter[name[dir_value]]]]
return[tuple[[<ast.BinOp object at 0x7da2054a4df0>, <ast.BinOp object at 0x7da2054a5b10>]]]
|
keyword[def] identifier[downstream_index] ( identifier[dir_value] , identifier[i] , identifier[j] , identifier[alg] = literal[string] ):
literal[string]
keyword[assert] identifier[alg] . identifier[lower] () keyword[in] identifier[FlowModelConst] . identifier[d8_deltas]
identifier[delta] = identifier[FlowModelConst] . identifier[d8_deltas] . identifier[get] ( identifier[alg] . identifier[lower] ())
identifier[drow] , identifier[dcol] = identifier[delta] [ identifier[int] ( identifier[dir_value] )]
keyword[return] identifier[i] + identifier[drow] , identifier[j] + identifier[dcol]
|
def downstream_index(dir_value, i, j, alg='taudem'):
"""find downslope coordinate for D8 direction."""
assert alg.lower() in FlowModelConst.d8_deltas
delta = FlowModelConst.d8_deltas.get(alg.lower())
(drow, dcol) = delta[int(dir_value)]
return (i + drow, j + dcol)
|
def _reset_django(settings):
"""
Hackish way to reset the django instance settings and AppConfig
:param settings: django settings module
"""
if settings._wrapped != empty:
clear_url_caches()
from django.apps import apps
apps.clear_cache()
settings._wrapped = empty
clear_url_caches()
|
def function[_reset_django, parameter[settings]]:
constant[
Hackish way to reset the django instance settings and AppConfig
:param settings: django settings module
]
if compare[name[settings]._wrapped not_equal[!=] name[empty]] begin[:]
call[name[clear_url_caches], parameter[]]
from relative_module[django.apps] import module[apps]
call[name[apps].clear_cache, parameter[]]
name[settings]._wrapped assign[=] name[empty]
call[name[clear_url_caches], parameter[]]
|
keyword[def] identifier[_reset_django] ( identifier[settings] ):
literal[string]
keyword[if] identifier[settings] . identifier[_wrapped] != identifier[empty] :
identifier[clear_url_caches] ()
keyword[from] identifier[django] . identifier[apps] keyword[import] identifier[apps]
identifier[apps] . identifier[clear_cache] ()
identifier[settings] . identifier[_wrapped] = identifier[empty]
identifier[clear_url_caches] ()
|
def _reset_django(settings):
"""
Hackish way to reset the django instance settings and AppConfig
:param settings: django settings module
"""
if settings._wrapped != empty:
clear_url_caches()
from django.apps import apps
apps.clear_cache()
settings._wrapped = empty
clear_url_caches() # depends on [control=['if'], data=['empty']]
|
def remove_binaries(package_dir=False):
"""Remove all binaries for the current platform
Parameters
----------
package_dir: bool
If True, remove all binaries from the `resources`
directory of the qpsphere package. If False,
remove all binaries from the user's cache directory.
"""
paths = []
if package_dir:
pdir = RESCR_PATH
else:
pdir = CACHE_PATH
for pp in pdir.iterdir():
if pp.name != "shipped_resources_go_here":
paths.append(pp)
for pp in paths:
pp.unlink()
|
def function[remove_binaries, parameter[package_dir]]:
constant[Remove all binaries for the current platform
Parameters
----------
package_dir: bool
If True, remove all binaries from the `resources`
directory of the qpsphere package. If False,
remove all binaries from the user's cache directory.
]
variable[paths] assign[=] list[[]]
if name[package_dir] begin[:]
variable[pdir] assign[=] name[RESCR_PATH]
for taget[name[pp]] in starred[call[name[pdir].iterdir, parameter[]]] begin[:]
if compare[name[pp].name not_equal[!=] constant[shipped_resources_go_here]] begin[:]
call[name[paths].append, parameter[name[pp]]]
for taget[name[pp]] in starred[name[paths]] begin[:]
call[name[pp].unlink, parameter[]]
|
keyword[def] identifier[remove_binaries] ( identifier[package_dir] = keyword[False] ):
literal[string]
identifier[paths] =[]
keyword[if] identifier[package_dir] :
identifier[pdir] = identifier[RESCR_PATH]
keyword[else] :
identifier[pdir] = identifier[CACHE_PATH]
keyword[for] identifier[pp] keyword[in] identifier[pdir] . identifier[iterdir] ():
keyword[if] identifier[pp] . identifier[name] != literal[string] :
identifier[paths] . identifier[append] ( identifier[pp] )
keyword[for] identifier[pp] keyword[in] identifier[paths] :
identifier[pp] . identifier[unlink] ()
|
def remove_binaries(package_dir=False):
"""Remove all binaries for the current platform
Parameters
----------
package_dir: bool
If True, remove all binaries from the `resources`
directory of the qpsphere package. If False,
remove all binaries from the user's cache directory.
"""
paths = []
if package_dir:
pdir = RESCR_PATH # depends on [control=['if'], data=[]]
else:
pdir = CACHE_PATH
for pp in pdir.iterdir():
if pp.name != 'shipped_resources_go_here':
paths.append(pp) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pp']]
for pp in paths:
pp.unlink() # depends on [control=['for'], data=['pp']]
|
def get_provisioning_configuration(
self, provisioning_configuration_id, custom_headers=None, raw=False, **operation_config):
"""GetContinuousDeploymentOperation.
:param provisioning_configuration_id:
:type provisioning_configuration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ContinuousDeploymentOperation
<vsts_info_provider.models.ContinuousDeploymentOperation>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = '/_apis/continuousdelivery/provisioningconfigurations/{provisioningConfigurationId}'
path_format_arguments = {
'provisioningConfigurationId': self._serialize.url("provisioning_configuration_id", provisioning_configuration_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
print("GET", request.url, file=stderr)
print("response:", response.status_code, file=stderr)
print(response.text, file=stderr)
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ProvisioningConfiguration', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
def function[get_provisioning_configuration, parameter[self, provisioning_configuration_id, custom_headers, raw]]:
constant[GetContinuousDeploymentOperation.
:param provisioning_configuration_id:
:type provisioning_configuration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ContinuousDeploymentOperation
<vsts_info_provider.models.ContinuousDeploymentOperation>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
]
variable[url] assign[=] constant[/_apis/continuousdelivery/provisioningconfigurations/{provisioningConfigurationId}]
variable[path_format_arguments] assign[=] dictionary[[<ast.Constant object at 0x7da1b19af6a0>], [<ast.Call object at 0x7da1b19ad750>]]
variable[url] assign[=] call[name[self]._client.format_url, parameter[name[url]]]
variable[query_parameters] assign[=] dictionary[[], []]
variable[header_parameters] assign[=] dictionary[[], []]
call[name[header_parameters]][constant[Content-Type]] assign[=] constant[application/json; charset=utf-8]
if name[custom_headers] begin[:]
call[name[header_parameters].update, parameter[name[custom_headers]]]
variable[request] assign[=] call[name[self]._client.get, parameter[name[url], name[query_parameters]]]
variable[response] assign[=] call[name[self]._client.send, parameter[name[request], name[header_parameters]]]
if compare[name[response].status_code <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b195fac0>]]] begin[:]
call[name[print], parameter[constant[GET], name[request].url]]
call[name[print], parameter[constant[response:], name[response].status_code]]
call[name[print], parameter[name[response].text]]
<ast.Raise object at 0x7da1b195d4e0>
variable[deserialized] assign[=] constant[None]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[deserialized] assign[=] call[name[self]._deserialize, parameter[constant[ProvisioningConfiguration], name[response]]]
if name[raw] begin[:]
variable[client_raw_response] assign[=] call[name[ClientRawResponse], parameter[name[deserialized], name[response]]]
return[name[client_raw_response]]
return[name[deserialized]]
|
keyword[def] identifier[get_provisioning_configuration] (
identifier[self] , identifier[provisioning_configuration_id] , identifier[custom_headers] = keyword[None] , identifier[raw] = keyword[False] ,** identifier[operation_config] ):
literal[string]
identifier[url] = literal[string]
identifier[path_format_arguments] ={
literal[string] : identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[provisioning_configuration_id] , literal[string] )
}
identifier[url] = identifier[self] . identifier[_client] . identifier[format_url] ( identifier[url] ,** identifier[path_format_arguments] )
identifier[query_parameters] ={}
identifier[header_parameters] ={}
identifier[header_parameters] [ literal[string] ]= literal[string]
keyword[if] identifier[custom_headers] :
identifier[header_parameters] . identifier[update] ( identifier[custom_headers] )
identifier[request] = identifier[self] . identifier[_client] . identifier[get] ( identifier[url] , identifier[query_parameters] )
identifier[response] = identifier[self] . identifier[_client] . identifier[send] ( identifier[request] , identifier[header_parameters] ,** identifier[operation_config] )
keyword[if] identifier[response] . identifier[status_code] keyword[not] keyword[in] [ literal[int] ]:
identifier[print] ( literal[string] , identifier[request] . identifier[url] , identifier[file] = identifier[stderr] )
identifier[print] ( literal[string] , identifier[response] . identifier[status_code] , identifier[file] = identifier[stderr] )
identifier[print] ( identifier[response] . identifier[text] , identifier[file] = identifier[stderr] )
keyword[raise] identifier[HttpOperationError] ( identifier[self] . identifier[_deserialize] , identifier[response] )
identifier[deserialized] = keyword[None]
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[deserialized] = identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
keyword[if] identifier[raw] :
identifier[client_raw_response] = identifier[ClientRawResponse] ( identifier[deserialized] , identifier[response] )
keyword[return] identifier[client_raw_response]
keyword[return] identifier[deserialized]
|
def get_provisioning_configuration(self, provisioning_configuration_id, custom_headers=None, raw=False, **operation_config):
"""GetContinuousDeploymentOperation.
:param provisioning_configuration_id:
:type provisioning_configuration_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ContinuousDeploymentOperation
<vsts_info_provider.models.ContinuousDeploymentOperation>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = '/_apis/continuousdelivery/provisioningconfigurations/{provisioningConfigurationId}'
path_format_arguments = {'provisioningConfigurationId': self._serialize.url('provisioning_configuration_id', provisioning_configuration_id, 'str')}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers) # depends on [control=['if'], data=[]]
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
print('GET', request.url, file=stderr)
print('response:', response.status_code, file=stderr)
print(response.text, file=stderr)
raise HttpOperationError(self._deserialize, response) # depends on [control=['if'], data=[]]
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ProvisioningConfiguration', response) # depends on [control=['if'], data=[]]
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response # depends on [control=['if'], data=[]]
return deserialized
|
def set_slug(apps, schema_editor):
"""
Create a slug for each Event already in the DB.
"""
Event = apps.get_model('spectator_events', 'Event')
for e in Event.objects.all():
e.slug = generate_slug(e.pk)
e.save(update_fields=['slug'])
|
def function[set_slug, parameter[apps, schema_editor]]:
constant[
Create a slug for each Event already in the DB.
]
variable[Event] assign[=] call[name[apps].get_model, parameter[constant[spectator_events], constant[Event]]]
for taget[name[e]] in starred[call[name[Event].objects.all, parameter[]]] begin[:]
name[e].slug assign[=] call[name[generate_slug], parameter[name[e].pk]]
call[name[e].save, parameter[]]
|
keyword[def] identifier[set_slug] ( identifier[apps] , identifier[schema_editor] ):
literal[string]
identifier[Event] = identifier[apps] . identifier[get_model] ( literal[string] , literal[string] )
keyword[for] identifier[e] keyword[in] identifier[Event] . identifier[objects] . identifier[all] ():
identifier[e] . identifier[slug] = identifier[generate_slug] ( identifier[e] . identifier[pk] )
identifier[e] . identifier[save] ( identifier[update_fields] =[ literal[string] ])
|
def set_slug(apps, schema_editor):
"""
Create a slug for each Event already in the DB.
"""
Event = apps.get_model('spectator_events', 'Event')
for e in Event.objects.all():
e.slug = generate_slug(e.pk)
e.save(update_fields=['slug']) # depends on [control=['for'], data=['e']]
|
def execute(self):
"""Output environment name."""
# Disable other runway logging so the only response is the env name
logging.getLogger('runway').setLevel(logging.ERROR)
# This may be invoked from a module directory in an environment;
# account for that here if necessary
if not os.path.isfile('runway.yml'):
self.env_root = os.path.dirname(os.getcwd())
self.runway_config_path = os.path.join(self.env_root, 'runway.yml')
print(get_env(
self.env_root,
self.runway_config.get('ignore_git_branch', False)
))
|
def function[execute, parameter[self]]:
constant[Output environment name.]
call[call[name[logging].getLogger, parameter[constant[runway]]].setLevel, parameter[name[logging].ERROR]]
if <ast.UnaryOp object at 0x7da1b07ac6a0> begin[:]
name[self].env_root assign[=] call[name[os].path.dirname, parameter[call[name[os].getcwd, parameter[]]]]
name[self].runway_config_path assign[=] call[name[os].path.join, parameter[name[self].env_root, constant[runway.yml]]]
call[name[print], parameter[call[name[get_env], parameter[name[self].env_root, call[name[self].runway_config.get, parameter[constant[ignore_git_branch], constant[False]]]]]]]
|
keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[getLogger] ( literal[string] ). identifier[setLevel] ( identifier[logging] . identifier[ERROR] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( literal[string] ):
identifier[self] . identifier[env_root] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[getcwd] ())
identifier[self] . identifier[runway_config_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[env_root] , literal[string] )
identifier[print] ( identifier[get_env] (
identifier[self] . identifier[env_root] ,
identifier[self] . identifier[runway_config] . identifier[get] ( literal[string] , keyword[False] )
))
|
def execute(self):
"""Output environment name."""
# Disable other runway logging so the only response is the env name
logging.getLogger('runway').setLevel(logging.ERROR)
# This may be invoked from a module directory in an environment;
# account for that here if necessary
if not os.path.isfile('runway.yml'):
self.env_root = os.path.dirname(os.getcwd())
self.runway_config_path = os.path.join(self.env_root, 'runway.yml') # depends on [control=['if'], data=[]]
print(get_env(self.env_root, self.runway_config.get('ignore_git_branch', False)))
|
def step_impl09(context):
"""Create application list.
:param context: test context.
"""
assert context.table, "ENSURE: table is provided."
context.app_list = [row['application'] for row in context.table.rows]
|
def function[step_impl09, parameter[context]]:
constant[Create application list.
:param context: test context.
]
assert[name[context].table]
name[context].app_list assign[=] <ast.ListComp object at 0x7da20eb64c70>
|
keyword[def] identifier[step_impl09] ( identifier[context] ):
literal[string]
keyword[assert] identifier[context] . identifier[table] , literal[string]
identifier[context] . identifier[app_list] =[ identifier[row] [ literal[string] ] keyword[for] identifier[row] keyword[in] identifier[context] . identifier[table] . identifier[rows] ]
|
def step_impl09(context):
"""Create application list.
:param context: test context.
"""
assert context.table, 'ENSURE: table is provided.'
context.app_list = [row['application'] for row in context.table.rows]
|
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: ConferenceContext for this ConferenceInstance
:rtype: twilio.rest.api.v2010.account.conference.ConferenceContext
"""
if self._context is None:
self._context = ConferenceContext(
self._version,
account_sid=self._solution['account_sid'],
sid=self._solution['sid'],
)
return self._context
|
def function[_proxy, parameter[self]]:
constant[
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: ConferenceContext for this ConferenceInstance
:rtype: twilio.rest.api.v2010.account.conference.ConferenceContext
]
if compare[name[self]._context is constant[None]] begin[:]
name[self]._context assign[=] call[name[ConferenceContext], parameter[name[self]._version]]
return[name[self]._context]
|
keyword[def] identifier[_proxy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] :
identifier[self] . identifier[_context] = identifier[ConferenceContext] (
identifier[self] . identifier[_version] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_context]
|
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: ConferenceContext for this ConferenceInstance
:rtype: twilio.rest.api.v2010.account.conference.ConferenceContext
"""
if self._context is None:
self._context = ConferenceContext(self._version, account_sid=self._solution['account_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._context
|
def download_file_insecure(url, target):
'''
Use Python to download the file, even though it cannot authenticate the
connection.
'''
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, 'wb')
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
|
def function[download_file_insecure, parameter[url, target]]:
constant[
Use Python to download the file, even though it cannot authenticate the
connection.
]
<ast.Try object at 0x7da1b11e04f0>
variable[src] assign[=] constant[None]
<ast.Try object at 0x7da1b11e2e00>
|
keyword[def] identifier[download_file_insecure] ( identifier[url] , identifier[target] ):
literal[string]
keyword[try] :
keyword[from] identifier[urllib] . identifier[request] keyword[import] identifier[urlopen]
keyword[except] identifier[ImportError] :
keyword[from] identifier[urllib2] keyword[import] identifier[urlopen]
identifier[src] = identifier[dst] = keyword[None]
keyword[try] :
identifier[src] = identifier[urlopen] ( identifier[url] )
identifier[data] = identifier[src] . identifier[read] ()
identifier[dst] = identifier[open] ( identifier[target] , literal[string] )
identifier[dst] . identifier[write] ( identifier[data] )
keyword[finally] :
keyword[if] identifier[src] :
identifier[src] . identifier[close] ()
keyword[if] identifier[dst] :
identifier[dst] . identifier[close] ()
|
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen # depends on [control=['try'], data=[]]
except ImportError:
from urllib2 import urlopen # depends on [control=['except'], data=[]]
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, 'wb')
dst.write(data) # depends on [control=['try'], data=[]]
finally:
if src:
src.close() # depends on [control=['if'], data=[]]
if dst:
dst.close() # depends on [control=['if'], data=[]]
|
def get_attrs_by_path(self, field_path, stop_first=False):
"""
It returns list of values looked up by field path.
Field path is dot-formatted string path: ``parent_field.child_field``.
:param field_path: field path. It allows ``*`` as wildcard.
:type field_path: list or None.
:param stop_first: Stop iteration on first value looked up. Default: False.
:type stop_first: bool
:return: A list of values or None it was a invalid path.
:rtype: :class:`list` or :class:`None`
"""
fields, next_field = self._get_fields_by_path(field_path)
values = []
for field in fields:
if next_field:
try:
res = self.get_field_value(field).get_attrs_by_path(next_field, stop_first=stop_first)
if res is None:
continue
values.extend(res)
if stop_first and len(values):
break
except AttributeError:
pass
else:
value = self.get_field_value(field)
if value is None:
continue
if stop_first:
return [value, ]
values.append(value)
return values if len(values) else None
|
def function[get_attrs_by_path, parameter[self, field_path, stop_first]]:
constant[
It returns list of values looked up by field path.
Field path is dot-formatted string path: ``parent_field.child_field``.
:param field_path: field path. It allows ``*`` as wildcard.
:type field_path: list or None.
:param stop_first: Stop iteration on first value looked up. Default: False.
:type stop_first: bool
:return: A list of values or None it was a invalid path.
:rtype: :class:`list` or :class:`None`
]
<ast.Tuple object at 0x7da1b0a3f460> assign[=] call[name[self]._get_fields_by_path, parameter[name[field_path]]]
variable[values] assign[=] list[[]]
for taget[name[field]] in starred[name[fields]] begin[:]
if name[next_field] begin[:]
<ast.Try object at 0x7da1b0a3c220>
return[<ast.IfExp object at 0x7da1b0aa5750>]
|
keyword[def] identifier[get_attrs_by_path] ( identifier[self] , identifier[field_path] , identifier[stop_first] = keyword[False] ):
literal[string]
identifier[fields] , identifier[next_field] = identifier[self] . identifier[_get_fields_by_path] ( identifier[field_path] )
identifier[values] =[]
keyword[for] identifier[field] keyword[in] identifier[fields] :
keyword[if] identifier[next_field] :
keyword[try] :
identifier[res] = identifier[self] . identifier[get_field_value] ( identifier[field] ). identifier[get_attrs_by_path] ( identifier[next_field] , identifier[stop_first] = identifier[stop_first] )
keyword[if] identifier[res] keyword[is] keyword[None] :
keyword[continue]
identifier[values] . identifier[extend] ( identifier[res] )
keyword[if] identifier[stop_first] keyword[and] identifier[len] ( identifier[values] ):
keyword[break]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
identifier[value] = identifier[self] . identifier[get_field_value] ( identifier[field] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[stop_first] :
keyword[return] [ identifier[value] ,]
identifier[values] . identifier[append] ( identifier[value] )
keyword[return] identifier[values] keyword[if] identifier[len] ( identifier[values] ) keyword[else] keyword[None]
|
def get_attrs_by_path(self, field_path, stop_first=False):
"""
It returns list of values looked up by field path.
Field path is dot-formatted string path: ``parent_field.child_field``.
:param field_path: field path. It allows ``*`` as wildcard.
:type field_path: list or None.
:param stop_first: Stop iteration on first value looked up. Default: False.
:type stop_first: bool
:return: A list of values or None it was a invalid path.
:rtype: :class:`list` or :class:`None`
"""
(fields, next_field) = self._get_fields_by_path(field_path)
values = []
for field in fields:
if next_field:
try:
res = self.get_field_value(field).get_attrs_by_path(next_field, stop_first=stop_first)
if res is None:
continue # depends on [control=['if'], data=[]]
values.extend(res)
if stop_first and len(values):
break # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
value = self.get_field_value(field)
if value is None:
continue # depends on [control=['if'], data=[]]
if stop_first:
return [value] # depends on [control=['if'], data=[]]
values.append(value) # depends on [control=['for'], data=['field']]
return values if len(values) else None
|
def find_this(search, filename=MODULE_PATH):
"""Take a string and a filename path string and return the found value."""
if not search:
return
for line in open(str(filename)).readlines():
if search.lower() in line.lower():
line = line.split("=")[1].strip()
if "'" in line or '"' in line or '"""' in line:
line = line.replace("'", "").replace('"', '').replace('"""', '')
return line
|
def function[find_this, parameter[search, filename]]:
constant[Take a string and a filename path string and return the found value.]
if <ast.UnaryOp object at 0x7da1b0c0d360> begin[:]
return[None]
for taget[name[line]] in starred[call[call[name[open], parameter[call[name[str], parameter[name[filename]]]]].readlines, parameter[]]] begin[:]
if compare[call[name[search].lower, parameter[]] in call[name[line].lower, parameter[]]] begin[:]
variable[line] assign[=] call[call[call[name[line].split, parameter[constant[=]]]][constant[1]].strip, parameter[]]
if <ast.BoolOp object at 0x7da1b0c0f880> begin[:]
variable[line] assign[=] call[call[call[name[line].replace, parameter[constant['], constant[]]].replace, parameter[constant["], constant[]]].replace, parameter[constant["""], constant[]]]
return[name[line]]
|
keyword[def] identifier[find_this] ( identifier[search] , identifier[filename] = identifier[MODULE_PATH] ):
literal[string]
keyword[if] keyword[not] identifier[search] :
keyword[return]
keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[str] ( identifier[filename] )). identifier[readlines] ():
keyword[if] identifier[search] . identifier[lower] () keyword[in] identifier[line] . identifier[lower] ():
identifier[line] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] ()
keyword[if] literal[string] keyword[in] identifier[line] keyword[or] literal[string] keyword[in] identifier[line] keyword[or] literal[string] keyword[in] identifier[line] :
identifier[line] = identifier[line] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
keyword[return] identifier[line]
|
def find_this(search, filename=MODULE_PATH):
"""Take a string and a filename path string and return the found value."""
if not search:
return # depends on [control=['if'], data=[]]
for line in open(str(filename)).readlines():
if search.lower() in line.lower():
line = line.split('=')[1].strip()
if "'" in line or '"' in line or '"""' in line:
line = line.replace("'", '').replace('"', '').replace('"""', '') # depends on [control=['if'], data=[]]
return line # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
|
def get_upcoming_events(num, days, featured=False):
"""
Get upcoming events.
Allows slicing to a given number,
picking the number of days to hold them after they've started
and whether they should be featured or not.
Usage:
{% get_upcoming_events 5 14 featured as events %}
Would return no more than 5 Featured events,
holding them for 14 days past their start date.
"""
from happenings.models import Event
start_date = today - datetime.timedelta(days=days)
events = Event.objects.filter(start_date__gt=start_date).order_by('start_date')
if featured:
events = events.filter(featured=True)
events = events[:num]
return events
|
def function[get_upcoming_events, parameter[num, days, featured]]:
constant[
Get upcoming events.
Allows slicing to a given number,
picking the number of days to hold them after they've started
and whether they should be featured or not.
Usage:
{% get_upcoming_events 5 14 featured as events %}
Would return no more than 5 Featured events,
holding them for 14 days past their start date.
]
from relative_module[happenings.models] import module[Event]
variable[start_date] assign[=] binary_operation[name[today] - call[name[datetime].timedelta, parameter[]]]
variable[events] assign[=] call[call[name[Event].objects.filter, parameter[]].order_by, parameter[constant[start_date]]]
if name[featured] begin[:]
variable[events] assign[=] call[name[events].filter, parameter[]]
variable[events] assign[=] call[name[events]][<ast.Slice object at 0x7da1affd65c0>]
return[name[events]]
|
keyword[def] identifier[get_upcoming_events] ( identifier[num] , identifier[days] , identifier[featured] = keyword[False] ):
literal[string]
keyword[from] identifier[happenings] . identifier[models] keyword[import] identifier[Event]
identifier[start_date] = identifier[today] - identifier[datetime] . identifier[timedelta] ( identifier[days] = identifier[days] )
identifier[events] = identifier[Event] . identifier[objects] . identifier[filter] ( identifier[start_date__gt] = identifier[start_date] ). identifier[order_by] ( literal[string] )
keyword[if] identifier[featured] :
identifier[events] = identifier[events] . identifier[filter] ( identifier[featured] = keyword[True] )
identifier[events] = identifier[events] [: identifier[num] ]
keyword[return] identifier[events]
|
def get_upcoming_events(num, days, featured=False):
"""
Get upcoming events.
Allows slicing to a given number,
picking the number of days to hold them after they've started
and whether they should be featured or not.
Usage:
{% get_upcoming_events 5 14 featured as events %}
Would return no more than 5 Featured events,
holding them for 14 days past their start date.
"""
from happenings.models import Event
start_date = today - datetime.timedelta(days=days)
events = Event.objects.filter(start_date__gt=start_date).order_by('start_date')
if featured:
events = events.filter(featured=True) # depends on [control=['if'], data=[]]
events = events[:num]
return events
|
def many_nodes(
lexer: Lexer,
open_kind: TokenKind,
parse_fn: Callable[[Lexer], Node],
close_kind: TokenKind,
) -> List[Node]:
"""Fetch matching nodes, at least one.
Returns a non-empty list of parse nodes, determined by the `parse_fn`.
This list begins with a lex token of `open_kind` and ends with a lex token of
`close_kind`. Advances the parser to the next lex token after the closing token.
"""
expect_token(lexer, open_kind)
nodes = [parse_fn(lexer)]
append = nodes.append
while not expect_optional_token(lexer, close_kind):
append(parse_fn(lexer))
return nodes
|
def function[many_nodes, parameter[lexer, open_kind, parse_fn, close_kind]]:
constant[Fetch matching nodes, at least one.
Returns a non-empty list of parse nodes, determined by the `parse_fn`.
This list begins with a lex token of `open_kind` and ends with a lex token of
`close_kind`. Advances the parser to the next lex token after the closing token.
]
call[name[expect_token], parameter[name[lexer], name[open_kind]]]
variable[nodes] assign[=] list[[<ast.Call object at 0x7da2054a6080>]]
variable[append] assign[=] name[nodes].append
while <ast.UnaryOp object at 0x7da1b1eedf00> begin[:]
call[name[append], parameter[call[name[parse_fn], parameter[name[lexer]]]]]
return[name[nodes]]
|
keyword[def] identifier[many_nodes] (
identifier[lexer] : identifier[Lexer] ,
identifier[open_kind] : identifier[TokenKind] ,
identifier[parse_fn] : identifier[Callable] [[ identifier[Lexer] ], identifier[Node] ],
identifier[close_kind] : identifier[TokenKind] ,
)-> identifier[List] [ identifier[Node] ]:
literal[string]
identifier[expect_token] ( identifier[lexer] , identifier[open_kind] )
identifier[nodes] =[ identifier[parse_fn] ( identifier[lexer] )]
identifier[append] = identifier[nodes] . identifier[append]
keyword[while] keyword[not] identifier[expect_optional_token] ( identifier[lexer] , identifier[close_kind] ):
identifier[append] ( identifier[parse_fn] ( identifier[lexer] ))
keyword[return] identifier[nodes]
|
def many_nodes(lexer: Lexer, open_kind: TokenKind, parse_fn: Callable[[Lexer], Node], close_kind: TokenKind) -> List[Node]:
"""Fetch matching nodes, at least one.
Returns a non-empty list of parse nodes, determined by the `parse_fn`.
This list begins with a lex token of `open_kind` and ends with a lex token of
`close_kind`. Advances the parser to the next lex token after the closing token.
"""
expect_token(lexer, open_kind)
nodes = [parse_fn(lexer)]
append = nodes.append
while not expect_optional_token(lexer, close_kind):
append(parse_fn(lexer)) # depends on [control=['while'], data=[]]
return nodes
|
def diff(ctx, branch):
"""
Determine which tests intersect a git diff.
"""
diff = GitDiffReporter(branch)
regions = diff.changed_intervals()
_report_from_regions(regions, ctx.obj, file_factory=diff.old_file)
|
def function[diff, parameter[ctx, branch]]:
constant[
Determine which tests intersect a git diff.
]
variable[diff] assign[=] call[name[GitDiffReporter], parameter[name[branch]]]
variable[regions] assign[=] call[name[diff].changed_intervals, parameter[]]
call[name[_report_from_regions], parameter[name[regions], name[ctx].obj]]
|
keyword[def] identifier[diff] ( identifier[ctx] , identifier[branch] ):
literal[string]
identifier[diff] = identifier[GitDiffReporter] ( identifier[branch] )
identifier[regions] = identifier[diff] . identifier[changed_intervals] ()
identifier[_report_from_regions] ( identifier[regions] , identifier[ctx] . identifier[obj] , identifier[file_factory] = identifier[diff] . identifier[old_file] )
|
def diff(ctx, branch):
"""
Determine which tests intersect a git diff.
"""
diff = GitDiffReporter(branch)
regions = diff.changed_intervals()
_report_from_regions(regions, ctx.obj, file_factory=diff.old_file)
|
def build(self):
"""Builds Discord embed GUI
Returns:
discord.Embed: Built GUI
"""
if self.colour:
embed = discord.Embed(
title=self.title,
type='rich',
description=self.description,
colour=self.colour)
else:
embed = discord.Embed(
title=self.title,
type='rich',
description=self.description)
if self.thumbnail:
embed.set_thumbnail(url=self.thumbnail)
if self.image:
embed.set_image(url=self.image)
embed.set_author(
name="Modis",
url="https://musicbyango.com/modis/",
icon_url="http://musicbyango.com/modis/dp/modis64t.png")
for pack in self.datapacks:
embed.add_field(
name=pack[0],
value=pack[1],
inline=pack[2]
)
return embed
|
def function[build, parameter[self]]:
constant[Builds Discord embed GUI
Returns:
discord.Embed: Built GUI
]
if name[self].colour begin[:]
variable[embed] assign[=] call[name[discord].Embed, parameter[]]
if name[self].thumbnail begin[:]
call[name[embed].set_thumbnail, parameter[]]
if name[self].image begin[:]
call[name[embed].set_image, parameter[]]
call[name[embed].set_author, parameter[]]
for taget[name[pack]] in starred[name[self].datapacks] begin[:]
call[name[embed].add_field, parameter[]]
return[name[embed]]
|
keyword[def] identifier[build] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[colour] :
identifier[embed] = identifier[discord] . identifier[Embed] (
identifier[title] = identifier[self] . identifier[title] ,
identifier[type] = literal[string] ,
identifier[description] = identifier[self] . identifier[description] ,
identifier[colour] = identifier[self] . identifier[colour] )
keyword[else] :
identifier[embed] = identifier[discord] . identifier[Embed] (
identifier[title] = identifier[self] . identifier[title] ,
identifier[type] = literal[string] ,
identifier[description] = identifier[self] . identifier[description] )
keyword[if] identifier[self] . identifier[thumbnail] :
identifier[embed] . identifier[set_thumbnail] ( identifier[url] = identifier[self] . identifier[thumbnail] )
keyword[if] identifier[self] . identifier[image] :
identifier[embed] . identifier[set_image] ( identifier[url] = identifier[self] . identifier[image] )
identifier[embed] . identifier[set_author] (
identifier[name] = literal[string] ,
identifier[url] = literal[string] ,
identifier[icon_url] = literal[string] )
keyword[for] identifier[pack] keyword[in] identifier[self] . identifier[datapacks] :
identifier[embed] . identifier[add_field] (
identifier[name] = identifier[pack] [ literal[int] ],
identifier[value] = identifier[pack] [ literal[int] ],
identifier[inline] = identifier[pack] [ literal[int] ]
)
keyword[return] identifier[embed]
|
def build(self):
"""Builds Discord embed GUI
Returns:
discord.Embed: Built GUI
"""
if self.colour:
embed = discord.Embed(title=self.title, type='rich', description=self.description, colour=self.colour) # depends on [control=['if'], data=[]]
else:
embed = discord.Embed(title=self.title, type='rich', description=self.description)
if self.thumbnail:
embed.set_thumbnail(url=self.thumbnail) # depends on [control=['if'], data=[]]
if self.image:
embed.set_image(url=self.image) # depends on [control=['if'], data=[]]
embed.set_author(name='Modis', url='https://musicbyango.com/modis/', icon_url='http://musicbyango.com/modis/dp/modis64t.png')
for pack in self.datapacks:
embed.add_field(name=pack[0], value=pack[1], inline=pack[2]) # depends on [control=['for'], data=['pack']]
return embed
|
def forward(ctx, x, dutyCycles, k, boostStrength):
"""
Use the boost strength to compute a boost factor for each unit represented
in x. These factors are used to increase the impact of each unit to improve
their chances of being chosen. This encourages participation of more columns
in the learning process. See :meth:`k_winners.forward` for more details.
:param ctx:
Place where we can store information we will need to compute the gradients
for the backward pass.
:param x:
Current activity of each unit.
:param dutyCycles:
The averaged duty cycle of each unit.
:param k:
The activity of the top k units will be allowed to remain, the rest are
set to zero.
:param boostStrength:
A boost strength of 0.0 has no effect on x.
:return:
A tensor representing the activity of x after k-winner take all.
"""
batchSize = x.shape[0]
if boostStrength > 0.0:
targetDensity = float(k) / (x.shape[1] * x.shape[2] * x.shape[3])
boostFactors = torch.exp((targetDensity - dutyCycles) * boostStrength)
boosted = x.detach() * boostFactors
else:
boosted = x.detach()
# Take the boosted version of the input x, find the top k winners.
# Compute an output that only contains the values of x corresponding to the top k
# boosted values. The rest of the elements in the output should be 0.
boosted = boosted.reshape((batchSize, -1))
xr = x.reshape((batchSize, -1))
res = torch.zeros_like(boosted)
topk, indices = boosted.topk(k, dim=1, sorted=False)
res.scatter_(1, indices, xr.gather(1, indices))
res = res.reshape(x.shape)
ctx.save_for_backward(indices)
return res
|
def function[forward, parameter[ctx, x, dutyCycles, k, boostStrength]]:
constant[
Use the boost strength to compute a boost factor for each unit represented
in x. These factors are used to increase the impact of each unit to improve
their chances of being chosen. This encourages participation of more columns
in the learning process. See :meth:`k_winners.forward` for more details.
:param ctx:
Place where we can store information we will need to compute the gradients
for the backward pass.
:param x:
Current activity of each unit.
:param dutyCycles:
The averaged duty cycle of each unit.
:param k:
The activity of the top k units will be allowed to remain, the rest are
set to zero.
:param boostStrength:
A boost strength of 0.0 has no effect on x.
:return:
A tensor representing the activity of x after k-winner take all.
]
variable[batchSize] assign[=] call[name[x].shape][constant[0]]
if compare[name[boostStrength] greater[>] constant[0.0]] begin[:]
variable[targetDensity] assign[=] binary_operation[call[name[float], parameter[name[k]]] / binary_operation[binary_operation[call[name[x].shape][constant[1]] * call[name[x].shape][constant[2]]] * call[name[x].shape][constant[3]]]]
variable[boostFactors] assign[=] call[name[torch].exp, parameter[binary_operation[binary_operation[name[targetDensity] - name[dutyCycles]] * name[boostStrength]]]]
variable[boosted] assign[=] binary_operation[call[name[x].detach, parameter[]] * name[boostFactors]]
variable[boosted] assign[=] call[name[boosted].reshape, parameter[tuple[[<ast.Name object at 0x7da1b086e410>, <ast.UnaryOp object at 0x7da1b086d3c0>]]]]
variable[xr] assign[=] call[name[x].reshape, parameter[tuple[[<ast.Name object at 0x7da1b086ded0>, <ast.UnaryOp object at 0x7da1b086fd90>]]]]
variable[res] assign[=] call[name[torch].zeros_like, parameter[name[boosted]]]
<ast.Tuple object at 0x7da1b09244c0> assign[=] call[name[boosted].topk, parameter[name[k]]]
call[name[res].scatter_, parameter[constant[1], name[indices], call[name[xr].gather, parameter[constant[1], name[indices]]]]]
variable[res] assign[=] call[name[res].reshape, parameter[name[x].shape]]
call[name[ctx].save_for_backward, parameter[name[indices]]]
return[name[res]]
|
keyword[def] identifier[forward] ( identifier[ctx] , identifier[x] , identifier[dutyCycles] , identifier[k] , identifier[boostStrength] ):
literal[string]
identifier[batchSize] = identifier[x] . identifier[shape] [ literal[int] ]
keyword[if] identifier[boostStrength] > literal[int] :
identifier[targetDensity] = identifier[float] ( identifier[k] )/( identifier[x] . identifier[shape] [ literal[int] ]* identifier[x] . identifier[shape] [ literal[int] ]* identifier[x] . identifier[shape] [ literal[int] ])
identifier[boostFactors] = identifier[torch] . identifier[exp] (( identifier[targetDensity] - identifier[dutyCycles] )* identifier[boostStrength] )
identifier[boosted] = identifier[x] . identifier[detach] ()* identifier[boostFactors]
keyword[else] :
identifier[boosted] = identifier[x] . identifier[detach] ()
identifier[boosted] = identifier[boosted] . identifier[reshape] (( identifier[batchSize] ,- literal[int] ))
identifier[xr] = identifier[x] . identifier[reshape] (( identifier[batchSize] ,- literal[int] ))
identifier[res] = identifier[torch] . identifier[zeros_like] ( identifier[boosted] )
identifier[topk] , identifier[indices] = identifier[boosted] . identifier[topk] ( identifier[k] , identifier[dim] = literal[int] , identifier[sorted] = keyword[False] )
identifier[res] . identifier[scatter_] ( literal[int] , identifier[indices] , identifier[xr] . identifier[gather] ( literal[int] , identifier[indices] ))
identifier[res] = identifier[res] . identifier[reshape] ( identifier[x] . identifier[shape] )
identifier[ctx] . identifier[save_for_backward] ( identifier[indices] )
keyword[return] identifier[res]
|
def forward(ctx, x, dutyCycles, k, boostStrength):
"""
Use the boost strength to compute a boost factor for each unit represented
in x. These factors are used to increase the impact of each unit to improve
their chances of being chosen. This encourages participation of more columns
in the learning process. See :meth:`k_winners.forward` for more details.
:param ctx:
Place where we can store information we will need to compute the gradients
for the backward pass.
:param x:
Current activity of each unit.
:param dutyCycles:
The averaged duty cycle of each unit.
:param k:
The activity of the top k units will be allowed to remain, the rest are
set to zero.
:param boostStrength:
A boost strength of 0.0 has no effect on x.
:return:
A tensor representing the activity of x after k-winner take all.
"""
batchSize = x.shape[0]
if boostStrength > 0.0:
targetDensity = float(k) / (x.shape[1] * x.shape[2] * x.shape[3])
boostFactors = torch.exp((targetDensity - dutyCycles) * boostStrength)
boosted = x.detach() * boostFactors # depends on [control=['if'], data=['boostStrength']]
else:
boosted = x.detach()
# Take the boosted version of the input x, find the top k winners.
# Compute an output that only contains the values of x corresponding to the top k
# boosted values. The rest of the elements in the output should be 0.
boosted = boosted.reshape((batchSize, -1))
xr = x.reshape((batchSize, -1))
res = torch.zeros_like(boosted)
(topk, indices) = boosted.topk(k, dim=1, sorted=False)
res.scatter_(1, indices, xr.gather(1, indices))
res = res.reshape(x.shape)
ctx.save_for_backward(indices)
return res
|
def _start_instance(self):
"""
Start the instance.
"""
try:
vm_start = self.compute.virtual_machines.start(
self.running_instance_id, self.running_instance_id
)
except Exception as error:
raise AzureCloudException(
'Unable to start instance: {0}.'.format(error)
)
vm_start.wait()
|
def function[_start_instance, parameter[self]]:
constant[
Start the instance.
]
<ast.Try object at 0x7da1b1a22da0>
call[name[vm_start].wait, parameter[]]
|
keyword[def] identifier[_start_instance] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[vm_start] = identifier[self] . identifier[compute] . identifier[virtual_machines] . identifier[start] (
identifier[self] . identifier[running_instance_id] , identifier[self] . identifier[running_instance_id]
)
keyword[except] identifier[Exception] keyword[as] identifier[error] :
keyword[raise] identifier[AzureCloudException] (
literal[string] . identifier[format] ( identifier[error] )
)
identifier[vm_start] . identifier[wait] ()
|
def _start_instance(self):
"""
Start the instance.
"""
try:
vm_start = self.compute.virtual_machines.start(self.running_instance_id, self.running_instance_id) # depends on [control=['try'], data=[]]
except Exception as error:
raise AzureCloudException('Unable to start instance: {0}.'.format(error)) # depends on [control=['except'], data=['error']]
vm_start.wait()
|
def barracks_in_middle(self) -> Point2:
""" Barracks position in the middle of the 2 depots """
if len(self.upper2_for_ramp_wall) == 2:
points = self.upper2_for_ramp_wall
p1 = points.pop().offset((self.x_offset, self.y_offset))
p2 = points.pop().offset((self.x_offset, self.y_offset))
# Offset from top point to barracks center is (2, 1)
intersects = p1.circle_intersection(p2, 5 ** 0.5)
anyLowerPoint = next(iter(self.lower))
return max(intersects, key=lambda p: p.distance_to(anyLowerPoint))
raise Exception("Not implemented. Trying to access a ramp that has a wrong amount of upper points.")
|
def function[barracks_in_middle, parameter[self]]:
constant[ Barracks position in the middle of the 2 depots ]
if compare[call[name[len], parameter[name[self].upper2_for_ramp_wall]] equal[==] constant[2]] begin[:]
variable[points] assign[=] name[self].upper2_for_ramp_wall
variable[p1] assign[=] call[call[name[points].pop, parameter[]].offset, parameter[tuple[[<ast.Attribute object at 0x7da18bc739d0>, <ast.Attribute object at 0x7da18bc70640>]]]]
variable[p2] assign[=] call[call[name[points].pop, parameter[]].offset, parameter[tuple[[<ast.Attribute object at 0x7da18bc71900>, <ast.Attribute object at 0x7da18bc70580>]]]]
variable[intersects] assign[=] call[name[p1].circle_intersection, parameter[name[p2], binary_operation[constant[5] ** constant[0.5]]]]
variable[anyLowerPoint] assign[=] call[name[next], parameter[call[name[iter], parameter[name[self].lower]]]]
return[call[name[max], parameter[name[intersects]]]]
<ast.Raise object at 0x7da18bc71000>
|
keyword[def] identifier[barracks_in_middle] ( identifier[self] )-> identifier[Point2] :
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[upper2_for_ramp_wall] )== literal[int] :
identifier[points] = identifier[self] . identifier[upper2_for_ramp_wall]
identifier[p1] = identifier[points] . identifier[pop] (). identifier[offset] (( identifier[self] . identifier[x_offset] , identifier[self] . identifier[y_offset] ))
identifier[p2] = identifier[points] . identifier[pop] (). identifier[offset] (( identifier[self] . identifier[x_offset] , identifier[self] . identifier[y_offset] ))
identifier[intersects] = identifier[p1] . identifier[circle_intersection] ( identifier[p2] , literal[int] ** literal[int] )
identifier[anyLowerPoint] = identifier[next] ( identifier[iter] ( identifier[self] . identifier[lower] ))
keyword[return] identifier[max] ( identifier[intersects] , identifier[key] = keyword[lambda] identifier[p] : identifier[p] . identifier[distance_to] ( identifier[anyLowerPoint] ))
keyword[raise] identifier[Exception] ( literal[string] )
|
def barracks_in_middle(self) -> Point2:
""" Barracks position in the middle of the 2 depots """
if len(self.upper2_for_ramp_wall) == 2:
points = self.upper2_for_ramp_wall
p1 = points.pop().offset((self.x_offset, self.y_offset))
p2 = points.pop().offset((self.x_offset, self.y_offset))
# Offset from top point to barracks center is (2, 1)
intersects = p1.circle_intersection(p2, 5 ** 0.5)
anyLowerPoint = next(iter(self.lower))
return max(intersects, key=lambda p: p.distance_to(anyLowerPoint)) # depends on [control=['if'], data=[]]
raise Exception('Not implemented. Trying to access a ramp that has a wrong amount of upper points.')
|
def get_ref(self, reftype, index):
"""Retrieve a reference."""
try:
got_reftype, data = self.refs[int(index)]
except (IndexError, ValueError):
raise CoconutInternalException("no reference at invalid index", index)
internal_assert(got_reftype == reftype, "wanted " + reftype + " reference; got " + got_reftype + " reference")
return data
|
def function[get_ref, parameter[self, reftype, index]]:
constant[Retrieve a reference.]
<ast.Try object at 0x7da1b0912830>
call[name[internal_assert], parameter[compare[name[got_reftype] equal[==] name[reftype]], binary_operation[binary_operation[binary_operation[binary_operation[constant[wanted ] + name[reftype]] + constant[ reference; got ]] + name[got_reftype]] + constant[ reference]]]]
return[name[data]]
|
keyword[def] identifier[get_ref] ( identifier[self] , identifier[reftype] , identifier[index] ):
literal[string]
keyword[try] :
identifier[got_reftype] , identifier[data] = identifier[self] . identifier[refs] [ identifier[int] ( identifier[index] )]
keyword[except] ( identifier[IndexError] , identifier[ValueError] ):
keyword[raise] identifier[CoconutInternalException] ( literal[string] , identifier[index] )
identifier[internal_assert] ( identifier[got_reftype] == identifier[reftype] , literal[string] + identifier[reftype] + literal[string] + identifier[got_reftype] + literal[string] )
keyword[return] identifier[data]
|
def get_ref(self, reftype, index):
"""Retrieve a reference."""
try:
(got_reftype, data) = self.refs[int(index)] # depends on [control=['try'], data=[]]
except (IndexError, ValueError):
raise CoconutInternalException('no reference at invalid index', index) # depends on [control=['except'], data=[]]
internal_assert(got_reftype == reftype, 'wanted ' + reftype + ' reference; got ' + got_reftype + ' reference')
return data
|
def _pnpoly(x, y, coords):
"""
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
"""
vert = [[0, 0]]
for coord in coords:
for node in coord:
vert.append(node)
vert.append(coord[0])
vert.append([0, 0])
inside = False
i = 0
j = len(vert) - 1
while i < len(vert):
if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1])
* (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]):
inside = not inside
j = i
i += 1
return inside
|
def function[_pnpoly, parameter[x, y, coords]]:
constant[
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
]
variable[vert] assign[=] list[[<ast.List object at 0x7da1b1019c90>]]
for taget[name[coord]] in starred[name[coords]] begin[:]
for taget[name[node]] in starred[name[coord]] begin[:]
call[name[vert].append, parameter[name[node]]]
call[name[vert].append, parameter[call[name[coord]][constant[0]]]]
call[name[vert].append, parameter[list[[<ast.Constant object at 0x7da1b1019a50>, <ast.Constant object at 0x7da1b101a1a0>]]]]
variable[inside] assign[=] constant[False]
variable[i] assign[=] constant[0]
variable[j] assign[=] binary_operation[call[name[len], parameter[name[vert]]] - constant[1]]
while compare[name[i] less[<] call[name[len], parameter[name[vert]]]] begin[:]
if <ast.BoolOp object at 0x7da1b1006950> begin[:]
variable[inside] assign[=] <ast.UnaryOp object at 0x7da1b1005810>
variable[j] assign[=] name[i]
<ast.AugAssign object at 0x7da1b10068f0>
return[name[inside]]
|
keyword[def] identifier[_pnpoly] ( identifier[x] , identifier[y] , identifier[coords] ):
literal[string]
identifier[vert] =[[ literal[int] , literal[int] ]]
keyword[for] identifier[coord] keyword[in] identifier[coords] :
keyword[for] identifier[node] keyword[in] identifier[coord] :
identifier[vert] . identifier[append] ( identifier[node] )
identifier[vert] . identifier[append] ( identifier[coord] [ literal[int] ])
identifier[vert] . identifier[append] ([ literal[int] , literal[int] ])
identifier[inside] = keyword[False]
identifier[i] = literal[int]
identifier[j] = identifier[len] ( identifier[vert] )- literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[vert] ):
keyword[if] (( identifier[vert] [ identifier[i] ][ literal[int] ]> identifier[y] )!=( identifier[vert] [ identifier[j] ][ literal[int] ]> identifier[y] )) keyword[and] ( identifier[x] <( identifier[vert] [ identifier[j] ][ literal[int] ]- identifier[vert] [ identifier[i] ][ literal[int] ])
*( identifier[y] - identifier[vert] [ identifier[i] ][ literal[int] ])/( identifier[vert] [ identifier[j] ][ literal[int] ]- identifier[vert] [ identifier[i] ][ literal[int] ])+ identifier[vert] [ identifier[i] ][ literal[int] ]):
identifier[inside] = keyword[not] identifier[inside]
identifier[j] = identifier[i]
identifier[i] += literal[int]
keyword[return] identifier[inside]
|
def _pnpoly(x, y, coords):
"""
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
"""
vert = [[0, 0]]
for coord in coords:
for node in coord:
vert.append(node) # depends on [control=['for'], data=['node']]
vert.append(coord[0])
vert.append([0, 0]) # depends on [control=['for'], data=['coord']]
inside = False
i = 0
j = len(vert) - 1
while i < len(vert):
if (vert[i][0] > y) != (vert[j][0] > y) and x < (vert[j][1] - vert[i][1]) * (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]:
inside = not inside # depends on [control=['if'], data=[]]
j = i
i += 1 # depends on [control=['while'], data=['i']]
return inside
|
def subdivide(self, points_per_edge):
"""
Adds ``N`` interpolated points with uniform spacing to each edge.
For each edge between points ``A`` and ``B`` this adds points
at ``A + (i/(1+N)) * (B - A)``, where ``i`` is the index of the added
point and ``N`` is the number of points to add per edge.
Calling this method two times will split each edge at its center
and then again split each newly created edge at their center.
It is equivalent to calling `subdivide(3)`.
Parameters
----------
points_per_edge : int
Number of points to interpolate on each edge.
Returns
-------
LineString
Line string with subdivided edges.
"""
if len(self.coords) <= 1 or points_per_edge < 1:
return self.deepcopy()
coords = interpolate_points(self.coords, nb_steps=points_per_edge,
closed=False)
return self.deepcopy(coords=coords)
|
def function[subdivide, parameter[self, points_per_edge]]:
constant[
Adds ``N`` interpolated points with uniform spacing to each edge.
For each edge between points ``A`` and ``B`` this adds points
at ``A + (i/(1+N)) * (B - A)``, where ``i`` is the index of the added
point and ``N`` is the number of points to add per edge.
Calling this method two times will split each edge at its center
and then again split each newly created edge at their center.
It is equivalent to calling `subdivide(3)`.
Parameters
----------
points_per_edge : int
Number of points to interpolate on each edge.
Returns
-------
LineString
Line string with subdivided edges.
]
if <ast.BoolOp object at 0x7da1b0159ba0> begin[:]
return[call[name[self].deepcopy, parameter[]]]
variable[coords] assign[=] call[name[interpolate_points], parameter[name[self].coords]]
return[call[name[self].deepcopy, parameter[]]]
|
keyword[def] identifier[subdivide] ( identifier[self] , identifier[points_per_edge] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[coords] )<= literal[int] keyword[or] identifier[points_per_edge] < literal[int] :
keyword[return] identifier[self] . identifier[deepcopy] ()
identifier[coords] = identifier[interpolate_points] ( identifier[self] . identifier[coords] , identifier[nb_steps] = identifier[points_per_edge] ,
identifier[closed] = keyword[False] )
keyword[return] identifier[self] . identifier[deepcopy] ( identifier[coords] = identifier[coords] )
|
def subdivide(self, points_per_edge):
"""
Adds ``N`` interpolated points with uniform spacing to each edge.
For each edge between points ``A`` and ``B`` this adds points
at ``A + (i/(1+N)) * (B - A)``, where ``i`` is the index of the added
point and ``N`` is the number of points to add per edge.
Calling this method two times will split each edge at its center
and then again split each newly created edge at their center.
It is equivalent to calling `subdivide(3)`.
Parameters
----------
points_per_edge : int
Number of points to interpolate on each edge.
Returns
-------
LineString
Line string with subdivided edges.
"""
if len(self.coords) <= 1 or points_per_edge < 1:
return self.deepcopy() # depends on [control=['if'], data=[]]
coords = interpolate_points(self.coords, nb_steps=points_per_edge, closed=False)
return self.deepcopy(coords=coords)
|
def disambiguate_ip_address(ip, location=None):
"""turn multi-ip interfaces '0.0.0.0' and '*' into connectable
ones, based on the location (default interpretation of location is localhost)."""
if ip in ('0.0.0.0', '*'):
try:
external_ips = socket.gethostbyname_ex(socket.gethostname())[2]
except (socket.gaierror, IndexError):
# couldn't identify this machine, assume localhost
external_ips = []
if location is None or location in external_ips or not external_ips:
# If location is unspecified or cannot be determined, assume local
ip='127.0.0.1'
elif location:
return location
return ip
|
def function[disambiguate_ip_address, parameter[ip, location]]:
constant[turn multi-ip interfaces '0.0.0.0' and '*' into connectable
ones, based on the location (default interpretation of location is localhost).]
if compare[name[ip] in tuple[[<ast.Constant object at 0x7da18fe91540>, <ast.Constant object at 0x7da18fe93a90>]]] begin[:]
<ast.Try object at 0x7da18fe924d0>
if <ast.BoolOp object at 0x7da18fe92530> begin[:]
variable[ip] assign[=] constant[127.0.0.1]
return[name[ip]]
|
keyword[def] identifier[disambiguate_ip_address] ( identifier[ip] , identifier[location] = keyword[None] ):
literal[string]
keyword[if] identifier[ip] keyword[in] ( literal[string] , literal[string] ):
keyword[try] :
identifier[external_ips] = identifier[socket] . identifier[gethostbyname_ex] ( identifier[socket] . identifier[gethostname] ())[ literal[int] ]
keyword[except] ( identifier[socket] . identifier[gaierror] , identifier[IndexError] ):
identifier[external_ips] =[]
keyword[if] identifier[location] keyword[is] keyword[None] keyword[or] identifier[location] keyword[in] identifier[external_ips] keyword[or] keyword[not] identifier[external_ips] :
identifier[ip] = literal[string]
keyword[elif] identifier[location] :
keyword[return] identifier[location]
keyword[return] identifier[ip]
|
def disambiguate_ip_address(ip, location=None):
"""turn multi-ip interfaces '0.0.0.0' and '*' into connectable
ones, based on the location (default interpretation of location is localhost)."""
if ip in ('0.0.0.0', '*'):
try:
external_ips = socket.gethostbyname_ex(socket.gethostname())[2] # depends on [control=['try'], data=[]]
except (socket.gaierror, IndexError):
# couldn't identify this machine, assume localhost
external_ips = [] # depends on [control=['except'], data=[]]
if location is None or location in external_ips or (not external_ips):
# If location is unspecified or cannot be determined, assume local
ip = '127.0.0.1' # depends on [control=['if'], data=[]]
elif location:
return location # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['ip']]
return ip
|
def remove_pod(self, pod, array, **kwargs):
"""Remove arrays from a pod.
:param pod: Name of the pod.
:type pod: str
:param array: Array to remove from pod.
:type array: str
:param \*\*kwargs: See the REST API Guide on your array for the
documentation on the request:
**DELETE pod/:pod**/array/:array**
:type \*\*kwargs: optional
:returns: A dictionary mapping "name" to pod and "array" to the pod's
new array list.
:rtype: ResponseDict
.. note::
Requires use of REST API 1.13 or later.
"""
return self._request("DELETE", "pod/{0}/array/{1}".format(pod, array), kwargs)
|
def function[remove_pod, parameter[self, pod, array]]:
constant[Remove arrays from a pod.
:param pod: Name of the pod.
:type pod: str
:param array: Array to remove from pod.
:type array: str
:param \*\*kwargs: See the REST API Guide on your array for the
documentation on the request:
**DELETE pod/:pod**/array/:array**
:type \*\*kwargs: optional
:returns: A dictionary mapping "name" to pod and "array" to the pod's
new array list.
:rtype: ResponseDict
.. note::
Requires use of REST API 1.13 or later.
]
return[call[name[self]._request, parameter[constant[DELETE], call[constant[pod/{0}/array/{1}].format, parameter[name[pod], name[array]]], name[kwargs]]]]
|
keyword[def] identifier[remove_pod] ( identifier[self] , identifier[pod] , identifier[array] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_request] ( literal[string] , literal[string] . identifier[format] ( identifier[pod] , identifier[array] ), identifier[kwargs] )
|
def remove_pod(self, pod, array, **kwargs):
"""Remove arrays from a pod.
:param pod: Name of the pod.
:type pod: str
:param array: Array to remove from pod.
:type array: str
:param \\*\\*kwargs: See the REST API Guide on your array for the
documentation on the request:
**DELETE pod/:pod**/array/:array**
:type \\*\\*kwargs: optional
:returns: A dictionary mapping "name" to pod and "array" to the pod's
new array list.
:rtype: ResponseDict
.. note::
Requires use of REST API 1.13 or later.
"""
return self._request('DELETE', 'pod/{0}/array/{1}'.format(pod, array), kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.