code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def parse_table_column_properties(doc, cell, prop):
"Parse table column properties."
if not cell:
return
grid = prop.find(_name('{{{w}}}gridSpan'))
if grid is not None:
cell.grid_span = int(grid.attrib[_name('{{{w}}}val')])
vmerge = prop.find(_name('{{{w}}}vMerge'))
if vmerge is not None:
if _name('{{{w}}}val') in vmerge.attrib:
cell.vmerge = vmerge.attrib[_name('{{{w}}}val')]
else:
cell.vmerge = "" | def function[parse_table_column_properties, parameter[doc, cell, prop]]:
constant[Parse table column properties.]
if <ast.UnaryOp object at 0x7da18f811090> begin[:]
return[None]
variable[grid] assign[=] call[name[prop].find, parameter[call[name[_name], parameter[constant[{{{w}}}gridSpan]]]]]
if compare[name[grid] is_not constant[None]] begin[:]
name[cell].grid_span assign[=] call[name[int], parameter[call[name[grid].attrib][call[name[_name], parameter[constant[{{{w}}}val]]]]]]
variable[vmerge] assign[=] call[name[prop].find, parameter[call[name[_name], parameter[constant[{{{w}}}vMerge]]]]]
if compare[name[vmerge] is_not constant[None]] begin[:]
if compare[call[name[_name], parameter[constant[{{{w}}}val]]] in name[vmerge].attrib] begin[:]
name[cell].vmerge assign[=] call[name[vmerge].attrib][call[name[_name], parameter[constant[{{{w}}}val]]]] | keyword[def] identifier[parse_table_column_properties] ( identifier[doc] , identifier[cell] , identifier[prop] ):
literal[string]
keyword[if] keyword[not] identifier[cell] :
keyword[return]
identifier[grid] = identifier[prop] . identifier[find] ( identifier[_name] ( literal[string] ))
keyword[if] identifier[grid] keyword[is] keyword[not] keyword[None] :
identifier[cell] . identifier[grid_span] = identifier[int] ( identifier[grid] . identifier[attrib] [ identifier[_name] ( literal[string] )])
identifier[vmerge] = identifier[prop] . identifier[find] ( identifier[_name] ( literal[string] ))
keyword[if] identifier[vmerge] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[_name] ( literal[string] ) keyword[in] identifier[vmerge] . identifier[attrib] :
identifier[cell] . identifier[vmerge] = identifier[vmerge] . identifier[attrib] [ identifier[_name] ( literal[string] )]
keyword[else] :
identifier[cell] . identifier[vmerge] = literal[string] | def parse_table_column_properties(doc, cell, prop):
"""Parse table column properties."""
if not cell:
return # depends on [control=['if'], data=[]]
grid = prop.find(_name('{{{w}}}gridSpan'))
if grid is not None:
cell.grid_span = int(grid.attrib[_name('{{{w}}}val')]) # depends on [control=['if'], data=['grid']]
vmerge = prop.find(_name('{{{w}}}vMerge'))
if vmerge is not None:
if _name('{{{w}}}val') in vmerge.attrib:
cell.vmerge = vmerge.attrib[_name('{{{w}}}val')] # depends on [control=['if'], data=[]]
else:
cell.vmerge = '' # depends on [control=['if'], data=['vmerge']] |
def createRoles(self, configFiles, dateTimeFormat=None):
"""Parses a JSON configuration file to create roles.
Args:
configFiles (list): A list of JSON files on disk containing
configuration data for creating roles.
dateTimeFormat (str): A valid date formatting directive, as understood
by :py:meth:`datetime.datetime.strftime`. Defaults to ``None``, i.e.,
``'%Y-%m-%d %H:%M'``.
"""
if dateTimeFormat is None:
dateTimeFormat = '%Y-%m-%d %H:%M'
scriptStartTime = datetime.datetime.now()
try:
print ("********************Create Roles********************")
print ("Script started at %s" % scriptStartTime.strftime(dateTimeFormat))
if self.securityhandler.valid == False:
print ("Login required")
else:
orgTools = orgtools.orgtools(securityinfo=self)
if orgTools is None:
print ("Error creating orgtools")
else:
for configFile in configFiles:
config = common.init_config_json(config_file=configFile)
if config is not None:
startTime = datetime.datetime.now()
print ("Processing config %s, starting at: %s" % (configFile,startTime.strftime(dateTimeFormat)))
roleInfos = config['Roles']
for roleInfo in roleInfos:
createRoleResults = orgTools.createRole(roleInfo['Name'],roleInfo['Description'],roleInfo['Privileges'])
else:
print ("Config %s not found" % configFile)
except(TypeError,ValueError,AttributeError) as e:
print (e)
except (common.ArcRestHelperError) as e:
print ("error in function: %s" % e[0]['function'])
print ("error on line: %s" % e[0]['line'])
print ("error in file name: %s" % e[0]['filename'])
print ("with error message: %s" % e[0]['synerror'])
if 'arcpyError' in e[0]:
print ("with arcpy message: %s" % e[0]['arcpyError'])
except Exception as e:
if (reportToolsInstalled):
if isinstance(e,(ReportTools.ReportToolsError,DataPrep.DataPrepError)):
print ("error in function: %s" % e[0]['function'])
print ("error on line: %s" % e[0]['line'])
print ("error in file name: %s" % e[0]['filename'])
print ("with error message: %s" % e[0]['synerror'])
if 'arcpyError' in e[0]:
print ("with arcpy message: %s" % e[0]['arcpyError'])
else:
line, filename, synerror = trace()
print ("error on line: %s" % line)
print ("error in file name: %s" % filename)
print ("with error message: %s" % synerror)
else:
line, filename, synerror = trace()
print ("error on line: %s" % line)
print ("error in file name: %s" % filename)
print ("with error message: %s" % synerror)
finally:
print ("Script complete, time to complete: %s" % str(datetime.datetime.now() - scriptStartTime))
print ("###############Create Groups Completed#################")
print ("")
#if orgTools is not None:
#orgTools.dispose()
groupInfo = None
groupFile = None
iconPath = None
startTime = None
thumbnail = None
result = None
config = None
sciptPath = None
orgTools = None
del groupInfo
del groupFile
del iconPath
del startTime
del thumbnail
del result
del config
del sciptPath
del orgTools
gc.collect() | def function[createRoles, parameter[self, configFiles, dateTimeFormat]]:
constant[Parses a JSON configuration file to create roles.
Args:
configFiles (list): A list of JSON files on disk containing
configuration data for creating roles.
dateTimeFormat (str): A valid date formatting directive, as understood
by :py:meth:`datetime.datetime.strftime`. Defaults to ``None``, i.e.,
``'%Y-%m-%d %H:%M'``.
]
if compare[name[dateTimeFormat] is constant[None]] begin[:]
variable[dateTimeFormat] assign[=] constant[%Y-%m-%d %H:%M]
variable[scriptStartTime] assign[=] call[name[datetime].datetime.now, parameter[]]
<ast.Try object at 0x7da1b12891b0> | keyword[def] identifier[createRoles] ( identifier[self] , identifier[configFiles] , identifier[dateTimeFormat] = keyword[None] ):
literal[string]
keyword[if] identifier[dateTimeFormat] keyword[is] keyword[None] :
identifier[dateTimeFormat] = literal[string]
identifier[scriptStartTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
keyword[try] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] % identifier[scriptStartTime] . identifier[strftime] ( identifier[dateTimeFormat] ))
keyword[if] identifier[self] . identifier[securityhandler] . identifier[valid] == keyword[False] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[orgTools] = identifier[orgtools] . identifier[orgtools] ( identifier[securityinfo] = identifier[self] )
keyword[if] identifier[orgTools] keyword[is] keyword[None] :
identifier[print] ( literal[string] )
keyword[else] :
keyword[for] identifier[configFile] keyword[in] identifier[configFiles] :
identifier[config] = identifier[common] . identifier[init_config_json] ( identifier[config_file] = identifier[configFile] )
keyword[if] identifier[config] keyword[is] keyword[not] keyword[None] :
identifier[startTime] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[print] ( literal[string] %( identifier[configFile] , identifier[startTime] . identifier[strftime] ( identifier[dateTimeFormat] )))
identifier[roleInfos] = identifier[config] [ literal[string] ]
keyword[for] identifier[roleInfo] keyword[in] identifier[roleInfos] :
identifier[createRoleResults] = identifier[orgTools] . identifier[createRole] ( identifier[roleInfo] [ literal[string] ], identifier[roleInfo] [ literal[string] ], identifier[roleInfo] [ literal[string] ])
keyword[else] :
identifier[print] ( literal[string] % identifier[configFile] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[AttributeError] ) keyword[as] identifier[e] :
identifier[print] ( identifier[e] )
keyword[except] ( identifier[common] . identifier[ArcRestHelperError] ) keyword[as] identifier[e] :
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[e] [ literal[int] ]:
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] ( identifier[reportToolsInstalled] ):
keyword[if] identifier[isinstance] ( identifier[e] ,( identifier[ReportTools] . identifier[ReportToolsError] , identifier[DataPrep] . identifier[DataPrepError] )):
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[e] [ literal[int] ]:
identifier[print] ( literal[string] % identifier[e] [ literal[int] ][ literal[string] ])
keyword[else] :
identifier[line] , identifier[filename] , identifier[synerror] = identifier[trace] ()
identifier[print] ( literal[string] % identifier[line] )
identifier[print] ( literal[string] % identifier[filename] )
identifier[print] ( literal[string] % identifier[synerror] )
keyword[else] :
identifier[line] , identifier[filename] , identifier[synerror] = identifier[trace] ()
identifier[print] ( literal[string] % identifier[line] )
identifier[print] ( literal[string] % identifier[filename] )
identifier[print] ( literal[string] % identifier[synerror] )
keyword[finally] :
identifier[print] ( literal[string] % identifier[str] ( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[scriptStartTime] ))
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[groupInfo] = keyword[None]
identifier[groupFile] = keyword[None]
identifier[iconPath] = keyword[None]
identifier[startTime] = keyword[None]
identifier[thumbnail] = keyword[None]
identifier[result] = keyword[None]
identifier[config] = keyword[None]
identifier[sciptPath] = keyword[None]
identifier[orgTools] = keyword[None]
keyword[del] identifier[groupInfo]
keyword[del] identifier[groupFile]
keyword[del] identifier[iconPath]
keyword[del] identifier[startTime]
keyword[del] identifier[thumbnail]
keyword[del] identifier[result]
keyword[del] identifier[config]
keyword[del] identifier[sciptPath]
keyword[del] identifier[orgTools]
identifier[gc] . identifier[collect] () | def createRoles(self, configFiles, dateTimeFormat=None):
"""Parses a JSON configuration file to create roles.
Args:
configFiles (list): A list of JSON files on disk containing
configuration data for creating roles.
dateTimeFormat (str): A valid date formatting directive, as understood
by :py:meth:`datetime.datetime.strftime`. Defaults to ``None``, i.e.,
``'%Y-%m-%d %H:%M'``.
"""
if dateTimeFormat is None:
dateTimeFormat = '%Y-%m-%d %H:%M' # depends on [control=['if'], data=['dateTimeFormat']]
scriptStartTime = datetime.datetime.now()
try:
print('********************Create Roles********************')
print('Script started at %s' % scriptStartTime.strftime(dateTimeFormat))
if self.securityhandler.valid == False:
print('Login required') # depends on [control=['if'], data=[]]
else:
orgTools = orgtools.orgtools(securityinfo=self)
if orgTools is None:
print('Error creating orgtools') # depends on [control=['if'], data=[]]
else:
for configFile in configFiles:
config = common.init_config_json(config_file=configFile)
if config is not None:
startTime = datetime.datetime.now()
print('Processing config %s, starting at: %s' % (configFile, startTime.strftime(dateTimeFormat)))
roleInfos = config['Roles']
for roleInfo in roleInfos:
createRoleResults = orgTools.createRole(roleInfo['Name'], roleInfo['Description'], roleInfo['Privileges']) # depends on [control=['for'], data=['roleInfo']] # depends on [control=['if'], data=['config']]
else:
print('Config %s not found' % configFile) # depends on [control=['for'], data=['configFile']] # depends on [control=['try'], data=[]]
except (TypeError, ValueError, AttributeError) as e:
print(e) # depends on [control=['except'], data=['e']]
except common.ArcRestHelperError as e:
print('error in function: %s' % e[0]['function'])
print('error on line: %s' % e[0]['line'])
print('error in file name: %s' % e[0]['filename'])
print('with error message: %s' % e[0]['synerror'])
if 'arcpyError' in e[0]:
print('with arcpy message: %s' % e[0]['arcpyError']) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
except Exception as e:
if reportToolsInstalled:
if isinstance(e, (ReportTools.ReportToolsError, DataPrep.DataPrepError)):
print('error in function: %s' % e[0]['function'])
print('error on line: %s' % e[0]['line'])
print('error in file name: %s' % e[0]['filename'])
print('with error message: %s' % e[0]['synerror'])
if 'arcpyError' in e[0]:
print('with arcpy message: %s' % e[0]['arcpyError']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
(line, filename, synerror) = trace()
print('error on line: %s' % line)
print('error in file name: %s' % filename)
print('with error message: %s' % synerror) # depends on [control=['if'], data=[]]
else:
(line, filename, synerror) = trace()
print('error on line: %s' % line)
print('error in file name: %s' % filename)
print('with error message: %s' % synerror) # depends on [control=['except'], data=['e']]
finally:
print('Script complete, time to complete: %s' % str(datetime.datetime.now() - scriptStartTime))
print('###############Create Groups Completed#################')
print('')
#if orgTools is not None:
#orgTools.dispose()
groupInfo = None
groupFile = None
iconPath = None
startTime = None
thumbnail = None
result = None
config = None
sciptPath = None
orgTools = None
del groupInfo
del groupFile
del iconPath
del startTime
del thumbnail
del result
del config
del sciptPath
del orgTools
gc.collect() |
def _get_tunnel_context_mask(address_translations=False,
internal_subnets=False,
remote_subnets=False,
static_subnets=False,
service_subnets=False):
"""Yields a mask object for a tunnel context.
All exposed properties on the tunnel context service are included in
the constructed mask. Additional joins may be requested.
:param bool address_translations: Whether to join the context's address
translation entries.
:param bool internal_subnets: Whether to join the context's internal
subnet associations.
:param bool remote_subnets: Whether to join the context's remote subnet
associations.
:param bool static_subnets: Whether to join the context's statically
routed subnet associations.
:param bool service_subnets: Whether to join the SoftLayer service
network subnets.
:return string: Encoding for the requested mask object.
"""
entries = ['id',
'accountId',
'advancedConfigurationFlag',
'createDate',
'customerPeerIpAddress',
'modifyDate',
'name',
'friendlyName',
'internalPeerIpAddress',
'phaseOneAuthentication',
'phaseOneDiffieHellmanGroup',
'phaseOneEncryption',
'phaseOneKeylife',
'phaseTwoAuthentication',
'phaseTwoDiffieHellmanGroup',
'phaseTwoEncryption',
'phaseTwoKeylife',
'phaseTwoPerfectForwardSecrecy',
'presharedKey']
if address_translations:
entries.append('addressTranslations[internalIpAddressRecord[ipAddress],'
'customerIpAddressRecord[ipAddress]]')
if internal_subnets:
entries.append('internalSubnets')
if remote_subnets:
entries.append('customerSubnets')
if static_subnets:
entries.append('staticRouteSubnets')
if service_subnets:
entries.append('serviceSubnets')
return '[mask[{}]]'.format(','.join(entries)) | def function[_get_tunnel_context_mask, parameter[address_translations, internal_subnets, remote_subnets, static_subnets, service_subnets]]:
constant[Yields a mask object for a tunnel context.
All exposed properties on the tunnel context service are included in
the constructed mask. Additional joins may be requested.
:param bool address_translations: Whether to join the context's address
translation entries.
:param bool internal_subnets: Whether to join the context's internal
subnet associations.
:param bool remote_subnets: Whether to join the context's remote subnet
associations.
:param bool static_subnets: Whether to join the context's statically
routed subnet associations.
:param bool service_subnets: Whether to join the SoftLayer service
network subnets.
:return string: Encoding for the requested mask object.
]
variable[entries] assign[=] list[[<ast.Constant object at 0x7da18f721780>, <ast.Constant object at 0x7da18f723fd0>, <ast.Constant object at 0x7da18f720e80>, <ast.Constant object at 0x7da18f721de0>, <ast.Constant object at 0x7da18f7218d0>, <ast.Constant object at 0x7da18f723640>, <ast.Constant object at 0x7da18f721690>, <ast.Constant object at 0x7da18f7228c0>, <ast.Constant object at 0x7da18f722f50>, <ast.Constant object at 0x7da18f721750>, <ast.Constant object at 0x7da18f723850>, <ast.Constant object at 0x7da18f722170>, <ast.Constant object at 0x7da18f723e50>, <ast.Constant object at 0x7da18f7200d0>, <ast.Constant object at 0x7da18f721660>, <ast.Constant object at 0x7da18f722770>, <ast.Constant object at 0x7da18f722fe0>, <ast.Constant object at 0x7da18f7211e0>, <ast.Constant object at 0x7da18f7236d0>]]
if name[address_translations] begin[:]
call[name[entries].append, parameter[constant[addressTranslations[internalIpAddressRecord[ipAddress],customerIpAddressRecord[ipAddress]]]]]
if name[internal_subnets] begin[:]
call[name[entries].append, parameter[constant[internalSubnets]]]
if name[remote_subnets] begin[:]
call[name[entries].append, parameter[constant[customerSubnets]]]
if name[static_subnets] begin[:]
call[name[entries].append, parameter[constant[staticRouteSubnets]]]
if name[service_subnets] begin[:]
call[name[entries].append, parameter[constant[serviceSubnets]]]
return[call[constant[[mask[{}]]].format, parameter[call[constant[,].join, parameter[name[entries]]]]]] | keyword[def] identifier[_get_tunnel_context_mask] ( identifier[address_translations] = keyword[False] ,
identifier[internal_subnets] = keyword[False] ,
identifier[remote_subnets] = keyword[False] ,
identifier[static_subnets] = keyword[False] ,
identifier[service_subnets] = keyword[False] ):
literal[string]
identifier[entries] =[ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
keyword[if] identifier[address_translations] :
identifier[entries] . identifier[append] ( literal[string]
literal[string] )
keyword[if] identifier[internal_subnets] :
identifier[entries] . identifier[append] ( literal[string] )
keyword[if] identifier[remote_subnets] :
identifier[entries] . identifier[append] ( literal[string] )
keyword[if] identifier[static_subnets] :
identifier[entries] . identifier[append] ( literal[string] )
keyword[if] identifier[service_subnets] :
identifier[entries] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[entries] )) | def _get_tunnel_context_mask(address_translations=False, internal_subnets=False, remote_subnets=False, static_subnets=False, service_subnets=False):
"""Yields a mask object for a tunnel context.
All exposed properties on the tunnel context service are included in
the constructed mask. Additional joins may be requested.
:param bool address_translations: Whether to join the context's address
translation entries.
:param bool internal_subnets: Whether to join the context's internal
subnet associations.
:param bool remote_subnets: Whether to join the context's remote subnet
associations.
:param bool static_subnets: Whether to join the context's statically
routed subnet associations.
:param bool service_subnets: Whether to join the SoftLayer service
network subnets.
:return string: Encoding for the requested mask object.
"""
entries = ['id', 'accountId', 'advancedConfigurationFlag', 'createDate', 'customerPeerIpAddress', 'modifyDate', 'name', 'friendlyName', 'internalPeerIpAddress', 'phaseOneAuthentication', 'phaseOneDiffieHellmanGroup', 'phaseOneEncryption', 'phaseOneKeylife', 'phaseTwoAuthentication', 'phaseTwoDiffieHellmanGroup', 'phaseTwoEncryption', 'phaseTwoKeylife', 'phaseTwoPerfectForwardSecrecy', 'presharedKey']
if address_translations:
entries.append('addressTranslations[internalIpAddressRecord[ipAddress],customerIpAddressRecord[ipAddress]]') # depends on [control=['if'], data=[]]
if internal_subnets:
entries.append('internalSubnets') # depends on [control=['if'], data=[]]
if remote_subnets:
entries.append('customerSubnets') # depends on [control=['if'], data=[]]
if static_subnets:
entries.append('staticRouteSubnets') # depends on [control=['if'], data=[]]
if service_subnets:
entries.append('serviceSubnets') # depends on [control=['if'], data=[]]
return '[mask[{}]]'.format(','.join(entries)) |
def execute_sql(server_context, schema_name, sql, container_path=None,
max_rows=None,
sort=None,
offset=None,
container_filter=None,
save_in_session=None,
parameters=None,
required_version=None,
timeout=_default_timeout):
"""
Execute sql query against a LabKey server.
:param server_context: A LabKey server context. See utils.create_server_context.
:param schema_name: schema of table
:param sql: String of labkey sql to execute
:param container_path: labkey container path if not already set in context
:param max_rows: max number of rows to return
:param sort: comma separated list of column names to sort by
:param offset: number of rows to offset results by
:param container_filter: enumeration of the various container filters available. See:
https://www.labkey.org/download/clientapi_docs/javascript-api/symbols/LABKEY.Query.html#.containerFilter
:param save_in_session: save query result as a named view to the session
:param parameters: parameter values to pass through to a parameterized query
:param required_version: Api version of response
:param timeout: timeout of request in seconds (defaults to 30s)
:return:
"""
url = server_context.build_url('query', 'executeSql.api', container_path=container_path)
payload = {
'schemaName': schema_name,
'sql': sql
}
if container_filter is not None:
payload['containerFilter'] = container_filter
if max_rows is not None:
payload['maxRows'] = max_rows
if offset is not None:
payload['offset'] = offset
if sort is not None:
payload['query.sort'] = sort
if save_in_session is not None:
payload['saveInSession'] = save_in_session
if parameters is not None:
for key, value in parameters.items():
payload['query.param.' + key] = value
if required_version is not None:
payload['apiVersion'] = required_version
return server_context.make_request(url, payload, timeout=timeout) | def function[execute_sql, parameter[server_context, schema_name, sql, container_path, max_rows, sort, offset, container_filter, save_in_session, parameters, required_version, timeout]]:
constant[
Execute sql query against a LabKey server.
:param server_context: A LabKey server context. See utils.create_server_context.
:param schema_name: schema of table
:param sql: String of labkey sql to execute
:param container_path: labkey container path if not already set in context
:param max_rows: max number of rows to return
:param sort: comma separated list of column names to sort by
:param offset: number of rows to offset results by
:param container_filter: enumeration of the various container filters available. See:
https://www.labkey.org/download/clientapi_docs/javascript-api/symbols/LABKEY.Query.html#.containerFilter
:param save_in_session: save query result as a named view to the session
:param parameters: parameter values to pass through to a parameterized query
:param required_version: Api version of response
:param timeout: timeout of request in seconds (defaults to 30s)
:return:
]
variable[url] assign[=] call[name[server_context].build_url, parameter[constant[query], constant[executeSql.api]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b0e881f0>, <ast.Constant object at 0x7da1b0e88fa0>], [<ast.Name object at 0x7da1b0e8afb0>, <ast.Name object at 0x7da1b0e88ee0>]]
if compare[name[container_filter] is_not constant[None]] begin[:]
call[name[payload]][constant[containerFilter]] assign[=] name[container_filter]
if compare[name[max_rows] is_not constant[None]] begin[:]
call[name[payload]][constant[maxRows]] assign[=] name[max_rows]
if compare[name[offset] is_not constant[None]] begin[:]
call[name[payload]][constant[offset]] assign[=] name[offset]
if compare[name[sort] is_not constant[None]] begin[:]
call[name[payload]][constant[query.sort]] assign[=] name[sort]
if compare[name[save_in_session] is_not constant[None]] begin[:]
call[name[payload]][constant[saveInSession]] assign[=] name[save_in_session]
if compare[name[parameters] is_not constant[None]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b0cc0b20>, <ast.Name object at 0x7da1b0cc02b0>]]] in starred[call[name[parameters].items, parameter[]]] begin[:]
call[name[payload]][binary_operation[constant[query.param.] + name[key]]] assign[=] name[value]
if compare[name[required_version] is_not constant[None]] begin[:]
call[name[payload]][constant[apiVersion]] assign[=] name[required_version]
return[call[name[server_context].make_request, parameter[name[url], name[payload]]]] | keyword[def] identifier[execute_sql] ( identifier[server_context] , identifier[schema_name] , identifier[sql] , identifier[container_path] = keyword[None] ,
identifier[max_rows] = keyword[None] ,
identifier[sort] = keyword[None] ,
identifier[offset] = keyword[None] ,
identifier[container_filter] = keyword[None] ,
identifier[save_in_session] = keyword[None] ,
identifier[parameters] = keyword[None] ,
identifier[required_version] = keyword[None] ,
identifier[timeout] = identifier[_default_timeout] ):
literal[string]
identifier[url] = identifier[server_context] . identifier[build_url] ( literal[string] , literal[string] , identifier[container_path] = identifier[container_path] )
identifier[payload] ={
literal[string] : identifier[schema_name] ,
literal[string] : identifier[sql]
}
keyword[if] identifier[container_filter] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ literal[string] ]= identifier[container_filter]
keyword[if] identifier[max_rows] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ literal[string] ]= identifier[max_rows]
keyword[if] identifier[offset] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ literal[string] ]= identifier[offset]
keyword[if] identifier[sort] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ literal[string] ]= identifier[sort]
keyword[if] identifier[save_in_session] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ literal[string] ]= identifier[save_in_session]
keyword[if] identifier[parameters] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[parameters] . identifier[items] ():
identifier[payload] [ literal[string] + identifier[key] ]= identifier[value]
keyword[if] identifier[required_version] keyword[is] keyword[not] keyword[None] :
identifier[payload] [ literal[string] ]= identifier[required_version]
keyword[return] identifier[server_context] . identifier[make_request] ( identifier[url] , identifier[payload] , identifier[timeout] = identifier[timeout] ) | def execute_sql(server_context, schema_name, sql, container_path=None, max_rows=None, sort=None, offset=None, container_filter=None, save_in_session=None, parameters=None, required_version=None, timeout=_default_timeout):
"""
Execute sql query against a LabKey server.
:param server_context: A LabKey server context. See utils.create_server_context.
:param schema_name: schema of table
:param sql: String of labkey sql to execute
:param container_path: labkey container path if not already set in context
:param max_rows: max number of rows to return
:param sort: comma separated list of column names to sort by
:param offset: number of rows to offset results by
:param container_filter: enumeration of the various container filters available. See:
https://www.labkey.org/download/clientapi_docs/javascript-api/symbols/LABKEY.Query.html#.containerFilter
:param save_in_session: save query result as a named view to the session
:param parameters: parameter values to pass through to a parameterized query
:param required_version: Api version of response
:param timeout: timeout of request in seconds (defaults to 30s)
:return:
"""
url = server_context.build_url('query', 'executeSql.api', container_path=container_path)
payload = {'schemaName': schema_name, 'sql': sql}
if container_filter is not None:
payload['containerFilter'] = container_filter # depends on [control=['if'], data=['container_filter']]
if max_rows is not None:
payload['maxRows'] = max_rows # depends on [control=['if'], data=['max_rows']]
if offset is not None:
payload['offset'] = offset # depends on [control=['if'], data=['offset']]
if sort is not None:
payload['query.sort'] = sort # depends on [control=['if'], data=['sort']]
if save_in_session is not None:
payload['saveInSession'] = save_in_session # depends on [control=['if'], data=['save_in_session']]
if parameters is not None:
for (key, value) in parameters.items():
payload['query.param.' + key] = value # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['parameters']]
if required_version is not None:
payload['apiVersion'] = required_version # depends on [control=['if'], data=['required_version']]
return server_context.make_request(url, payload, timeout=timeout) |
def create_snapshot(self, name, *args, **kwargs):
"""
Thin method that just uses the provider
"""
return self.provider.create_snapshot(name, *args, **kwargs) | def function[create_snapshot, parameter[self, name]]:
constant[
Thin method that just uses the provider
]
return[call[name[self].provider.create_snapshot, parameter[name[name], <ast.Starred object at 0x7da2041d8d90>]]] | keyword[def] identifier[create_snapshot] ( identifier[self] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[provider] . identifier[create_snapshot] ( identifier[name] ,* identifier[args] ,** identifier[kwargs] ) | def create_snapshot(self, name, *args, **kwargs):
"""
Thin method that just uses the provider
"""
return self.provider.create_snapshot(name, *args, **kwargs) |
def compute_kv(self, memory_antecedent):
"""Compute key/value Tensor kv.
Args:
memory_antecedent: a Tensor with dimensions
{memory_input_dim} + other_dims
Returns:
a Tensor with dimensions
memory_heads_dims + {key_dim} + other_dims
"""
if not self.shared_kv:
raise ValueError("compute_kv can only be called with shared_kv")
ret = mtf.einsum(
[memory_antecedent, self.wkv], reduced_dims=[self.memory_input_dim])
if self.combine_dims:
ret = mtf.replace_dimensions(ret, ret.shape.dims[-1], self.k_dims)
return ret | def function[compute_kv, parameter[self, memory_antecedent]]:
constant[Compute key/value Tensor kv.
Args:
memory_antecedent: a Tensor with dimensions
{memory_input_dim} + other_dims
Returns:
a Tensor with dimensions
memory_heads_dims + {key_dim} + other_dims
]
if <ast.UnaryOp object at 0x7da20e9b0640> begin[:]
<ast.Raise object at 0x7da20e9b0fa0>
variable[ret] assign[=] call[name[mtf].einsum, parameter[list[[<ast.Name object at 0x7da20e9b29e0>, <ast.Attribute object at 0x7da20e9b12a0>]]]]
if name[self].combine_dims begin[:]
variable[ret] assign[=] call[name[mtf].replace_dimensions, parameter[name[ret], call[name[ret].shape.dims][<ast.UnaryOp object at 0x7da20c9933d0>], name[self].k_dims]]
return[name[ret]] | keyword[def] identifier[compute_kv] ( identifier[self] , identifier[memory_antecedent] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[shared_kv] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[ret] = identifier[mtf] . identifier[einsum] (
[ identifier[memory_antecedent] , identifier[self] . identifier[wkv] ], identifier[reduced_dims] =[ identifier[self] . identifier[memory_input_dim] ])
keyword[if] identifier[self] . identifier[combine_dims] :
identifier[ret] = identifier[mtf] . identifier[replace_dimensions] ( identifier[ret] , identifier[ret] . identifier[shape] . identifier[dims] [- literal[int] ], identifier[self] . identifier[k_dims] )
keyword[return] identifier[ret] | def compute_kv(self, memory_antecedent):
"""Compute key/value Tensor kv.
Args:
memory_antecedent: a Tensor with dimensions
{memory_input_dim} + other_dims
Returns:
a Tensor with dimensions
memory_heads_dims + {key_dim} + other_dims
"""
if not self.shared_kv:
raise ValueError('compute_kv can only be called with shared_kv') # depends on [control=['if'], data=[]]
ret = mtf.einsum([memory_antecedent, self.wkv], reduced_dims=[self.memory_input_dim])
if self.combine_dims:
ret = mtf.replace_dimensions(ret, ret.shape.dims[-1], self.k_dims) # depends on [control=['if'], data=[]]
return ret |
def _optional_list(value):
'''Convert a value that may be a scalar (str) or list into a tuple. This
produces uniform output for fields that may supply a single value or list
of values, like the `imports` field.'''
if isinstance(value, str):
return (value, )
elif isinstance(value, list):
return tuple(value)
return None | def function[_optional_list, parameter[value]]:
constant[Convert a value that may be a scalar (str) or list into a tuple. This
produces uniform output for fields that may supply a single value or list
of values, like the `imports` field.]
if call[name[isinstance], parameter[name[value], name[str]]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b26af160>]]]
return[constant[None]] | keyword[def] identifier[_optional_list] ( identifier[value] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ):
keyword[return] ( identifier[value] ,)
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ):
keyword[return] identifier[tuple] ( identifier[value] )
keyword[return] keyword[None] | def _optional_list(value):
"""Convert a value that may be a scalar (str) or list into a tuple. This
produces uniform output for fields that may supply a single value or list
of values, like the `imports` field."""
if isinstance(value, str):
return (value,) # depends on [control=['if'], data=[]]
elif isinstance(value, list):
return tuple(value) # depends on [control=['if'], data=[]]
return None |
def push_primary_analyses_for_removal(self, analysis_request, analyses):
"""Stores the analyses to be removed after partitions creation
"""
to_remove = self.analyses_to_remove.get(analysis_request, [])
to_remove.extend(analyses)
self.analyses_to_remove[analysis_request] = list(set(to_remove)) | def function[push_primary_analyses_for_removal, parameter[self, analysis_request, analyses]]:
constant[Stores the analyses to be removed after partitions creation
]
variable[to_remove] assign[=] call[name[self].analyses_to_remove.get, parameter[name[analysis_request], list[[]]]]
call[name[to_remove].extend, parameter[name[analyses]]]
call[name[self].analyses_to_remove][name[analysis_request]] assign[=] call[name[list], parameter[call[name[set], parameter[name[to_remove]]]]] | keyword[def] identifier[push_primary_analyses_for_removal] ( identifier[self] , identifier[analysis_request] , identifier[analyses] ):
literal[string]
identifier[to_remove] = identifier[self] . identifier[analyses_to_remove] . identifier[get] ( identifier[analysis_request] ,[])
identifier[to_remove] . identifier[extend] ( identifier[analyses] )
identifier[self] . identifier[analyses_to_remove] [ identifier[analysis_request] ]= identifier[list] ( identifier[set] ( identifier[to_remove] )) | def push_primary_analyses_for_removal(self, analysis_request, analyses):
"""Stores the analyses to be removed after partitions creation
"""
to_remove = self.analyses_to_remove.get(analysis_request, [])
to_remove.extend(analyses)
self.analyses_to_remove[analysis_request] = list(set(to_remove)) |
def include_class_in_title(result):
"""Makes sure that test class is included in "title".
e.g. "TestServiceRESTAPI.test_power_parent_service"
>>> result = {"title": "test_foo", "classname": "foo.bar.baz.TestFoo",
... "file": "foo/bar/baz.py"}
>>> include_class_in_title(result)
>>> str(result.get("title"))
'TestFoo.test_foo'
>>> result.get("classname")
"""
classname = result.get("classname", "")
if classname:
filepath = result.get("file", "")
title = result.get("title")
if title and "/" in filepath and "." in classname:
fname = filepath.split("/")[-1].replace(".py", "")
last_classname = classname.split(".")[-1]
# last part of classname is not file name
if fname != last_classname and last_classname not in title:
result["title"] = "{}.{}".format(last_classname, title)
# we don't need to pass classnames?
del result["classname"] | def function[include_class_in_title, parameter[result]]:
constant[Makes sure that test class is included in "title".
e.g. "TestServiceRESTAPI.test_power_parent_service"
>>> result = {"title": "test_foo", "classname": "foo.bar.baz.TestFoo",
... "file": "foo/bar/baz.py"}
>>> include_class_in_title(result)
>>> str(result.get("title"))
'TestFoo.test_foo'
>>> result.get("classname")
]
variable[classname] assign[=] call[name[result].get, parameter[constant[classname], constant[]]]
if name[classname] begin[:]
variable[filepath] assign[=] call[name[result].get, parameter[constant[file], constant[]]]
variable[title] assign[=] call[name[result].get, parameter[constant[title]]]
if <ast.BoolOp object at 0x7da2044c06a0> begin[:]
variable[fname] assign[=] call[call[call[name[filepath].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da18bc736d0>].replace, parameter[constant[.py], constant[]]]
variable[last_classname] assign[=] call[call[name[classname].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da18bc72530>]
if <ast.BoolOp object at 0x7da18bc70af0> begin[:]
call[name[result]][constant[title]] assign[=] call[constant[{}.{}].format, parameter[name[last_classname], name[title]]]
<ast.Delete object at 0x7da18bc705b0> | keyword[def] identifier[include_class_in_title] ( identifier[result] ):
literal[string]
identifier[classname] = identifier[result] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[classname] :
identifier[filepath] = identifier[result] . identifier[get] ( literal[string] , literal[string] )
identifier[title] = identifier[result] . identifier[get] ( literal[string] )
keyword[if] identifier[title] keyword[and] literal[string] keyword[in] identifier[filepath] keyword[and] literal[string] keyword[in] identifier[classname] :
identifier[fname] = identifier[filepath] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[replace] ( literal[string] , literal[string] )
identifier[last_classname] = identifier[classname] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[if] identifier[fname] != identifier[last_classname] keyword[and] identifier[last_classname] keyword[not] keyword[in] identifier[title] :
identifier[result] [ literal[string] ]= literal[string] . identifier[format] ( identifier[last_classname] , identifier[title] )
keyword[del] identifier[result] [ literal[string] ] | def include_class_in_title(result):
"""Makes sure that test class is included in "title".
e.g. "TestServiceRESTAPI.test_power_parent_service"
>>> result = {"title": "test_foo", "classname": "foo.bar.baz.TestFoo",
... "file": "foo/bar/baz.py"}
>>> include_class_in_title(result)
>>> str(result.get("title"))
'TestFoo.test_foo'
>>> result.get("classname")
"""
classname = result.get('classname', '')
if classname:
filepath = result.get('file', '')
title = result.get('title')
if title and '/' in filepath and ('.' in classname):
fname = filepath.split('/')[-1].replace('.py', '')
last_classname = classname.split('.')[-1]
# last part of classname is not file name
if fname != last_classname and last_classname not in title:
result['title'] = '{}.{}'.format(last_classname, title) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# we don't need to pass classnames?
del result['classname'] # depends on [control=['if'], data=[]] |
def cycle_app_reverse_cache(*args, **kwargs):
"""Does not really empty the cache; instead it adds a random element to the
cache key generation which guarantees that the cache does not yet contain
values for all newly generated keys"""
value = '%07x' % (SystemRandom().randint(0, 0x10000000))
cache.set(APP_REVERSE_CACHE_GENERATION_KEY, value)
return value | def function[cycle_app_reverse_cache, parameter[]]:
constant[Does not really empty the cache; instead it adds a random element to the
cache key generation which guarantees that the cache does not yet contain
values for all newly generated keys]
variable[value] assign[=] binary_operation[constant[%07x] <ast.Mod object at 0x7da2590d6920> call[call[name[SystemRandom], parameter[]].randint, parameter[constant[0], constant[268435456]]]]
call[name[cache].set, parameter[name[APP_REVERSE_CACHE_GENERATION_KEY], name[value]]]
return[name[value]] | keyword[def] identifier[cycle_app_reverse_cache] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[value] = literal[string] %( identifier[SystemRandom] (). identifier[randint] ( literal[int] , literal[int] ))
identifier[cache] . identifier[set] ( identifier[APP_REVERSE_CACHE_GENERATION_KEY] , identifier[value] )
keyword[return] identifier[value] | def cycle_app_reverse_cache(*args, **kwargs):
"""Does not really empty the cache; instead it adds a random element to the
cache key generation which guarantees that the cache does not yet contain
values for all newly generated keys"""
value = '%07x' % SystemRandom().randint(0, 268435456)
cache.set(APP_REVERSE_CACHE_GENERATION_KEY, value)
return value |
def post(self, request, *args, **kwargs):
# type: (HttpRequest, object, object) -> HttpResponse
"""The method that handles HTTP POST request on the view.
This method is called when the view receives a HTTP POST
request, which is generally the request sent from Alexa during
skill invocation. The request is verified through the
registered list of verifiers, before invoking the request
handlers. The method returns a
:py:class:`django.http.JsonResponse` in case of successful
skill invocation.
:param request: The input request sent by Alexa to the skill
:type request: django.http.HttpRequest
:return: The response from the skill to Alexa
:rtype: django.http.JsonResponse
:raises: :py:class:`django.http.HttpResponseBadRequest` if the
request verification fails.
:py:class:`django.http.HttpResponseServerError` for any
internal exception.
"""
try:
content = request.body.decode(
verifier_constants.CHARACTER_ENCODING)
response = self._webservice_handler.verify_request_and_dispatch(
http_request_headers=request.META, http_request_body=content)
return JsonResponse(
data=response, safe=False)
except VerificationException:
logger.exception(msg="Request verification failed")
return HttpResponseBadRequest(
content="Incoming request failed verification")
except AskSdkException:
logger.exception(msg="Skill dispatch exception")
return HttpResponseServerError(
content="Exception occurred during skill dispatch") | def function[post, parameter[self, request]]:
constant[The method that handles HTTP POST request on the view.
This method is called when the view receives a HTTP POST
request, which is generally the request sent from Alexa during
skill invocation. The request is verified through the
registered list of verifiers, before invoking the request
handlers. The method returns a
:py:class:`django.http.JsonResponse` in case of successful
skill invocation.
:param request: The input request sent by Alexa to the skill
:type request: django.http.HttpRequest
:return: The response from the skill to Alexa
:rtype: django.http.JsonResponse
:raises: :py:class:`django.http.HttpResponseBadRequest` if the
request verification fails.
:py:class:`django.http.HttpResponseServerError` for any
internal exception.
]
<ast.Try object at 0x7da1b188d630> | keyword[def] identifier[post] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[content] = identifier[request] . identifier[body] . identifier[decode] (
identifier[verifier_constants] . identifier[CHARACTER_ENCODING] )
identifier[response] = identifier[self] . identifier[_webservice_handler] . identifier[verify_request_and_dispatch] (
identifier[http_request_headers] = identifier[request] . identifier[META] , identifier[http_request_body] = identifier[content] )
keyword[return] identifier[JsonResponse] (
identifier[data] = identifier[response] , identifier[safe] = keyword[False] )
keyword[except] identifier[VerificationException] :
identifier[logger] . identifier[exception] ( identifier[msg] = literal[string] )
keyword[return] identifier[HttpResponseBadRequest] (
identifier[content] = literal[string] )
keyword[except] identifier[AskSdkException] :
identifier[logger] . identifier[exception] ( identifier[msg] = literal[string] )
keyword[return] identifier[HttpResponseServerError] (
identifier[content] = literal[string] ) | def post(self, request, *args, **kwargs):
# type: (HttpRequest, object, object) -> HttpResponse
'The method that handles HTTP POST request on the view.\n\n This method is called when the view receives a HTTP POST\n request, which is generally the request sent from Alexa during\n skill invocation. The request is verified through the\n registered list of verifiers, before invoking the request\n handlers. The method returns a\n :py:class:`django.http.JsonResponse` in case of successful\n skill invocation.\n\n :param request: The input request sent by Alexa to the skill\n :type request: django.http.HttpRequest\n :return: The response from the skill to Alexa\n :rtype: django.http.JsonResponse\n :raises: :py:class:`django.http.HttpResponseBadRequest` if the\n request verification fails.\n :py:class:`django.http.HttpResponseServerError` for any\n internal exception.\n '
try:
content = request.body.decode(verifier_constants.CHARACTER_ENCODING)
response = self._webservice_handler.verify_request_and_dispatch(http_request_headers=request.META, http_request_body=content)
return JsonResponse(data=response, safe=False) # depends on [control=['try'], data=[]]
except VerificationException:
logger.exception(msg='Request verification failed')
return HttpResponseBadRequest(content='Incoming request failed verification') # depends on [control=['except'], data=[]]
except AskSdkException:
logger.exception(msg='Skill dispatch exception')
return HttpResponseServerError(content='Exception occurred during skill dispatch') # depends on [control=['except'], data=[]] |
def GetDefaultServicePeriod(self):
"""Return the default ServicePeriod. If no default ServicePeriod has been
set select the default depending on how many ServicePeriod objects are in
the Schedule. If there are 0 make a new ServicePeriod the default, if there
is 1 it becomes the default, if there is more than 1 then return None.
"""
if not self._default_service_period:
if len(self.service_periods) == 0:
self.NewDefaultServicePeriod()
elif len(self.service_periods) == 1:
self._default_service_period = self.service_periods.values()[0]
return self._default_service_period | def function[GetDefaultServicePeriod, parameter[self]]:
constant[Return the default ServicePeriod. If no default ServicePeriod has been
set select the default depending on how many ServicePeriod objects are in
the Schedule. If there are 0 make a new ServicePeriod the default, if there
is 1 it becomes the default, if there is more than 1 then return None.
]
if <ast.UnaryOp object at 0x7da1b1783160> begin[:]
if compare[call[name[len], parameter[name[self].service_periods]] equal[==] constant[0]] begin[:]
call[name[self].NewDefaultServicePeriod, parameter[]]
return[name[self]._default_service_period] | keyword[def] identifier[GetDefaultServicePeriod] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_default_service_period] :
keyword[if] identifier[len] ( identifier[self] . identifier[service_periods] )== literal[int] :
identifier[self] . identifier[NewDefaultServicePeriod] ()
keyword[elif] identifier[len] ( identifier[self] . identifier[service_periods] )== literal[int] :
identifier[self] . identifier[_default_service_period] = identifier[self] . identifier[service_periods] . identifier[values] ()[ literal[int] ]
keyword[return] identifier[self] . identifier[_default_service_period] | def GetDefaultServicePeriod(self):
"""Return the default ServicePeriod. If no default ServicePeriod has been
set select the default depending on how many ServicePeriod objects are in
the Schedule. If there are 0 make a new ServicePeriod the default, if there
is 1 it becomes the default, if there is more than 1 then return None.
"""
if not self._default_service_period:
if len(self.service_periods) == 0:
self.NewDefaultServicePeriod() # depends on [control=['if'], data=[]]
elif len(self.service_periods) == 1:
self._default_service_period = self.service_periods.values()[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self._default_service_period |
def pre_process_method_headers(method, headers):
'''
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
'''
method = method.lower()
# Standard WSGI supported headers
_wsgi_headers = ["content_length", "content_type", "query_string",
"remote_addr", "remote_host", "remote_user",
"request_method", "server_name", "server_port"]
_transformed_headers = {}
# For every header, replace - to _, prepend http_ if necessary and convert
# to upper case.
for header, value in headers.items():
header = header.replace("-", "_")
header = "http_{header}".format(
header=header) if header.lower() not in _wsgi_headers else header
_transformed_headers.update({header.upper(): value})
return method, _transformed_headers | def function[pre_process_method_headers, parameter[method, headers]]:
constant[
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
]
variable[method] assign[=] call[name[method].lower, parameter[]]
variable[_wsgi_headers] assign[=] list[[<ast.Constant object at 0x7da1b0ea2500>, <ast.Constant object at 0x7da1b0ea1f00>, <ast.Constant object at 0x7da1b0ea2770>, <ast.Constant object at 0x7da1b0ea20b0>, <ast.Constant object at 0x7da1b0ea07c0>, <ast.Constant object at 0x7da1b0ea0fa0>, <ast.Constant object at 0x7da1b0ea05e0>, <ast.Constant object at 0x7da1b0e318a0>, <ast.Constant object at 0x7da1b0e32230>]]
variable[_transformed_headers] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0e31390>, <ast.Name object at 0x7da1b0ef74f0>]]] in starred[call[name[headers].items, parameter[]]] begin[:]
variable[header] assign[=] call[name[header].replace, parameter[constant[-], constant[_]]]
variable[header] assign[=] <ast.IfExp object at 0x7da1b0ef4490>
call[name[_transformed_headers].update, parameter[dictionary[[<ast.Call object at 0x7da1b0ef5cc0>], [<ast.Name object at 0x7da1b0ef4c40>]]]]
return[tuple[[<ast.Name object at 0x7da1b0ef4340>, <ast.Name object at 0x7da1b0ef55d0>]]] | keyword[def] identifier[pre_process_method_headers] ( identifier[method] , identifier[headers] ):
literal[string]
identifier[method] = identifier[method] . identifier[lower] ()
identifier[_wsgi_headers] =[ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ]
identifier[_transformed_headers] ={}
keyword[for] identifier[header] , identifier[value] keyword[in] identifier[headers] . identifier[items] ():
identifier[header] = identifier[header] . identifier[replace] ( literal[string] , literal[string] )
identifier[header] = literal[string] . identifier[format] (
identifier[header] = identifier[header] ) keyword[if] identifier[header] . identifier[lower] () keyword[not] keyword[in] identifier[_wsgi_headers] keyword[else] identifier[header]
identifier[_transformed_headers] . identifier[update] ({ identifier[header] . identifier[upper] (): identifier[value] })
keyword[return] identifier[method] , identifier[_transformed_headers] | def pre_process_method_headers(method, headers):
"""
Returns the lowered method.
Capitalize headers, prepend HTTP_ and change - to _.
"""
method = method.lower()
# Standard WSGI supported headers
_wsgi_headers = ['content_length', 'content_type', 'query_string', 'remote_addr', 'remote_host', 'remote_user', 'request_method', 'server_name', 'server_port']
_transformed_headers = {}
# For every header, replace - to _, prepend http_ if necessary and convert
# to upper case.
for (header, value) in headers.items():
header = header.replace('-', '_')
header = 'http_{header}'.format(header=header) if header.lower() not in _wsgi_headers else header
_transformed_headers.update({header.upper(): value}) # depends on [control=['for'], data=[]]
return (method, _transformed_headers) |
def get_repo_keys():
'''
.. versionadded:: 2017.7.0
List known repo key details.
:return: A dictionary containing the repo keys.
:rtype: dict
CLI Examples:
.. code-block:: bash
salt '*' pkg.get_repo_keys
'''
ret = dict()
repo_keys = list()
# The double usage of '--with-fingerprint' is necessary in order to
# retrieve the fingerprint of the subkey.
cmd = ['apt-key', 'adv', '--batch', '--list-public-keys', '--with-fingerprint',
'--with-fingerprint', '--with-colons', '--fixed-list-mode']
cmd_ret = _call_apt(cmd, scope=False)
if cmd_ret['retcode'] != 0:
log.error(cmd_ret['stderr'])
return ret
lines = [line for line in cmd_ret['stdout'].splitlines() if line.strip()]
# Reference for the meaning of each item in the colon-separated
# record can be found here: https://goo.gl/KIZbvp
for line in lines:
items = [_convert_if_int(item.strip()) if item.strip() else None for item in line.split(':')]
key_props = dict()
if len(items) < 2:
log.debug('Skipping line: %s', line)
continue
if items[0] in ('pub', 'sub'):
key_props.update({
'algorithm': items[3],
'bits': items[2],
'capability': items[11],
'date_creation': items[5],
'date_expiration': items[6],
'keyid': items[4],
'validity': items[1]
})
if items[0] == 'pub':
repo_keys.append(key_props)
else:
repo_keys[-1]['subkey'] = key_props
elif items[0] == 'fpr':
if repo_keys[-1].get('subkey', False):
repo_keys[-1]['subkey'].update({'fingerprint': items[9]})
else:
repo_keys[-1].update({'fingerprint': items[9]})
elif items[0] == 'uid':
repo_keys[-1].update({
'uid': items[9],
'uid_hash': items[7]
})
for repo_key in repo_keys:
ret[repo_key['keyid']] = repo_key
return ret | def function[get_repo_keys, parameter[]]:
constant[
.. versionadded:: 2017.7.0
List known repo key details.
:return: A dictionary containing the repo keys.
:rtype: dict
CLI Examples:
.. code-block:: bash
salt '*' pkg.get_repo_keys
]
variable[ret] assign[=] call[name[dict], parameter[]]
variable[repo_keys] assign[=] call[name[list], parameter[]]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b2013ee0>, <ast.Constant object at 0x7da1b2013eb0>, <ast.Constant object at 0x7da1b20115a0>, <ast.Constant object at 0x7da1b20115d0>, <ast.Constant object at 0x7da1b2011030>, <ast.Constant object at 0x7da1b20112a0>, <ast.Constant object at 0x7da1b2010ca0>, <ast.Constant object at 0x7da1b2012230>]]
variable[cmd_ret] assign[=] call[name[_call_apt], parameter[name[cmd]]]
if compare[call[name[cmd_ret]][constant[retcode]] not_equal[!=] constant[0]] begin[:]
call[name[log].error, parameter[call[name[cmd_ret]][constant[stderr]]]]
return[name[ret]]
variable[lines] assign[=] <ast.ListComp object at 0x7da1b2012440>
for taget[name[line]] in starred[name[lines]] begin[:]
variable[items] assign[=] <ast.ListComp object at 0x7da1b2012ad0>
variable[key_props] assign[=] call[name[dict], parameter[]]
if compare[call[name[len], parameter[name[items]]] less[<] constant[2]] begin[:]
call[name[log].debug, parameter[constant[Skipping line: %s], name[line]]]
continue
if compare[call[name[items]][constant[0]] in tuple[[<ast.Constant object at 0x7da1b2011570>, <ast.Constant object at 0x7da1b2011630>]]] begin[:]
call[name[key_props].update, parameter[dictionary[[<ast.Constant object at 0x7da1b2011420>, <ast.Constant object at 0x7da1b20111b0>, <ast.Constant object at 0x7da1b2011150>, <ast.Constant object at 0x7da1b2011b40>, <ast.Constant object at 0x7da1b2011900>, <ast.Constant object at 0x7da1b2011930>, <ast.Constant object at 0x7da1b20122f0>], [<ast.Subscript object at 0x7da1b2012290>, <ast.Subscript object at 0x7da1b2013880>, <ast.Subscript object at 0x7da1b2012bf0>, <ast.Subscript object at 0x7da1b20101f0>, <ast.Subscript object at 0x7da1b2010070>, <ast.Subscript object at 0x7da1b2010130>, <ast.Subscript object at 0x7da1b20106a0>]]]]
if compare[call[name[items]][constant[0]] equal[==] constant[pub]] begin[:]
call[name[repo_keys].append, parameter[name[key_props]]]
for taget[name[repo_key]] in starred[name[repo_keys]] begin[:]
call[name[ret]][call[name[repo_key]][constant[keyid]]] assign[=] name[repo_key]
return[name[ret]] | keyword[def] identifier[get_repo_keys] ():
literal[string]
identifier[ret] = identifier[dict] ()
identifier[repo_keys] = identifier[list] ()
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] ]
identifier[cmd_ret] = identifier[_call_apt] ( identifier[cmd] , identifier[scope] = keyword[False] )
keyword[if] identifier[cmd_ret] [ literal[string] ]!= literal[int] :
identifier[log] . identifier[error] ( identifier[cmd_ret] [ literal[string] ])
keyword[return] identifier[ret]
identifier[lines] =[ identifier[line] keyword[for] identifier[line] keyword[in] identifier[cmd_ret] [ literal[string] ]. identifier[splitlines] () keyword[if] identifier[line] . identifier[strip] ()]
keyword[for] identifier[line] keyword[in] identifier[lines] :
identifier[items] =[ identifier[_convert_if_int] ( identifier[item] . identifier[strip] ()) keyword[if] identifier[item] . identifier[strip] () keyword[else] keyword[None] keyword[for] identifier[item] keyword[in] identifier[line] . identifier[split] ( literal[string] )]
identifier[key_props] = identifier[dict] ()
keyword[if] identifier[len] ( identifier[items] )< literal[int] :
identifier[log] . identifier[debug] ( literal[string] , identifier[line] )
keyword[continue]
keyword[if] identifier[items] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[key_props] . identifier[update] ({
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ]
})
keyword[if] identifier[items] [ literal[int] ]== literal[string] :
identifier[repo_keys] . identifier[append] ( identifier[key_props] )
keyword[else] :
identifier[repo_keys] [- literal[int] ][ literal[string] ]= identifier[key_props]
keyword[elif] identifier[items] [ literal[int] ]== literal[string] :
keyword[if] identifier[repo_keys] [- literal[int] ]. identifier[get] ( literal[string] , keyword[False] ):
identifier[repo_keys] [- literal[int] ][ literal[string] ]. identifier[update] ({ literal[string] : identifier[items] [ literal[int] ]})
keyword[else] :
identifier[repo_keys] [- literal[int] ]. identifier[update] ({ literal[string] : identifier[items] [ literal[int] ]})
keyword[elif] identifier[items] [ literal[int] ]== literal[string] :
identifier[repo_keys] [- literal[int] ]. identifier[update] ({
literal[string] : identifier[items] [ literal[int] ],
literal[string] : identifier[items] [ literal[int] ]
})
keyword[for] identifier[repo_key] keyword[in] identifier[repo_keys] :
identifier[ret] [ identifier[repo_key] [ literal[string] ]]= identifier[repo_key]
keyword[return] identifier[ret] | def get_repo_keys():
"""
.. versionadded:: 2017.7.0
List known repo key details.
:return: A dictionary containing the repo keys.
:rtype: dict
CLI Examples:
.. code-block:: bash
salt '*' pkg.get_repo_keys
"""
ret = dict()
repo_keys = list()
# The double usage of '--with-fingerprint' is necessary in order to
# retrieve the fingerprint of the subkey.
cmd = ['apt-key', 'adv', '--batch', '--list-public-keys', '--with-fingerprint', '--with-fingerprint', '--with-colons', '--fixed-list-mode']
cmd_ret = _call_apt(cmd, scope=False)
if cmd_ret['retcode'] != 0:
log.error(cmd_ret['stderr'])
return ret # depends on [control=['if'], data=[]]
lines = [line for line in cmd_ret['stdout'].splitlines() if line.strip()]
# Reference for the meaning of each item in the colon-separated
# record can be found here: https://goo.gl/KIZbvp
for line in lines:
items = [_convert_if_int(item.strip()) if item.strip() else None for item in line.split(':')]
key_props = dict()
if len(items) < 2:
log.debug('Skipping line: %s', line)
continue # depends on [control=['if'], data=[]]
if items[0] in ('pub', 'sub'):
key_props.update({'algorithm': items[3], 'bits': items[2], 'capability': items[11], 'date_creation': items[5], 'date_expiration': items[6], 'keyid': items[4], 'validity': items[1]})
if items[0] == 'pub':
repo_keys.append(key_props) # depends on [control=['if'], data=[]]
else:
repo_keys[-1]['subkey'] = key_props # depends on [control=['if'], data=[]]
elif items[0] == 'fpr':
if repo_keys[-1].get('subkey', False):
repo_keys[-1]['subkey'].update({'fingerprint': items[9]}) # depends on [control=['if'], data=[]]
else:
repo_keys[-1].update({'fingerprint': items[9]}) # depends on [control=['if'], data=[]]
elif items[0] == 'uid':
repo_keys[-1].update({'uid': items[9], 'uid_hash': items[7]}) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
for repo_key in repo_keys:
ret[repo_key['keyid']] = repo_key # depends on [control=['for'], data=['repo_key']]
return ret |
def date_range(start, end, boo):
"""
Return list of dates within a specified range, inclusive.
Args:
start: earliest date to include, String ("2015-11-25")
end: latest date to include, String ("2015-12-01")
boo: if true, output list contains Numbers (20151230); if false, list contains Strings ("2015-12-30")
Returns:
list of either Numbers or Strings
"""
earliest = datetime.strptime(start.replace('-', ' '), '%Y %m %d')
latest = datetime.strptime(end.replace('-', ' '), '%Y %m %d')
num_days = (latest - earliest).days + 1
all_days = [latest - timedelta(days=x) for x in range(num_days)]
all_days.reverse()
output = []
if boo:
# Return as Integer, yyyymmdd
for d in all_days:
output.append(int(str(d).replace('-', '')[:8]))
else:
# Return as String, yyyy-mm-dd
for d in all_days:
output.append(str(d)[:10])
return output | def function[date_range, parameter[start, end, boo]]:
constant[
Return list of dates within a specified range, inclusive.
Args:
start: earliest date to include, String ("2015-11-25")
end: latest date to include, String ("2015-12-01")
boo: if true, output list contains Numbers (20151230); if false, list contains Strings ("2015-12-30")
Returns:
list of either Numbers or Strings
]
variable[earliest] assign[=] call[name[datetime].strptime, parameter[call[name[start].replace, parameter[constant[-], constant[ ]]], constant[%Y %m %d]]]
variable[latest] assign[=] call[name[datetime].strptime, parameter[call[name[end].replace, parameter[constant[-], constant[ ]]], constant[%Y %m %d]]]
variable[num_days] assign[=] binary_operation[binary_operation[name[latest] - name[earliest]].days + constant[1]]
variable[all_days] assign[=] <ast.ListComp object at 0x7da204344c10>
call[name[all_days].reverse, parameter[]]
variable[output] assign[=] list[[]]
if name[boo] begin[:]
for taget[name[d]] in starred[name[all_days]] begin[:]
call[name[output].append, parameter[call[name[int], parameter[call[call[call[name[str], parameter[name[d]]].replace, parameter[constant[-], constant[]]]][<ast.Slice object at 0x7da1b0b58220>]]]]]
return[name[output]] | keyword[def] identifier[date_range] ( identifier[start] , identifier[end] , identifier[boo] ):
literal[string]
identifier[earliest] = identifier[datetime] . identifier[strptime] ( identifier[start] . identifier[replace] ( literal[string] , literal[string] ), literal[string] )
identifier[latest] = identifier[datetime] . identifier[strptime] ( identifier[end] . identifier[replace] ( literal[string] , literal[string] ), literal[string] )
identifier[num_days] =( identifier[latest] - identifier[earliest] ). identifier[days] + literal[int]
identifier[all_days] =[ identifier[latest] - identifier[timedelta] ( identifier[days] = identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[num_days] )]
identifier[all_days] . identifier[reverse] ()
identifier[output] =[]
keyword[if] identifier[boo] :
keyword[for] identifier[d] keyword[in] identifier[all_days] :
identifier[output] . identifier[append] ( identifier[int] ( identifier[str] ( identifier[d] ). identifier[replace] ( literal[string] , literal[string] )[: literal[int] ]))
keyword[else] :
keyword[for] identifier[d] keyword[in] identifier[all_days] :
identifier[output] . identifier[append] ( identifier[str] ( identifier[d] )[: literal[int] ])
keyword[return] identifier[output] | def date_range(start, end, boo):
"""
Return list of dates within a specified range, inclusive.
Args:
start: earliest date to include, String ("2015-11-25")
end: latest date to include, String ("2015-12-01")
boo: if true, output list contains Numbers (20151230); if false, list contains Strings ("2015-12-30")
Returns:
list of either Numbers or Strings
"""
earliest = datetime.strptime(start.replace('-', ' '), '%Y %m %d')
latest = datetime.strptime(end.replace('-', ' '), '%Y %m %d')
num_days = (latest - earliest).days + 1
all_days = [latest - timedelta(days=x) for x in range(num_days)]
all_days.reverse()
output = []
if boo:
# Return as Integer, yyyymmdd
for d in all_days:
output.append(int(str(d).replace('-', '')[:8])) # depends on [control=['for'], data=['d']] # depends on [control=['if'], data=[]]
else:
# Return as String, yyyy-mm-dd
for d in all_days:
output.append(str(d)[:10]) # depends on [control=['for'], data=['d']]
return output |
def unregister_model(self, storagemodel:object, modeldefinition = None, delete_blob=False):
""" clear up an Queueservice for an StorageQueueModel in your Azure Storage Account
Will delete the hole Queue if delete_queue Flag is True!
required Parameter is:
- storagemodel: StorageQueueModel(Object)
Optional Parameter is:
- delete_queue: bool
"""
""" remove from modeldefinitions """
for i in range(len(self._modeldefinitions)):
if self._modeldefinitions[i]['modelname'] == modeldefinition['modelname']:
del self._modeldefinitions[i]
break
""" delete queue from storage if delete_queue == True """
if delete_blob:
self.__delete__(modeldefinition)
log.info('model {} unregistered successfully. Models are {!s}'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions]))
pass | def function[unregister_model, parameter[self, storagemodel, modeldefinition, delete_blob]]:
constant[ clear up an Queueservice for an StorageQueueModel in your Azure Storage Account
Will delete the hole Queue if delete_queue Flag is True!
required Parameter is:
- storagemodel: StorageQueueModel(Object)
Optional Parameter is:
- delete_queue: bool
]
constant[ remove from modeldefinitions ]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self]._modeldefinitions]]]]] begin[:]
if compare[call[call[name[self]._modeldefinitions][name[i]]][constant[modelname]] equal[==] call[name[modeldefinition]][constant[modelname]]] begin[:]
<ast.Delete object at 0x7da1b0a4f610>
break
constant[ delete queue from storage if delete_queue == True ]
if name[delete_blob] begin[:]
call[name[self].__delete__, parameter[name[modeldefinition]]]
call[name[log].info, parameter[call[constant[model {} unregistered successfully. Models are {!s}].format, parameter[call[name[modeldefinition]][constant[modelname]], <ast.ListComp object at 0x7da18bccacb0>]]]]
pass | keyword[def] identifier[unregister_model] ( identifier[self] , identifier[storagemodel] : identifier[object] , identifier[modeldefinition] = keyword[None] , identifier[delete_blob] = keyword[False] ):
literal[string]
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[_modeldefinitions] )):
keyword[if] identifier[self] . identifier[_modeldefinitions] [ identifier[i] ][ literal[string] ]== identifier[modeldefinition] [ literal[string] ]:
keyword[del] identifier[self] . identifier[_modeldefinitions] [ identifier[i] ]
keyword[break]
literal[string]
keyword[if] identifier[delete_blob] :
identifier[self] . identifier[__delete__] ( identifier[modeldefinition] )
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[modeldefinition] [ literal[string] ],[ identifier[model] [ literal[string] ] keyword[for] identifier[model] keyword[in] identifier[self] . identifier[_modeldefinitions] ]))
keyword[pass] | def unregister_model(self, storagemodel: object, modeldefinition=None, delete_blob=False):
""" clear up an Queueservice for an StorageQueueModel in your Azure Storage Account
Will delete the hole Queue if delete_queue Flag is True!
required Parameter is:
- storagemodel: StorageQueueModel(Object)
Optional Parameter is:
- delete_queue: bool
"""
' remove from modeldefinitions '
for i in range(len(self._modeldefinitions)):
if self._modeldefinitions[i]['modelname'] == modeldefinition['modelname']:
del self._modeldefinitions[i]
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
' delete queue from storage if delete_queue == True '
if delete_blob:
self.__delete__(modeldefinition) # depends on [control=['if'], data=[]]
log.info('model {} unregistered successfully. Models are {!s}'.format(modeldefinition['modelname'], [model['modelname'] for model in self._modeldefinitions]))
pass |
def create(self, obj, ref=None):
""" Convert *obj* to a new ShaderObject. If the output is a Variable
with no name, then set its name using *ref*.
"""
if isinstance(ref, Variable):
ref = ref.name
elif isinstance(ref, string_types) and ref.startswith('gl_'):
# gl_ names not allowed for variables
ref = ref[3:].lower()
# Allow any type of object to be converted to ShaderObject if it
# provides a magic method:
if hasattr(obj, '_shader_object'):
obj = obj._shader_object()
if isinstance(obj, ShaderObject):
if isinstance(obj, Variable) and obj.name is None:
obj.name = ref
elif isinstance(obj, string_types):
obj = TextExpression(obj)
else:
obj = Variable(ref, obj)
# Try prepending the name to indicate attribute, uniform, varying
if obj.vtype and obj.vtype[0] in 'auv':
obj.name = obj.vtype[0] + '_' + obj.name
return obj | def function[create, parameter[self, obj, ref]]:
constant[ Convert *obj* to a new ShaderObject. If the output is a Variable
with no name, then set its name using *ref*.
]
if call[name[isinstance], parameter[name[ref], name[Variable]]] begin[:]
variable[ref] assign[=] name[ref].name
if call[name[hasattr], parameter[name[obj], constant[_shader_object]]] begin[:]
variable[obj] assign[=] call[name[obj]._shader_object, parameter[]]
if call[name[isinstance], parameter[name[obj], name[ShaderObject]]] begin[:]
if <ast.BoolOp object at 0x7da1b0e43b80> begin[:]
name[obj].name assign[=] name[ref]
return[name[obj]] | keyword[def] identifier[create] ( identifier[self] , identifier[obj] , identifier[ref] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[ref] , identifier[Variable] ):
identifier[ref] = identifier[ref] . identifier[name]
keyword[elif] identifier[isinstance] ( identifier[ref] , identifier[string_types] ) keyword[and] identifier[ref] . identifier[startswith] ( literal[string] ):
identifier[ref] = identifier[ref] [ literal[int] :]. identifier[lower] ()
keyword[if] identifier[hasattr] ( identifier[obj] , literal[string] ):
identifier[obj] = identifier[obj] . identifier[_shader_object] ()
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[ShaderObject] ):
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Variable] ) keyword[and] identifier[obj] . identifier[name] keyword[is] keyword[None] :
identifier[obj] . identifier[name] = identifier[ref]
keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[string_types] ):
identifier[obj] = identifier[TextExpression] ( identifier[obj] )
keyword[else] :
identifier[obj] = identifier[Variable] ( identifier[ref] , identifier[obj] )
keyword[if] identifier[obj] . identifier[vtype] keyword[and] identifier[obj] . identifier[vtype] [ literal[int] ] keyword[in] literal[string] :
identifier[obj] . identifier[name] = identifier[obj] . identifier[vtype] [ literal[int] ]+ literal[string] + identifier[obj] . identifier[name]
keyword[return] identifier[obj] | def create(self, obj, ref=None):
""" Convert *obj* to a new ShaderObject. If the output is a Variable
with no name, then set its name using *ref*.
"""
if isinstance(ref, Variable):
ref = ref.name # depends on [control=['if'], data=[]]
elif isinstance(ref, string_types) and ref.startswith('gl_'):
# gl_ names not allowed for variables
ref = ref[3:].lower() # depends on [control=['if'], data=[]]
# Allow any type of object to be converted to ShaderObject if it
# provides a magic method:
if hasattr(obj, '_shader_object'):
obj = obj._shader_object() # depends on [control=['if'], data=[]]
if isinstance(obj, ShaderObject):
if isinstance(obj, Variable) and obj.name is None:
obj.name = ref # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(obj, string_types):
obj = TextExpression(obj) # depends on [control=['if'], data=[]]
else:
obj = Variable(ref, obj)
# Try prepending the name to indicate attribute, uniform, varying
if obj.vtype and obj.vtype[0] in 'auv':
obj.name = obj.vtype[0] + '_' + obj.name # depends on [control=['if'], data=[]]
return obj |
def is_multi_timeseries_incomplete(nc, variable):
'''
Returns true if the variable is an incomplete multidimensional array
representation of time series. For more information on what this means see
CF 1.6 §H.2.2
http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_incomplete_multidimensional_array_representation_of_time_series
:param netCDF4.Dataset nc: An open netCDF dataset
:param str variable: name of the variable to check
'''
# x(i), y(i), z(i), t(i, o)
# X(i, o)
dims = nc.variables[variable].dimensions
cmatrix = coordinate_dimension_matrix(nc)
for req in ('x', 'y', 't'):
if req not in cmatrix:
return False
if len(cmatrix['x']) != 1:
return False
if cmatrix['x'] != cmatrix['y']:
return False
if len(cmatrix['t']) != 2:
return False
if cmatrix['x'][0] != cmatrix['t'][0]:
return False
i = cmatrix['x'][0]
o = cmatrix['t'][1]
if dims == (i, o):
return True
return False | def function[is_multi_timeseries_incomplete, parameter[nc, variable]]:
constant[
Returns true if the variable is an incomplete multidimensional array
representation of time series. For more information on what this means see
CF 1.6 §H.2.2
http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_incomplete_multidimensional_array_representation_of_time_series
:param netCDF4.Dataset nc: An open netCDF dataset
:param str variable: name of the variable to check
]
variable[dims] assign[=] call[name[nc].variables][name[variable]].dimensions
variable[cmatrix] assign[=] call[name[coordinate_dimension_matrix], parameter[name[nc]]]
for taget[name[req]] in starred[tuple[[<ast.Constant object at 0x7da18bc73850>, <ast.Constant object at 0x7da18bc70190>, <ast.Constant object at 0x7da18bc70cd0>]]] begin[:]
if compare[name[req] <ast.NotIn object at 0x7da2590d7190> name[cmatrix]] begin[:]
return[constant[False]]
if compare[call[name[len], parameter[call[name[cmatrix]][constant[x]]]] not_equal[!=] constant[1]] begin[:]
return[constant[False]]
if compare[call[name[cmatrix]][constant[x]] not_equal[!=] call[name[cmatrix]][constant[y]]] begin[:]
return[constant[False]]
if compare[call[name[len], parameter[call[name[cmatrix]][constant[t]]]] not_equal[!=] constant[2]] begin[:]
return[constant[False]]
if compare[call[call[name[cmatrix]][constant[x]]][constant[0]] not_equal[!=] call[call[name[cmatrix]][constant[t]]][constant[0]]] begin[:]
return[constant[False]]
variable[i] assign[=] call[call[name[cmatrix]][constant[x]]][constant[0]]
variable[o] assign[=] call[call[name[cmatrix]][constant[t]]][constant[1]]
if compare[name[dims] equal[==] tuple[[<ast.Name object at 0x7da18bc71510>, <ast.Name object at 0x7da18bc70ca0>]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[is_multi_timeseries_incomplete] ( identifier[nc] , identifier[variable] ):
literal[string]
identifier[dims] = identifier[nc] . identifier[variables] [ identifier[variable] ]. identifier[dimensions]
identifier[cmatrix] = identifier[coordinate_dimension_matrix] ( identifier[nc] )
keyword[for] identifier[req] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[if] identifier[req] keyword[not] keyword[in] identifier[cmatrix] :
keyword[return] keyword[False]
keyword[if] identifier[len] ( identifier[cmatrix] [ literal[string] ])!= literal[int] :
keyword[return] keyword[False]
keyword[if] identifier[cmatrix] [ literal[string] ]!= identifier[cmatrix] [ literal[string] ]:
keyword[return] keyword[False]
keyword[if] identifier[len] ( identifier[cmatrix] [ literal[string] ])!= literal[int] :
keyword[return] keyword[False]
keyword[if] identifier[cmatrix] [ literal[string] ][ literal[int] ]!= identifier[cmatrix] [ literal[string] ][ literal[int] ]:
keyword[return] keyword[False]
identifier[i] = identifier[cmatrix] [ literal[string] ][ literal[int] ]
identifier[o] = identifier[cmatrix] [ literal[string] ][ literal[int] ]
keyword[if] identifier[dims] ==( identifier[i] , identifier[o] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def is_multi_timeseries_incomplete(nc, variable):
"""
Returns true if the variable is an incomplete multidimensional array
representation of time series. For more information on what this means see
CF 1.6 §H.2.2
http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_incomplete_multidimensional_array_representation_of_time_series
:param netCDF4.Dataset nc: An open netCDF dataset
:param str variable: name of the variable to check
"""
# x(i), y(i), z(i), t(i, o)
# X(i, o)
dims = nc.variables[variable].dimensions
cmatrix = coordinate_dimension_matrix(nc)
for req in ('x', 'y', 't'):
if req not in cmatrix:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['req']]
if len(cmatrix['x']) != 1:
return False # depends on [control=['if'], data=[]]
if cmatrix['x'] != cmatrix['y']:
return False # depends on [control=['if'], data=[]]
if len(cmatrix['t']) != 2:
return False # depends on [control=['if'], data=[]]
if cmatrix['x'][0] != cmatrix['t'][0]:
return False # depends on [control=['if'], data=[]]
i = cmatrix['x'][0]
o = cmatrix['t'][1]
if dims == (i, o):
return True # depends on [control=['if'], data=[]]
return False |
def convert_result(converter):
"""Decorator that can convert the result of a function call."""
def decorate(fn):
@inspection.wraps(fn)
def new_fn(*args, **kwargs):
return converter(fn(*args, **kwargs))
return new_fn
return decorate | def function[convert_result, parameter[converter]]:
constant[Decorator that can convert the result of a function call.]
def function[decorate, parameter[fn]]:
def function[new_fn, parameter[]]:
return[call[name[converter], parameter[call[name[fn], parameter[<ast.Starred object at 0x7da1b0d3eb60>]]]]]
return[name[new_fn]]
return[name[decorate]] | keyword[def] identifier[convert_result] ( identifier[converter] ):
literal[string]
keyword[def] identifier[decorate] ( identifier[fn] ):
@ identifier[inspection] . identifier[wraps] ( identifier[fn] )
keyword[def] identifier[new_fn] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[converter] ( identifier[fn] (* identifier[args] ,** identifier[kwargs] ))
keyword[return] identifier[new_fn]
keyword[return] identifier[decorate] | def convert_result(converter):
"""Decorator that can convert the result of a function call."""
def decorate(fn):
@inspection.wraps(fn)
def new_fn(*args, **kwargs):
return converter(fn(*args, **kwargs))
return new_fn
return decorate |
def plot_fiber_slice(self, plane=None, index=None, fig=None):
r"""
Plot one slice from the fiber image
Parameters
----------
plane : array_like
List of 3 values, [x,y,z], 2 must be zero and the other must be between
zero and one representing the fraction of the domain to slice along
the non-zero axis
index : array_like
similar to plane but instead of the fraction an index of the image is
used
"""
if hasattr(self, '_fiber_image') is False:
logger.warning('This method only works when a fiber image exists')
return
slice_image = self._get_fiber_slice(plane, index)
if slice_image is not None:
if fig is None:
plt.figure()
plt.imshow(slice_image.T, cmap='Greys', origin='lower',
interpolation='nearest')
return fig | def function[plot_fiber_slice, parameter[self, plane, index, fig]]:
constant[
Plot one slice from the fiber image
Parameters
----------
plane : array_like
List of 3 values, [x,y,z], 2 must be zero and the other must be between
zero and one representing the fraction of the domain to slice along
the non-zero axis
index : array_like
similar to plane but instead of the fraction an index of the image is
used
]
if compare[call[name[hasattr], parameter[name[self], constant[_fiber_image]]] is constant[False]] begin[:]
call[name[logger].warning, parameter[constant[This method only works when a fiber image exists]]]
return[None]
variable[slice_image] assign[=] call[name[self]._get_fiber_slice, parameter[name[plane], name[index]]]
if compare[name[slice_image] is_not constant[None]] begin[:]
if compare[name[fig] is constant[None]] begin[:]
call[name[plt].figure, parameter[]]
call[name[plt].imshow, parameter[name[slice_image].T]]
return[name[fig]] | keyword[def] identifier[plot_fiber_slice] ( identifier[self] , identifier[plane] = keyword[None] , identifier[index] = keyword[None] , identifier[fig] = keyword[None] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[is] keyword[False] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return]
identifier[slice_image] = identifier[self] . identifier[_get_fiber_slice] ( identifier[plane] , identifier[index] )
keyword[if] identifier[slice_image] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[fig] keyword[is] keyword[None] :
identifier[plt] . identifier[figure] ()
identifier[plt] . identifier[imshow] ( identifier[slice_image] . identifier[T] , identifier[cmap] = literal[string] , identifier[origin] = literal[string] ,
identifier[interpolation] = literal[string] )
keyword[return] identifier[fig] | def plot_fiber_slice(self, plane=None, index=None, fig=None):
"""
Plot one slice from the fiber image
Parameters
----------
plane : array_like
List of 3 values, [x,y,z], 2 must be zero and the other must be between
zero and one representing the fraction of the domain to slice along
the non-zero axis
index : array_like
similar to plane but instead of the fraction an index of the image is
used
"""
if hasattr(self, '_fiber_image') is False:
logger.warning('This method only works when a fiber image exists')
return # depends on [control=['if'], data=[]]
slice_image = self._get_fiber_slice(plane, index)
if slice_image is not None:
if fig is None:
plt.figure() # depends on [control=['if'], data=[]]
plt.imshow(slice_image.T, cmap='Greys', origin='lower', interpolation='nearest') # depends on [control=['if'], data=['slice_image']]
return fig |
def image_get(fingerprint,
remote_addr=None,
cert=None,
key=None,
verify_cert=True,
_raw=False):
''' Get an image by its fingerprint
fingerprint :
The fingerprint of the image to retrieve
remote_addr :
An URL to a remote Server, you also have to give cert and key if
you provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
_raw : False
Return the raw pylxd object or a dict of it?
CLI Examples:
..code-block:: bash
$ salt '*' lxd.image_get <fingerprint>
'''
client = pylxd_client_get(remote_addr, cert, key, verify_cert)
image = None
try:
image = client.images.get(fingerprint)
except pylxd.exceptions.LXDAPIException:
raise SaltInvocationError(
'Image with fingerprint \'{0}\' not found'.format(fingerprint)
)
if _raw:
return image
return _pylxd_model_to_dict(image) | def function[image_get, parameter[fingerprint, remote_addr, cert, key, verify_cert, _raw]]:
constant[ Get an image by its fingerprint
fingerprint :
The fingerprint of the image to retrieve
remote_addr :
An URL to a remote Server, you also have to give cert and key if
you provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
_raw : False
Return the raw pylxd object or a dict of it?
CLI Examples:
..code-block:: bash
$ salt '*' lxd.image_get <fingerprint>
]
variable[client] assign[=] call[name[pylxd_client_get], parameter[name[remote_addr], name[cert], name[key], name[verify_cert]]]
variable[image] assign[=] constant[None]
<ast.Try object at 0x7da207f01900>
if name[_raw] begin[:]
return[name[image]]
return[call[name[_pylxd_model_to_dict], parameter[name[image]]]] | keyword[def] identifier[image_get] ( identifier[fingerprint] ,
identifier[remote_addr] = keyword[None] ,
identifier[cert] = keyword[None] ,
identifier[key] = keyword[None] ,
identifier[verify_cert] = keyword[True] ,
identifier[_raw] = keyword[False] ):
literal[string]
identifier[client] = identifier[pylxd_client_get] ( identifier[remote_addr] , identifier[cert] , identifier[key] , identifier[verify_cert] )
identifier[image] = keyword[None]
keyword[try] :
identifier[image] = identifier[client] . identifier[images] . identifier[get] ( identifier[fingerprint] )
keyword[except] identifier[pylxd] . identifier[exceptions] . identifier[LXDAPIException] :
keyword[raise] identifier[SaltInvocationError] (
literal[string] . identifier[format] ( identifier[fingerprint] )
)
keyword[if] identifier[_raw] :
keyword[return] identifier[image]
keyword[return] identifier[_pylxd_model_to_dict] ( identifier[image] ) | def image_get(fingerprint, remote_addr=None, cert=None, key=None, verify_cert=True, _raw=False):
""" Get an image by its fingerprint
fingerprint :
The fingerprint of the image to retrieve
remote_addr :
An URL to a remote Server, you also have to give cert and key if
you provide remote_addr and its a TCP Address!
Examples:
https://myserver.lan:8443
/var/lib/mysocket.sock
cert :
PEM Formatted SSL Certificate.
Examples:
~/.config/lxc/client.crt
key :
PEM Formatted SSL Key.
Examples:
~/.config/lxc/client.key
verify_cert : True
Wherever to verify the cert, this is by default True
but in the most cases you want to set it off as LXD
normaly uses self-signed certificates.
_raw : False
Return the raw pylxd object or a dict of it?
CLI Examples:
..code-block:: bash
$ salt '*' lxd.image_get <fingerprint>
"""
client = pylxd_client_get(remote_addr, cert, key, verify_cert)
image = None
try:
image = client.images.get(fingerprint) # depends on [control=['try'], data=[]]
except pylxd.exceptions.LXDAPIException:
raise SaltInvocationError("Image with fingerprint '{0}' not found".format(fingerprint)) # depends on [control=['except'], data=[]]
if _raw:
return image # depends on [control=['if'], data=[]]
return _pylxd_model_to_dict(image) |
def handle_onchain_secretreveal(
mediator_state: MediatorTransferState,
onchain_secret_reveal: ContractReceiveSecretReveal,
channelidentifiers_to_channels: ChannelMap,
pseudo_random_generator: random.Random,
block_number: BlockNumber,
) -> TransitionResult[MediatorTransferState]:
""" The secret was revealed on-chain, set the state of all transfers to
secret known.
"""
secrethash = onchain_secret_reveal.secrethash
is_valid_reveal = is_valid_secret_reveal(
state_change=onchain_secret_reveal,
transfer_secrethash=mediator_state.secrethash,
secret=onchain_secret_reveal.secret,
)
if is_valid_reveal:
secret = onchain_secret_reveal.secret
# Compare against the block number at which the event was emitted.
block_number = onchain_secret_reveal.block_number
secret_reveal = set_onchain_secret(
state=mediator_state,
channelidentifiers_to_channels=channelidentifiers_to_channels,
secret=secret,
secrethash=secrethash,
block_number=block_number,
)
balance_proof = events_for_balanceproof(
channelidentifiers_to_channels=channelidentifiers_to_channels,
transfers_pair=mediator_state.transfers_pair,
pseudo_random_generator=pseudo_random_generator,
block_number=block_number,
secret=secret,
secrethash=secrethash,
)
iteration = TransitionResult(mediator_state, secret_reveal + balance_proof)
else:
iteration = TransitionResult(mediator_state, list())
return iteration | def function[handle_onchain_secretreveal, parameter[mediator_state, onchain_secret_reveal, channelidentifiers_to_channels, pseudo_random_generator, block_number]]:
constant[ The secret was revealed on-chain, set the state of all transfers to
secret known.
]
variable[secrethash] assign[=] name[onchain_secret_reveal].secrethash
variable[is_valid_reveal] assign[=] call[name[is_valid_secret_reveal], parameter[]]
if name[is_valid_reveal] begin[:]
variable[secret] assign[=] name[onchain_secret_reveal].secret
variable[block_number] assign[=] name[onchain_secret_reveal].block_number
variable[secret_reveal] assign[=] call[name[set_onchain_secret], parameter[]]
variable[balance_proof] assign[=] call[name[events_for_balanceproof], parameter[]]
variable[iteration] assign[=] call[name[TransitionResult], parameter[name[mediator_state], binary_operation[name[secret_reveal] + name[balance_proof]]]]
return[name[iteration]] | keyword[def] identifier[handle_onchain_secretreveal] (
identifier[mediator_state] : identifier[MediatorTransferState] ,
identifier[onchain_secret_reveal] : identifier[ContractReceiveSecretReveal] ,
identifier[channelidentifiers_to_channels] : identifier[ChannelMap] ,
identifier[pseudo_random_generator] : identifier[random] . identifier[Random] ,
identifier[block_number] : identifier[BlockNumber] ,
)-> identifier[TransitionResult] [ identifier[MediatorTransferState] ]:
literal[string]
identifier[secrethash] = identifier[onchain_secret_reveal] . identifier[secrethash]
identifier[is_valid_reveal] = identifier[is_valid_secret_reveal] (
identifier[state_change] = identifier[onchain_secret_reveal] ,
identifier[transfer_secrethash] = identifier[mediator_state] . identifier[secrethash] ,
identifier[secret] = identifier[onchain_secret_reveal] . identifier[secret] ,
)
keyword[if] identifier[is_valid_reveal] :
identifier[secret] = identifier[onchain_secret_reveal] . identifier[secret]
identifier[block_number] = identifier[onchain_secret_reveal] . identifier[block_number]
identifier[secret_reveal] = identifier[set_onchain_secret] (
identifier[state] = identifier[mediator_state] ,
identifier[channelidentifiers_to_channels] = identifier[channelidentifiers_to_channels] ,
identifier[secret] = identifier[secret] ,
identifier[secrethash] = identifier[secrethash] ,
identifier[block_number] = identifier[block_number] ,
)
identifier[balance_proof] = identifier[events_for_balanceproof] (
identifier[channelidentifiers_to_channels] = identifier[channelidentifiers_to_channels] ,
identifier[transfers_pair] = identifier[mediator_state] . identifier[transfers_pair] ,
identifier[pseudo_random_generator] = identifier[pseudo_random_generator] ,
identifier[block_number] = identifier[block_number] ,
identifier[secret] = identifier[secret] ,
identifier[secrethash] = identifier[secrethash] ,
)
identifier[iteration] = identifier[TransitionResult] ( identifier[mediator_state] , identifier[secret_reveal] + identifier[balance_proof] )
keyword[else] :
identifier[iteration] = identifier[TransitionResult] ( identifier[mediator_state] , identifier[list] ())
keyword[return] identifier[iteration] | def handle_onchain_secretreveal(mediator_state: MediatorTransferState, onchain_secret_reveal: ContractReceiveSecretReveal, channelidentifiers_to_channels: ChannelMap, pseudo_random_generator: random.Random, block_number: BlockNumber) -> TransitionResult[MediatorTransferState]:
""" The secret was revealed on-chain, set the state of all transfers to
secret known.
"""
secrethash = onchain_secret_reveal.secrethash
is_valid_reveal = is_valid_secret_reveal(state_change=onchain_secret_reveal, transfer_secrethash=mediator_state.secrethash, secret=onchain_secret_reveal.secret)
if is_valid_reveal:
secret = onchain_secret_reveal.secret
# Compare against the block number at which the event was emitted.
block_number = onchain_secret_reveal.block_number
secret_reveal = set_onchain_secret(state=mediator_state, channelidentifiers_to_channels=channelidentifiers_to_channels, secret=secret, secrethash=secrethash, block_number=block_number)
balance_proof = events_for_balanceproof(channelidentifiers_to_channels=channelidentifiers_to_channels, transfers_pair=mediator_state.transfers_pair, pseudo_random_generator=pseudo_random_generator, block_number=block_number, secret=secret, secrethash=secrethash)
iteration = TransitionResult(mediator_state, secret_reveal + balance_proof) # depends on [control=['if'], data=[]]
else:
iteration = TransitionResult(mediator_state, list())
return iteration |
def isready(self, obj) -> bool:
"""Return |True| or |False| to indicate if the protected
property is ready for the given object. If the object is
unknow, |ProtectedProperty| returns |False|."""
return vars(obj).get(self.name, False) | def function[isready, parameter[self, obj]]:
constant[Return |True| or |False| to indicate if the protected
property is ready for the given object. If the object is
unknow, |ProtectedProperty| returns |False|.]
return[call[call[name[vars], parameter[name[obj]]].get, parameter[name[self].name, constant[False]]]] | keyword[def] identifier[isready] ( identifier[self] , identifier[obj] )-> identifier[bool] :
literal[string]
keyword[return] identifier[vars] ( identifier[obj] ). identifier[get] ( identifier[self] . identifier[name] , keyword[False] ) | def isready(self, obj) -> bool:
"""Return |True| or |False| to indicate if the protected
property is ready for the given object. If the object is
unknow, |ProtectedProperty| returns |False|."""
return vars(obj).get(self.name, False) |
def create_team(self, name, repo_names=[], permission=''):
"""Assuming the authenticated user owns this organization,
create and return a new team.
:param str name: (required), name to be given to the team
:param list repo_names: (optional) repositories, e.g.
['github/dotfiles']
:param str permission: (optional), options:
- ``pull`` -- (default) members can not push or administer
repositories accessible by this team
- ``push`` -- members can push and pull but not administer
repositories accessible by this team
- ``admin`` -- members can push, pull and administer
repositories accessible by this team
:returns: :class:`Team <Team>`
"""
data = {'name': name, 'repo_names': repo_names,
'permission': permission}
url = self._build_url('teams', base_url=self._api)
json = self._json(self._post(url, data), 201)
return Team(json, self._session) if json else None | def function[create_team, parameter[self, name, repo_names, permission]]:
constant[Assuming the authenticated user owns this organization,
create and return a new team.
:param str name: (required), name to be given to the team
:param list repo_names: (optional) repositories, e.g.
['github/dotfiles']
:param str permission: (optional), options:
- ``pull`` -- (default) members can not push or administer
repositories accessible by this team
- ``push`` -- members can push and pull but not administer
repositories accessible by this team
- ``admin`` -- members can push, pull and administer
repositories accessible by this team
:returns: :class:`Team <Team>`
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0e0f4f0>, <ast.Constant object at 0x7da1b0e0e1a0>, <ast.Constant object at 0x7da1b0e0dcc0>], [<ast.Name object at 0x7da1b0e0d1b0>, <ast.Name object at 0x7da1b0e0cd90>, <ast.Name object at 0x7da1b0e0cd30>]]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[teams]]]
variable[json] assign[=] call[name[self]._json, parameter[call[name[self]._post, parameter[name[url], name[data]]], constant[201]]]
return[<ast.IfExp object at 0x7da1b0e0d750>] | keyword[def] identifier[create_team] ( identifier[self] , identifier[name] , identifier[repo_names] =[], identifier[permission] = literal[string] ):
literal[string]
identifier[data] ={ literal[string] : identifier[name] , literal[string] : identifier[repo_names] ,
literal[string] : identifier[permission] }
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , identifier[base_url] = identifier[self] . identifier[_api] )
identifier[json] = identifier[self] . identifier[_json] ( identifier[self] . identifier[_post] ( identifier[url] , identifier[data] ), literal[int] )
keyword[return] identifier[Team] ( identifier[json] , identifier[self] . identifier[_session] ) keyword[if] identifier[json] keyword[else] keyword[None] | def create_team(self, name, repo_names=[], permission=''):
"""Assuming the authenticated user owns this organization,
create and return a new team.
:param str name: (required), name to be given to the team
:param list repo_names: (optional) repositories, e.g.
['github/dotfiles']
:param str permission: (optional), options:
- ``pull`` -- (default) members can not push or administer
repositories accessible by this team
- ``push`` -- members can push and pull but not administer
repositories accessible by this team
- ``admin`` -- members can push, pull and administer
repositories accessible by this team
:returns: :class:`Team <Team>`
"""
data = {'name': name, 'repo_names': repo_names, 'permission': permission}
url = self._build_url('teams', base_url=self._api)
json = self._json(self._post(url, data), 201)
return Team(json, self._session) if json else None |
def match(self, string):
"""Returns True if the pattern matches the string"""
if self.casesensitive:
return fnmatch.fnmatch(string, self.pattern)
else:
return fnmatch.fnmatch(string.lower(), self.pattern.lower()) | def function[match, parameter[self, string]]:
constant[Returns True if the pattern matches the string]
if name[self].casesensitive begin[:]
return[call[name[fnmatch].fnmatch, parameter[name[string], name[self].pattern]]] | keyword[def] identifier[match] ( identifier[self] , identifier[string] ):
literal[string]
keyword[if] identifier[self] . identifier[casesensitive] :
keyword[return] identifier[fnmatch] . identifier[fnmatch] ( identifier[string] , identifier[self] . identifier[pattern] )
keyword[else] :
keyword[return] identifier[fnmatch] . identifier[fnmatch] ( identifier[string] . identifier[lower] (), identifier[self] . identifier[pattern] . identifier[lower] ()) | def match(self, string):
"""Returns True if the pattern matches the string"""
if self.casesensitive:
return fnmatch.fnmatch(string, self.pattern) # depends on [control=['if'], data=[]]
else:
return fnmatch.fnmatch(string.lower(), self.pattern.lower()) |
def _apply_default_values(catalog, default_values):
"""Aplica valores default a los campos de un catálogo.
Si el campo está vacío, aplica el default. Si tiene un valor, deja el valor
que estaba. Sólo soporta defaults para las siguientes clases:
catalog
dataset
distribution
field
Args:
catalog (dict): Un catálogo.
default_values (dict): Valores default para algunos de los campos del
catálogo.
{
"dataset_issued": "2017-06-22",
"distribution_issued": "2017-06-22"
}
"""
for field, default_value in iteritems(default_values):
class_metadata = field.split("_")[0]
field_json_path = field.split("_")[1:]
# valores default de catálogo
if class_metadata == "catalog":
_set_default_value(catalog, field_json_path, default_value)
# valores default de dataset
elif class_metadata == "dataset":
for dataset in catalog["dataset"]:
_set_default_value(dataset, field_json_path, default_value)
# valores default de distribución
elif class_metadata == "distribution":
for dataset in catalog["dataset"]:
for distribution in dataset["distribution"]:
_set_default_value(
distribution, field_json_path, default_value)
# valores default de field
elif class_metadata == "field":
for dataset in catalog["dataset"]:
for distribution in dataset["distribution"]:
# campo "field" en una "distribution" no es obligatorio
if distribution.get("field"):
for field in distribution["field"]:
_set_default_value(
field, field_json_path, default_value) | def function[_apply_default_values, parameter[catalog, default_values]]:
constant[Aplica valores default a los campos de un catálogo.
Si el campo está vacío, aplica el default. Si tiene un valor, deja el valor
que estaba. Sólo soporta defaults para las siguientes clases:
catalog
dataset
distribution
field
Args:
catalog (dict): Un catálogo.
default_values (dict): Valores default para algunos de los campos del
catálogo.
{
"dataset_issued": "2017-06-22",
"distribution_issued": "2017-06-22"
}
]
for taget[tuple[[<ast.Name object at 0x7da1b04fd0f0>, <ast.Name object at 0x7da1b04fc040>]]] in starred[call[name[iteritems], parameter[name[default_values]]]] begin[:]
variable[class_metadata] assign[=] call[call[name[field].split, parameter[constant[_]]]][constant[0]]
variable[field_json_path] assign[=] call[call[name[field].split, parameter[constant[_]]]][<ast.Slice object at 0x7da1b04fd300>]
if compare[name[class_metadata] equal[==] constant[catalog]] begin[:]
call[name[_set_default_value], parameter[name[catalog], name[field_json_path], name[default_value]]] | keyword[def] identifier[_apply_default_values] ( identifier[catalog] , identifier[default_values] ):
literal[string]
keyword[for] identifier[field] , identifier[default_value] keyword[in] identifier[iteritems] ( identifier[default_values] ):
identifier[class_metadata] = identifier[field] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[field_json_path] = identifier[field] . identifier[split] ( literal[string] )[ literal[int] :]
keyword[if] identifier[class_metadata] == literal[string] :
identifier[_set_default_value] ( identifier[catalog] , identifier[field_json_path] , identifier[default_value] )
keyword[elif] identifier[class_metadata] == literal[string] :
keyword[for] identifier[dataset] keyword[in] identifier[catalog] [ literal[string] ]:
identifier[_set_default_value] ( identifier[dataset] , identifier[field_json_path] , identifier[default_value] )
keyword[elif] identifier[class_metadata] == literal[string] :
keyword[for] identifier[dataset] keyword[in] identifier[catalog] [ literal[string] ]:
keyword[for] identifier[distribution] keyword[in] identifier[dataset] [ literal[string] ]:
identifier[_set_default_value] (
identifier[distribution] , identifier[field_json_path] , identifier[default_value] )
keyword[elif] identifier[class_metadata] == literal[string] :
keyword[for] identifier[dataset] keyword[in] identifier[catalog] [ literal[string] ]:
keyword[for] identifier[distribution] keyword[in] identifier[dataset] [ literal[string] ]:
keyword[if] identifier[distribution] . identifier[get] ( literal[string] ):
keyword[for] identifier[field] keyword[in] identifier[distribution] [ literal[string] ]:
identifier[_set_default_value] (
identifier[field] , identifier[field_json_path] , identifier[default_value] ) | def _apply_default_values(catalog, default_values):
"""Aplica valores default a los campos de un catálogo.
Si el campo está vacío, aplica el default. Si tiene un valor, deja el valor
que estaba. Sólo soporta defaults para las siguientes clases:
catalog
dataset
distribution
field
Args:
catalog (dict): Un catálogo.
default_values (dict): Valores default para algunos de los campos del
catálogo.
{
"dataset_issued": "2017-06-22",
"distribution_issued": "2017-06-22"
}
"""
for (field, default_value) in iteritems(default_values):
class_metadata = field.split('_')[0]
field_json_path = field.split('_')[1:]
# valores default de catálogo
if class_metadata == 'catalog':
_set_default_value(catalog, field_json_path, default_value) # depends on [control=['if'], data=[]]
# valores default de dataset
elif class_metadata == 'dataset':
for dataset in catalog['dataset']:
_set_default_value(dataset, field_json_path, default_value) # depends on [control=['for'], data=['dataset']] # depends on [control=['if'], data=[]]
# valores default de distribución
elif class_metadata == 'distribution':
for dataset in catalog['dataset']:
for distribution in dataset['distribution']:
_set_default_value(distribution, field_json_path, default_value) # depends on [control=['for'], data=['distribution']] # depends on [control=['for'], data=['dataset']] # depends on [control=['if'], data=[]]
# valores default de field
elif class_metadata == 'field':
for dataset in catalog['dataset']:
for distribution in dataset['distribution']:
# campo "field" en una "distribution" no es obligatorio
if distribution.get('field'):
for field in distribution['field']:
_set_default_value(field, field_json_path, default_value) # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['distribution']] # depends on [control=['for'], data=['dataset']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def _syspath(dev):
'''
Full SysFS path of a device
'''
dev = _devbase(dev)
dev = re.sub(r'^([vhs][a-z]+)([0-9]+)', r'\1/\1\2', dev)
# name = re.sub(r'^([a-z]+)(?<!(bcache|md|dm))([0-9]+)', r'\1/\1\2', name)
return os.path.join('/sys/block/', dev) | def function[_syspath, parameter[dev]]:
constant[
Full SysFS path of a device
]
variable[dev] assign[=] call[name[_devbase], parameter[name[dev]]]
variable[dev] assign[=] call[name[re].sub, parameter[constant[^([vhs][a-z]+)([0-9]+)], constant[\1/\1\2], name[dev]]]
return[call[name[os].path.join, parameter[constant[/sys/block/], name[dev]]]] | keyword[def] identifier[_syspath] ( identifier[dev] ):
literal[string]
identifier[dev] = identifier[_devbase] ( identifier[dev] )
identifier[dev] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[dev] )
keyword[return] identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[dev] ) | def _syspath(dev):
"""
Full SysFS path of a device
"""
dev = _devbase(dev)
dev = re.sub('^([vhs][a-z]+)([0-9]+)', '\\1/\\1\\2', dev)
# name = re.sub(r'^([a-z]+)(?<!(bcache|md|dm))([0-9]+)', r'\1/\1\2', name)
return os.path.join('/sys/block/', dev) |
def _get_columns(self, blueprint):
"""
Get the blueprint's columns definitions.
:param blueprint: The blueprint
:type blueprint: Blueprint
:rtype: list
"""
columns = []
for column in blueprint.get_added_columns():
sql = self.wrap(column) + ' ' + self._get_type(column)
columns.append(self._add_modifiers(sql, blueprint, column))
return columns | def function[_get_columns, parameter[self, blueprint]]:
constant[
Get the blueprint's columns definitions.
:param blueprint: The blueprint
:type blueprint: Blueprint
:rtype: list
]
variable[columns] assign[=] list[[]]
for taget[name[column]] in starred[call[name[blueprint].get_added_columns, parameter[]]] begin[:]
variable[sql] assign[=] binary_operation[binary_operation[call[name[self].wrap, parameter[name[column]]] + constant[ ]] + call[name[self]._get_type, parameter[name[column]]]]
call[name[columns].append, parameter[call[name[self]._add_modifiers, parameter[name[sql], name[blueprint], name[column]]]]]
return[name[columns]] | keyword[def] identifier[_get_columns] ( identifier[self] , identifier[blueprint] ):
literal[string]
identifier[columns] =[]
keyword[for] identifier[column] keyword[in] identifier[blueprint] . identifier[get_added_columns] ():
identifier[sql] = identifier[self] . identifier[wrap] ( identifier[column] )+ literal[string] + identifier[self] . identifier[_get_type] ( identifier[column] )
identifier[columns] . identifier[append] ( identifier[self] . identifier[_add_modifiers] ( identifier[sql] , identifier[blueprint] , identifier[column] ))
keyword[return] identifier[columns] | def _get_columns(self, blueprint):
"""
Get the blueprint's columns definitions.
:param blueprint: The blueprint
:type blueprint: Blueprint
:rtype: list
"""
columns = []
for column in blueprint.get_added_columns():
sql = self.wrap(column) + ' ' + self._get_type(column)
columns.append(self._add_modifiers(sql, blueprint, column)) # depends on [control=['for'], data=['column']]
return columns |
def _exception_raise(self):
"""
Raises a pending exception that was recorded while getting a
Task ready for execution.
"""
exc = self.exc_info()[:]
try:
exc_type, exc_value, exc_traceback = exc
except ValueError:
exc_type, exc_value = exc
exc_traceback = None
# raise exc_type(exc_value).with_traceback(exc_traceback)
if sys.version_info[0] == 2:
exec("raise exc_type, exc_value, exc_traceback")
else: # sys.version_info[0] == 3:
if isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
exec("raise exc_value.with_traceback(exc_traceback)")
else:
# else we'll create an exception using the value and raise that
exec("raise exc_type(exc_value).with_traceback(exc_traceback)") | def function[_exception_raise, parameter[self]]:
constant[
Raises a pending exception that was recorded while getting a
Task ready for execution.
]
variable[exc] assign[=] call[call[name[self].exc_info, parameter[]]][<ast.Slice object at 0x7da20c76e050>]
<ast.Try object at 0x7da20c76f7c0>
if compare[call[name[sys].version_info][constant[0]] equal[==] constant[2]] begin[:]
call[name[exec], parameter[constant[raise exc_type, exc_value, exc_traceback]]] | keyword[def] identifier[_exception_raise] ( identifier[self] ):
literal[string]
identifier[exc] = identifier[self] . identifier[exc_info] ()[:]
keyword[try] :
identifier[exc_type] , identifier[exc_value] , identifier[exc_traceback] = identifier[exc]
keyword[except] identifier[ValueError] :
identifier[exc_type] , identifier[exc_value] = identifier[exc]
identifier[exc_traceback] = keyword[None]
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]== literal[int] :
identifier[exec] ( literal[string] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[exc_value] , identifier[Exception] ):
identifier[exec] ( literal[string] )
keyword[else] :
identifier[exec] ( literal[string] ) | def _exception_raise(self):
"""
Raises a pending exception that was recorded while getting a
Task ready for execution.
"""
exc = self.exc_info()[:]
try:
(exc_type, exc_value, exc_traceback) = exc # depends on [control=['try'], data=[]]
except ValueError:
(exc_type, exc_value) = exc
exc_traceback = None # depends on [control=['except'], data=[]]
# raise exc_type(exc_value).with_traceback(exc_traceback)
if sys.version_info[0] == 2:
exec('raise exc_type, exc_value, exc_traceback') # depends on [control=['if'], data=[]] # sys.version_info[0] == 3:
elif isinstance(exc_value, Exception): #hasattr(exc_value, 'with_traceback'):
# If exc_value is an exception, then just reraise
exec('raise exc_value.with_traceback(exc_traceback)') # depends on [control=['if'], data=[]]
else:
# else we'll create an exception using the value and raise that
exec('raise exc_type(exc_value).with_traceback(exc_traceback)') |
def mod_list(only_persist=False):
'''
Return a list of the loaded module names
only_persist
Only return the list of loaded persistent modules
CLI Example:
.. code-block:: bash
salt '*' kmod.mod_list
'''
mods = set()
if only_persist:
conf = _get_modules_conf()
if os.path.exists(conf):
try:
with salt.utils.files.fopen(conf, 'r') as modules_file:
for line in modules_file:
line = line.strip()
mod_name = _strip_module_name(line)
if not line.startswith('#') and mod_name:
mods.add(mod_name)
except IOError:
log.error('kmod module could not open modules file at %s', conf)
else:
for mod in lsmod():
mods.add(mod['module'])
return sorted(list(mods)) | def function[mod_list, parameter[only_persist]]:
constant[
Return a list of the loaded module names
only_persist
Only return the list of loaded persistent modules
CLI Example:
.. code-block:: bash
salt '*' kmod.mod_list
]
variable[mods] assign[=] call[name[set], parameter[]]
if name[only_persist] begin[:]
variable[conf] assign[=] call[name[_get_modules_conf], parameter[]]
if call[name[os].path.exists, parameter[name[conf]]] begin[:]
<ast.Try object at 0x7da20c6a8790>
return[call[name[sorted], parameter[call[name[list], parameter[name[mods]]]]]] | keyword[def] identifier[mod_list] ( identifier[only_persist] = keyword[False] ):
literal[string]
identifier[mods] = identifier[set] ()
keyword[if] identifier[only_persist] :
identifier[conf] = identifier[_get_modules_conf] ()
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[conf] ):
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[conf] , literal[string] ) keyword[as] identifier[modules_file] :
keyword[for] identifier[line] keyword[in] identifier[modules_file] :
identifier[line] = identifier[line] . identifier[strip] ()
identifier[mod_name] = identifier[_strip_module_name] ( identifier[line] )
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ) keyword[and] identifier[mod_name] :
identifier[mods] . identifier[add] ( identifier[mod_name] )
keyword[except] identifier[IOError] :
identifier[log] . identifier[error] ( literal[string] , identifier[conf] )
keyword[else] :
keyword[for] identifier[mod] keyword[in] identifier[lsmod] ():
identifier[mods] . identifier[add] ( identifier[mod] [ literal[string] ])
keyword[return] identifier[sorted] ( identifier[list] ( identifier[mods] )) | def mod_list(only_persist=False):
"""
Return a list of the loaded module names
only_persist
Only return the list of loaded persistent modules
CLI Example:
.. code-block:: bash
salt '*' kmod.mod_list
"""
mods = set()
if only_persist:
conf = _get_modules_conf()
if os.path.exists(conf):
try:
with salt.utils.files.fopen(conf, 'r') as modules_file:
for line in modules_file:
line = line.strip()
mod_name = _strip_module_name(line)
if not line.startswith('#') and mod_name:
mods.add(mod_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['modules_file']] # depends on [control=['try'], data=[]]
except IOError:
log.error('kmod module could not open modules file at %s', conf) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
for mod in lsmod():
mods.add(mod['module']) # depends on [control=['for'], data=['mod']]
return sorted(list(mods)) |
def create(vm_):
'''
Create a single VM from a data dict
'''
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__,
__active_provider_name__ or 'aliyun',
vm_['profile'],
vm_=vm_) is False:
return False
except AttributeError:
pass
__utils__['cloud.fire_event'](
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
args=__utils__['cloud.filter_event']('creating', vm_, ['name', 'profile', 'provider', 'driver']),
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
log.info('Creating Cloud VM %s', vm_['name'])
kwargs = {
'name': vm_['name'],
'size_id': get_size(vm_),
'image_id': get_image(vm_),
'region_id': __get_location(vm_),
'securitygroup_id': get_securitygroup(vm_),
}
if 'vswitch_id' in vm_:
kwargs['VSwitchId'] = vm_['vswitch_id']
if 'internet_chargetype' in vm_:
kwargs['InternetChargeType'] = vm_['internet_chargetype']
if 'internet_maxbandwidthin' in vm_:
kwargs['InternetMaxBandwidthIn'] = six.text_type(vm_['internet_maxbandwidthin'])
if 'internet_maxbandwidthout' in vm_:
kwargs['InternetMaxBandwidthOut'] = six.text_type(vm_['internet_maxbandwidthOut'])
if 'hostname' in vm_:
kwargs['HostName'] = vm_['hostname']
if 'password' in vm_:
kwargs['Password'] = vm_['password']
if 'instance_name' in vm_:
kwargs['InstanceName'] = vm_['instance_name']
if 'systemdisk_category' in vm_:
kwargs['SystemDisk.Category'] = vm_['systemdisk_category']
__utils__['cloud.fire_event'](
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
args=__utils__['cloud.filter_event']('requesting', kwargs, list(kwargs)),
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
try:
ret = create_node(kwargs)
except Exception as exc:
log.error(
'Error creating %s on Aliyun ECS\n\n'
'The following exception was thrown when trying to '
'run the initial deployment: %s',
vm_['name'], six.text_type(exc),
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return False
# repair ip address error and start vm
time.sleep(8)
params = {'Action': 'StartInstance',
'InstanceId': ret}
query(params)
def __query_node_data(vm_name):
data = show_instance(vm_name, call='action')
if not data:
# Trigger an error in the wait_for_ip function
return False
if data.get('PublicIpAddress', None) is not None:
return data
try:
data = salt.utils.cloud.wait_for_ip(
__query_node_data,
update_args=(vm_['name'],),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=10),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
destroy(vm_['name'])
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(six.text_type(exc))
if data['public_ips']:
ssh_ip = data['public_ips'][0]
elif data['private_ips']:
ssh_ip = data['private_ips'][0]
else:
log.info('No available ip:cant connect to salt')
return False
log.debug('VM %s is now running', ssh_ip)
vm_['ssh_host'] = ssh_ip
# The instance is booted and accessible, let's Salt it!
ret = __utils__['cloud.bootstrap'](vm_, __opts__)
ret.update(data)
log.info('Created Cloud VM \'%s\'', vm_['name'])
log.debug(
'\'%s\' VM creation details:\n%s',
vm_['name'], pprint.pformat(data)
)
__utils__['cloud.fire_event'](
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
args=__utils__['cloud.filter_event']('created', vm_, ['name', 'profile', 'provider', 'driver']),
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
return ret | def function[create, parameter[vm_]]:
constant[
Create a single VM from a data dict
]
<ast.Try object at 0x7da1b1f6fb50>
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[starting create], call[constant[salt/cloud/{0}/creating].format, parameter[call[name[vm_]][constant[name]]]]]]
call[name[log].info, parameter[constant[Creating Cloud VM %s], call[name[vm_]][constant[name]]]]
variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f6c250>, <ast.Constant object at 0x7da1b1f6df00>, <ast.Constant object at 0x7da1b1f6fd30>, <ast.Constant object at 0x7da1b1f6ff70>, <ast.Constant object at 0x7da1b1f6f9d0>], [<ast.Subscript object at 0x7da1b1f6d7e0>, <ast.Call object at 0x7da1b1f6f2e0>, <ast.Call object at 0x7da1b1f6e620>, <ast.Call object at 0x7da1b1f6f6d0>, <ast.Call object at 0x7da1b1f6f970>]]
if compare[constant[vswitch_id] in name[vm_]] begin[:]
call[name[kwargs]][constant[VSwitchId]] assign[=] call[name[vm_]][constant[vswitch_id]]
if compare[constant[internet_chargetype] in name[vm_]] begin[:]
call[name[kwargs]][constant[InternetChargeType]] assign[=] call[name[vm_]][constant[internet_chargetype]]
if compare[constant[internet_maxbandwidthin] in name[vm_]] begin[:]
call[name[kwargs]][constant[InternetMaxBandwidthIn]] assign[=] call[name[six].text_type, parameter[call[name[vm_]][constant[internet_maxbandwidthin]]]]
if compare[constant[internet_maxbandwidthout] in name[vm_]] begin[:]
call[name[kwargs]][constant[InternetMaxBandwidthOut]] assign[=] call[name[six].text_type, parameter[call[name[vm_]][constant[internet_maxbandwidthOut]]]]
if compare[constant[hostname] in name[vm_]] begin[:]
call[name[kwargs]][constant[HostName]] assign[=] call[name[vm_]][constant[hostname]]
if compare[constant[password] in name[vm_]] begin[:]
call[name[kwargs]][constant[Password]] assign[=] call[name[vm_]][constant[password]]
if compare[constant[instance_name] in name[vm_]] begin[:]
call[name[kwargs]][constant[InstanceName]] assign[=] call[name[vm_]][constant[instance_name]]
if compare[constant[systemdisk_category] in name[vm_]] begin[:]
call[name[kwargs]][constant[SystemDisk.Category]] assign[=] call[name[vm_]][constant[systemdisk_category]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[requesting instance], call[constant[salt/cloud/{0}/requesting].format, parameter[call[name[vm_]][constant[name]]]]]]
<ast.Try object at 0x7da1b1f6ebf0>
call[name[time].sleep, parameter[constant[8]]]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f6ed40>, <ast.Constant object at 0x7da1b1f6c610>], [<ast.Constant object at 0x7da1b1f6d630>, <ast.Name object at 0x7da1b1f6d4b0>]]
call[name[query], parameter[name[params]]]
def function[__query_node_data, parameter[vm_name]]:
variable[data] assign[=] call[name[show_instance], parameter[name[vm_name]]]
if <ast.UnaryOp object at 0x7da1b1f6f850> begin[:]
return[constant[False]]
if compare[call[name[data].get, parameter[constant[PublicIpAddress], constant[None]]] is_not constant[None]] begin[:]
return[name[data]]
<ast.Try object at 0x7da1b1f6efe0>
if call[name[data]][constant[public_ips]] begin[:]
variable[ssh_ip] assign[=] call[call[name[data]][constant[public_ips]]][constant[0]]
call[name[log].debug, parameter[constant[VM %s is now running], name[ssh_ip]]]
call[name[vm_]][constant[ssh_host]] assign[=] name[ssh_ip]
variable[ret] assign[=] call[call[name[__utils__]][constant[cloud.bootstrap]], parameter[name[vm_], name[__opts__]]]
call[name[ret].update, parameter[name[data]]]
call[name[log].info, parameter[constant[Created Cloud VM '%s'], call[name[vm_]][constant[name]]]]
call[name[log].debug, parameter[constant['%s' VM creation details:
%s], call[name[vm_]][constant[name]], call[name[pprint].pformat, parameter[name[data]]]]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[created instance], call[constant[salt/cloud/{0}/created].format, parameter[call[name[vm_]][constant[name]]]]]]
return[name[ret]] | keyword[def] identifier[create] ( identifier[vm_] ):
literal[string]
keyword[try] :
keyword[if] identifier[vm_] [ literal[string] ] keyword[and] identifier[config] . identifier[is_profile_configured] ( identifier[__opts__] ,
identifier[__active_provider_name__] keyword[or] literal[string] ,
identifier[vm_] [ literal[string] ],
identifier[vm_] = identifier[vm_] ) keyword[is] keyword[False] :
keyword[return] keyword[False]
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[vm_] ,[ literal[string] , literal[string] , literal[string] , literal[string] ]),
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
identifier[log] . identifier[info] ( literal[string] , identifier[vm_] [ literal[string] ])
identifier[kwargs] ={
literal[string] : identifier[vm_] [ literal[string] ],
literal[string] : identifier[get_size] ( identifier[vm_] ),
literal[string] : identifier[get_image] ( identifier[vm_] ),
literal[string] : identifier[__get_location] ( identifier[vm_] ),
literal[string] : identifier[get_securitygroup] ( identifier[vm_] ),
}
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[vm_] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[vm_] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[vm_] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[vm_] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[vm_] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[vm_] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[vm_] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[vm_] :
identifier[kwargs] [ literal[string] ]= identifier[vm_] [ literal[string] ]
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[kwargs] , identifier[list] ( identifier[kwargs] )),
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
keyword[try] :
identifier[ret] = identifier[create_node] ( identifier[kwargs] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string] ,
identifier[vm_] [ literal[string] ], identifier[six] . identifier[text_type] ( identifier[exc] ),
identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG]
)
keyword[return] keyword[False]
identifier[time] . identifier[sleep] ( literal[int] )
identifier[params] ={ literal[string] : literal[string] ,
literal[string] : identifier[ret] }
identifier[query] ( identifier[params] )
keyword[def] identifier[__query_node_data] ( identifier[vm_name] ):
identifier[data] = identifier[show_instance] ( identifier[vm_name] , identifier[call] = literal[string] )
keyword[if] keyword[not] identifier[data] :
keyword[return] keyword[False]
keyword[if] identifier[data] . identifier[get] ( literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[data]
keyword[try] :
identifier[data] = identifier[salt] . identifier[utils] . identifier[cloud] . identifier[wait_for_ip] (
identifier[__query_node_data] ,
identifier[update_args] =( identifier[vm_] [ literal[string] ],),
identifier[timeout] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[int] * literal[int] ),
identifier[interval] = identifier[config] . identifier[get_cloud_config_value] (
literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = literal[int] ),
)
keyword[except] ( identifier[SaltCloudExecutionTimeout] , identifier[SaltCloudExecutionFailure] ) keyword[as] identifier[exc] :
keyword[try] :
identifier[destroy] ( identifier[vm_] [ literal[string] ])
keyword[except] identifier[SaltCloudSystemExit] :
keyword[pass]
keyword[finally] :
keyword[raise] identifier[SaltCloudSystemExit] ( identifier[six] . identifier[text_type] ( identifier[exc] ))
keyword[if] identifier[data] [ literal[string] ]:
identifier[ssh_ip] = identifier[data] [ literal[string] ][ literal[int] ]
keyword[elif] identifier[data] [ literal[string] ]:
identifier[ssh_ip] = identifier[data] [ literal[string] ][ literal[int] ]
keyword[else] :
identifier[log] . identifier[info] ( literal[string] )
keyword[return] keyword[False]
identifier[log] . identifier[debug] ( literal[string] , identifier[ssh_ip] )
identifier[vm_] [ literal[string] ]= identifier[ssh_ip]
identifier[ret] = identifier[__utils__] [ literal[string] ]( identifier[vm_] , identifier[__opts__] )
identifier[ret] . identifier[update] ( identifier[data] )
identifier[log] . identifier[info] ( literal[string] , identifier[vm_] [ literal[string] ])
identifier[log] . identifier[debug] (
literal[string] ,
identifier[vm_] [ literal[string] ], identifier[pprint] . identifier[pformat] ( identifier[data] )
)
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]),
identifier[args] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[vm_] ,[ literal[string] , literal[string] , literal[string] , literal[string] ]),
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
keyword[return] identifier[ret] | def create(vm_):
"""
Create a single VM from a data dict
"""
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'aliyun', vm_['profile'], vm_=vm_) is False:
return False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
__utils__['cloud.fire_event']('event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), args=__utils__['cloud.filter_event']('creating', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
log.info('Creating Cloud VM %s', vm_['name'])
kwargs = {'name': vm_['name'], 'size_id': get_size(vm_), 'image_id': get_image(vm_), 'region_id': __get_location(vm_), 'securitygroup_id': get_securitygroup(vm_)}
if 'vswitch_id' in vm_:
kwargs['VSwitchId'] = vm_['vswitch_id'] # depends on [control=['if'], data=['vm_']]
if 'internet_chargetype' in vm_:
kwargs['InternetChargeType'] = vm_['internet_chargetype'] # depends on [control=['if'], data=['vm_']]
if 'internet_maxbandwidthin' in vm_:
kwargs['InternetMaxBandwidthIn'] = six.text_type(vm_['internet_maxbandwidthin']) # depends on [control=['if'], data=['vm_']]
if 'internet_maxbandwidthout' in vm_:
kwargs['InternetMaxBandwidthOut'] = six.text_type(vm_['internet_maxbandwidthOut']) # depends on [control=['if'], data=['vm_']]
if 'hostname' in vm_:
kwargs['HostName'] = vm_['hostname'] # depends on [control=['if'], data=['vm_']]
if 'password' in vm_:
kwargs['Password'] = vm_['password'] # depends on [control=['if'], data=['vm_']]
if 'instance_name' in vm_:
kwargs['InstanceName'] = vm_['instance_name'] # depends on [control=['if'], data=['vm_']]
if 'systemdisk_category' in vm_:
kwargs['SystemDisk.Category'] = vm_['systemdisk_category'] # depends on [control=['if'], data=['vm_']]
__utils__['cloud.fire_event']('event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args=__utils__['cloud.filter_event']('requesting', kwargs, list(kwargs)), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
try:
ret = create_node(kwargs) # depends on [control=['try'], data=[]]
except Exception as exc:
# Show the traceback if the debug logging level is enabled
log.error('Error creating %s on Aliyun ECS\n\nThe following exception was thrown when trying to run the initial deployment: %s', vm_['name'], six.text_type(exc), exc_info_on_loglevel=logging.DEBUG)
return False # depends on [control=['except'], data=['exc']]
# repair ip address error and start vm
time.sleep(8)
params = {'Action': 'StartInstance', 'InstanceId': ret}
query(params)
def __query_node_data(vm_name):
data = show_instance(vm_name, call='action')
if not data:
# Trigger an error in the wait_for_ip function
return False # depends on [control=['if'], data=[]]
if data.get('PublicIpAddress', None) is not None:
return data # depends on [control=['if'], data=[]]
try:
data = salt.utils.cloud.wait_for_ip(__query_node_data, update_args=(vm_['name'],), timeout=config.get_cloud_config_value('wait_for_ip_timeout', vm_, __opts__, default=10 * 60), interval=config.get_cloud_config_value('wait_for_ip_interval', vm_, __opts__, default=10)) # depends on [control=['try'], data=[]]
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
destroy(vm_['name']) # depends on [control=['try'], data=[]]
except SaltCloudSystemExit:
pass # depends on [control=['except'], data=[]]
finally:
raise SaltCloudSystemExit(six.text_type(exc)) # depends on [control=['except'], data=['exc']]
if data['public_ips']:
ssh_ip = data['public_ips'][0] # depends on [control=['if'], data=[]]
elif data['private_ips']:
ssh_ip = data['private_ips'][0] # depends on [control=['if'], data=[]]
else:
log.info('No available ip:cant connect to salt')
return False
log.debug('VM %s is now running', ssh_ip)
vm_['ssh_host'] = ssh_ip
# The instance is booted and accessible, let's Salt it!
ret = __utils__['cloud.bootstrap'](vm_, __opts__)
ret.update(data)
log.info("Created Cloud VM '%s'", vm_['name'])
log.debug("'%s' VM creation details:\n%s", vm_['name'], pprint.pformat(data))
__utils__['cloud.fire_event']('event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), args=__utils__['cloud.filter_event']('created', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
return ret |
def next(self):
"""Returns the next batch of data."""
if self.curr_idx == len(self.idx):
raise StopIteration
i, j = self.idx[self.curr_idx]
self.curr_idx += 1
if self.major_axis == 1:
data = self.nddata[i][j:j+self.batch_size].T
label = self.ndlabel[i][j:j+self.batch_size].T
else:
data = self.nddata[i][j:j+self.batch_size]
label = self.ndlabel[i][j:j+self.batch_size]
return DataBatch([data], [label], pad=0,
bucket_key=self.buckets[i],
provide_data=[DataDesc(
name=self.data_name, shape=data.shape,
layout=self.layout)],
provide_label=[DataDesc(
name=self.label_name, shape=label.shape,
layout=self.layout)]) | def function[next, parameter[self]]:
constant[Returns the next batch of data.]
if compare[name[self].curr_idx equal[==] call[name[len], parameter[name[self].idx]]] begin[:]
<ast.Raise object at 0x7da2054a54b0>
<ast.Tuple object at 0x7da2054a4eb0> assign[=] call[name[self].idx][name[self].curr_idx]
<ast.AugAssign object at 0x7da2054a58d0>
if compare[name[self].major_axis equal[==] constant[1]] begin[:]
variable[data] assign[=] call[call[name[self].nddata][name[i]]][<ast.Slice object at 0x7da2054a4910>].T
variable[label] assign[=] call[call[name[self].ndlabel][name[i]]][<ast.Slice object at 0x7da2054a4fd0>].T
return[call[name[DataBatch], parameter[list[[<ast.Name object at 0x7da1b200d0c0>]], list[[<ast.Name object at 0x7da1b200c430>]]]]] | keyword[def] identifier[next] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[curr_idx] == identifier[len] ( identifier[self] . identifier[idx] ):
keyword[raise] identifier[StopIteration]
identifier[i] , identifier[j] = identifier[self] . identifier[idx] [ identifier[self] . identifier[curr_idx] ]
identifier[self] . identifier[curr_idx] += literal[int]
keyword[if] identifier[self] . identifier[major_axis] == literal[int] :
identifier[data] = identifier[self] . identifier[nddata] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[self] . identifier[batch_size] ]. identifier[T]
identifier[label] = identifier[self] . identifier[ndlabel] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[self] . identifier[batch_size] ]. identifier[T]
keyword[else] :
identifier[data] = identifier[self] . identifier[nddata] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[self] . identifier[batch_size] ]
identifier[label] = identifier[self] . identifier[ndlabel] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[self] . identifier[batch_size] ]
keyword[return] identifier[DataBatch] ([ identifier[data] ],[ identifier[label] ], identifier[pad] = literal[int] ,
identifier[bucket_key] = identifier[self] . identifier[buckets] [ identifier[i] ],
identifier[provide_data] =[ identifier[DataDesc] (
identifier[name] = identifier[self] . identifier[data_name] , identifier[shape] = identifier[data] . identifier[shape] ,
identifier[layout] = identifier[self] . identifier[layout] )],
identifier[provide_label] =[ identifier[DataDesc] (
identifier[name] = identifier[self] . identifier[label_name] , identifier[shape] = identifier[label] . identifier[shape] ,
identifier[layout] = identifier[self] . identifier[layout] )]) | def next(self):
"""Returns the next batch of data."""
if self.curr_idx == len(self.idx):
raise StopIteration # depends on [control=['if'], data=[]]
(i, j) = self.idx[self.curr_idx]
self.curr_idx += 1
if self.major_axis == 1:
data = self.nddata[i][j:j + self.batch_size].T
label = self.ndlabel[i][j:j + self.batch_size].T # depends on [control=['if'], data=[]]
else:
data = self.nddata[i][j:j + self.batch_size]
label = self.ndlabel[i][j:j + self.batch_size]
return DataBatch([data], [label], pad=0, bucket_key=self.buckets[i], provide_data=[DataDesc(name=self.data_name, shape=data.shape, layout=self.layout)], provide_label=[DataDesc(name=self.label_name, shape=label.shape, layout=self.layout)]) |
def _generate_sas_token(uri, policy, key, expiry=None):
"""Create a shared access signiture token as a string literal.
:returns: SAS token as string literal.
:rtype: str
"""
from base64 import b64encode, b64decode
from hashlib import sha256
from hmac import HMAC
if not expiry:
expiry = time.time() + 3600 # Default to 1 hour.
encoded_uri = quote_plus(uri)
ttl = int(expiry)
sign_key = '%s\n%d' % (encoded_uri, ttl)
signature = b64encode(HMAC(b64decode(key), sign_key.encode('utf-8'), sha256).digest())
result = {
'sr': uri,
'sig': signature,
'se': str(ttl)}
if policy:
result['skn'] = policy
return 'SharedAccessSignature ' + urlencode(result) | def function[_generate_sas_token, parameter[uri, policy, key, expiry]]:
constant[Create a shared access signiture token as a string literal.
:returns: SAS token as string literal.
:rtype: str
]
from relative_module[base64] import module[b64encode], module[b64decode]
from relative_module[hashlib] import module[sha256]
from relative_module[hmac] import module[HMAC]
if <ast.UnaryOp object at 0x7da1b2344520> begin[:]
variable[expiry] assign[=] binary_operation[call[name[time].time, parameter[]] + constant[3600]]
variable[encoded_uri] assign[=] call[name[quote_plus], parameter[name[uri]]]
variable[ttl] assign[=] call[name[int], parameter[name[expiry]]]
variable[sign_key] assign[=] binary_operation[constant[%s
%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204962d40>, <ast.Name object at 0x7da2049635b0>]]]
variable[signature] assign[=] call[name[b64encode], parameter[call[call[name[HMAC], parameter[call[name[b64decode], parameter[name[key]]], call[name[sign_key].encode, parameter[constant[utf-8]]], name[sha256]]].digest, parameter[]]]]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da20c991600>, <ast.Constant object at 0x7da20c992290>, <ast.Constant object at 0x7da20c993fa0>], [<ast.Name object at 0x7da20c990910>, <ast.Name object at 0x7da20c990c70>, <ast.Call object at 0x7da20c991480>]]
if name[policy] begin[:]
call[name[result]][constant[skn]] assign[=] name[policy]
return[binary_operation[constant[SharedAccessSignature ] + call[name[urlencode], parameter[name[result]]]]] | keyword[def] identifier[_generate_sas_token] ( identifier[uri] , identifier[policy] , identifier[key] , identifier[expiry] = keyword[None] ):
literal[string]
keyword[from] identifier[base64] keyword[import] identifier[b64encode] , identifier[b64decode]
keyword[from] identifier[hashlib] keyword[import] identifier[sha256]
keyword[from] identifier[hmac] keyword[import] identifier[HMAC]
keyword[if] keyword[not] identifier[expiry] :
identifier[expiry] = identifier[time] . identifier[time] ()+ literal[int]
identifier[encoded_uri] = identifier[quote_plus] ( identifier[uri] )
identifier[ttl] = identifier[int] ( identifier[expiry] )
identifier[sign_key] = literal[string] %( identifier[encoded_uri] , identifier[ttl] )
identifier[signature] = identifier[b64encode] ( identifier[HMAC] ( identifier[b64decode] ( identifier[key] ), identifier[sign_key] . identifier[encode] ( literal[string] ), identifier[sha256] ). identifier[digest] ())
identifier[result] ={
literal[string] : identifier[uri] ,
literal[string] : identifier[signature] ,
literal[string] : identifier[str] ( identifier[ttl] )}
keyword[if] identifier[policy] :
identifier[result] [ literal[string] ]= identifier[policy]
keyword[return] literal[string] + identifier[urlencode] ( identifier[result] ) | def _generate_sas_token(uri, policy, key, expiry=None):
"""Create a shared access signiture token as a string literal.
:returns: SAS token as string literal.
:rtype: str
"""
from base64 import b64encode, b64decode
from hashlib import sha256
from hmac import HMAC
if not expiry:
expiry = time.time() + 3600 # Default to 1 hour. # depends on [control=['if'], data=[]]
encoded_uri = quote_plus(uri)
ttl = int(expiry)
sign_key = '%s\n%d' % (encoded_uri, ttl)
signature = b64encode(HMAC(b64decode(key), sign_key.encode('utf-8'), sha256).digest())
result = {'sr': uri, 'sig': signature, 'se': str(ttl)}
if policy:
result['skn'] = policy # depends on [control=['if'], data=[]]
return 'SharedAccessSignature ' + urlencode(result) |
def load_label(self, idx, label_type=None):
"""
Load label image as 1 x height x width integer array of label indices.
The leading singleton dimension is required by the loss.
"""
if label_type == 'semantic':
label = scipy.io.loadmat('{}/SemanticLabels/spatial_envelope_256x256_static_8outdoorcategories/{}.mat'.format(self.siftflow_dir, idx))['S']
elif label_type == 'geometric':
label = scipy.io.loadmat('{}/GeoLabels/spatial_envelope_256x256_static_8outdoorcategories/{}.mat'.format(self.siftflow_dir, idx))['S']
label[label == -1] = 0
else:
raise Exception("Unknown label type: {}. Pick semantic or geometric.".format(label_type))
label = label.astype(np.uint8)
label -= 1 # rotate labels so classes start at 0, void is 255
label = label[np.newaxis, ...]
return label.copy() | def function[load_label, parameter[self, idx, label_type]]:
constant[
Load label image as 1 x height x width integer array of label indices.
The leading singleton dimension is required by the loss.
]
if compare[name[label_type] equal[==] constant[semantic]] begin[:]
variable[label] assign[=] call[call[name[scipy].io.loadmat, parameter[call[constant[{}/SemanticLabels/spatial_envelope_256x256_static_8outdoorcategories/{}.mat].format, parameter[name[self].siftflow_dir, name[idx]]]]]][constant[S]]
variable[label] assign[=] call[name[label].astype, parameter[name[np].uint8]]
<ast.AugAssign object at 0x7da18ede5d80>
variable[label] assign[=] call[name[label]][tuple[[<ast.Attribute object at 0x7da20c7ca6b0>, <ast.Constant object at 0x7da20c7c8c10>]]]
return[call[name[label].copy, parameter[]]] | keyword[def] identifier[load_label] ( identifier[self] , identifier[idx] , identifier[label_type] = keyword[None] ):
literal[string]
keyword[if] identifier[label_type] == literal[string] :
identifier[label] = identifier[scipy] . identifier[io] . identifier[loadmat] ( literal[string] . identifier[format] ( identifier[self] . identifier[siftflow_dir] , identifier[idx] ))[ literal[string] ]
keyword[elif] identifier[label_type] == literal[string] :
identifier[label] = identifier[scipy] . identifier[io] . identifier[loadmat] ( literal[string] . identifier[format] ( identifier[self] . identifier[siftflow_dir] , identifier[idx] ))[ literal[string] ]
identifier[label] [ identifier[label] ==- literal[int] ]= literal[int]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[label_type] ))
identifier[label] = identifier[label] . identifier[astype] ( identifier[np] . identifier[uint8] )
identifier[label] -= literal[int]
identifier[label] = identifier[label] [ identifier[np] . identifier[newaxis] ,...]
keyword[return] identifier[label] . identifier[copy] () | def load_label(self, idx, label_type=None):
"""
Load label image as 1 x height x width integer array of label indices.
The leading singleton dimension is required by the loss.
"""
if label_type == 'semantic':
label = scipy.io.loadmat('{}/SemanticLabels/spatial_envelope_256x256_static_8outdoorcategories/{}.mat'.format(self.siftflow_dir, idx))['S'] # depends on [control=['if'], data=[]]
elif label_type == 'geometric':
label = scipy.io.loadmat('{}/GeoLabels/spatial_envelope_256x256_static_8outdoorcategories/{}.mat'.format(self.siftflow_dir, idx))['S']
label[label == -1] = 0 # depends on [control=['if'], data=[]]
else:
raise Exception('Unknown label type: {}. Pick semantic or geometric.'.format(label_type))
label = label.astype(np.uint8)
label -= 1 # rotate labels so classes start at 0, void is 255
label = label[np.newaxis, ...]
return label.copy() |
def command_update(prog_name, prof_mgr, prof_name, prog_args):
"""
Update components.
"""
# Retrieve arguments
parser = argparse.ArgumentParser(
prog=prog_name
)
parser.add_argument(
"components",
metavar="comps",
nargs=argparse.REMAINDER,
help="system components"
)
args = parser.parse_args(prog_args)
# Profile load
prof_stub = prof_mgr.load(prof_name)
# Collect component stubs
comp_stubs = []
if len(args.components) == 0:
raise Exception("Empty component list")
for comp_name in args.components:
comp_stub = prof_stub.component(comp_name)
component_exists(prof_stub, comp_stub)
comp_stubs.append(comp_stub)
context = prof_stub.context()
# Create delete plan
plan = []
for comp_stub in comp_stubs:
comp_stub.delete(context, plan)
# Execute delete plan
for op in plan:
operation_execute(op, context)
# Update component stub list
for op in plan:
comp_stub = prof_stub.component(op.name())
if comp_stub not in comp_stubs:
comp_stubs.append(comp_stub)
# Create insert plan
plan = []
for comp_stub in comp_stubs:
comp_stub.insert(context, plan)
# Execute insert plan
for op in plan:
operation_execute(op, context) | def function[command_update, parameter[prog_name, prof_mgr, prof_name, prog_args]]:
constant[
Update components.
]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[components]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[name[prog_args]]]
variable[prof_stub] assign[=] call[name[prof_mgr].load, parameter[name[prof_name]]]
variable[comp_stubs] assign[=] list[[]]
if compare[call[name[len], parameter[name[args].components]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da204346290>
for taget[name[comp_name]] in starred[name[args].components] begin[:]
variable[comp_stub] assign[=] call[name[prof_stub].component, parameter[name[comp_name]]]
call[name[component_exists], parameter[name[prof_stub], name[comp_stub]]]
call[name[comp_stubs].append, parameter[name[comp_stub]]]
variable[context] assign[=] call[name[prof_stub].context, parameter[]]
variable[plan] assign[=] list[[]]
for taget[name[comp_stub]] in starred[name[comp_stubs]] begin[:]
call[name[comp_stub].delete, parameter[name[context], name[plan]]]
for taget[name[op]] in starred[name[plan]] begin[:]
call[name[operation_execute], parameter[name[op], name[context]]]
for taget[name[op]] in starred[name[plan]] begin[:]
variable[comp_stub] assign[=] call[name[prof_stub].component, parameter[call[name[op].name, parameter[]]]]
if compare[name[comp_stub] <ast.NotIn object at 0x7da2590d7190> name[comp_stubs]] begin[:]
call[name[comp_stubs].append, parameter[name[comp_stub]]]
variable[plan] assign[=] list[[]]
for taget[name[comp_stub]] in starred[name[comp_stubs]] begin[:]
call[name[comp_stub].insert, parameter[name[context], name[plan]]]
for taget[name[op]] in starred[name[plan]] begin[:]
call[name[operation_execute], parameter[name[op], name[context]]] | keyword[def] identifier[command_update] ( identifier[prog_name] , identifier[prof_mgr] , identifier[prof_name] , identifier[prog_args] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[prog] = identifier[prog_name]
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[metavar] = literal[string] ,
identifier[nargs] = identifier[argparse] . identifier[REMAINDER] ,
identifier[help] = literal[string]
)
identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[prog_args] )
identifier[prof_stub] = identifier[prof_mgr] . identifier[load] ( identifier[prof_name] )
identifier[comp_stubs] =[]
keyword[if] identifier[len] ( identifier[args] . identifier[components] )== literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[for] identifier[comp_name] keyword[in] identifier[args] . identifier[components] :
identifier[comp_stub] = identifier[prof_stub] . identifier[component] ( identifier[comp_name] )
identifier[component_exists] ( identifier[prof_stub] , identifier[comp_stub] )
identifier[comp_stubs] . identifier[append] ( identifier[comp_stub] )
identifier[context] = identifier[prof_stub] . identifier[context] ()
identifier[plan] =[]
keyword[for] identifier[comp_stub] keyword[in] identifier[comp_stubs] :
identifier[comp_stub] . identifier[delete] ( identifier[context] , identifier[plan] )
keyword[for] identifier[op] keyword[in] identifier[plan] :
identifier[operation_execute] ( identifier[op] , identifier[context] )
keyword[for] identifier[op] keyword[in] identifier[plan] :
identifier[comp_stub] = identifier[prof_stub] . identifier[component] ( identifier[op] . identifier[name] ())
keyword[if] identifier[comp_stub] keyword[not] keyword[in] identifier[comp_stubs] :
identifier[comp_stubs] . identifier[append] ( identifier[comp_stub] )
identifier[plan] =[]
keyword[for] identifier[comp_stub] keyword[in] identifier[comp_stubs] :
identifier[comp_stub] . identifier[insert] ( identifier[context] , identifier[plan] )
keyword[for] identifier[op] keyword[in] identifier[plan] :
identifier[operation_execute] ( identifier[op] , identifier[context] ) | def command_update(prog_name, prof_mgr, prof_name, prog_args):
"""
Update components.
""" # Retrieve arguments
parser = argparse.ArgumentParser(prog=prog_name)
parser.add_argument('components', metavar='comps', nargs=argparse.REMAINDER, help='system components')
args = parser.parse_args(prog_args) # Profile load
prof_stub = prof_mgr.load(prof_name) # Collect component stubs
comp_stubs = []
if len(args.components) == 0:
raise Exception('Empty component list') # depends on [control=['if'], data=[]]
for comp_name in args.components:
comp_stub = prof_stub.component(comp_name)
component_exists(prof_stub, comp_stub)
comp_stubs.append(comp_stub) # depends on [control=['for'], data=['comp_name']]
context = prof_stub.context() # Create delete plan
plan = []
for comp_stub in comp_stubs:
comp_stub.delete(context, plan) # depends on [control=['for'], data=['comp_stub']] # Execute delete plan
for op in plan:
operation_execute(op, context) # depends on [control=['for'], data=['op']] # Update component stub list
for op in plan:
comp_stub = prof_stub.component(op.name())
if comp_stub not in comp_stubs:
comp_stubs.append(comp_stub) # depends on [control=['if'], data=['comp_stub', 'comp_stubs']] # depends on [control=['for'], data=['op']] # Create insert plan
plan = []
for comp_stub in comp_stubs:
comp_stub.insert(context, plan) # depends on [control=['for'], data=['comp_stub']] # Execute insert plan
for op in plan:
operation_execute(op, context) # depends on [control=['for'], data=['op']] |
def get_url_for_schedule(self, schedule):
"""
Returns a link to verba. The link varies by campus and schedule.
Multiple calls to this with the same schedule may result in
different urls.
"""
url = self._get_url(schedule)
if url is None:
return None
response = DAO.getURL(url, {"Accept": "application/json"})
if response.status != 200:
raise DataFailureException(url, response.status, response.data)
data = json.loads(response.data)
if "ubsLink" in data:
return data["ubsLink"][0]["search"] | def function[get_url_for_schedule, parameter[self, schedule]]:
constant[
Returns a link to verba. The link varies by campus and schedule.
Multiple calls to this with the same schedule may result in
different urls.
]
variable[url] assign[=] call[name[self]._get_url, parameter[name[schedule]]]
if compare[name[url] is constant[None]] begin[:]
return[constant[None]]
variable[response] assign[=] call[name[DAO].getURL, parameter[name[url], dictionary[[<ast.Constant object at 0x7da1b15e7130>], [<ast.Constant object at 0x7da1b15e6650>]]]]
if compare[name[response].status not_equal[!=] constant[200]] begin[:]
<ast.Raise object at 0x7da1b15e4d00>
variable[data] assign[=] call[name[json].loads, parameter[name[response].data]]
if compare[constant[ubsLink] in name[data]] begin[:]
return[call[call[call[name[data]][constant[ubsLink]]][constant[0]]][constant[search]]] | keyword[def] identifier[get_url_for_schedule] ( identifier[self] , identifier[schedule] ):
literal[string]
identifier[url] = identifier[self] . identifier[_get_url] ( identifier[schedule] )
keyword[if] identifier[url] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[response] = identifier[DAO] . identifier[getURL] ( identifier[url] ,{ literal[string] : literal[string] })
keyword[if] identifier[response] . identifier[status] != literal[int] :
keyword[raise] identifier[DataFailureException] ( identifier[url] , identifier[response] . identifier[status] , identifier[response] . identifier[data] )
identifier[data] = identifier[json] . identifier[loads] ( identifier[response] . identifier[data] )
keyword[if] literal[string] keyword[in] identifier[data] :
keyword[return] identifier[data] [ literal[string] ][ literal[int] ][ literal[string] ] | def get_url_for_schedule(self, schedule):
"""
Returns a link to verba. The link varies by campus and schedule.
Multiple calls to this with the same schedule may result in
different urls.
"""
url = self._get_url(schedule)
if url is None:
return None # depends on [control=['if'], data=[]]
response = DAO.getURL(url, {'Accept': 'application/json'})
if response.status != 200:
raise DataFailureException(url, response.status, response.data) # depends on [control=['if'], data=[]]
data = json.loads(response.data)
if 'ubsLink' in data:
return data['ubsLink'][0]['search'] # depends on [control=['if'], data=['data']] |
def get_sample_data(sample_file):
"""Read and returns sample data to fill form with default sample sequence. """
sequence_sample_in_fasta = None
with open(sample_file) as handle:
sequence_sample_in_fasta = handle.read()
return sequence_sample_in_fasta | def function[get_sample_data, parameter[sample_file]]:
constant[Read and returns sample data to fill form with default sample sequence. ]
variable[sequence_sample_in_fasta] assign[=] constant[None]
with call[name[open], parameter[name[sample_file]]] begin[:]
variable[sequence_sample_in_fasta] assign[=] call[name[handle].read, parameter[]]
return[name[sequence_sample_in_fasta]] | keyword[def] identifier[get_sample_data] ( identifier[sample_file] ):
literal[string]
identifier[sequence_sample_in_fasta] = keyword[None]
keyword[with] identifier[open] ( identifier[sample_file] ) keyword[as] identifier[handle] :
identifier[sequence_sample_in_fasta] = identifier[handle] . identifier[read] ()
keyword[return] identifier[sequence_sample_in_fasta] | def get_sample_data(sample_file):
"""Read and returns sample data to fill form with default sample sequence. """
sequence_sample_in_fasta = None
with open(sample_file) as handle:
sequence_sample_in_fasta = handle.read() # depends on [control=['with'], data=['handle']]
return sequence_sample_in_fasta |
def solve(self):
"""Start (or re-start) optimisation. This method implements the
framework for the alternation between `X` and `D` updates in a
dictionary learning algorithm.
If option ``Verbose`` is ``True``, the progress of the
optimisation is displayed at every iteration. At termination
of this method, attribute :attr:`itstat` is a list of tuples
representing statistics of each iteration.
Attribute :attr:`timer` is an instance of :class:`.util.Timer`
that provides the following labelled timers:
``init``: Time taken for object initialisation by
:meth:`__init__`
``solve``: Total time taken by call(s) to :meth:`solve`
``solve_wo_func``: Total time taken by call(s) to
:meth:`solve`, excluding time taken to compute functional
value and related iteration statistics
"""
# Construct tuple of status display column titles and set status
# display strings
hdrtxt = ['Itn', 'Fnc', 'DFid', u('Regℓ1')]
hdrstr, fmtstr, nsep = common.solve_status_str(
hdrtxt, fwdth0=type(self).fwiter, fprec=type(self).fpothr)
# Print header and separator strings
if self.opt['Verbose']:
if self.opt['StatusHeader']:
print(hdrstr)
print("-" * nsep)
# Reset timer
self.timer.start(['solve', 'solve_wo_eval'])
# Create process pool
if self.nproc > 0:
self.pool = mp.Pool(processes=self.nproc)
for self.j in range(self.j, self.j + self.opt['MaxMainIter']):
# Perform a set of update steps
self.step()
# Evaluate functional
self.timer.stop('solve_wo_eval')
fnev = self.evaluate()
self.timer.start('solve_wo_eval')
# Record iteration stats
tk = self.timer.elapsed('solve')
itst = self.IterationStats(*((self.j,) + fnev + (tk,)))
self.itstat.append(itst)
# Display iteration stats if Verbose option enabled
if self.opt['Verbose']:
print(fmtstr % itst[:-1])
# Call callback function if defined
if self.opt['Callback'] is not None:
if self.opt['Callback'](self):
break
# Clean up process pool
if self.nproc > 0:
self.pool.close()
self.pool.join()
# Increment iteration count
self.j += 1
# Record solve time
self.timer.stop(['solve', 'solve_wo_eval'])
# Print final separator string if Verbose option enabled
if self.opt['Verbose'] and self.opt['StatusHeader']:
print("-" * nsep)
# Return final dictionary
return self.getdict() | def function[solve, parameter[self]]:
constant[Start (or re-start) optimisation. This method implements the
framework for the alternation between `X` and `D` updates in a
dictionary learning algorithm.
If option ``Verbose`` is ``True``, the progress of the
optimisation is displayed at every iteration. At termination
of this method, attribute :attr:`itstat` is a list of tuples
representing statistics of each iteration.
Attribute :attr:`timer` is an instance of :class:`.util.Timer`
that provides the following labelled timers:
``init``: Time taken for object initialisation by
:meth:`__init__`
``solve``: Total time taken by call(s) to :meth:`solve`
``solve_wo_func``: Total time taken by call(s) to
:meth:`solve`, excluding time taken to compute functional
value and related iteration statistics
]
variable[hdrtxt] assign[=] list[[<ast.Constant object at 0x7da1b07fbd60>, <ast.Constant object at 0x7da1b07f9de0>, <ast.Constant object at 0x7da1b07fb7f0>, <ast.Call object at 0x7da1b07fa860>]]
<ast.Tuple object at 0x7da1b07f84c0> assign[=] call[name[common].solve_status_str, parameter[name[hdrtxt]]]
if call[name[self].opt][constant[Verbose]] begin[:]
if call[name[self].opt][constant[StatusHeader]] begin[:]
call[name[print], parameter[name[hdrstr]]]
call[name[print], parameter[binary_operation[constant[-] * name[nsep]]]]
call[name[self].timer.start, parameter[list[[<ast.Constant object at 0x7da1b074dd20>, <ast.Constant object at 0x7da1b074ee60>]]]]
if compare[name[self].nproc greater[>] constant[0]] begin[:]
name[self].pool assign[=] call[name[mp].Pool, parameter[]]
for taget[name[self].j] in starred[call[name[range], parameter[name[self].j, binary_operation[name[self].j + call[name[self].opt][constant[MaxMainIter]]]]]] begin[:]
call[name[self].step, parameter[]]
call[name[self].timer.stop, parameter[constant[solve_wo_eval]]]
variable[fnev] assign[=] call[name[self].evaluate, parameter[]]
call[name[self].timer.start, parameter[constant[solve_wo_eval]]]
variable[tk] assign[=] call[name[self].timer.elapsed, parameter[constant[solve]]]
variable[itst] assign[=] call[name[self].IterationStats, parameter[<ast.Starred object at 0x7da1b074cac0>]]
call[name[self].itstat.append, parameter[name[itst]]]
if call[name[self].opt][constant[Verbose]] begin[:]
call[name[print], parameter[binary_operation[name[fmtstr] <ast.Mod object at 0x7da2590d6920> call[name[itst]][<ast.Slice object at 0x7da1b07fa320>]]]]
if compare[call[name[self].opt][constant[Callback]] is_not constant[None]] begin[:]
if call[call[name[self].opt][constant[Callback]], parameter[name[self]]] begin[:]
break
if compare[name[self].nproc greater[>] constant[0]] begin[:]
call[name[self].pool.close, parameter[]]
call[name[self].pool.join, parameter[]]
<ast.AugAssign object at 0x7da1b07f8b50>
call[name[self].timer.stop, parameter[list[[<ast.Constant object at 0x7da1b07f8070>, <ast.Constant object at 0x7da1b07f9570>]]]]
if <ast.BoolOp object at 0x7da1b07f91b0> begin[:]
call[name[print], parameter[binary_operation[constant[-] * name[nsep]]]]
return[call[name[self].getdict, parameter[]]] | keyword[def] identifier[solve] ( identifier[self] ):
literal[string]
identifier[hdrtxt] =[ literal[string] , literal[string] , literal[string] , identifier[u] ( literal[string] )]
identifier[hdrstr] , identifier[fmtstr] , identifier[nsep] = identifier[common] . identifier[solve_status_str] (
identifier[hdrtxt] , identifier[fwdth0] = identifier[type] ( identifier[self] ). identifier[fwiter] , identifier[fprec] = identifier[type] ( identifier[self] ). identifier[fpothr] )
keyword[if] identifier[self] . identifier[opt] [ literal[string] ]:
keyword[if] identifier[self] . identifier[opt] [ literal[string] ]:
identifier[print] ( identifier[hdrstr] )
identifier[print] ( literal[string] * identifier[nsep] )
identifier[self] . identifier[timer] . identifier[start] ([ literal[string] , literal[string] ])
keyword[if] identifier[self] . identifier[nproc] > literal[int] :
identifier[self] . identifier[pool] = identifier[mp] . identifier[Pool] ( identifier[processes] = identifier[self] . identifier[nproc] )
keyword[for] identifier[self] . identifier[j] keyword[in] identifier[range] ( identifier[self] . identifier[j] , identifier[self] . identifier[j] + identifier[self] . identifier[opt] [ literal[string] ]):
identifier[self] . identifier[step] ()
identifier[self] . identifier[timer] . identifier[stop] ( literal[string] )
identifier[fnev] = identifier[self] . identifier[evaluate] ()
identifier[self] . identifier[timer] . identifier[start] ( literal[string] )
identifier[tk] = identifier[self] . identifier[timer] . identifier[elapsed] ( literal[string] )
identifier[itst] = identifier[self] . identifier[IterationStats] (*(( identifier[self] . identifier[j] ,)+ identifier[fnev] +( identifier[tk] ,)))
identifier[self] . identifier[itstat] . identifier[append] ( identifier[itst] )
keyword[if] identifier[self] . identifier[opt] [ literal[string] ]:
identifier[print] ( identifier[fmtstr] % identifier[itst] [:- literal[int] ])
keyword[if] identifier[self] . identifier[opt] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[opt] [ literal[string] ]( identifier[self] ):
keyword[break]
keyword[if] identifier[self] . identifier[nproc] > literal[int] :
identifier[self] . identifier[pool] . identifier[close] ()
identifier[self] . identifier[pool] . identifier[join] ()
identifier[self] . identifier[j] += literal[int]
identifier[self] . identifier[timer] . identifier[stop] ([ literal[string] , literal[string] ])
keyword[if] identifier[self] . identifier[opt] [ literal[string] ] keyword[and] identifier[self] . identifier[opt] [ literal[string] ]:
identifier[print] ( literal[string] * identifier[nsep] )
keyword[return] identifier[self] . identifier[getdict] () | def solve(self):
"""Start (or re-start) optimisation. This method implements the
framework for the alternation between `X` and `D` updates in a
dictionary learning algorithm.
If option ``Verbose`` is ``True``, the progress of the
optimisation is displayed at every iteration. At termination
of this method, attribute :attr:`itstat` is a list of tuples
representing statistics of each iteration.
Attribute :attr:`timer` is an instance of :class:`.util.Timer`
that provides the following labelled timers:
``init``: Time taken for object initialisation by
:meth:`__init__`
``solve``: Total time taken by call(s) to :meth:`solve`
``solve_wo_func``: Total time taken by call(s) to
:meth:`solve`, excluding time taken to compute functional
value and related iteration statistics
"""
# Construct tuple of status display column titles and set status
# display strings
hdrtxt = ['Itn', 'Fnc', 'DFid', u('Regℓ1')]
(hdrstr, fmtstr, nsep) = common.solve_status_str(hdrtxt, fwdth0=type(self).fwiter, fprec=type(self).fpothr)
# Print header and separator strings
if self.opt['Verbose']:
if self.opt['StatusHeader']:
print(hdrstr)
print('-' * nsep) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Reset timer
self.timer.start(['solve', 'solve_wo_eval'])
# Create process pool
if self.nproc > 0:
self.pool = mp.Pool(processes=self.nproc) # depends on [control=['if'], data=[]]
for self.j in range(self.j, self.j + self.opt['MaxMainIter']):
# Perform a set of update steps
self.step()
# Evaluate functional
self.timer.stop('solve_wo_eval')
fnev = self.evaluate()
self.timer.start('solve_wo_eval')
# Record iteration stats
tk = self.timer.elapsed('solve')
itst = self.IterationStats(*(self.j,) + fnev + (tk,))
self.itstat.append(itst)
# Display iteration stats if Verbose option enabled
if self.opt['Verbose']:
print(fmtstr % itst[:-1]) # depends on [control=['if'], data=[]]
# Call callback function if defined
if self.opt['Callback'] is not None:
if self.opt['Callback'](self):
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Clean up process pool
if self.nproc > 0:
self.pool.close()
self.pool.join() # depends on [control=['if'], data=[]]
# Increment iteration count
self.j += 1
# Record solve time
self.timer.stop(['solve', 'solve_wo_eval'])
# Print final separator string if Verbose option enabled
if self.opt['Verbose'] and self.opt['StatusHeader']:
print('-' * nsep) # depends on [control=['if'], data=[]]
# Return final dictionary
return self.getdict() |
def iter_subscriptions(self, number=-1, etag=None):
"""Iterate over repositories subscribed to by this user.
:param int number: (optional), number of subscriptions to return.
Default: -1, returns all available
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`Repository <github3.repos.Repository>`
"""
from .repos import Repository
url = self._build_url('subscriptions', base_url=self._api)
return self._iter(int(number), url, Repository, etag=etag) | def function[iter_subscriptions, parameter[self, number, etag]]:
constant[Iterate over repositories subscribed to by this user.
:param int number: (optional), number of subscriptions to return.
Default: -1, returns all available
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`Repository <github3.repos.Repository>`
]
from relative_module[repos] import module[Repository]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[subscriptions]]]
return[call[name[self]._iter, parameter[call[name[int], parameter[name[number]]], name[url], name[Repository]]]] | keyword[def] identifier[iter_subscriptions] ( identifier[self] , identifier[number] =- literal[int] , identifier[etag] = keyword[None] ):
literal[string]
keyword[from] . identifier[repos] keyword[import] identifier[Repository]
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , identifier[base_url] = identifier[self] . identifier[_api] )
keyword[return] identifier[self] . identifier[_iter] ( identifier[int] ( identifier[number] ), identifier[url] , identifier[Repository] , identifier[etag] = identifier[etag] ) | def iter_subscriptions(self, number=-1, etag=None):
"""Iterate over repositories subscribed to by this user.
:param int number: (optional), number of subscriptions to return.
Default: -1, returns all available
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`Repository <github3.repos.Repository>`
"""
from .repos import Repository
url = self._build_url('subscriptions', base_url=self._api)
return self._iter(int(number), url, Repository, etag=etag) |
def behavior_script(url, template_parameters=None, behaviors_dir=None):
'''
Returns the javascript behavior string populated with template_parameters.
'''
import re, logging, json
for behavior in behaviors(behaviors_dir=behaviors_dir):
if re.match(behavior['url_regex'], url):
parameters = dict()
if 'default_parameters' in behavior:
parameters.update(behavior['default_parameters'])
if template_parameters:
parameters.update(template_parameters)
template = jinja2_environment(behaviors_dir).get_template(
behavior['behavior_js_template'])
script = template.render(parameters)
logging.info(
'using template=%r populated with parameters=%r for %r',
behavior['behavior_js_template'], json.dumps(parameters), url)
return script
return None | def function[behavior_script, parameter[url, template_parameters, behaviors_dir]]:
constant[
Returns the javascript behavior string populated with template_parameters.
]
import module[re], module[logging], module[json]
for taget[name[behavior]] in starred[call[name[behaviors], parameter[]]] begin[:]
if call[name[re].match, parameter[call[name[behavior]][constant[url_regex]], name[url]]] begin[:]
variable[parameters] assign[=] call[name[dict], parameter[]]
if compare[constant[default_parameters] in name[behavior]] begin[:]
call[name[parameters].update, parameter[call[name[behavior]][constant[default_parameters]]]]
if name[template_parameters] begin[:]
call[name[parameters].update, parameter[name[template_parameters]]]
variable[template] assign[=] call[call[name[jinja2_environment], parameter[name[behaviors_dir]]].get_template, parameter[call[name[behavior]][constant[behavior_js_template]]]]
variable[script] assign[=] call[name[template].render, parameter[name[parameters]]]
call[name[logging].info, parameter[constant[using template=%r populated with parameters=%r for %r], call[name[behavior]][constant[behavior_js_template]], call[name[json].dumps, parameter[name[parameters]]], name[url]]]
return[name[script]]
return[constant[None]] | keyword[def] identifier[behavior_script] ( identifier[url] , identifier[template_parameters] = keyword[None] , identifier[behaviors_dir] = keyword[None] ):
literal[string]
keyword[import] identifier[re] , identifier[logging] , identifier[json]
keyword[for] identifier[behavior] keyword[in] identifier[behaviors] ( identifier[behaviors_dir] = identifier[behaviors_dir] ):
keyword[if] identifier[re] . identifier[match] ( identifier[behavior] [ literal[string] ], identifier[url] ):
identifier[parameters] = identifier[dict] ()
keyword[if] literal[string] keyword[in] identifier[behavior] :
identifier[parameters] . identifier[update] ( identifier[behavior] [ literal[string] ])
keyword[if] identifier[template_parameters] :
identifier[parameters] . identifier[update] ( identifier[template_parameters] )
identifier[template] = identifier[jinja2_environment] ( identifier[behaviors_dir] ). identifier[get_template] (
identifier[behavior] [ literal[string] ])
identifier[script] = identifier[template] . identifier[render] ( identifier[parameters] )
identifier[logging] . identifier[info] (
literal[string] ,
identifier[behavior] [ literal[string] ], identifier[json] . identifier[dumps] ( identifier[parameters] ), identifier[url] )
keyword[return] identifier[script]
keyword[return] keyword[None] | def behavior_script(url, template_parameters=None, behaviors_dir=None):
"""
Returns the javascript behavior string populated with template_parameters.
"""
import re, logging, json
for behavior in behaviors(behaviors_dir=behaviors_dir):
if re.match(behavior['url_regex'], url):
parameters = dict()
if 'default_parameters' in behavior:
parameters.update(behavior['default_parameters']) # depends on [control=['if'], data=['behavior']]
if template_parameters:
parameters.update(template_parameters) # depends on [control=['if'], data=[]]
template = jinja2_environment(behaviors_dir).get_template(behavior['behavior_js_template'])
script = template.render(parameters)
logging.info('using template=%r populated with parameters=%r for %r', behavior['behavior_js_template'], json.dumps(parameters), url)
return script # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['behavior']]
return None |
def compile_mpim_users(self):
"""
Gets the info for the members within the multiple person instant message
Returns a list of all dms with the members that have ever existed
:rtype: [object]
{
name: <name>
users: [<user_id>]
}
"""
mpim_data = self._read_from_json("mpims.json")
mpims = [c for c in mpim_data.values()]
all_mpim_users = []
for mpim in mpims:
mpim_members = {"name": mpim["name"], "users": [self.__USER_DATA[m] for m in mpim["members"]]}
all_mpim_users.append(mpim_members)
return all_mpim_users | def function[compile_mpim_users, parameter[self]]:
constant[
Gets the info for the members within the multiple person instant message
Returns a list of all dms with the members that have ever existed
:rtype: [object]
{
name: <name>
users: [<user_id>]
}
]
variable[mpim_data] assign[=] call[name[self]._read_from_json, parameter[constant[mpims.json]]]
variable[mpims] assign[=] <ast.ListComp object at 0x7da1b07aa6e0>
variable[all_mpim_users] assign[=] list[[]]
for taget[name[mpim]] in starred[name[mpims]] begin[:]
variable[mpim_members] assign[=] dictionary[[<ast.Constant object at 0x7da1b07aaa10>, <ast.Constant object at 0x7da1b07ab100>], [<ast.Subscript object at 0x7da1b07a9210>, <ast.ListComp object at 0x7da1b07ab9a0>]]
call[name[all_mpim_users].append, parameter[name[mpim_members]]]
return[name[all_mpim_users]] | keyword[def] identifier[compile_mpim_users] ( identifier[self] ):
literal[string]
identifier[mpim_data] = identifier[self] . identifier[_read_from_json] ( literal[string] )
identifier[mpims] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[mpim_data] . identifier[values] ()]
identifier[all_mpim_users] =[]
keyword[for] identifier[mpim] keyword[in] identifier[mpims] :
identifier[mpim_members] ={ literal[string] : identifier[mpim] [ literal[string] ], literal[string] :[ identifier[self] . identifier[__USER_DATA] [ identifier[m] ] keyword[for] identifier[m] keyword[in] identifier[mpim] [ literal[string] ]]}
identifier[all_mpim_users] . identifier[append] ( identifier[mpim_members] )
keyword[return] identifier[all_mpim_users] | def compile_mpim_users(self):
"""
Gets the info for the members within the multiple person instant message
Returns a list of all dms with the members that have ever existed
:rtype: [object]
{
name: <name>
users: [<user_id>]
}
"""
mpim_data = self._read_from_json('mpims.json')
mpims = [c for c in mpim_data.values()]
all_mpim_users = []
for mpim in mpims:
mpim_members = {'name': mpim['name'], 'users': [self.__USER_DATA[m] for m in mpim['members']]}
all_mpim_users.append(mpim_members) # depends on [control=['for'], data=['mpim']]
return all_mpim_users |
def _clone_reverses(self, old_reverses):
"""
Clones all the objects that were previously gathered.
"""
for ctype, reverses in old_reverses.items():
for parts in reverses.values():
sub_objs = parts[1]
field_name = parts[0]
attrs = {}
for sub_obj in sub_objs:
if ctype != 'm2m' and not attrs:
field = sub_obj._meta.get_field(field_name)
attrs = {
field.column: getattr(self, field.rel.field_name)
}
sub_obj._clone(**attrs)
if ctype == 'm2m':
setattr(self, field_name, sub_objs) | def function[_clone_reverses, parameter[self, old_reverses]]:
constant[
Clones all the objects that were previously gathered.
]
for taget[tuple[[<ast.Name object at 0x7da20c794580>, <ast.Name object at 0x7da20c7951e0>]]] in starred[call[name[old_reverses].items, parameter[]]] begin[:]
for taget[name[parts]] in starred[call[name[reverses].values, parameter[]]] begin[:]
variable[sub_objs] assign[=] call[name[parts]][constant[1]]
variable[field_name] assign[=] call[name[parts]][constant[0]]
variable[attrs] assign[=] dictionary[[], []]
for taget[name[sub_obj]] in starred[name[sub_objs]] begin[:]
if <ast.BoolOp object at 0x7da20c7940a0> begin[:]
variable[field] assign[=] call[name[sub_obj]._meta.get_field, parameter[name[field_name]]]
variable[attrs] assign[=] dictionary[[<ast.Attribute object at 0x7da20c7955a0>], [<ast.Call object at 0x7da20c7962c0>]]
call[name[sub_obj]._clone, parameter[]]
if compare[name[ctype] equal[==] constant[m2m]] begin[:]
call[name[setattr], parameter[name[self], name[field_name], name[sub_objs]]] | keyword[def] identifier[_clone_reverses] ( identifier[self] , identifier[old_reverses] ):
literal[string]
keyword[for] identifier[ctype] , identifier[reverses] keyword[in] identifier[old_reverses] . identifier[items] ():
keyword[for] identifier[parts] keyword[in] identifier[reverses] . identifier[values] ():
identifier[sub_objs] = identifier[parts] [ literal[int] ]
identifier[field_name] = identifier[parts] [ literal[int] ]
identifier[attrs] ={}
keyword[for] identifier[sub_obj] keyword[in] identifier[sub_objs] :
keyword[if] identifier[ctype] != literal[string] keyword[and] keyword[not] identifier[attrs] :
identifier[field] = identifier[sub_obj] . identifier[_meta] . identifier[get_field] ( identifier[field_name] )
identifier[attrs] ={
identifier[field] . identifier[column] : identifier[getattr] ( identifier[self] , identifier[field] . identifier[rel] . identifier[field_name] )
}
identifier[sub_obj] . identifier[_clone] (** identifier[attrs] )
keyword[if] identifier[ctype] == literal[string] :
identifier[setattr] ( identifier[self] , identifier[field_name] , identifier[sub_objs] ) | def _clone_reverses(self, old_reverses):
"""
Clones all the objects that were previously gathered.
"""
for (ctype, reverses) in old_reverses.items():
for parts in reverses.values():
sub_objs = parts[1]
field_name = parts[0]
attrs = {}
for sub_obj in sub_objs:
if ctype != 'm2m' and (not attrs):
field = sub_obj._meta.get_field(field_name)
attrs = {field.column: getattr(self, field.rel.field_name)} # depends on [control=['if'], data=[]]
sub_obj._clone(**attrs) # depends on [control=['for'], data=['sub_obj']]
if ctype == 'm2m':
setattr(self, field_name, sub_objs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parts']] # depends on [control=['for'], data=[]] |
def best_motif_in_cluster(single_pwm, clus_pwm, clusters, fg_fa, background, stats=None, metrics=("roc_auc", "recall_at_fdr")):
"""Return the best motif per cluster for a clustering results.
The motif can be either the average motif or one of the clustered motifs.
Parameters
----------
single_pwm : str
Filename of motifs.
clus_pwm : str
Filename of motifs.
clusters :
Motif clustering result.
fg_fa : str
Filename of FASTA file.
background : dict
Dictionary for background file names.
stats : dict, optional
If statistics are not supplied they will be computed.
metrics : sequence, optional
Metrics to use for motif evaluation. Default are "roc_auc" and
"recall_at_fdr".
Returns
-------
motifs : list
List of Motif instances.
"""
# combine original and clustered motifs
motifs = read_motifs(single_pwm) + read_motifs(clus_pwm)
motifs = dict([(str(m), m) for m in motifs])
# get the statistics for those motifs that were not yet checked
clustered_motifs = []
for clus,singles in clusters:
for motif in set([clus] + singles):
if str(motif) not in stats:
clustered_motifs.append(motifs[str(motif)])
new_stats = {}
for bg, bg_fa in background.items():
for m,s in calc_stats(clustered_motifs, fg_fa, bg_fa).items():
if m not in new_stats:
new_stats[m] = {}
new_stats[m][bg] = s
stats.update(new_stats)
rank = rank_motifs(stats, metrics)
# rank the motifs
best_motifs = []
for clus, singles in clusters:
if len(singles) > 1:
eval_motifs = singles
if clus not in motifs:
eval_motifs.append(clus)
eval_motifs = [motifs[str(e)] for e in eval_motifs]
best_motif = sorted(eval_motifs, key=lambda x: rank[str(x)])[-1]
best_motifs.append(best_motif)
else:
best_motifs.append(clus)
for bg in background:
stats[str(best_motifs[-1])][bg]["num_cluster"] = len(singles)
best_motifs = sorted(best_motifs, key=lambda x: rank[str(x)], reverse=True)
return best_motifs | def function[best_motif_in_cluster, parameter[single_pwm, clus_pwm, clusters, fg_fa, background, stats, metrics]]:
constant[Return the best motif per cluster for a clustering results.
The motif can be either the average motif or one of the clustered motifs.
Parameters
----------
single_pwm : str
Filename of motifs.
clus_pwm : str
Filename of motifs.
clusters :
Motif clustering result.
fg_fa : str
Filename of FASTA file.
background : dict
Dictionary for background file names.
stats : dict, optional
If statistics are not supplied they will be computed.
metrics : sequence, optional
Metrics to use for motif evaluation. Default are "roc_auc" and
"recall_at_fdr".
Returns
-------
motifs : list
List of Motif instances.
]
variable[motifs] assign[=] binary_operation[call[name[read_motifs], parameter[name[single_pwm]]] + call[name[read_motifs], parameter[name[clus_pwm]]]]
variable[motifs] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da2054a7010>]]
variable[clustered_motifs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da2054a5420>, <ast.Name object at 0x7da2054a7ca0>]]] in starred[name[clusters]] begin[:]
for taget[name[motif]] in starred[call[name[set], parameter[binary_operation[list[[<ast.Name object at 0x7da2054a5e70>]] + name[singles]]]]] begin[:]
if compare[call[name[str], parameter[name[motif]]] <ast.NotIn object at 0x7da2590d7190> name[stats]] begin[:]
call[name[clustered_motifs].append, parameter[call[name[motifs]][call[name[str], parameter[name[motif]]]]]]
variable[new_stats] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2054a5bd0>, <ast.Name object at 0x7da2054a4100>]]] in starred[call[name[background].items, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2054a4a90>, <ast.Name object at 0x7da2054a5c30>]]] in starred[call[call[name[calc_stats], parameter[name[clustered_motifs], name[fg_fa], name[bg_fa]]].items, parameter[]]] begin[:]
if compare[name[m] <ast.NotIn object at 0x7da2590d7190> name[new_stats]] begin[:]
call[name[new_stats]][name[m]] assign[=] dictionary[[], []]
call[call[name[new_stats]][name[m]]][name[bg]] assign[=] name[s]
call[name[stats].update, parameter[name[new_stats]]]
variable[rank] assign[=] call[name[rank_motifs], parameter[name[stats], name[metrics]]]
variable[best_motifs] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f00d930>, <ast.Name object at 0x7da18f00e950>]]] in starred[name[clusters]] begin[:]
if compare[call[name[len], parameter[name[singles]]] greater[>] constant[1]] begin[:]
variable[eval_motifs] assign[=] name[singles]
if compare[name[clus] <ast.NotIn object at 0x7da2590d7190> name[motifs]] begin[:]
call[name[eval_motifs].append, parameter[name[clus]]]
variable[eval_motifs] assign[=] <ast.ListComp object at 0x7da18bcc8df0>
variable[best_motif] assign[=] call[call[name[sorted], parameter[name[eval_motifs]]]][<ast.UnaryOp object at 0x7da18bcc80d0>]
call[name[best_motifs].append, parameter[name[best_motif]]]
for taget[name[bg]] in starred[name[background]] begin[:]
call[call[call[name[stats]][call[name[str], parameter[call[name[best_motifs]][<ast.UnaryOp object at 0x7da18bccad10>]]]]][name[bg]]][constant[num_cluster]] assign[=] call[name[len], parameter[name[singles]]]
variable[best_motifs] assign[=] call[name[sorted], parameter[name[best_motifs]]]
return[name[best_motifs]] | keyword[def] identifier[best_motif_in_cluster] ( identifier[single_pwm] , identifier[clus_pwm] , identifier[clusters] , identifier[fg_fa] , identifier[background] , identifier[stats] = keyword[None] , identifier[metrics] =( literal[string] , literal[string] )):
literal[string]
identifier[motifs] = identifier[read_motifs] ( identifier[single_pwm] )+ identifier[read_motifs] ( identifier[clus_pwm] )
identifier[motifs] = identifier[dict] ([( identifier[str] ( identifier[m] ), identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[motifs] ])
identifier[clustered_motifs] =[]
keyword[for] identifier[clus] , identifier[singles] keyword[in] identifier[clusters] :
keyword[for] identifier[motif] keyword[in] identifier[set] ([ identifier[clus] ]+ identifier[singles] ):
keyword[if] identifier[str] ( identifier[motif] ) keyword[not] keyword[in] identifier[stats] :
identifier[clustered_motifs] . identifier[append] ( identifier[motifs] [ identifier[str] ( identifier[motif] )])
identifier[new_stats] ={}
keyword[for] identifier[bg] , identifier[bg_fa] keyword[in] identifier[background] . identifier[items] ():
keyword[for] identifier[m] , identifier[s] keyword[in] identifier[calc_stats] ( identifier[clustered_motifs] , identifier[fg_fa] , identifier[bg_fa] ). identifier[items] ():
keyword[if] identifier[m] keyword[not] keyword[in] identifier[new_stats] :
identifier[new_stats] [ identifier[m] ]={}
identifier[new_stats] [ identifier[m] ][ identifier[bg] ]= identifier[s]
identifier[stats] . identifier[update] ( identifier[new_stats] )
identifier[rank] = identifier[rank_motifs] ( identifier[stats] , identifier[metrics] )
identifier[best_motifs] =[]
keyword[for] identifier[clus] , identifier[singles] keyword[in] identifier[clusters] :
keyword[if] identifier[len] ( identifier[singles] )> literal[int] :
identifier[eval_motifs] = identifier[singles]
keyword[if] identifier[clus] keyword[not] keyword[in] identifier[motifs] :
identifier[eval_motifs] . identifier[append] ( identifier[clus] )
identifier[eval_motifs] =[ identifier[motifs] [ identifier[str] ( identifier[e] )] keyword[for] identifier[e] keyword[in] identifier[eval_motifs] ]
identifier[best_motif] = identifier[sorted] ( identifier[eval_motifs] , identifier[key] = keyword[lambda] identifier[x] : identifier[rank] [ identifier[str] ( identifier[x] )])[- literal[int] ]
identifier[best_motifs] . identifier[append] ( identifier[best_motif] )
keyword[else] :
identifier[best_motifs] . identifier[append] ( identifier[clus] )
keyword[for] identifier[bg] keyword[in] identifier[background] :
identifier[stats] [ identifier[str] ( identifier[best_motifs] [- literal[int] ])][ identifier[bg] ][ literal[string] ]= identifier[len] ( identifier[singles] )
identifier[best_motifs] = identifier[sorted] ( identifier[best_motifs] , identifier[key] = keyword[lambda] identifier[x] : identifier[rank] [ identifier[str] ( identifier[x] )], identifier[reverse] = keyword[True] )
keyword[return] identifier[best_motifs] | def best_motif_in_cluster(single_pwm, clus_pwm, clusters, fg_fa, background, stats=None, metrics=('roc_auc', 'recall_at_fdr')):
"""Return the best motif per cluster for a clustering results.
The motif can be either the average motif or one of the clustered motifs.
Parameters
----------
single_pwm : str
Filename of motifs.
clus_pwm : str
Filename of motifs.
clusters :
Motif clustering result.
fg_fa : str
Filename of FASTA file.
background : dict
Dictionary for background file names.
stats : dict, optional
If statistics are not supplied they will be computed.
metrics : sequence, optional
Metrics to use for motif evaluation. Default are "roc_auc" and
"recall_at_fdr".
Returns
-------
motifs : list
List of Motif instances.
"""
# combine original and clustered motifs
motifs = read_motifs(single_pwm) + read_motifs(clus_pwm)
motifs = dict([(str(m), m) for m in motifs])
# get the statistics for those motifs that were not yet checked
clustered_motifs = []
for (clus, singles) in clusters:
for motif in set([clus] + singles):
if str(motif) not in stats:
clustered_motifs.append(motifs[str(motif)]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['motif']] # depends on [control=['for'], data=[]]
new_stats = {}
for (bg, bg_fa) in background.items():
for (m, s) in calc_stats(clustered_motifs, fg_fa, bg_fa).items():
if m not in new_stats:
new_stats[m] = {} # depends on [control=['if'], data=['m', 'new_stats']]
new_stats[m][bg] = s # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
stats.update(new_stats)
rank = rank_motifs(stats, metrics)
# rank the motifs
best_motifs = []
for (clus, singles) in clusters:
if len(singles) > 1:
eval_motifs = singles
if clus not in motifs:
eval_motifs.append(clus) # depends on [control=['if'], data=['clus']]
eval_motifs = [motifs[str(e)] for e in eval_motifs]
best_motif = sorted(eval_motifs, key=lambda x: rank[str(x)])[-1]
best_motifs.append(best_motif) # depends on [control=['if'], data=[]]
else:
best_motifs.append(clus)
for bg in background:
stats[str(best_motifs[-1])][bg]['num_cluster'] = len(singles) # depends on [control=['for'], data=['bg']] # depends on [control=['for'], data=[]]
best_motifs = sorted(best_motifs, key=lambda x: rank[str(x)], reverse=True)
return best_motifs |
def delete(self):
"""Delete the project.
>>> from pytodoist import todoist
>>> user = todoist.login('john.doe@gmail.com', 'password')
>>> project = user.get_project('PyTodoist')
>>> project.delete()
"""
args = {'ids': [self.id]}
_perform_command(self.owner, 'project_delete', args)
del self.owner.projects[self.id] | def function[delete, parameter[self]]:
constant[Delete the project.
>>> from pytodoist import todoist
>>> user = todoist.login('john.doe@gmail.com', 'password')
>>> project = user.get_project('PyTodoist')
>>> project.delete()
]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f397b0>], [<ast.List object at 0x7da1b0f384c0>]]
call[name[_perform_command], parameter[name[self].owner, constant[project_delete], name[args]]]
<ast.Delete object at 0x7da1b0f39d50> | keyword[def] identifier[delete] ( identifier[self] ):
literal[string]
identifier[args] ={ literal[string] :[ identifier[self] . identifier[id] ]}
identifier[_perform_command] ( identifier[self] . identifier[owner] , literal[string] , identifier[args] )
keyword[del] identifier[self] . identifier[owner] . identifier[projects] [ identifier[self] . identifier[id] ] | def delete(self):
"""Delete the project.
>>> from pytodoist import todoist
>>> user = todoist.login('john.doe@gmail.com', 'password')
>>> project = user.get_project('PyTodoist')
>>> project.delete()
"""
args = {'ids': [self.id]}
_perform_command(self.owner, 'project_delete', args)
del self.owner.projects[self.id] |
def array_sha256(a):
"""Create a SHA256 hash from a Numpy array."""
dtype = str(a.dtype).encode()
shape = numpy.array(a.shape)
sha = hashlib.sha256()
sha.update(dtype)
sha.update(shape)
sha.update(a.tobytes())
return sha.hexdigest() | def function[array_sha256, parameter[a]]:
constant[Create a SHA256 hash from a Numpy array.]
variable[dtype] assign[=] call[call[name[str], parameter[name[a].dtype]].encode, parameter[]]
variable[shape] assign[=] call[name[numpy].array, parameter[name[a].shape]]
variable[sha] assign[=] call[name[hashlib].sha256, parameter[]]
call[name[sha].update, parameter[name[dtype]]]
call[name[sha].update, parameter[name[shape]]]
call[name[sha].update, parameter[call[name[a].tobytes, parameter[]]]]
return[call[name[sha].hexdigest, parameter[]]] | keyword[def] identifier[array_sha256] ( identifier[a] ):
literal[string]
identifier[dtype] = identifier[str] ( identifier[a] . identifier[dtype] ). identifier[encode] ()
identifier[shape] = identifier[numpy] . identifier[array] ( identifier[a] . identifier[shape] )
identifier[sha] = identifier[hashlib] . identifier[sha256] ()
identifier[sha] . identifier[update] ( identifier[dtype] )
identifier[sha] . identifier[update] ( identifier[shape] )
identifier[sha] . identifier[update] ( identifier[a] . identifier[tobytes] ())
keyword[return] identifier[sha] . identifier[hexdigest] () | def array_sha256(a):
"""Create a SHA256 hash from a Numpy array."""
dtype = str(a.dtype).encode()
shape = numpy.array(a.shape)
sha = hashlib.sha256()
sha.update(dtype)
sha.update(shape)
sha.update(a.tobytes())
return sha.hexdigest() |
def console_wait_for_keypress(flush: bool) -> Key:
"""Block until the user presses a key, then returns a new Key.
Args:
flush bool: If True then the event queue is cleared before waiting
for the next event.
Returns:
Key: A new Key instance.
.. deprecated:: 9.3
Use the :any:`tcod.event.wait` function to wait for events.
"""
key = Key()
lib.TCOD_console_wait_for_keypress_wrapper(key.key_p, flush)
return key | def function[console_wait_for_keypress, parameter[flush]]:
constant[Block until the user presses a key, then returns a new Key.
Args:
flush bool: If True then the event queue is cleared before waiting
for the next event.
Returns:
Key: A new Key instance.
.. deprecated:: 9.3
Use the :any:`tcod.event.wait` function to wait for events.
]
variable[key] assign[=] call[name[Key], parameter[]]
call[name[lib].TCOD_console_wait_for_keypress_wrapper, parameter[name[key].key_p, name[flush]]]
return[name[key]] | keyword[def] identifier[console_wait_for_keypress] ( identifier[flush] : identifier[bool] )-> identifier[Key] :
literal[string]
identifier[key] = identifier[Key] ()
identifier[lib] . identifier[TCOD_console_wait_for_keypress_wrapper] ( identifier[key] . identifier[key_p] , identifier[flush] )
keyword[return] identifier[key] | def console_wait_for_keypress(flush: bool) -> Key:
"""Block until the user presses a key, then returns a new Key.
Args:
flush bool: If True then the event queue is cleared before waiting
for the next event.
Returns:
Key: A new Key instance.
.. deprecated:: 9.3
Use the :any:`tcod.event.wait` function to wait for events.
"""
key = Key()
lib.TCOD_console_wait_for_keypress_wrapper(key.key_p, flush)
return key |
def resize_to_contents(self):
"""Resize cells to contents"""
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.resizeColumnsToContents()
self.model().fetch_more(columns=True)
self.resizeColumnsToContents()
QApplication.restoreOverrideCursor() | def function[resize_to_contents, parameter[self]]:
constant[Resize cells to contents]
call[name[QApplication].setOverrideCursor, parameter[call[name[QCursor], parameter[name[Qt].WaitCursor]]]]
call[name[self].resizeColumnsToContents, parameter[]]
call[call[name[self].model, parameter[]].fetch_more, parameter[]]
call[name[self].resizeColumnsToContents, parameter[]]
call[name[QApplication].restoreOverrideCursor, parameter[]] | keyword[def] identifier[resize_to_contents] ( identifier[self] ):
literal[string]
identifier[QApplication] . identifier[setOverrideCursor] ( identifier[QCursor] ( identifier[Qt] . identifier[WaitCursor] ))
identifier[self] . identifier[resizeColumnsToContents] ()
identifier[self] . identifier[model] (). identifier[fetch_more] ( identifier[columns] = keyword[True] )
identifier[self] . identifier[resizeColumnsToContents] ()
identifier[QApplication] . identifier[restoreOverrideCursor] () | def resize_to_contents(self):
"""Resize cells to contents"""
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.resizeColumnsToContents()
self.model().fetch_more(columns=True)
self.resizeColumnsToContents()
QApplication.restoreOverrideCursor() |
def build_response_card(title, subtitle, options):
"""
Build a responseCard with a title, subtitle, and an optional set of options which should be displayed as buttons.
"""
buttons = None
if options is not None:
buttons = []
for i in range(min(5, len(options))):
buttons.append(options[i])
return {
'contentType': 'application/vnd.amazonaws.card.generic',
'version': 1,
'genericAttachments': [{
'title': title,
'subTitle': subtitle,
'buttons': buttons
}]
} | def function[build_response_card, parameter[title, subtitle, options]]:
constant[
Build a responseCard with a title, subtitle, and an optional set of options which should be displayed as buttons.
]
variable[buttons] assign[=] constant[None]
if compare[name[options] is_not constant[None]] begin[:]
variable[buttons] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[min], parameter[constant[5], call[name[len], parameter[name[options]]]]]]]] begin[:]
call[name[buttons].append, parameter[call[name[options]][name[i]]]]
return[dictionary[[<ast.Constant object at 0x7da1b1f95e10>, <ast.Constant object at 0x7da1b1f969b0>, <ast.Constant object at 0x7da1b1f97f70>], [<ast.Constant object at 0x7da1b1f96f80>, <ast.Constant object at 0x7da1b1f95ae0>, <ast.List object at 0x7da1b1f97520>]]] | keyword[def] identifier[build_response_card] ( identifier[title] , identifier[subtitle] , identifier[options] ):
literal[string]
identifier[buttons] = keyword[None]
keyword[if] identifier[options] keyword[is] keyword[not] keyword[None] :
identifier[buttons] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[min] ( literal[int] , identifier[len] ( identifier[options] ))):
identifier[buttons] . identifier[append] ( identifier[options] [ identifier[i] ])
keyword[return] {
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] :[{
literal[string] : identifier[title] ,
literal[string] : identifier[subtitle] ,
literal[string] : identifier[buttons]
}]
} | def build_response_card(title, subtitle, options):
"""
Build a responseCard with a title, subtitle, and an optional set of options which should be displayed as buttons.
"""
buttons = None
if options is not None:
buttons = []
for i in range(min(5, len(options))):
buttons.append(options[i]) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['options']]
return {'contentType': 'application/vnd.amazonaws.card.generic', 'version': 1, 'genericAttachments': [{'title': title, 'subTitle': subtitle, 'buttons': buttons}]} |
def delete(self, resource, timeout=-1):
"""
Delete all the labels for a resource.
Args:
resource (dict): Object to delete.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView, just stop waiting for its completion.
"""
self._client.delete(resource=resource, timeout=timeout) | def function[delete, parameter[self, resource, timeout]]:
constant[
Delete all the labels for a resource.
Args:
resource (dict): Object to delete.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView, just stop waiting for its completion.
]
call[name[self]._client.delete, parameter[]] | keyword[def] identifier[delete] ( identifier[self] , identifier[resource] , identifier[timeout] =- literal[int] ):
literal[string]
identifier[self] . identifier[_client] . identifier[delete] ( identifier[resource] = identifier[resource] , identifier[timeout] = identifier[timeout] ) | def delete(self, resource, timeout=-1):
"""
Delete all the labels for a resource.
Args:
resource (dict): Object to delete.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView, just stop waiting for its completion.
"""
self._client.delete(resource=resource, timeout=timeout) |
def topfnfile(self, fileobj):
"""
write a cache object to filename as a plain text pfn file
"""
for entry in self:
print >>fileobj, entry.path
fileobj.close() | def function[topfnfile, parameter[self, fileobj]]:
constant[
write a cache object to filename as a plain text pfn file
]
for taget[name[entry]] in starred[name[self]] begin[:]
tuple[[<ast.BinOp object at 0x7da18f721d50>, <ast.Attribute object at 0x7da18f722620>]]
call[name[fileobj].close, parameter[]] | keyword[def] identifier[topfnfile] ( identifier[self] , identifier[fileobj] ):
literal[string]
keyword[for] identifier[entry] keyword[in] identifier[self] :
identifier[print] >> identifier[fileobj] , identifier[entry] . identifier[path]
identifier[fileobj] . identifier[close] () | def topfnfile(self, fileobj):
"""
write a cache object to filename as a plain text pfn file
"""
for entry in self:
(print >> fileobj, entry.path) # depends on [control=['for'], data=['entry']]
fileobj.close() |
def get_string_camel_patterns(cls, name, min_length=0):
""" Finds all permutations of possible camel casing of the given name
:param name: str, the name we need to get all possible permutations and abbreviations for
:param min_length: int, minimum length we want for abbreviations
:return: list(list(str)), list casing permutations of list of abbreviations
"""
# Have to check for longest first and remove duplicates
patterns = []
abbreviations = list(set(cls._get_abbreviations(name, output_length=min_length)))
abbreviations.sort(key=len, reverse=True)
for abbr in abbreviations:
# We won't check for abbreviations that are stupid eg something with apparent camel casing within
# the word itself like LeF, sorting from:
# http://stackoverflow.com/questions/13954841/python-sort-upper-case-and-lower-case
casing_permutations = list(set(cls._get_casing_permutations(abbr)))
casing_permutations.sort(key=lambda v: (v.upper(), v[0].islower(), len(v)))
permutations = [permutation for permutation in casing_permutations if
cls.is_valid_camel(permutation) or len(permutation) <= 2]
if permutations:
patterns.append(permutations)
return patterns | def function[get_string_camel_patterns, parameter[cls, name, min_length]]:
constant[ Finds all permutations of possible camel casing of the given name
:param name: str, the name we need to get all possible permutations and abbreviations for
:param min_length: int, minimum length we want for abbreviations
:return: list(list(str)), list casing permutations of list of abbreviations
]
variable[patterns] assign[=] list[[]]
variable[abbreviations] assign[=] call[name[list], parameter[call[name[set], parameter[call[name[cls]._get_abbreviations, parameter[name[name]]]]]]]
call[name[abbreviations].sort, parameter[]]
for taget[name[abbr]] in starred[name[abbreviations]] begin[:]
variable[casing_permutations] assign[=] call[name[list], parameter[call[name[set], parameter[call[name[cls]._get_casing_permutations, parameter[name[abbr]]]]]]]
call[name[casing_permutations].sort, parameter[]]
variable[permutations] assign[=] <ast.ListComp object at 0x7da18f811150>
if name[permutations] begin[:]
call[name[patterns].append, parameter[name[permutations]]]
return[name[patterns]] | keyword[def] identifier[get_string_camel_patterns] ( identifier[cls] , identifier[name] , identifier[min_length] = literal[int] ):
literal[string]
identifier[patterns] =[]
identifier[abbreviations] = identifier[list] ( identifier[set] ( identifier[cls] . identifier[_get_abbreviations] ( identifier[name] , identifier[output_length] = identifier[min_length] )))
identifier[abbreviations] . identifier[sort] ( identifier[key] = identifier[len] , identifier[reverse] = keyword[True] )
keyword[for] identifier[abbr] keyword[in] identifier[abbreviations] :
identifier[casing_permutations] = identifier[list] ( identifier[set] ( identifier[cls] . identifier[_get_casing_permutations] ( identifier[abbr] )))
identifier[casing_permutations] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[v] :( identifier[v] . identifier[upper] (), identifier[v] [ literal[int] ]. identifier[islower] (), identifier[len] ( identifier[v] )))
identifier[permutations] =[ identifier[permutation] keyword[for] identifier[permutation] keyword[in] identifier[casing_permutations] keyword[if]
identifier[cls] . identifier[is_valid_camel] ( identifier[permutation] ) keyword[or] identifier[len] ( identifier[permutation] )<= literal[int] ]
keyword[if] identifier[permutations] :
identifier[patterns] . identifier[append] ( identifier[permutations] )
keyword[return] identifier[patterns] | def get_string_camel_patterns(cls, name, min_length=0):
""" Finds all permutations of possible camel casing of the given name
:param name: str, the name we need to get all possible permutations and abbreviations for
:param min_length: int, minimum length we want for abbreviations
:return: list(list(str)), list casing permutations of list of abbreviations
"""
# Have to check for longest first and remove duplicates
patterns = []
abbreviations = list(set(cls._get_abbreviations(name, output_length=min_length)))
abbreviations.sort(key=len, reverse=True)
for abbr in abbreviations:
# We won't check for abbreviations that are stupid eg something with apparent camel casing within
# the word itself like LeF, sorting from:
# http://stackoverflow.com/questions/13954841/python-sort-upper-case-and-lower-case
casing_permutations = list(set(cls._get_casing_permutations(abbr)))
casing_permutations.sort(key=lambda v: (v.upper(), v[0].islower(), len(v)))
permutations = [permutation for permutation in casing_permutations if cls.is_valid_camel(permutation) or len(permutation) <= 2]
if permutations:
patterns.append(permutations) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['abbr']]
return patterns |
async def auto_detect_at_address(cls, address, auth):
'''Try to detect a SOCKS proxy at address using the authentication method (or None).
SOCKS5, SOCKS4a and SOCKS are tried in order. If a SOCKS proxy is detected a
SOCKSProxy object is returned.
Returning a SOCKSProxy does not mean it is functioning - for example, it may have
no network connectivity.
If no proxy is detected return None.
'''
for protocol in (SOCKS5, SOCKS4a, SOCKS4):
proxy = cls(address, protocol, auth)
if await proxy._detect_proxy():
return proxy
return None | <ast.AsyncFunctionDef object at 0x7da20e9b3610> | keyword[async] keyword[def] identifier[auto_detect_at_address] ( identifier[cls] , identifier[address] , identifier[auth] ):
literal[string]
keyword[for] identifier[protocol] keyword[in] ( identifier[SOCKS5] , identifier[SOCKS4a] , identifier[SOCKS4] ):
identifier[proxy] = identifier[cls] ( identifier[address] , identifier[protocol] , identifier[auth] )
keyword[if] keyword[await] identifier[proxy] . identifier[_detect_proxy] ():
keyword[return] identifier[proxy]
keyword[return] keyword[None] | async def auto_detect_at_address(cls, address, auth):
"""Try to detect a SOCKS proxy at address using the authentication method (or None).
SOCKS5, SOCKS4a and SOCKS are tried in order. If a SOCKS proxy is detected a
SOCKSProxy object is returned.
Returning a SOCKSProxy does not mean it is functioning - for example, it may have
no network connectivity.
If no proxy is detected return None.
"""
for protocol in (SOCKS5, SOCKS4a, SOCKS4):
proxy = cls(address, protocol, auth)
if await proxy._detect_proxy():
return proxy # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['protocol']]
return None |
def login_required(f, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator that wraps django.contrib.auth.decorators.login_required, but supports extracting Shopify's authentication
query parameters (`shop`, `timestamp`, `signature` and `hmac`) and passing them on to the login URL (instead of just
wrapping them up and encoding them in to the `next` parameter).
This is useful for ensuring that users are automatically logged on when they first access a page through the Shopify
Admin, which passes these parameters with every page request to an embedded app.
"""
@wraps(f)
def wrapper(request, *args, **kwargs):
if is_authenticated(request.user):
return f(request, *args, **kwargs)
# Extract the Shopify-specific authentication parameters from the current request.
shopify_params = {
k: request.GET[k]
for k in ['shop', 'timestamp', 'signature', 'hmac']
if k in request.GET
}
# Get the login URL.
resolved_login_url = force_str(resolve_url(login_url or settings.LOGIN_URL))
# Add the Shopify authentication parameters to the login URL.
updated_login_url = add_query_parameters_to_url(resolved_login_url, shopify_params)
django_login_required_decorator = django_login_required(redirect_field_name=redirect_field_name,
login_url=updated_login_url)
return django_login_required_decorator(f)(request, *args, **kwargs)
return wrapper | def function[login_required, parameter[f, redirect_field_name, login_url]]:
constant[
Decorator that wraps django.contrib.auth.decorators.login_required, but supports extracting Shopify's authentication
query parameters (`shop`, `timestamp`, `signature` and `hmac`) and passing them on to the login URL (instead of just
wrapping them up and encoding them in to the `next` parameter).
This is useful for ensuring that users are automatically logged on when they first access a page through the Shopify
Admin, which passes these parameters with every page request to an embedded app.
]
def function[wrapper, parameter[request]]:
if call[name[is_authenticated], parameter[name[request].user]] begin[:]
return[call[name[f], parameter[name[request], <ast.Starred object at 0x7da1b101b2b0>]]]
variable[shopify_params] assign[=] <ast.DictComp object at 0x7da1b1019570>
variable[resolved_login_url] assign[=] call[name[force_str], parameter[call[name[resolve_url], parameter[<ast.BoolOp object at 0x7da1b101bdc0>]]]]
variable[updated_login_url] assign[=] call[name[add_query_parameters_to_url], parameter[name[resolved_login_url], name[shopify_params]]]
variable[django_login_required_decorator] assign[=] call[name[django_login_required], parameter[]]
return[call[call[name[django_login_required_decorator], parameter[name[f]]], parameter[name[request], <ast.Starred object at 0x7da1b10187f0>]]]
return[name[wrapper]] | keyword[def] identifier[login_required] ( identifier[f] , identifier[redirect_field_name] = identifier[REDIRECT_FIELD_NAME] , identifier[login_url] = keyword[None] ):
literal[string]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[is_authenticated] ( identifier[request] . identifier[user] ):
keyword[return] identifier[f] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
identifier[shopify_params] ={
identifier[k] : identifier[request] . identifier[GET] [ identifier[k] ]
keyword[for] identifier[k] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[k] keyword[in] identifier[request] . identifier[GET]
}
identifier[resolved_login_url] = identifier[force_str] ( identifier[resolve_url] ( identifier[login_url] keyword[or] identifier[settings] . identifier[LOGIN_URL] ))
identifier[updated_login_url] = identifier[add_query_parameters_to_url] ( identifier[resolved_login_url] , identifier[shopify_params] )
identifier[django_login_required_decorator] = identifier[django_login_required] ( identifier[redirect_field_name] = identifier[redirect_field_name] ,
identifier[login_url] = identifier[updated_login_url] )
keyword[return] identifier[django_login_required_decorator] ( identifier[f] )( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper] | def login_required(f, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator that wraps django.contrib.auth.decorators.login_required, but supports extracting Shopify's authentication
query parameters (`shop`, `timestamp`, `signature` and `hmac`) and passing them on to the login URL (instead of just
wrapping them up and encoding them in to the `next` parameter).
This is useful for ensuring that users are automatically logged on when they first access a page through the Shopify
Admin, which passes these parameters with every page request to an embedded app.
"""
@wraps(f)
def wrapper(request, *args, **kwargs):
if is_authenticated(request.user):
return f(request, *args, **kwargs) # depends on [control=['if'], data=[]]
# Extract the Shopify-specific authentication parameters from the current request.
shopify_params = {k: request.GET[k] for k in ['shop', 'timestamp', 'signature', 'hmac'] if k in request.GET}
# Get the login URL.
resolved_login_url = force_str(resolve_url(login_url or settings.LOGIN_URL))
# Add the Shopify authentication parameters to the login URL.
updated_login_url = add_query_parameters_to_url(resolved_login_url, shopify_params)
django_login_required_decorator = django_login_required(redirect_field_name=redirect_field_name, login_url=updated_login_url)
return django_login_required_decorator(f)(request, *args, **kwargs)
return wrapper |
async def release(self, task_id, *, delay=None):
"""
Release task (return to queue) with delay if specified
:param task_id: Task id
:param delay: Time in seconds before task will become ready again
:return: Task instance
"""
opts = {}
if delay is not None:
opts['delay'] = delay
args = (task_id, opts)
res = await self.conn.call(self.__funcs['release'], args)
return self._create_task(res.body) | <ast.AsyncFunctionDef object at 0x7da1b2586170> | keyword[async] keyword[def] identifier[release] ( identifier[self] , identifier[task_id] ,*, identifier[delay] = keyword[None] ):
literal[string]
identifier[opts] ={}
keyword[if] identifier[delay] keyword[is] keyword[not] keyword[None] :
identifier[opts] [ literal[string] ]= identifier[delay]
identifier[args] =( identifier[task_id] , identifier[opts] )
identifier[res] = keyword[await] identifier[self] . identifier[conn] . identifier[call] ( identifier[self] . identifier[__funcs] [ literal[string] ], identifier[args] )
keyword[return] identifier[self] . identifier[_create_task] ( identifier[res] . identifier[body] ) | async def release(self, task_id, *, delay=None):
"""
Release task (return to queue) with delay if specified
:param task_id: Task id
:param delay: Time in seconds before task will become ready again
:return: Task instance
"""
opts = {}
if delay is not None:
opts['delay'] = delay # depends on [control=['if'], data=['delay']]
args = (task_id, opts)
res = await self.conn.call(self.__funcs['release'], args)
return self._create_task(res.body) |
def adjust_column_width(worksheet):
"""Adjust column width in worksheet.
Args:
worksheet: worksheet to be adjusted
"""
dims = {}
padding = 1
for row in worksheet.rows:
for cell in row:
if not cell.value:
continue
dims[cell.column] = max(
dims.get(cell.column, 0),
len(str(cell.value))
)
for col, value in list(dims.items()):
worksheet.column_dimensions[col].width = value + padding | def function[adjust_column_width, parameter[worksheet]]:
constant[Adjust column width in worksheet.
Args:
worksheet: worksheet to be adjusted
]
variable[dims] assign[=] dictionary[[], []]
variable[padding] assign[=] constant[1]
for taget[name[row]] in starred[name[worksheet].rows] begin[:]
for taget[name[cell]] in starred[name[row]] begin[:]
if <ast.UnaryOp object at 0x7da1b0ff0910> begin[:]
continue
call[name[dims]][name[cell].column] assign[=] call[name[max], parameter[call[name[dims].get, parameter[name[cell].column, constant[0]]], call[name[len], parameter[call[name[str], parameter[name[cell].value]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0ff13f0>, <ast.Name object at 0x7da1b0ff3d30>]]] in starred[call[name[list], parameter[call[name[dims].items, parameter[]]]]] begin[:]
call[name[worksheet].column_dimensions][name[col]].width assign[=] binary_operation[name[value] + name[padding]] | keyword[def] identifier[adjust_column_width] ( identifier[worksheet] ):
literal[string]
identifier[dims] ={}
identifier[padding] = literal[int]
keyword[for] identifier[row] keyword[in] identifier[worksheet] . identifier[rows] :
keyword[for] identifier[cell] keyword[in] identifier[row] :
keyword[if] keyword[not] identifier[cell] . identifier[value] :
keyword[continue]
identifier[dims] [ identifier[cell] . identifier[column] ]= identifier[max] (
identifier[dims] . identifier[get] ( identifier[cell] . identifier[column] , literal[int] ),
identifier[len] ( identifier[str] ( identifier[cell] . identifier[value] ))
)
keyword[for] identifier[col] , identifier[value] keyword[in] identifier[list] ( identifier[dims] . identifier[items] ()):
identifier[worksheet] . identifier[column_dimensions] [ identifier[col] ]. identifier[width] = identifier[value] + identifier[padding] | def adjust_column_width(worksheet):
"""Adjust column width in worksheet.
Args:
worksheet: worksheet to be adjusted
"""
dims = {}
padding = 1
for row in worksheet.rows:
for cell in row:
if not cell.value:
continue # depends on [control=['if'], data=[]]
dims[cell.column] = max(dims.get(cell.column, 0), len(str(cell.value))) # depends on [control=['for'], data=['cell']] # depends on [control=['for'], data=['row']]
for (col, value) in list(dims.items()):
worksheet.column_dimensions[col].width = value + padding # depends on [control=['for'], data=[]] |
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data | def function[__call_api, parameter[self, path, params, api_url]]:
constant[
Call the datapoint api using the requests module
]
if <ast.UnaryOp object at 0x7da1b0e077f0> begin[:]
variable[params] assign[=] call[name[dict], parameter[]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b0e04df0>], [<ast.Attribute object at 0x7da1b0e069e0>]]
call[name[payload].update, parameter[name[params]]]
variable[url] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e07e80>, <ast.Name object at 0x7da1b0e06980>]]]
variable[sess] assign[=] call[name[self].__retry_session, parameter[]]
variable[req] assign[=] call[name[sess].get, parameter[name[url]]]
<ast.Try object at 0x7da1b0e04070>
name[self].call_response assign[=] name[data]
if compare[name[req].status_code not_equal[!=] constant[200]] begin[:]
variable[msg] assign[=] call[<ast.ListComp object at 0x7da1b0e05900>][constant[0]]
<ast.Raise object at 0x7da1b0e06470>
return[name[data]] | keyword[def] identifier[__call_api] ( identifier[self] , identifier[path] , identifier[params] = keyword[None] , identifier[api_url] = identifier[FORECAST_URL] ):
literal[string]
keyword[if] keyword[not] identifier[params] :
identifier[params] = identifier[dict] ()
identifier[payload] ={ literal[string] : identifier[self] . identifier[api_key] }
identifier[payload] . identifier[update] ( identifier[params] )
identifier[url] = literal[string] %( identifier[api_url] , identifier[path] )
identifier[sess] = identifier[self] . identifier[__retry_session] ()
identifier[req] = identifier[sess] . identifier[get] ( identifier[url] , identifier[params] = identifier[payload] , identifier[timeout] = literal[int] )
keyword[try] :
identifier[data] = identifier[req] . identifier[json] ()
keyword[except] identifier[ValueError] :
keyword[raise] identifier[APIException] ( literal[string] )
identifier[self] . identifier[call_response] = identifier[data]
keyword[if] identifier[req] . identifier[status_code] != literal[int] :
identifier[msg] =[ identifier[data] [ identifier[m] ] keyword[for] identifier[m] keyword[in] ( literal[string] , literal[string] , literal[string] ) keyword[if] identifier[m] keyword[in] identifier[data] ][ literal[int] ]
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[return] identifier[data] | def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict() # depends on [control=['if'], data=[]]
payload = {'key': self.api_key}
payload.update(params)
url = '%s/%s' % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json() # depends on [control=['try'], data=[]]
except ValueError:
raise APIException('DataPoint has not returned any data, this could be due to an incorrect API key') # depends on [control=['except'], data=[]]
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ('message', 'error_message', 'status') if m in data][0]
raise Exception(msg) # depends on [control=['if'], data=[]]
return data |
def inputtemplates(self):
"""Return all input templates as a list (of InputTemplate instances)"""
l = []
for profile in self.profiles:
l += profile.input
return l | def function[inputtemplates, parameter[self]]:
constant[Return all input templates as a list (of InputTemplate instances)]
variable[l] assign[=] list[[]]
for taget[name[profile]] in starred[name[self].profiles] begin[:]
<ast.AugAssign object at 0x7da20c795660>
return[name[l]] | keyword[def] identifier[inputtemplates] ( identifier[self] ):
literal[string]
identifier[l] =[]
keyword[for] identifier[profile] keyword[in] identifier[self] . identifier[profiles] :
identifier[l] += identifier[profile] . identifier[input]
keyword[return] identifier[l] | def inputtemplates(self):
"""Return all input templates as a list (of InputTemplate instances)"""
l = []
for profile in self.profiles:
l += profile.input # depends on [control=['for'], data=['profile']]
return l |
def _parse_italics_and_bold(self):
"""Parse wiki-style italics and bold together (i.e., five ticks)."""
reset = self._head
try:
stack = self._parse(contexts.STYLE_BOLD)
except BadRoute:
self._head = reset
try:
stack = self._parse(contexts.STYLE_ITALICS)
except BadRoute:
self._head = reset
self._emit_text("'''''")
else:
reset = self._head
try:
stack2 = self._parse(contexts.STYLE_BOLD)
except BadRoute:
self._head = reset
self._emit_text("'''")
self._emit_style_tag("i", "''", stack)
else:
self._push()
self._emit_style_tag("i", "''", stack)
self._emit_all(stack2)
self._emit_style_tag("b", "'''", self._pop())
else:
reset = self._head
try:
stack2 = self._parse(contexts.STYLE_ITALICS)
except BadRoute:
self._head = reset
self._emit_text("''")
self._emit_style_tag("b", "'''", stack)
else:
self._push()
self._emit_style_tag("b", "'''", stack)
self._emit_all(stack2)
self._emit_style_tag("i", "''", self._pop()) | def function[_parse_italics_and_bold, parameter[self]]:
constant[Parse wiki-style italics and bold together (i.e., five ticks).]
variable[reset] assign[=] name[self]._head
<ast.Try object at 0x7da18f723f70> | keyword[def] identifier[_parse_italics_and_bold] ( identifier[self] ):
literal[string]
identifier[reset] = identifier[self] . identifier[_head]
keyword[try] :
identifier[stack] = identifier[self] . identifier[_parse] ( identifier[contexts] . identifier[STYLE_BOLD] )
keyword[except] identifier[BadRoute] :
identifier[self] . identifier[_head] = identifier[reset]
keyword[try] :
identifier[stack] = identifier[self] . identifier[_parse] ( identifier[contexts] . identifier[STYLE_ITALICS] )
keyword[except] identifier[BadRoute] :
identifier[self] . identifier[_head] = identifier[reset]
identifier[self] . identifier[_emit_text] ( literal[string] )
keyword[else] :
identifier[reset] = identifier[self] . identifier[_head]
keyword[try] :
identifier[stack2] = identifier[self] . identifier[_parse] ( identifier[contexts] . identifier[STYLE_BOLD] )
keyword[except] identifier[BadRoute] :
identifier[self] . identifier[_head] = identifier[reset]
identifier[self] . identifier[_emit_text] ( literal[string] )
identifier[self] . identifier[_emit_style_tag] ( literal[string] , literal[string] , identifier[stack] )
keyword[else] :
identifier[self] . identifier[_push] ()
identifier[self] . identifier[_emit_style_tag] ( literal[string] , literal[string] , identifier[stack] )
identifier[self] . identifier[_emit_all] ( identifier[stack2] )
identifier[self] . identifier[_emit_style_tag] ( literal[string] , literal[string] , identifier[self] . identifier[_pop] ())
keyword[else] :
identifier[reset] = identifier[self] . identifier[_head]
keyword[try] :
identifier[stack2] = identifier[self] . identifier[_parse] ( identifier[contexts] . identifier[STYLE_ITALICS] )
keyword[except] identifier[BadRoute] :
identifier[self] . identifier[_head] = identifier[reset]
identifier[self] . identifier[_emit_text] ( literal[string] )
identifier[self] . identifier[_emit_style_tag] ( literal[string] , literal[string] , identifier[stack] )
keyword[else] :
identifier[self] . identifier[_push] ()
identifier[self] . identifier[_emit_style_tag] ( literal[string] , literal[string] , identifier[stack] )
identifier[self] . identifier[_emit_all] ( identifier[stack2] )
identifier[self] . identifier[_emit_style_tag] ( literal[string] , literal[string] , identifier[self] . identifier[_pop] ()) | def _parse_italics_and_bold(self):
"""Parse wiki-style italics and bold together (i.e., five ticks)."""
reset = self._head
try:
stack = self._parse(contexts.STYLE_BOLD) # depends on [control=['try'], data=[]]
except BadRoute:
self._head = reset
try:
stack = self._parse(contexts.STYLE_ITALICS) # depends on [control=['try'], data=[]]
except BadRoute:
self._head = reset
self._emit_text("'''''") # depends on [control=['except'], data=[]]
else:
reset = self._head
try:
stack2 = self._parse(contexts.STYLE_BOLD) # depends on [control=['try'], data=[]]
except BadRoute:
self._head = reset
self._emit_text("'''")
self._emit_style_tag('i', "''", stack) # depends on [control=['except'], data=[]]
else:
self._push()
self._emit_style_tag('i', "''", stack)
self._emit_all(stack2)
self._emit_style_tag('b', "'''", self._pop()) # depends on [control=['except'], data=[]]
else:
reset = self._head
try:
stack2 = self._parse(contexts.STYLE_ITALICS) # depends on [control=['try'], data=[]]
except BadRoute:
self._head = reset
self._emit_text("''")
self._emit_style_tag('b', "'''", stack) # depends on [control=['except'], data=[]]
else:
self._push()
self._emit_style_tag('b', "'''", stack)
self._emit_all(stack2)
self._emit_style_tag('i', "''", self._pop()) |
def getAsKmlPng(self, session, path=None, documentName=None, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=1.0,
noDataValue=None, drawOrder=0, cellSize=None, resampleMethod='NearestNeighbour'):
"""
Retrieve the raster as a PNG image ground overlay KML format. Coarse grid resolutions must be resampled to
smaller cell/pixel sizes to avoid a "fuzzy" look. Cells with the no data value are excluded.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database.
path (str, optional): Path to file where KML file will be written. Defaults to None.
documentName (str, optional): Name of the KML document. This will be the name that appears in the legend.
Defaults to 'Stream Network'.
colorRamp (:mod:`mapkit.ColorRampGenerator.ColorRampEnum` or dict, optional): Use ColorRampEnum to select a
default color ramp or a dictionary with keys 'colors' and 'interpolatedPoints' to specify a custom color
ramp. The 'colors' key must be a list of RGB integer tuples (e.g.: (255, 0, 0)) and the
'interpolatedPoints' must be an integer representing the number of points to interpolate between each
color given in the colors list.
alpha (float, optional): Set transparency of visualization. Value between 0.0 and 1.0 where 1.0 is 100%
opaque and 0.0 is 100% transparent. Defaults to 1.0.
noDataValue (float, optional): The value to treat as no data when generating visualizations of rasters.
Defaults to 0.0.
drawOrder (int, optional): Set the draw order of the images. Defaults to 0.
cellSize (float, optional): Define the cell size in the units of the project projection at which to resample
the raster to generate the PNG. Defaults to None which will cause the PNG to be generated with the
original raster cell size. It is generally better to set this to a size smaller than the original cell
size to obtain a higher resolution image. However, computation time increases exponentially as the cell
size is decreased.
resampleMethod (str, optional): If cellSize is set, this method will be used to resample the raster. Valid
values include: NearestNeighbour, Bilinear, Cubic, CubicSpline, and Lanczos. Defaults to
NearestNeighbour.
Returns:
(str, list): Returns a KML string and a list of binary strings that are the PNG images.
"""
if type(self.raster) != type(None):
# Set Document Name
if documentName is None:
try:
documentName = self.filename
except AttributeError:
documentName = 'default'
# Set no data value to default
if noDataValue is None:
noDataValue = self.defaultNoDataValue
# Make sure the raster field is valid
converter = RasterConverter(sqlAlchemyEngineOrSession=session)
# Configure color ramp
if isinstance(colorRamp, dict):
converter.setCustomColorRamp(colorRamp['colors'], colorRamp['interpolatedPoints'])
else:
converter.setDefaultColorRamp(colorRamp)
kmlString, binaryPngString = converter.getAsKmlPng(tableName=self.tableName,
rasterId=self.id,
rasterIdFieldName='id',
rasterFieldName=self.rasterColumnName,
documentName=documentName,
alpha=alpha,
drawOrder=drawOrder,
noDataValue=noDataValue,
cellSize=cellSize,
resampleMethod=resampleMethod,
discreet=self.discreet)
if path:
directory = os.path.dirname(path)
archiveName = (os.path.split(path)[1]).split('.')[0]
kmzPath = os.path.join(directory, (archiveName + '.kmz'))
with ZipFile(kmzPath, 'w') as kmz:
kmz.writestr(archiveName + '.kml', kmlString)
kmz.writestr('raster.png', binaryPngString)
return kmlString, binaryPngString | def function[getAsKmlPng, parameter[self, session, path, documentName, colorRamp, alpha, noDataValue, drawOrder, cellSize, resampleMethod]]:
constant[
Retrieve the raster as a PNG image ground overlay KML format. Coarse grid resolutions must be resampled to
smaller cell/pixel sizes to avoid a "fuzzy" look. Cells with the no data value are excluded.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database.
path (str, optional): Path to file where KML file will be written. Defaults to None.
documentName (str, optional): Name of the KML document. This will be the name that appears in the legend.
Defaults to 'Stream Network'.
colorRamp (:mod:`mapkit.ColorRampGenerator.ColorRampEnum` or dict, optional): Use ColorRampEnum to select a
default color ramp or a dictionary with keys 'colors' and 'interpolatedPoints' to specify a custom color
ramp. The 'colors' key must be a list of RGB integer tuples (e.g.: (255, 0, 0)) and the
'interpolatedPoints' must be an integer representing the number of points to interpolate between each
color given in the colors list.
alpha (float, optional): Set transparency of visualization. Value between 0.0 and 1.0 where 1.0 is 100%
opaque and 0.0 is 100% transparent. Defaults to 1.0.
noDataValue (float, optional): The value to treat as no data when generating visualizations of rasters.
Defaults to 0.0.
drawOrder (int, optional): Set the draw order of the images. Defaults to 0.
cellSize (float, optional): Define the cell size in the units of the project projection at which to resample
the raster to generate the PNG. Defaults to None which will cause the PNG to be generated with the
original raster cell size. It is generally better to set this to a size smaller than the original cell
size to obtain a higher resolution image. However, computation time increases exponentially as the cell
size is decreased.
resampleMethod (str, optional): If cellSize is set, this method will be used to resample the raster. Valid
values include: NearestNeighbour, Bilinear, Cubic, CubicSpline, and Lanczos. Defaults to
NearestNeighbour.
Returns:
(str, list): Returns a KML string and a list of binary strings that are the PNG images.
]
if compare[call[name[type], parameter[name[self].raster]] not_equal[!=] call[name[type], parameter[constant[None]]]] begin[:]
if compare[name[documentName] is constant[None]] begin[:]
<ast.Try object at 0x7da20c990250>
if compare[name[noDataValue] is constant[None]] begin[:]
variable[noDataValue] assign[=] name[self].defaultNoDataValue
variable[converter] assign[=] call[name[RasterConverter], parameter[]]
if call[name[isinstance], parameter[name[colorRamp], name[dict]]] begin[:]
call[name[converter].setCustomColorRamp, parameter[call[name[colorRamp]][constant[colors]], call[name[colorRamp]][constant[interpolatedPoints]]]]
<ast.Tuple object at 0x7da20c9920e0> assign[=] call[name[converter].getAsKmlPng, parameter[]]
if name[path] begin[:]
variable[directory] assign[=] call[name[os].path.dirname, parameter[name[path]]]
variable[archiveName] assign[=] call[call[call[call[name[os].path.split, parameter[name[path]]]][constant[1]].split, parameter[constant[.]]]][constant[0]]
variable[kmzPath] assign[=] call[name[os].path.join, parameter[name[directory], binary_operation[name[archiveName] + constant[.kmz]]]]
with call[name[ZipFile], parameter[name[kmzPath], constant[w]]] begin[:]
call[name[kmz].writestr, parameter[binary_operation[name[archiveName] + constant[.kml]], name[kmlString]]]
call[name[kmz].writestr, parameter[constant[raster.png], name[binaryPngString]]]
return[tuple[[<ast.Name object at 0x7da20c991750>, <ast.Name object at 0x7da20c991690>]]] | keyword[def] identifier[getAsKmlPng] ( identifier[self] , identifier[session] , identifier[path] = keyword[None] , identifier[documentName] = keyword[None] , identifier[colorRamp] = identifier[ColorRampEnum] . identifier[COLOR_RAMP_HUE] , identifier[alpha] = literal[int] ,
identifier[noDataValue] = keyword[None] , identifier[drawOrder] = literal[int] , identifier[cellSize] = keyword[None] , identifier[resampleMethod] = literal[string] ):
literal[string]
keyword[if] identifier[type] ( identifier[self] . identifier[raster] )!= identifier[type] ( keyword[None] ):
keyword[if] identifier[documentName] keyword[is] keyword[None] :
keyword[try] :
identifier[documentName] = identifier[self] . identifier[filename]
keyword[except] identifier[AttributeError] :
identifier[documentName] = literal[string]
keyword[if] identifier[noDataValue] keyword[is] keyword[None] :
identifier[noDataValue] = identifier[self] . identifier[defaultNoDataValue]
identifier[converter] = identifier[RasterConverter] ( identifier[sqlAlchemyEngineOrSession] = identifier[session] )
keyword[if] identifier[isinstance] ( identifier[colorRamp] , identifier[dict] ):
identifier[converter] . identifier[setCustomColorRamp] ( identifier[colorRamp] [ literal[string] ], identifier[colorRamp] [ literal[string] ])
keyword[else] :
identifier[converter] . identifier[setDefaultColorRamp] ( identifier[colorRamp] )
identifier[kmlString] , identifier[binaryPngString] = identifier[converter] . identifier[getAsKmlPng] ( identifier[tableName] = identifier[self] . identifier[tableName] ,
identifier[rasterId] = identifier[self] . identifier[id] ,
identifier[rasterIdFieldName] = literal[string] ,
identifier[rasterFieldName] = identifier[self] . identifier[rasterColumnName] ,
identifier[documentName] = identifier[documentName] ,
identifier[alpha] = identifier[alpha] ,
identifier[drawOrder] = identifier[drawOrder] ,
identifier[noDataValue] = identifier[noDataValue] ,
identifier[cellSize] = identifier[cellSize] ,
identifier[resampleMethod] = identifier[resampleMethod] ,
identifier[discreet] = identifier[self] . identifier[discreet] )
keyword[if] identifier[path] :
identifier[directory] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] )
identifier[archiveName] =( identifier[os] . identifier[path] . identifier[split] ( identifier[path] )[ literal[int] ]). identifier[split] ( literal[string] )[ literal[int] ]
identifier[kmzPath] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] ,( identifier[archiveName] + literal[string] ))
keyword[with] identifier[ZipFile] ( identifier[kmzPath] , literal[string] ) keyword[as] identifier[kmz] :
identifier[kmz] . identifier[writestr] ( identifier[archiveName] + literal[string] , identifier[kmlString] )
identifier[kmz] . identifier[writestr] ( literal[string] , identifier[binaryPngString] )
keyword[return] identifier[kmlString] , identifier[binaryPngString] | def getAsKmlPng(self, session, path=None, documentName=None, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=1.0, noDataValue=None, drawOrder=0, cellSize=None, resampleMethod='NearestNeighbour'):
"""
Retrieve the raster as a PNG image ground overlay KML format. Coarse grid resolutions must be resampled to
smaller cell/pixel sizes to avoid a "fuzzy" look. Cells with the no data value are excluded.
Args:
session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database.
path (str, optional): Path to file where KML file will be written. Defaults to None.
documentName (str, optional): Name of the KML document. This will be the name that appears in the legend.
Defaults to 'Stream Network'.
colorRamp (:mod:`mapkit.ColorRampGenerator.ColorRampEnum` or dict, optional): Use ColorRampEnum to select a
default color ramp or a dictionary with keys 'colors' and 'interpolatedPoints' to specify a custom color
ramp. The 'colors' key must be a list of RGB integer tuples (e.g.: (255, 0, 0)) and the
'interpolatedPoints' must be an integer representing the number of points to interpolate between each
color given in the colors list.
alpha (float, optional): Set transparency of visualization. Value between 0.0 and 1.0 where 1.0 is 100%
opaque and 0.0 is 100% transparent. Defaults to 1.0.
noDataValue (float, optional): The value to treat as no data when generating visualizations of rasters.
Defaults to 0.0.
drawOrder (int, optional): Set the draw order of the images. Defaults to 0.
cellSize (float, optional): Define the cell size in the units of the project projection at which to resample
the raster to generate the PNG. Defaults to None which will cause the PNG to be generated with the
original raster cell size. It is generally better to set this to a size smaller than the original cell
size to obtain a higher resolution image. However, computation time increases exponentially as the cell
size is decreased.
resampleMethod (str, optional): If cellSize is set, this method will be used to resample the raster. Valid
values include: NearestNeighbour, Bilinear, Cubic, CubicSpline, and Lanczos. Defaults to
NearestNeighbour.
Returns:
(str, list): Returns a KML string and a list of binary strings that are the PNG images.
"""
if type(self.raster) != type(None):
# Set Document Name
if documentName is None:
try:
documentName = self.filename # depends on [control=['try'], data=[]]
except AttributeError:
documentName = 'default' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['documentName']]
# Set no data value to default
if noDataValue is None:
noDataValue = self.defaultNoDataValue # depends on [control=['if'], data=['noDataValue']]
# Make sure the raster field is valid
converter = RasterConverter(sqlAlchemyEngineOrSession=session)
# Configure color ramp
if isinstance(colorRamp, dict):
converter.setCustomColorRamp(colorRamp['colors'], colorRamp['interpolatedPoints']) # depends on [control=['if'], data=[]]
else:
converter.setDefaultColorRamp(colorRamp)
(kmlString, binaryPngString) = converter.getAsKmlPng(tableName=self.tableName, rasterId=self.id, rasterIdFieldName='id', rasterFieldName=self.rasterColumnName, documentName=documentName, alpha=alpha, drawOrder=drawOrder, noDataValue=noDataValue, cellSize=cellSize, resampleMethod=resampleMethod, discreet=self.discreet)
if path:
directory = os.path.dirname(path)
archiveName = os.path.split(path)[1].split('.')[0]
kmzPath = os.path.join(directory, archiveName + '.kmz')
with ZipFile(kmzPath, 'w') as kmz:
kmz.writestr(archiveName + '.kml', kmlString)
kmz.writestr('raster.png', binaryPngString) # depends on [control=['with'], data=['kmz']] # depends on [control=['if'], data=[]]
return (kmlString, binaryPngString) # depends on [control=['if'], data=[]] |
def format_value(value, sf=3):
"""
convert a parameter value into a formatted string with certain significant figures
:param value: the value to be formatted
:param sf: number of significant figures
:return: str
"""
if isinstance(value, str):
return value
elif isinstance(value, list) or isinstance(value, np.ndarray):
value = list(value)
for i in range(len(value)):
vv = format_value(value[i])
value[i] = vv
return "[" + ", ".join(value) + "]"
elif value is None:
return "N/A"
else:
fmt_str = "{0:.%ig}" % sf
return fmt_str.format(value) | def function[format_value, parameter[value, sf]]:
constant[
convert a parameter value into a formatted string with certain significant figures
:param value: the value to be formatted
:param sf: number of significant figures
:return: str
]
if call[name[isinstance], parameter[name[value], name[str]]] begin[:]
return[name[value]] | keyword[def] identifier[format_value] ( identifier[value] , identifier[sf] = literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ):
keyword[return] identifier[value]
keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[np] . identifier[ndarray] ):
identifier[value] = identifier[list] ( identifier[value] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[value] )):
identifier[vv] = identifier[format_value] ( identifier[value] [ identifier[i] ])
identifier[value] [ identifier[i] ]= identifier[vv]
keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[value] )+ literal[string]
keyword[elif] identifier[value] keyword[is] keyword[None] :
keyword[return] literal[string]
keyword[else] :
identifier[fmt_str] = literal[string] % identifier[sf]
keyword[return] identifier[fmt_str] . identifier[format] ( identifier[value] ) | def format_value(value, sf=3):
"""
convert a parameter value into a formatted string with certain significant figures
:param value: the value to be formatted
:param sf: number of significant figures
:return: str
"""
if isinstance(value, str):
return value # depends on [control=['if'], data=[]]
elif isinstance(value, list) or isinstance(value, np.ndarray):
value = list(value)
for i in range(len(value)):
vv = format_value(value[i])
value[i] = vv # depends on [control=['for'], data=['i']]
return '[' + ', '.join(value) + ']' # depends on [control=['if'], data=[]]
elif value is None:
return 'N/A' # depends on [control=['if'], data=[]]
else:
fmt_str = '{0:.%ig}' % sf
return fmt_str.format(value) |
def get_user_permissions(uid, **kwargs):
"""
Get the roles for a user.
@param user_id
"""
try:
_get_user(uid)
user_perms = db.DBSession.query(Perm).filter(Perm.id==RolePerm.perm_id,
RolePerm.role_id==Role.id,
Role.id==RoleUser.role_id,
RoleUser.user_id==uid).all()
return user_perms
except:
raise HydraError("Permissions not found for user (user_id={})".format(uid)) | def function[get_user_permissions, parameter[uid]]:
constant[
Get the roles for a user.
@param user_id
]
<ast.Try object at 0x7da20e961150> | keyword[def] identifier[get_user_permissions] ( identifier[uid] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
identifier[_get_user] ( identifier[uid] )
identifier[user_perms] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Perm] ). identifier[filter] ( identifier[Perm] . identifier[id] == identifier[RolePerm] . identifier[perm_id] ,
identifier[RolePerm] . identifier[role_id] == identifier[Role] . identifier[id] ,
identifier[Role] . identifier[id] == identifier[RoleUser] . identifier[role_id] ,
identifier[RoleUser] . identifier[user_id] == identifier[uid] ). identifier[all] ()
keyword[return] identifier[user_perms]
keyword[except] :
keyword[raise] identifier[HydraError] ( literal[string] . identifier[format] ( identifier[uid] )) | def get_user_permissions(uid, **kwargs):
"""
Get the roles for a user.
@param user_id
"""
try:
_get_user(uid)
user_perms = db.DBSession.query(Perm).filter(Perm.id == RolePerm.perm_id, RolePerm.role_id == Role.id, Role.id == RoleUser.role_id, RoleUser.user_id == uid).all()
return user_perms # depends on [control=['try'], data=[]]
except:
raise HydraError('Permissions not found for user (user_id={})'.format(uid)) # depends on [control=['except'], data=[]] |
def get(self, key=None, view=None):
"""Register a new model (models)"""
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Content-Type", "application/json")
if key is not None:
value = {}
value.update(self.database[key])
if view is not None:
# generate a context with the relevant variables
context = {}
context["value"] = value
context["ctx"] = self.ctx
result = json.dumps(getattr(views, view)(context))
else:
result = json.dumps(value)
else:
result = json.dumps(self.database.values())
self.write(result) | def function[get, parameter[self, key, view]]:
constant[Register a new model (models)]
call[name[self].set_header, parameter[constant[Access-Control-Allow-Origin], constant[*]]]
call[name[self].set_header, parameter[constant[Content-Type], constant[application/json]]]
if compare[name[key] is_not constant[None]] begin[:]
variable[value] assign[=] dictionary[[], []]
call[name[value].update, parameter[call[name[self].database][name[key]]]]
if compare[name[view] is_not constant[None]] begin[:]
variable[context] assign[=] dictionary[[], []]
call[name[context]][constant[value]] assign[=] name[value]
call[name[context]][constant[ctx]] assign[=] name[self].ctx
variable[result] assign[=] call[name[json].dumps, parameter[call[call[name[getattr], parameter[name[views], name[view]]], parameter[name[context]]]]]
call[name[self].write, parameter[name[result]]] | keyword[def] identifier[get] ( identifier[self] , identifier[key] = keyword[None] , identifier[view] = keyword[None] ):
literal[string]
identifier[self] . identifier[set_header] ( literal[string] , literal[string] )
identifier[self] . identifier[set_header] ( literal[string] , literal[string] )
keyword[if] identifier[key] keyword[is] keyword[not] keyword[None] :
identifier[value] ={}
identifier[value] . identifier[update] ( identifier[self] . identifier[database] [ identifier[key] ])
keyword[if] identifier[view] keyword[is] keyword[not] keyword[None] :
identifier[context] ={}
identifier[context] [ literal[string] ]= identifier[value]
identifier[context] [ literal[string] ]= identifier[self] . identifier[ctx]
identifier[result] = identifier[json] . identifier[dumps] ( identifier[getattr] ( identifier[views] , identifier[view] )( identifier[context] ))
keyword[else] :
identifier[result] = identifier[json] . identifier[dumps] ( identifier[value] )
keyword[else] :
identifier[result] = identifier[json] . identifier[dumps] ( identifier[self] . identifier[database] . identifier[values] ())
identifier[self] . identifier[write] ( identifier[result] ) | def get(self, key=None, view=None):
"""Register a new model (models)"""
self.set_header('Access-Control-Allow-Origin', '*')
self.set_header('Content-Type', 'application/json')
if key is not None:
value = {}
value.update(self.database[key])
if view is not None:
# generate a context with the relevant variables
context = {}
context['value'] = value
context['ctx'] = self.ctx
result = json.dumps(getattr(views, view)(context)) # depends on [control=['if'], data=['view']]
else:
result = json.dumps(value) # depends on [control=['if'], data=['key']]
else:
result = json.dumps(self.database.values())
self.write(result) |
def ambigcutters(seq):
"""
Returns both resolutions of a cut site that has an ambiguous base in
it, else the single cut site
"""
resos = []
if any([i in list("RKSYWM") for i in seq]):
for base in list("RKSYWM"):
if base in seq:
resos.append(seq.replace(base, AMBIGS[base][0]))
resos.append(seq.replace(base, AMBIGS[base][1]))
return resos
else:
return [seq, ""] | def function[ambigcutters, parameter[seq]]:
constant[
Returns both resolutions of a cut site that has an ambiguous base in
it, else the single cut site
]
variable[resos] assign[=] list[[]]
if call[name[any], parameter[<ast.ListComp object at 0x7da18ede4070>]] begin[:]
for taget[name[base]] in starred[call[name[list], parameter[constant[RKSYWM]]]] begin[:]
if compare[name[base] in name[seq]] begin[:]
call[name[resos].append, parameter[call[name[seq].replace, parameter[name[base], call[call[name[AMBIGS]][name[base]]][constant[0]]]]]]
call[name[resos].append, parameter[call[name[seq].replace, parameter[name[base], call[call[name[AMBIGS]][name[base]]][constant[1]]]]]]
return[name[resos]] | keyword[def] identifier[ambigcutters] ( identifier[seq] ):
literal[string]
identifier[resos] =[]
keyword[if] identifier[any] ([ identifier[i] keyword[in] identifier[list] ( literal[string] ) keyword[for] identifier[i] keyword[in] identifier[seq] ]):
keyword[for] identifier[base] keyword[in] identifier[list] ( literal[string] ):
keyword[if] identifier[base] keyword[in] identifier[seq] :
identifier[resos] . identifier[append] ( identifier[seq] . identifier[replace] ( identifier[base] , identifier[AMBIGS] [ identifier[base] ][ literal[int] ]))
identifier[resos] . identifier[append] ( identifier[seq] . identifier[replace] ( identifier[base] , identifier[AMBIGS] [ identifier[base] ][ literal[int] ]))
keyword[return] identifier[resos]
keyword[else] :
keyword[return] [ identifier[seq] , literal[string] ] | def ambigcutters(seq):
"""
Returns both resolutions of a cut site that has an ambiguous base in
it, else the single cut site
"""
resos = []
if any([i in list('RKSYWM') for i in seq]):
for base in list('RKSYWM'):
if base in seq:
resos.append(seq.replace(base, AMBIGS[base][0]))
resos.append(seq.replace(base, AMBIGS[base][1])) # depends on [control=['if'], data=['base', 'seq']] # depends on [control=['for'], data=['base']]
return resos # depends on [control=['if'], data=[]]
else:
return [seq, ''] |
def pause(self):
"""Pause the music"""
self.pause_time = self.get_time()
self.paused = True
self.player.pause() | def function[pause, parameter[self]]:
constant[Pause the music]
name[self].pause_time assign[=] call[name[self].get_time, parameter[]]
name[self].paused assign[=] constant[True]
call[name[self].player.pause, parameter[]] | keyword[def] identifier[pause] ( identifier[self] ):
literal[string]
identifier[self] . identifier[pause_time] = identifier[self] . identifier[get_time] ()
identifier[self] . identifier[paused] = keyword[True]
identifier[self] . identifier[player] . identifier[pause] () | def pause(self):
"""Pause the music"""
self.pause_time = self.get_time()
self.paused = True
self.player.pause() |
def usage_plan_association_present(name, plan_name, api_stages, region=None, key=None, keyid=None, profile=None):
'''
Ensures usage plan identified by name is added to provided api_stages
.. versionadded:: 2017.7.0
name
name of the state
plan_name
name of the plan to use
api_stages
list of dictionaries, where each dictionary consists of the following keys:
apiId
apiId of the api to attach usage plan to
stage
stage name of the api to attach usage plan to
.. code-block:: yaml
UsagePlanAssociationPresent:
boto_apigateway.usage_plan_association_present:
- plan_name: my_plan
- api_stages:
- apiId: 9kb0404ec0
stage: my_stage
- apiId: l9v7o2aj90
stage: my_stage
- profile: my_profile
'''
ret = {'name': name,
'result': True,
'comment': '',
'changes': {}
}
try:
common_args = dict([('region', region),
('key', key),
('keyid', keyid),
('profile', profile)])
existing = __salt__['boto_apigateway.describe_usage_plans'](name=plan_name, **common_args)
if 'error' in existing:
ret['result'] = False
ret['comment'] = 'Failed to describe existing usage plans'
return ret
if not existing['plans']:
ret['comment'] = 'Usage plan {0} does not exist'.format(plan_name)
ret['result'] = False
return ret
if len(existing['plans']) != 1:
ret['comment'] = 'There are multiple usage plans with the same name - it is not supported'
ret['result'] = False
return ret
plan = existing['plans'][0]
plan_id = plan['id']
plan_stages = plan.get('apiStages', [])
stages_to_add = []
for api in api_stages:
if api not in plan_stages:
stages_to_add.append(api)
if not stages_to_add:
ret['comment'] = 'Usage plan is already asssociated to all api stages'
return ret
result = __salt__['boto_apigateway.attach_usage_plan_to_apis'](plan_id, stages_to_add, **common_args)
if 'error' in result:
ret['comment'] = 'Failed to associate a usage plan {0} to the apis {1}, {2}'.format(plan_name,
stages_to_add,
result['error'])
ret['result'] = False
return ret
ret['comment'] = 'successfully associated usage plan to apis'
ret['changes']['old'] = plan_stages
ret['changes']['new'] = result.get('result', {}).get('apiStages', [])
except (ValueError, IOError) as e:
ret['result'] = False
ret['comment'] = '{0}'.format(e.args)
return ret | def function[usage_plan_association_present, parameter[name, plan_name, api_stages, region, key, keyid, profile]]:
constant[
Ensures usage plan identified by name is added to provided api_stages
.. versionadded:: 2017.7.0
name
name of the state
plan_name
name of the plan to use
api_stages
list of dictionaries, where each dictionary consists of the following keys:
apiId
apiId of the api to attach usage plan to
stage
stage name of the api to attach usage plan to
.. code-block:: yaml
UsagePlanAssociationPresent:
boto_apigateway.usage_plan_association_present:
- plan_name: my_plan
- api_stages:
- apiId: 9kb0404ec0
stage: my_stage
- apiId: l9v7o2aj90
stage: my_stage
- profile: my_profile
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b21f27a0>, <ast.Constant object at 0x7da1b21f3280>, <ast.Constant object at 0x7da1b21f3760>, <ast.Constant object at 0x7da1b21f2290>], [<ast.Name object at 0x7da1b21f3520>, <ast.Constant object at 0x7da1b21f3160>, <ast.Constant object at 0x7da1b21f0790>, <ast.Dict object at 0x7da1b21f0a30>]]
<ast.Try object at 0x7da1b21f2770>
return[name[ret]] | keyword[def] identifier[usage_plan_association_present] ( identifier[name] , identifier[plan_name] , identifier[api_stages] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] : keyword[True] ,
literal[string] : literal[string] ,
literal[string] :{}
}
keyword[try] :
identifier[common_args] = identifier[dict] ([( literal[string] , identifier[region] ),
( literal[string] , identifier[key] ),
( literal[string] , identifier[keyid] ),
( literal[string] , identifier[profile] )])
identifier[existing] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[plan_name] ,** identifier[common_args] )
keyword[if] literal[string] keyword[in] identifier[existing] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] keyword[not] identifier[existing] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[plan_name] )
identifier[ret] [ literal[string] ]= keyword[False]
keyword[return] identifier[ret]
keyword[if] identifier[len] ( identifier[existing] [ literal[string] ])!= literal[int] :
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= keyword[False]
keyword[return] identifier[ret]
identifier[plan] = identifier[existing] [ literal[string] ][ literal[int] ]
identifier[plan_id] = identifier[plan] [ literal[string] ]
identifier[plan_stages] = identifier[plan] . identifier[get] ( literal[string] ,[])
identifier[stages_to_add] =[]
keyword[for] identifier[api] keyword[in] identifier[api_stages] :
keyword[if] identifier[api] keyword[not] keyword[in] identifier[plan_stages] :
identifier[stages_to_add] . identifier[append] ( identifier[api] )
keyword[if] keyword[not] identifier[stages_to_add] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[plan_id] , identifier[stages_to_add] ,** identifier[common_args] )
keyword[if] literal[string] keyword[in] identifier[result] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[plan_name] ,
identifier[stages_to_add] ,
identifier[result] [ literal[string] ])
identifier[ret] [ literal[string] ]= keyword[False]
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[plan_stages]
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[result] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[])
keyword[except] ( identifier[ValueError] , identifier[IOError] ) keyword[as] identifier[e] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[e] . identifier[args] )
keyword[return] identifier[ret] | def usage_plan_association_present(name, plan_name, api_stages, region=None, key=None, keyid=None, profile=None):
"""
Ensures usage plan identified by name is added to provided api_stages
.. versionadded:: 2017.7.0
name
name of the state
plan_name
name of the plan to use
api_stages
list of dictionaries, where each dictionary consists of the following keys:
apiId
apiId of the api to attach usage plan to
stage
stage name of the api to attach usage plan to
.. code-block:: yaml
UsagePlanAssociationPresent:
boto_apigateway.usage_plan_association_present:
- plan_name: my_plan
- api_stages:
- apiId: 9kb0404ec0
stage: my_stage
- apiId: l9v7o2aj90
stage: my_stage
- profile: my_profile
"""
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
try:
common_args = dict([('region', region), ('key', key), ('keyid', keyid), ('profile', profile)])
existing = __salt__['boto_apigateway.describe_usage_plans'](name=plan_name, **common_args)
if 'error' in existing:
ret['result'] = False
ret['comment'] = 'Failed to describe existing usage plans'
return ret # depends on [control=['if'], data=[]]
if not existing['plans']:
ret['comment'] = 'Usage plan {0} does not exist'.format(plan_name)
ret['result'] = False
return ret # depends on [control=['if'], data=[]]
if len(existing['plans']) != 1:
ret['comment'] = 'There are multiple usage plans with the same name - it is not supported'
ret['result'] = False
return ret # depends on [control=['if'], data=[]]
plan = existing['plans'][0]
plan_id = plan['id']
plan_stages = plan.get('apiStages', [])
stages_to_add = []
for api in api_stages:
if api not in plan_stages:
stages_to_add.append(api) # depends on [control=['if'], data=['api']] # depends on [control=['for'], data=['api']]
if not stages_to_add:
ret['comment'] = 'Usage plan is already asssociated to all api stages'
return ret # depends on [control=['if'], data=[]]
result = __salt__['boto_apigateway.attach_usage_plan_to_apis'](plan_id, stages_to_add, **common_args)
if 'error' in result:
ret['comment'] = 'Failed to associate a usage plan {0} to the apis {1}, {2}'.format(plan_name, stages_to_add, result['error'])
ret['result'] = False
return ret # depends on [control=['if'], data=['result']]
ret['comment'] = 'successfully associated usage plan to apis'
ret['changes']['old'] = plan_stages
ret['changes']['new'] = result.get('result', {}).get('apiStages', []) # depends on [control=['try'], data=[]]
except (ValueError, IOError) as e:
ret['result'] = False
ret['comment'] = '{0}'.format(e.args) # depends on [control=['except'], data=['e']]
return ret |
def suspendMember(self, clusterId, memberId):
"""
Parameters:
- clusterId
- memberId
"""
self.send_suspendMember(clusterId, memberId)
return self.recv_suspendMember() | def function[suspendMember, parameter[self, clusterId, memberId]]:
constant[
Parameters:
- clusterId
- memberId
]
call[name[self].send_suspendMember, parameter[name[clusterId], name[memberId]]]
return[call[name[self].recv_suspendMember, parameter[]]] | keyword[def] identifier[suspendMember] ( identifier[self] , identifier[clusterId] , identifier[memberId] ):
literal[string]
identifier[self] . identifier[send_suspendMember] ( identifier[clusterId] , identifier[memberId] )
keyword[return] identifier[self] . identifier[recv_suspendMember] () | def suspendMember(self, clusterId, memberId):
"""
Parameters:
- clusterId
- memberId
"""
self.send_suspendMember(clusterId, memberId)
return self.recv_suspendMember() |
def tlg_plaintext_cleanup(text, rm_punctuation=False, rm_periods=False):
"""Remove and substitute post-processing for Greek TLG text.
TODO: Surely more junk to pull out. Please submit bugs!
TODO: {.+?}|\(.+?\) working?
TODO: This is a rather slow now, help in speeding up welcome.
"""
remove_comp = regex.compile(r'-\n|«|»|<|>|\.\.\.|‘|’|_|{.+?}|\(.+?\)|[a-zA-Z0-9]', flags=regex.VERSION1)
text = remove_comp.sub('', text)
new_text = None
if rm_punctuation:
new_text = ''
punctuation = [',', '·', ':', '"', "'", '?', '-', '!', '*', '[', ']', '{', '}']
if rm_periods:
punctuation += ['.', ';']
for char in text:
# second try at rming some punctuation; merge with above regex
if char in punctuation:
pass
else:
new_text += char
if new_text:
text = new_text
# replace line breaks w/ space
replace_comp = regex.compile(r'\n')
text = replace_comp.sub(' ', text)
comp_space = regex.compile(r'\s+')
text = comp_space.sub(' ', text)
return text | def function[tlg_plaintext_cleanup, parameter[text, rm_punctuation, rm_periods]]:
constant[Remove and substitute post-processing for Greek TLG text.
TODO: Surely more junk to pull out. Please submit bugs!
TODO: {.+?}|\(.+?\) working?
TODO: This is a rather slow now, help in speeding up welcome.
]
variable[remove_comp] assign[=] call[name[regex].compile, parameter[constant[-\n|«|»|<|>|\.\.\.|‘|’|_|{.+?}|\(.+?\)|[a-zA-Z0-9]]]]
variable[text] assign[=] call[name[remove_comp].sub, parameter[constant[], name[text]]]
variable[new_text] assign[=] constant[None]
if name[rm_punctuation] begin[:]
variable[new_text] assign[=] constant[]
variable[punctuation] assign[=] list[[<ast.Constant object at 0x7da18eb57b20>, <ast.Constant object at 0x7da18eb54ee0>, <ast.Constant object at 0x7da18eb54a90>, <ast.Constant object at 0x7da18eb57400>, <ast.Constant object at 0x7da18eb578b0>, <ast.Constant object at 0x7da18eb57bb0>, <ast.Constant object at 0x7da18eb54ca0>, <ast.Constant object at 0x7da18eb55c00>, <ast.Constant object at 0x7da18eb55f00>, <ast.Constant object at 0x7da18eb56110>, <ast.Constant object at 0x7da18eb55690>, <ast.Constant object at 0x7da18eb54df0>, <ast.Constant object at 0x7da18eb57f40>]]
if name[rm_periods] begin[:]
<ast.AugAssign object at 0x7da18eb551b0>
for taget[name[char]] in starred[name[text]] begin[:]
if compare[name[char] in name[punctuation]] begin[:]
pass
if name[new_text] begin[:]
variable[text] assign[=] name[new_text]
variable[replace_comp] assign[=] call[name[regex].compile, parameter[constant[\n]]]
variable[text] assign[=] call[name[replace_comp].sub, parameter[constant[ ], name[text]]]
variable[comp_space] assign[=] call[name[regex].compile, parameter[constant[\s+]]]
variable[text] assign[=] call[name[comp_space].sub, parameter[constant[ ], name[text]]]
return[name[text]] | keyword[def] identifier[tlg_plaintext_cleanup] ( identifier[text] , identifier[rm_punctuation] = keyword[False] , identifier[rm_periods] = keyword[False] ):
literal[string]
identifier[remove_comp] = identifier[regex] . identifier[compile] ( literal[string] , identifier[flags] = identifier[regex] . identifier[VERSION1] )
identifier[text] = identifier[remove_comp] . identifier[sub] ( literal[string] , identifier[text] )
identifier[new_text] = keyword[None]
keyword[if] identifier[rm_punctuation] :
identifier[new_text] = literal[string]
identifier[punctuation] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[rm_periods] :
identifier[punctuation] +=[ literal[string] , literal[string] ]
keyword[for] identifier[char] keyword[in] identifier[text] :
keyword[if] identifier[char] keyword[in] identifier[punctuation] :
keyword[pass]
keyword[else] :
identifier[new_text] += identifier[char]
keyword[if] identifier[new_text] :
identifier[text] = identifier[new_text]
identifier[replace_comp] = identifier[regex] . identifier[compile] ( literal[string] )
identifier[text] = identifier[replace_comp] . identifier[sub] ( literal[string] , identifier[text] )
identifier[comp_space] = identifier[regex] . identifier[compile] ( literal[string] )
identifier[text] = identifier[comp_space] . identifier[sub] ( literal[string] , identifier[text] )
keyword[return] identifier[text] | def tlg_plaintext_cleanup(text, rm_punctuation=False, rm_periods=False):
"""Remove and substitute post-processing for Greek TLG text.
TODO: Surely more junk to pull out. Please submit bugs!
TODO: {.+?}|\\(.+?\\) working?
TODO: This is a rather slow now, help in speeding up welcome.
"""
remove_comp = regex.compile('-\\n|«|»|<|>|\\.\\.\\.|‘|’|_|{.+?}|\\(.+?\\)|[a-zA-Z0-9]', flags=regex.VERSION1)
text = remove_comp.sub('', text)
new_text = None
if rm_punctuation:
new_text = ''
punctuation = [',', '·', ':', '"', "'", '?', '-', '!', '*', '[', ']', '{', '}']
if rm_periods:
punctuation += ['.', ';'] # depends on [control=['if'], data=[]]
for char in text:
# second try at rming some punctuation; merge with above regex
if char in punctuation:
pass # depends on [control=['if'], data=[]]
else:
new_text += char # depends on [control=['for'], data=['char']] # depends on [control=['if'], data=[]]
if new_text:
text = new_text # depends on [control=['if'], data=[]]
# replace line breaks w/ space
replace_comp = regex.compile('\\n')
text = replace_comp.sub(' ', text)
comp_space = regex.compile('\\s+')
text = comp_space.sub(' ', text)
return text |
def translate(self, value):
"""Translate value to enum instance.
If value is already enum instance, check if this value belongs to base
enum.
"""
if self._check_if_already_proper(value):
return value
try:
return self.search_table[value]
except KeyError:
raise ValueError("Value {value} doesn't match any state.".format(
value=value
)) | def function[translate, parameter[self, value]]:
constant[Translate value to enum instance.
If value is already enum instance, check if this value belongs to base
enum.
]
if call[name[self]._check_if_already_proper, parameter[name[value]]] begin[:]
return[name[value]]
<ast.Try object at 0x7da1b2344f10> | keyword[def] identifier[translate] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[_check_if_already_proper] ( identifier[value] ):
keyword[return] identifier[value]
keyword[try] :
keyword[return] identifier[self] . identifier[search_table] [ identifier[value] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[value] = identifier[value]
)) | def translate(self, value):
"""Translate value to enum instance.
If value is already enum instance, check if this value belongs to base
enum.
"""
if self._check_if_already_proper(value):
return value # depends on [control=['if'], data=[]]
try:
return self.search_table[value] # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError("Value {value} doesn't match any state.".format(value=value)) # depends on [control=['except'], data=[]] |
def _set_token_expiration_time(self, expires_in):
"""
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
"""
self.token_expiration_time = dt.datetime.utcnow() + \
dt.timedelta(0, expires_in) | def function[_set_token_expiration_time, parameter[self, expires_in]]:
constant[
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
]
name[self].token_expiration_time assign[=] binary_operation[call[name[dt].datetime.utcnow, parameter[]] + call[name[dt].timedelta, parameter[constant[0], name[expires_in]]]] | keyword[def] identifier[_set_token_expiration_time] ( identifier[self] , identifier[expires_in] ):
literal[string]
identifier[self] . identifier[token_expiration_time] = identifier[dt] . identifier[datetime] . identifier[utcnow] ()+ identifier[dt] . identifier[timedelta] ( literal[int] , identifier[expires_in] ) | def _set_token_expiration_time(self, expires_in):
"""
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
"""
self.token_expiration_time = dt.datetime.utcnow() + dt.timedelta(0, expires_in) |
def walk(path, name):
"""
This function is a 2-time recursive func,
that findin file in dirs
:parameters:
- `path` (str) - Directory path
- `name` (str) - Name of file, that we lookin for
:returns:
Path to the swipl so, path to the resource file
:returns type:
(str)
"""
back_path = path[:]
path = os.path.join(path, name)
if os.path.exists(path):
return path
else:
for dir_ in os.listdir(back_path):
path = os.path.join(back_path, dir_)
if os.path.isdir(path):
res_path = walk(path, name)
if res_path is not None:
return (res_path, back_path)
return None | def function[walk, parameter[path, name]]:
constant[
This function is a 2-time recursive func,
that findin file in dirs
:parameters:
- `path` (str) - Directory path
- `name` (str) - Name of file, that we lookin for
:returns:
Path to the swipl so, path to the resource file
:returns type:
(str)
]
variable[back_path] assign[=] call[name[path]][<ast.Slice object at 0x7da1b162b5b0>]
variable[path] assign[=] call[name[os].path.join, parameter[name[path], name[name]]]
if call[name[os].path.exists, parameter[name[path]]] begin[:]
return[name[path]]
return[constant[None]] | keyword[def] identifier[walk] ( identifier[path] , identifier[name] ):
literal[string]
identifier[back_path] = identifier[path] [:]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[name] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[return] identifier[path]
keyword[else] :
keyword[for] identifier[dir_] keyword[in] identifier[os] . identifier[listdir] ( identifier[back_path] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[back_path] , identifier[dir_] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
identifier[res_path] = identifier[walk] ( identifier[path] , identifier[name] )
keyword[if] identifier[res_path] keyword[is] keyword[not] keyword[None] :
keyword[return] ( identifier[res_path] , identifier[back_path] )
keyword[return] keyword[None] | def walk(path, name):
"""
This function is a 2-time recursive func,
that findin file in dirs
:parameters:
- `path` (str) - Directory path
- `name` (str) - Name of file, that we lookin for
:returns:
Path to the swipl so, path to the resource file
:returns type:
(str)
"""
back_path = path[:]
path = os.path.join(path, name)
if os.path.exists(path):
return path # depends on [control=['if'], data=[]]
else:
for dir_ in os.listdir(back_path):
path = os.path.join(back_path, dir_)
if os.path.isdir(path):
res_path = walk(path, name)
if res_path is not None:
return (res_path, back_path) # depends on [control=['if'], data=['res_path']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dir_']]
return None |
def launch_vm_process(self, session, name, environment):
"""Spawns a new process that will execute the virtual machine and obtains a shared
lock on the machine for the calling session.
If launching the VM succeeds, the new VM process will create its own session
and write-lock the machine for it, preventing conflicting changes from other
processes. If the machine is already locked (because it is already running or
because another session has a write lock), launching the VM process will therefore
fail. Reversely, future attempts to obtain a write lock will also fail while the
machine is running.
The caller's session object remains separate from the session opened by the new
VM process. It receives its own :py:class:`IConsole` object which can be used
to control machine execution, but it cannot be used to change all VM settings
which would be available after a :py:func:`lock_machine` call.
The caller must eventually release the session's shared lock by calling
:py:func:`ISession.unlock_machine` on the local session object once this call
has returned. However, the session's state (see :py:func:`ISession.state` )
will not return to "Unlocked" until the remote session has also unlocked
the machine (i.e. the machine has stopped running).
Launching a VM process can take some time (a new VM is started in a new process,
for which memory and other resources need to be set up). Because of this,
an :py:class:`IProgress` object is returned to allow the caller to wait
for this asynchronous operation to be completed. Until then, the caller's
session object remains in the "Unlocked" state, and its :py:func:`ISession.machine`
and :py:func:`ISession.console` attributes cannot be accessed.
It is recommended to use :py:func:`IProgress.wait_for_completion` or
similar calls to wait for completion. Completion is signalled when the VM
is powered on. If launching the VM fails, error messages can be queried
via the progress object, if available.
The progress object will have at least 2 sub-operations. The first
operation covers the period up to the new VM process calls powerUp.
The subsequent operations mirror the :py:func:`IConsole.power_up`
progress object. Because :py:func:`IConsole.power_up` may require
some extra sub-operations, the :py:func:`IProgress.operation_count`
may change at the completion of operation.
For details on the teleportation progress operation, see
:py:func:`IConsole.power_up` .
<!-- TODO/r=bird: What about making @a environment into a smart array? Guess
this predates our safe array support by a year or so... Dmitry wrote the text here, right?
Just rename it to @a environmentChanges and shorten the documentation to say the string
are applied onto the server environment putenv style, i.e. "VAR=VALUE" for setting/replacing
and "VAR" for unsetting. -->
The @a environment argument is a string containing definitions of
environment variables in the following format:
::
NAME[=VALUE]\n
NAME[=VALUE]\n
...
where \\n is the new line character. These environment
variables will be appended to the environment of the VirtualBox server
process. If an environment variable exists both in the server process
and in this list, the value from this list takes precedence over the
server's variable. If the value of the environment variable is
omitted, this variable will be removed from the resulting environment.
If the environment string is @c null or empty, the server environment
is inherited by the started process as is.
in session of type :class:`ISession`
Client session object to which the VM process will be connected (this
must be in "Unlocked" state).
in name of type str
Front-end to use for the new VM process. The following are currently supported:
"gui": VirtualBox Qt GUI front-end
"headless": VBoxHeadless (VRDE Server) front-end
"sdl": VirtualBox SDL front-end
"emergencystop": reserved value, used for aborting
the currently running VM or session owner. In this case the
@a session parameter may be @c null (if it is non-null it isn't
used in any way), and the @a progress return value will be always
@c null. The operation completes immediately.
"": use the per-VM default frontend if set, otherwise
the global default defined in the system properties. If neither
are set, the API will launch a "gui" session, which may
fail if there is no windowing environment available. See
:py:func:`IMachine.default_frontend` and
:py:func:`ISystemProperties.default_frontend` .
in environment of type str
Environment to pass to the VM process.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
raises :class:`OleErrorUnexpected`
Virtual machine not registered.
raises :class:`OleErrorInvalidarg`
Invalid session type @a type.
raises :class:`VBoxErrorObjectNotFound`
No machine matching @a machineId found.
raises :class:`VBoxErrorInvalidObjectState`
Session already open or being opened.
raises :class:`VBoxErrorIprtError`
Launching process for machine failed.
raises :class:`VBoxErrorVmError`
Failed to assign machine to session.
"""
if not isinstance(session, ISession):
raise TypeError("session can only be an instance of type ISession")
if not isinstance(name, basestring):
raise TypeError("name can only be an instance of type basestring")
if not isinstance(environment, basestring):
raise TypeError("environment can only be an instance of type basestring")
progress = self._call("launchVMProcess",
in_p=[session, name, environment])
progress = IProgress(progress)
return progress | def function[launch_vm_process, parameter[self, session, name, environment]]:
constant[Spawns a new process that will execute the virtual machine and obtains a shared
lock on the machine for the calling session.
If launching the VM succeeds, the new VM process will create its own session
and write-lock the machine for it, preventing conflicting changes from other
processes. If the machine is already locked (because it is already running or
because another session has a write lock), launching the VM process will therefore
fail. Reversely, future attempts to obtain a write lock will also fail while the
machine is running.
The caller's session object remains separate from the session opened by the new
VM process. It receives its own :py:class:`IConsole` object which can be used
to control machine execution, but it cannot be used to change all VM settings
which would be available after a :py:func:`lock_machine` call.
The caller must eventually release the session's shared lock by calling
:py:func:`ISession.unlock_machine` on the local session object once this call
has returned. However, the session's state (see :py:func:`ISession.state` )
will not return to "Unlocked" until the remote session has also unlocked
the machine (i.e. the machine has stopped running).
Launching a VM process can take some time (a new VM is started in a new process,
for which memory and other resources need to be set up). Because of this,
an :py:class:`IProgress` object is returned to allow the caller to wait
for this asynchronous operation to be completed. Until then, the caller's
session object remains in the "Unlocked" state, and its :py:func:`ISession.machine`
and :py:func:`ISession.console` attributes cannot be accessed.
It is recommended to use :py:func:`IProgress.wait_for_completion` or
similar calls to wait for completion. Completion is signalled when the VM
is powered on. If launching the VM fails, error messages can be queried
via the progress object, if available.
The progress object will have at least 2 sub-operations. The first
operation covers the period up to the new VM process calls powerUp.
The subsequent operations mirror the :py:func:`IConsole.power_up`
progress object. Because :py:func:`IConsole.power_up` may require
some extra sub-operations, the :py:func:`IProgress.operation_count`
may change at the completion of operation.
For details on the teleportation progress operation, see
:py:func:`IConsole.power_up` .
<!-- TODO/r=bird: What about making @a environment into a smart array? Guess
this predates our safe array support by a year or so... Dmitry wrote the text here, right?
Just rename it to @a environmentChanges and shorten the documentation to say the string
are applied onto the server environment putenv style, i.e. "VAR=VALUE" for setting/replacing
and "VAR" for unsetting. -->
The @a environment argument is a string containing definitions of
environment variables in the following format:
::
NAME[=VALUE]
NAME[=VALUE]
...
where \n is the new line character. These environment
variables will be appended to the environment of the VirtualBox server
process. If an environment variable exists both in the server process
and in this list, the value from this list takes precedence over the
server's variable. If the value of the environment variable is
omitted, this variable will be removed from the resulting environment.
If the environment string is @c null or empty, the server environment
is inherited by the started process as is.
in session of type :class:`ISession`
Client session object to which the VM process will be connected (this
must be in "Unlocked" state).
in name of type str
Front-end to use for the new VM process. The following are currently supported:
"gui": VirtualBox Qt GUI front-end
"headless": VBoxHeadless (VRDE Server) front-end
"sdl": VirtualBox SDL front-end
"emergencystop": reserved value, used for aborting
the currently running VM or session owner. In this case the
@a session parameter may be @c null (if it is non-null it isn't
used in any way), and the @a progress return value will be always
@c null. The operation completes immediately.
"": use the per-VM default frontend if set, otherwise
the global default defined in the system properties. If neither
are set, the API will launch a "gui" session, which may
fail if there is no windowing environment available. See
:py:func:`IMachine.default_frontend` and
:py:func:`ISystemProperties.default_frontend` .
in environment of type str
Environment to pass to the VM process.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
raises :class:`OleErrorUnexpected`
Virtual machine not registered.
raises :class:`OleErrorInvalidarg`
Invalid session type @a type.
raises :class:`VBoxErrorObjectNotFound`
No machine matching @a machineId found.
raises :class:`VBoxErrorInvalidObjectState`
Session already open or being opened.
raises :class:`VBoxErrorIprtError`
Launching process for machine failed.
raises :class:`VBoxErrorVmError`
Failed to assign machine to session.
]
if <ast.UnaryOp object at 0x7da18eb54970> begin[:]
<ast.Raise object at 0x7da18eb55ea0>
if <ast.UnaryOp object at 0x7da18eb56c20> begin[:]
<ast.Raise object at 0x7da18eb57910>
if <ast.UnaryOp object at 0x7da18eb548e0> begin[:]
<ast.Raise object at 0x7da18eb56b30>
variable[progress] assign[=] call[name[self]._call, parameter[constant[launchVMProcess]]]
variable[progress] assign[=] call[name[IProgress], parameter[name[progress]]]
return[name[progress]] | keyword[def] identifier[launch_vm_process] ( identifier[self] , identifier[session] , identifier[name] , identifier[environment] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[session] , identifier[ISession] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[name] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[environment] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[progress] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[session] , identifier[name] , identifier[environment] ])
identifier[progress] = identifier[IProgress] ( identifier[progress] )
keyword[return] identifier[progress] | def launch_vm_process(self, session, name, environment):
"""Spawns a new process that will execute the virtual machine and obtains a shared
lock on the machine for the calling session.
If launching the VM succeeds, the new VM process will create its own session
and write-lock the machine for it, preventing conflicting changes from other
processes. If the machine is already locked (because it is already running or
because another session has a write lock), launching the VM process will therefore
fail. Reversely, future attempts to obtain a write lock will also fail while the
machine is running.
The caller's session object remains separate from the session opened by the new
VM process. It receives its own :py:class:`IConsole` object which can be used
to control machine execution, but it cannot be used to change all VM settings
which would be available after a :py:func:`lock_machine` call.
The caller must eventually release the session's shared lock by calling
:py:func:`ISession.unlock_machine` on the local session object once this call
has returned. However, the session's state (see :py:func:`ISession.state` )
will not return to "Unlocked" until the remote session has also unlocked
the machine (i.e. the machine has stopped running).
Launching a VM process can take some time (a new VM is started in a new process,
for which memory and other resources need to be set up). Because of this,
an :py:class:`IProgress` object is returned to allow the caller to wait
for this asynchronous operation to be completed. Until then, the caller's
session object remains in the "Unlocked" state, and its :py:func:`ISession.machine`
and :py:func:`ISession.console` attributes cannot be accessed.
It is recommended to use :py:func:`IProgress.wait_for_completion` or
similar calls to wait for completion. Completion is signalled when the VM
is powered on. If launching the VM fails, error messages can be queried
via the progress object, if available.
The progress object will have at least 2 sub-operations. The first
operation covers the period up to the new VM process calls powerUp.
The subsequent operations mirror the :py:func:`IConsole.power_up`
progress object. Because :py:func:`IConsole.power_up` may require
some extra sub-operations, the :py:func:`IProgress.operation_count`
may change at the completion of operation.
For details on the teleportation progress operation, see
:py:func:`IConsole.power_up` .
<!-- TODO/r=bird: What about making @a environment into a smart array? Guess
this predates our safe array support by a year or so... Dmitry wrote the text here, right?
Just rename it to @a environmentChanges and shorten the documentation to say the string
are applied onto the server environment putenv style, i.e. "VAR=VALUE" for setting/replacing
and "VAR" for unsetting. -->
The @a environment argument is a string containing definitions of
environment variables in the following format:
::
NAME[=VALUE]
NAME[=VALUE]
...
where \\n is the new line character. These environment
variables will be appended to the environment of the VirtualBox server
process. If an environment variable exists both in the server process
and in this list, the value from this list takes precedence over the
server's variable. If the value of the environment variable is
omitted, this variable will be removed from the resulting environment.
If the environment string is @c null or empty, the server environment
is inherited by the started process as is.
in session of type :class:`ISession`
Client session object to which the VM process will be connected (this
must be in "Unlocked" state).
in name of type str
Front-end to use for the new VM process. The following are currently supported:
"gui": VirtualBox Qt GUI front-end
"headless": VBoxHeadless (VRDE Server) front-end
"sdl": VirtualBox SDL front-end
"emergencystop": reserved value, used for aborting
the currently running VM or session owner. In this case the
@a session parameter may be @c null (if it is non-null it isn't
used in any way), and the @a progress return value will be always
@c null. The operation completes immediately.
"": use the per-VM default frontend if set, otherwise
the global default defined in the system properties. If neither
are set, the API will launch a "gui" session, which may
fail if there is no windowing environment available. See
:py:func:`IMachine.default_frontend` and
:py:func:`ISystemProperties.default_frontend` .
in environment of type str
Environment to pass to the VM process.
return progress of type :class:`IProgress`
Progress object to track the operation completion.
raises :class:`OleErrorUnexpected`
Virtual machine not registered.
raises :class:`OleErrorInvalidarg`
Invalid session type @a type.
raises :class:`VBoxErrorObjectNotFound`
No machine matching @a machineId found.
raises :class:`VBoxErrorInvalidObjectState`
Session already open or being opened.
raises :class:`VBoxErrorIprtError`
Launching process for machine failed.
raises :class:`VBoxErrorVmError`
Failed to assign machine to session.
"""
if not isinstance(session, ISession):
raise TypeError('session can only be an instance of type ISession') # depends on [control=['if'], data=[]]
if not isinstance(name, basestring):
raise TypeError('name can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(environment, basestring):
raise TypeError('environment can only be an instance of type basestring') # depends on [control=['if'], data=[]]
progress = self._call('launchVMProcess', in_p=[session, name, environment])
progress = IProgress(progress)
return progress |
def show_tip(self, tip=""):
"""Show tip"""
QToolTip.showText(self.mapToGlobal(self.pos()), tip, self) | def function[show_tip, parameter[self, tip]]:
constant[Show tip]
call[name[QToolTip].showText, parameter[call[name[self].mapToGlobal, parameter[call[name[self].pos, parameter[]]]], name[tip], name[self]]] | keyword[def] identifier[show_tip] ( identifier[self] , identifier[tip] = literal[string] ):
literal[string]
identifier[QToolTip] . identifier[showText] ( identifier[self] . identifier[mapToGlobal] ( identifier[self] . identifier[pos] ()), identifier[tip] , identifier[self] ) | def show_tip(self, tip=''):
"""Show tip"""
QToolTip.showText(self.mapToGlobal(self.pos()), tip, self) |
def _prepare_init_params_from_job_description(cls, job_details, model_channel_name=None):
"""Convert the job description to init params that can be handled by the class constructor
Args:
job_details: the returned job details from a describe_training_job API call.
Returns:
dictionary: The transformed init_params
"""
init_params = super(TensorFlow, cls)._prepare_init_params_from_job_description(job_details,
model_channel_name)
# Move some of the tensorflow specific init params from hyperparameters into the main init params.
for argument in ('checkpoint_path', 'training_steps', 'evaluation_steps', 'model_dir'):
value = init_params['hyperparameters'].pop(argument, None)
if value is not None:
init_params[argument] = value
image_name = init_params.pop('image')
framework, py_version, tag, script_mode = fw.framework_name_from_image(image_name)
if not framework:
# If we were unable to parse the framework name from the image it is not one of our
# officially supported images, in this case just add the image to the init params.
init_params['image_name'] = image_name
return init_params
if script_mode:
init_params['script_mode'] = True
init_params['py_version'] = py_version
# We switched image tagging scheme from regular image version (e.g. '1.0') to more expressive
# containing framework version, device type and python version (e.g. '1.5-gpu-py2').
# For backward compatibility map deprecated image tag '1.0' to a '1.4' framework version
# otherwise extract framework version from the tag itself.
init_params['framework_version'] = '1.4' if tag == '1.0' else fw.framework_version_from_tag(
tag)
training_job_name = init_params['base_job_name']
if framework != cls.__framework_name__:
raise ValueError("Training job: {} didn't use image for requested framework".format(
training_job_name))
return init_params | def function[_prepare_init_params_from_job_description, parameter[cls, job_details, model_channel_name]]:
constant[Convert the job description to init params that can be handled by the class constructor
Args:
job_details: the returned job details from a describe_training_job API call.
Returns:
dictionary: The transformed init_params
]
variable[init_params] assign[=] call[call[name[super], parameter[name[TensorFlow], name[cls]]]._prepare_init_params_from_job_description, parameter[name[job_details], name[model_channel_name]]]
for taget[name[argument]] in starred[tuple[[<ast.Constant object at 0x7da1b21c4400>, <ast.Constant object at 0x7da1b21c6920>, <ast.Constant object at 0x7da1b21c4490>, <ast.Constant object at 0x7da1b21c4dc0>]]] begin[:]
variable[value] assign[=] call[call[name[init_params]][constant[hyperparameters]].pop, parameter[name[argument], constant[None]]]
if compare[name[value] is_not constant[None]] begin[:]
call[name[init_params]][name[argument]] assign[=] name[value]
variable[image_name] assign[=] call[name[init_params].pop, parameter[constant[image]]]
<ast.Tuple object at 0x7da1b21c5ae0> assign[=] call[name[fw].framework_name_from_image, parameter[name[image_name]]]
if <ast.UnaryOp object at 0x7da1b21c7c40> begin[:]
call[name[init_params]][constant[image_name]] assign[=] name[image_name]
return[name[init_params]]
if name[script_mode] begin[:]
call[name[init_params]][constant[script_mode]] assign[=] constant[True]
call[name[init_params]][constant[py_version]] assign[=] name[py_version]
call[name[init_params]][constant[framework_version]] assign[=] <ast.IfExp object at 0x7da1b21c4610>
variable[training_job_name] assign[=] call[name[init_params]][constant[base_job_name]]
if compare[name[framework] not_equal[!=] name[cls].__framework_name__] begin[:]
<ast.Raise object at 0x7da1b21c7a30>
return[name[init_params]] | keyword[def] identifier[_prepare_init_params_from_job_description] ( identifier[cls] , identifier[job_details] , identifier[model_channel_name] = keyword[None] ):
literal[string]
identifier[init_params] = identifier[super] ( identifier[TensorFlow] , identifier[cls] ). identifier[_prepare_init_params_from_job_description] ( identifier[job_details] ,
identifier[model_channel_name] )
keyword[for] identifier[argument] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[value] = identifier[init_params] [ literal[string] ]. identifier[pop] ( identifier[argument] , keyword[None] )
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[init_params] [ identifier[argument] ]= identifier[value]
identifier[image_name] = identifier[init_params] . identifier[pop] ( literal[string] )
identifier[framework] , identifier[py_version] , identifier[tag] , identifier[script_mode] = identifier[fw] . identifier[framework_name_from_image] ( identifier[image_name] )
keyword[if] keyword[not] identifier[framework] :
identifier[init_params] [ literal[string] ]= identifier[image_name]
keyword[return] identifier[init_params]
keyword[if] identifier[script_mode] :
identifier[init_params] [ literal[string] ]= keyword[True]
identifier[init_params] [ literal[string] ]= identifier[py_version]
identifier[init_params] [ literal[string] ]= literal[string] keyword[if] identifier[tag] == literal[string] keyword[else] identifier[fw] . identifier[framework_version_from_tag] (
identifier[tag] )
identifier[training_job_name] = identifier[init_params] [ literal[string] ]
keyword[if] identifier[framework] != identifier[cls] . identifier[__framework_name__] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[training_job_name] ))
keyword[return] identifier[init_params] | def _prepare_init_params_from_job_description(cls, job_details, model_channel_name=None):
"""Convert the job description to init params that can be handled by the class constructor
Args:
job_details: the returned job details from a describe_training_job API call.
Returns:
dictionary: The transformed init_params
"""
init_params = super(TensorFlow, cls)._prepare_init_params_from_job_description(job_details, model_channel_name)
# Move some of the tensorflow specific init params from hyperparameters into the main init params.
for argument in ('checkpoint_path', 'training_steps', 'evaluation_steps', 'model_dir'):
value = init_params['hyperparameters'].pop(argument, None)
if value is not None:
init_params[argument] = value # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=['argument']]
image_name = init_params.pop('image')
(framework, py_version, tag, script_mode) = fw.framework_name_from_image(image_name)
if not framework:
# If we were unable to parse the framework name from the image it is not one of our
# officially supported images, in this case just add the image to the init params.
init_params['image_name'] = image_name
return init_params # depends on [control=['if'], data=[]]
if script_mode:
init_params['script_mode'] = True # depends on [control=['if'], data=[]]
init_params['py_version'] = py_version
# We switched image tagging scheme from regular image version (e.g. '1.0') to more expressive
# containing framework version, device type and python version (e.g. '1.5-gpu-py2').
# For backward compatibility map deprecated image tag '1.0' to a '1.4' framework version
# otherwise extract framework version from the tag itself.
init_params['framework_version'] = '1.4' if tag == '1.0' else fw.framework_version_from_tag(tag)
training_job_name = init_params['base_job_name']
if framework != cls.__framework_name__:
raise ValueError("Training job: {} didn't use image for requested framework".format(training_job_name)) # depends on [control=['if'], data=[]]
return init_params |
def root(self):
"""
Returns the top-level serializer for this field.
"""
root = self
while root.parent is not None:
root = root.parent
return root | def function[root, parameter[self]]:
constant[
Returns the top-level serializer for this field.
]
variable[root] assign[=] name[self]
while compare[name[root].parent is_not constant[None]] begin[:]
variable[root] assign[=] name[root].parent
return[name[root]] | keyword[def] identifier[root] ( identifier[self] ):
literal[string]
identifier[root] = identifier[self]
keyword[while] identifier[root] . identifier[parent] keyword[is] keyword[not] keyword[None] :
identifier[root] = identifier[root] . identifier[parent]
keyword[return] identifier[root] | def root(self):
"""
Returns the top-level serializer for this field.
"""
root = self
while root.parent is not None:
root = root.parent # depends on [control=['while'], data=[]]
return root |
def validate_python_version():
"""Validate python interpreter version. Only 3.3+ allowed."""
python_version = LooseVersion(platform.python_version())
minimal_version = LooseVersion('3.3.0')
if python_version < minimal_version:
print("Sorry, Python 3.3+ is required")
sys.exit(1) | def function[validate_python_version, parameter[]]:
constant[Validate python interpreter version. Only 3.3+ allowed.]
variable[python_version] assign[=] call[name[LooseVersion], parameter[call[name[platform].python_version, parameter[]]]]
variable[minimal_version] assign[=] call[name[LooseVersion], parameter[constant[3.3.0]]]
if compare[name[python_version] less[<] name[minimal_version]] begin[:]
call[name[print], parameter[constant[Sorry, Python 3.3+ is required]]]
call[name[sys].exit, parameter[constant[1]]] | keyword[def] identifier[validate_python_version] ():
literal[string]
identifier[python_version] = identifier[LooseVersion] ( identifier[platform] . identifier[python_version] ())
identifier[minimal_version] = identifier[LooseVersion] ( literal[string] )
keyword[if] identifier[python_version] < identifier[minimal_version] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] ) | def validate_python_version():
"""Validate python interpreter version. Only 3.3+ allowed."""
python_version = LooseVersion(platform.python_version())
minimal_version = LooseVersion('3.3.0')
if python_version < minimal_version:
print('Sorry, Python 3.3+ is required')
sys.exit(1) # depends on [control=['if'], data=[]] |
def find_biomass_precursors(model, reaction):
"""
Return a list of all biomass precursors excluding ATP and H2O.
Parameters
----------
reaction : cobra.core.reaction.Reaction
The biomass reaction of the model under investigation.
model : cobra.Model
The metabolic model under investigation.
Returns
-------
list
Metabolite objects that are reactants of the biomass reaction excluding
ATP and H2O.
"""
id_of_main_compartment = helpers.find_compartment_id_in_model(model, 'c')
gam_reactants = set()
try:
gam_reactants.update([
helpers.find_met_in_model(
model, "MNXM3", id_of_main_compartment)[0]])
except RuntimeError:
pass
try:
gam_reactants.update([
helpers.find_met_in_model(
model, "MNXM2", id_of_main_compartment)[0]])
except RuntimeError:
pass
biomass_precursors = set(reaction.reactants) - gam_reactants
return list(biomass_precursors) | def function[find_biomass_precursors, parameter[model, reaction]]:
constant[
Return a list of all biomass precursors excluding ATP and H2O.
Parameters
----------
reaction : cobra.core.reaction.Reaction
The biomass reaction of the model under investigation.
model : cobra.Model
The metabolic model under investigation.
Returns
-------
list
Metabolite objects that are reactants of the biomass reaction excluding
ATP and H2O.
]
variable[id_of_main_compartment] assign[=] call[name[helpers].find_compartment_id_in_model, parameter[name[model], constant[c]]]
variable[gam_reactants] assign[=] call[name[set], parameter[]]
<ast.Try object at 0x7da1b0666f50>
<ast.Try object at 0x7da1b06dd390>
variable[biomass_precursors] assign[=] binary_operation[call[name[set], parameter[name[reaction].reactants]] - name[gam_reactants]]
return[call[name[list], parameter[name[biomass_precursors]]]] | keyword[def] identifier[find_biomass_precursors] ( identifier[model] , identifier[reaction] ):
literal[string]
identifier[id_of_main_compartment] = identifier[helpers] . identifier[find_compartment_id_in_model] ( identifier[model] , literal[string] )
identifier[gam_reactants] = identifier[set] ()
keyword[try] :
identifier[gam_reactants] . identifier[update] ([
identifier[helpers] . identifier[find_met_in_model] (
identifier[model] , literal[string] , identifier[id_of_main_compartment] )[ literal[int] ]])
keyword[except] identifier[RuntimeError] :
keyword[pass]
keyword[try] :
identifier[gam_reactants] . identifier[update] ([
identifier[helpers] . identifier[find_met_in_model] (
identifier[model] , literal[string] , identifier[id_of_main_compartment] )[ literal[int] ]])
keyword[except] identifier[RuntimeError] :
keyword[pass]
identifier[biomass_precursors] = identifier[set] ( identifier[reaction] . identifier[reactants] )- identifier[gam_reactants]
keyword[return] identifier[list] ( identifier[biomass_precursors] ) | def find_biomass_precursors(model, reaction):
"""
Return a list of all biomass precursors excluding ATP and H2O.
Parameters
----------
reaction : cobra.core.reaction.Reaction
The biomass reaction of the model under investigation.
model : cobra.Model
The metabolic model under investigation.
Returns
-------
list
Metabolite objects that are reactants of the biomass reaction excluding
ATP and H2O.
"""
id_of_main_compartment = helpers.find_compartment_id_in_model(model, 'c')
gam_reactants = set()
try:
gam_reactants.update([helpers.find_met_in_model(model, 'MNXM3', id_of_main_compartment)[0]]) # depends on [control=['try'], data=[]]
except RuntimeError:
pass # depends on [control=['except'], data=[]]
try:
gam_reactants.update([helpers.find_met_in_model(model, 'MNXM2', id_of_main_compartment)[0]]) # depends on [control=['try'], data=[]]
except RuntimeError:
pass # depends on [control=['except'], data=[]]
biomass_precursors = set(reaction.reactants) - gam_reactants
return list(biomass_precursors) |
def _apply_tracing(self, handler, attributes):
"""
Helper function to avoid rewriting for middleware and decorator.
Returns a new span from the request with logged attributes and
correct operation name from the func.
"""
operation_name = self._get_operation_name(handler)
headers = handler.request.headers
request = handler.request
# start new span from trace info
try:
span_ctx = self._tracer.extract(opentracing.Format.HTTP_HEADERS,
headers)
scope = self._tracer.start_active_span(operation_name,
child_of=span_ctx)
except (opentracing.InvalidCarrierException,
opentracing.SpanContextCorruptedException):
scope = self._tracer.start_active_span(operation_name)
# add span to current spans
setattr(request, SCOPE_ATTR, scope)
# log any traced attributes
scope.span.set_tag(tags.COMPONENT, 'tornado')
scope.span.set_tag(tags.SPAN_KIND, tags.SPAN_KIND_RPC_SERVER)
scope.span.set_tag(tags.HTTP_METHOD, request.method)
scope.span.set_tag(tags.HTTP_URL, request.uri)
for attr in attributes:
if hasattr(request, attr):
payload = str(getattr(request, attr))
if payload:
scope.span.set_tag(attr, payload)
# invoke the start span callback, if any
self._call_start_span_cb(scope.span, request)
return scope | def function[_apply_tracing, parameter[self, handler, attributes]]:
constant[
Helper function to avoid rewriting for middleware and decorator.
Returns a new span from the request with logged attributes and
correct operation name from the func.
]
variable[operation_name] assign[=] call[name[self]._get_operation_name, parameter[name[handler]]]
variable[headers] assign[=] name[handler].request.headers
variable[request] assign[=] name[handler].request
<ast.Try object at 0x7da1b04d5c30>
call[name[setattr], parameter[name[request], name[SCOPE_ATTR], name[scope]]]
call[name[scope].span.set_tag, parameter[name[tags].COMPONENT, constant[tornado]]]
call[name[scope].span.set_tag, parameter[name[tags].SPAN_KIND, name[tags].SPAN_KIND_RPC_SERVER]]
call[name[scope].span.set_tag, parameter[name[tags].HTTP_METHOD, name[request].method]]
call[name[scope].span.set_tag, parameter[name[tags].HTTP_URL, name[request].uri]]
for taget[name[attr]] in starred[name[attributes]] begin[:]
if call[name[hasattr], parameter[name[request], name[attr]]] begin[:]
variable[payload] assign[=] call[name[str], parameter[call[name[getattr], parameter[name[request], name[attr]]]]]
if name[payload] begin[:]
call[name[scope].span.set_tag, parameter[name[attr], name[payload]]]
call[name[self]._call_start_span_cb, parameter[name[scope].span, name[request]]]
return[name[scope]] | keyword[def] identifier[_apply_tracing] ( identifier[self] , identifier[handler] , identifier[attributes] ):
literal[string]
identifier[operation_name] = identifier[self] . identifier[_get_operation_name] ( identifier[handler] )
identifier[headers] = identifier[handler] . identifier[request] . identifier[headers]
identifier[request] = identifier[handler] . identifier[request]
keyword[try] :
identifier[span_ctx] = identifier[self] . identifier[_tracer] . identifier[extract] ( identifier[opentracing] . identifier[Format] . identifier[HTTP_HEADERS] ,
identifier[headers] )
identifier[scope] = identifier[self] . identifier[_tracer] . identifier[start_active_span] ( identifier[operation_name] ,
identifier[child_of] = identifier[span_ctx] )
keyword[except] ( identifier[opentracing] . identifier[InvalidCarrierException] ,
identifier[opentracing] . identifier[SpanContextCorruptedException] ):
identifier[scope] = identifier[self] . identifier[_tracer] . identifier[start_active_span] ( identifier[operation_name] )
identifier[setattr] ( identifier[request] , identifier[SCOPE_ATTR] , identifier[scope] )
identifier[scope] . identifier[span] . identifier[set_tag] ( identifier[tags] . identifier[COMPONENT] , literal[string] )
identifier[scope] . identifier[span] . identifier[set_tag] ( identifier[tags] . identifier[SPAN_KIND] , identifier[tags] . identifier[SPAN_KIND_RPC_SERVER] )
identifier[scope] . identifier[span] . identifier[set_tag] ( identifier[tags] . identifier[HTTP_METHOD] , identifier[request] . identifier[method] )
identifier[scope] . identifier[span] . identifier[set_tag] ( identifier[tags] . identifier[HTTP_URL] , identifier[request] . identifier[uri] )
keyword[for] identifier[attr] keyword[in] identifier[attributes] :
keyword[if] identifier[hasattr] ( identifier[request] , identifier[attr] ):
identifier[payload] = identifier[str] ( identifier[getattr] ( identifier[request] , identifier[attr] ))
keyword[if] identifier[payload] :
identifier[scope] . identifier[span] . identifier[set_tag] ( identifier[attr] , identifier[payload] )
identifier[self] . identifier[_call_start_span_cb] ( identifier[scope] . identifier[span] , identifier[request] )
keyword[return] identifier[scope] | def _apply_tracing(self, handler, attributes):
"""
Helper function to avoid rewriting for middleware and decorator.
Returns a new span from the request with logged attributes and
correct operation name from the func.
"""
operation_name = self._get_operation_name(handler)
headers = handler.request.headers
request = handler.request
# start new span from trace info
try:
span_ctx = self._tracer.extract(opentracing.Format.HTTP_HEADERS, headers)
scope = self._tracer.start_active_span(operation_name, child_of=span_ctx) # depends on [control=['try'], data=[]]
except (opentracing.InvalidCarrierException, opentracing.SpanContextCorruptedException):
scope = self._tracer.start_active_span(operation_name) # depends on [control=['except'], data=[]]
# add span to current spans
setattr(request, SCOPE_ATTR, scope)
# log any traced attributes
scope.span.set_tag(tags.COMPONENT, 'tornado')
scope.span.set_tag(tags.SPAN_KIND, tags.SPAN_KIND_RPC_SERVER)
scope.span.set_tag(tags.HTTP_METHOD, request.method)
scope.span.set_tag(tags.HTTP_URL, request.uri)
for attr in attributes:
if hasattr(request, attr):
payload = str(getattr(request, attr))
if payload:
scope.span.set_tag(attr, payload) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']]
# invoke the start span callback, if any
self._call_start_span_cb(scope.span, request)
return scope |
def eq(self, other):
"""
Construct a Filter returning True for asset/date pairs where the output
of ``self`` matches ``other``.
"""
# We treat this as an error because missing_values have NaN semantics,
# which means this would return an array of all False, which is almost
# certainly not what the user wants.
if other == self.missing_value:
raise ValueError(
"Comparison against self.missing_value ({value!r}) in"
" {typename}.eq().\n"
"Missing values have NaN semantics, so the "
"requested comparison would always produce False.\n"
"Use the isnull() method to check for missing values.".format(
value=other,
typename=(type(self).__name__),
)
)
if isinstance(other, Number) != (self.dtype == int64_dtype):
raise InvalidClassifierComparison(self, other)
if isinstance(other, Number):
return NumExprFilter.create(
"x_0 == {other}".format(other=int(other)),
binds=(self,),
)
else:
return ArrayPredicate(
term=self,
op=operator.eq,
opargs=(other,),
) | def function[eq, parameter[self, other]]:
constant[
Construct a Filter returning True for asset/date pairs where the output
of ``self`` matches ``other``.
]
if compare[name[other] equal[==] name[self].missing_value] begin[:]
<ast.Raise object at 0x7da1b2046bc0>
if compare[call[name[isinstance], parameter[name[other], name[Number]]] not_equal[!=] compare[name[self].dtype equal[==] name[int64_dtype]]] begin[:]
<ast.Raise object at 0x7da1b1e8f730>
if call[name[isinstance], parameter[name[other], name[Number]]] begin[:]
return[call[name[NumExprFilter].create, parameter[call[constant[x_0 == {other}].format, parameter[]]]]] | keyword[def] identifier[eq] ( identifier[self] , identifier[other] ):
literal[string]
keyword[if] identifier[other] == identifier[self] . identifier[missing_value] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[value] = identifier[other] ,
identifier[typename] =( identifier[type] ( identifier[self] ). identifier[__name__] ),
)
)
keyword[if] identifier[isinstance] ( identifier[other] , identifier[Number] )!=( identifier[self] . identifier[dtype] == identifier[int64_dtype] ):
keyword[raise] identifier[InvalidClassifierComparison] ( identifier[self] , identifier[other] )
keyword[if] identifier[isinstance] ( identifier[other] , identifier[Number] ):
keyword[return] identifier[NumExprFilter] . identifier[create] (
literal[string] . identifier[format] ( identifier[other] = identifier[int] ( identifier[other] )),
identifier[binds] =( identifier[self] ,),
)
keyword[else] :
keyword[return] identifier[ArrayPredicate] (
identifier[term] = identifier[self] ,
identifier[op] = identifier[operator] . identifier[eq] ,
identifier[opargs] =( identifier[other] ,),
) | def eq(self, other):
"""
Construct a Filter returning True for asset/date pairs where the output
of ``self`` matches ``other``.
"""
# We treat this as an error because missing_values have NaN semantics,
# which means this would return an array of all False, which is almost
# certainly not what the user wants.
if other == self.missing_value:
raise ValueError('Comparison against self.missing_value ({value!r}) in {typename}.eq().\nMissing values have NaN semantics, so the requested comparison would always produce False.\nUse the isnull() method to check for missing values.'.format(value=other, typename=type(self).__name__)) # depends on [control=['if'], data=['other']]
if isinstance(other, Number) != (self.dtype == int64_dtype):
raise InvalidClassifierComparison(self, other) # depends on [control=['if'], data=[]]
if isinstance(other, Number):
return NumExprFilter.create('x_0 == {other}'.format(other=int(other)), binds=(self,)) # depends on [control=['if'], data=[]]
else:
return ArrayPredicate(term=self, op=operator.eq, opargs=(other,)) |
def load(self, callables_fname):
r"""
Load traced modules information from a `JSON <http://www.json.org/>`_ file.
The loaded module information is merged with any existing module information
:param callables_fname: File name
:type callables_fname: :ref:`FileNameExists`
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
"""
# Validate file name
_validate_fname(callables_fname)
if not os.path.exists(callables_fname):
raise OSError("File {0} could not be found".format(callables_fname))
with open(callables_fname, "r") as fobj:
fdict = json.load(fobj)
if sys.hexversion < 0x03000000: # pragma: no cover
fdict = _unicode_to_ascii(fdict)
self._callables_db.update(fdict["_callables_db"])
# Reverse the tuple-to-string conversion that the save method
# does due to the fact that JSON keys need to be strings and the
# keys of the reverse callable dictionary are tuples where the first
# item is a file name and the second item is the starting line of the
# callable within that file (dictionary value)
rdict = {}
for key, value in fdict["_reverse_callables_db"].items():
tokens = key[1:-1].split(",")
key = tokens[0].strip()[1:-1]
if platform.system().lower() == "windows": # pragma: no cover
while True:
tmp = key
key = key.replace("\\\\", "\\")
if tmp == key:
break
rdict[(key, int(tokens[1]))] = value
self._reverse_callables_db.update(rdict)
self._modules_dict.update(fdict["_modules_dict"])
self._fnames.update(fdict["_fnames"])
self._module_names.extend(fdict["_module_names"])
self._class_names.extend(fdict["_class_names"])
self._module_names = sorted(list(set(self._module_names)))
self._class_names = sorted(list(set(self._class_names))) | def function[load, parameter[self, callables_fname]]:
constant[
Load traced modules information from a `JSON <http://www.json.org/>`_ file.
The loaded module information is merged with any existing module information
:param callables_fname: File name
:type callables_fname: :ref:`FileNameExists`
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
]
call[name[_validate_fname], parameter[name[callables_fname]]]
if <ast.UnaryOp object at 0x7da18bc70640> begin[:]
<ast.Raise object at 0x7da18bc736a0>
with call[name[open], parameter[name[callables_fname], constant[r]]] begin[:]
variable[fdict] assign[=] call[name[json].load, parameter[name[fobj]]]
if compare[name[sys].hexversion less[<] constant[50331648]] begin[:]
variable[fdict] assign[=] call[name[_unicode_to_ascii], parameter[name[fdict]]]
call[name[self]._callables_db.update, parameter[call[name[fdict]][constant[_callables_db]]]]
variable[rdict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18bc726e0>, <ast.Name object at 0x7da18bc71c00>]]] in starred[call[call[name[fdict]][constant[_reverse_callables_db]].items, parameter[]]] begin[:]
variable[tokens] assign[=] call[call[name[key]][<ast.Slice object at 0x7da20c9905e0>].split, parameter[constant[,]]]
variable[key] assign[=] call[call[call[name[tokens]][constant[0]].strip, parameter[]]][<ast.Slice object at 0x7da20c991ed0>]
if compare[call[call[name[platform].system, parameter[]].lower, parameter[]] equal[==] constant[windows]] begin[:]
while constant[True] begin[:]
variable[tmp] assign[=] name[key]
variable[key] assign[=] call[name[key].replace, parameter[constant[\\], constant[\]]]
if compare[name[tmp] equal[==] name[key]] begin[:]
break
call[name[rdict]][tuple[[<ast.Name object at 0x7da20c993b20>, <ast.Call object at 0x7da20c993bb0>]]] assign[=] name[value]
call[name[self]._reverse_callables_db.update, parameter[name[rdict]]]
call[name[self]._modules_dict.update, parameter[call[name[fdict]][constant[_modules_dict]]]]
call[name[self]._fnames.update, parameter[call[name[fdict]][constant[_fnames]]]]
call[name[self]._module_names.extend, parameter[call[name[fdict]][constant[_module_names]]]]
call[name[self]._class_names.extend, parameter[call[name[fdict]][constant[_class_names]]]]
name[self]._module_names assign[=] call[name[sorted], parameter[call[name[list], parameter[call[name[set], parameter[name[self]._module_names]]]]]]
name[self]._class_names assign[=] call[name[sorted], parameter[call[name[list], parameter[call[name[set], parameter[name[self]._class_names]]]]]] | keyword[def] identifier[load] ( identifier[self] , identifier[callables_fname] ):
literal[string]
identifier[_validate_fname] ( identifier[callables_fname] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[callables_fname] ):
keyword[raise] identifier[OSError] ( literal[string] . identifier[format] ( identifier[callables_fname] ))
keyword[with] identifier[open] ( identifier[callables_fname] , literal[string] ) keyword[as] identifier[fobj] :
identifier[fdict] = identifier[json] . identifier[load] ( identifier[fobj] )
keyword[if] identifier[sys] . identifier[hexversion] < literal[int] :
identifier[fdict] = identifier[_unicode_to_ascii] ( identifier[fdict] )
identifier[self] . identifier[_callables_db] . identifier[update] ( identifier[fdict] [ literal[string] ])
identifier[rdict] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[fdict] [ literal[string] ]. identifier[items] ():
identifier[tokens] = identifier[key] [ literal[int] :- literal[int] ]. identifier[split] ( literal[string] )
identifier[key] = identifier[tokens] [ literal[int] ]. identifier[strip] ()[ literal[int] :- literal[int] ]
keyword[if] identifier[platform] . identifier[system] (). identifier[lower] ()== literal[string] :
keyword[while] keyword[True] :
identifier[tmp] = identifier[key]
identifier[key] = identifier[key] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[tmp] == identifier[key] :
keyword[break]
identifier[rdict] [( identifier[key] , identifier[int] ( identifier[tokens] [ literal[int] ]))]= identifier[value]
identifier[self] . identifier[_reverse_callables_db] . identifier[update] ( identifier[rdict] )
identifier[self] . identifier[_modules_dict] . identifier[update] ( identifier[fdict] [ literal[string] ])
identifier[self] . identifier[_fnames] . identifier[update] ( identifier[fdict] [ literal[string] ])
identifier[self] . identifier[_module_names] . identifier[extend] ( identifier[fdict] [ literal[string] ])
identifier[self] . identifier[_class_names] . identifier[extend] ( identifier[fdict] [ literal[string] ])
identifier[self] . identifier[_module_names] = identifier[sorted] ( identifier[list] ( identifier[set] ( identifier[self] . identifier[_module_names] )))
identifier[self] . identifier[_class_names] = identifier[sorted] ( identifier[list] ( identifier[set] ( identifier[self] . identifier[_class_names] ))) | def load(self, callables_fname):
"""
Load traced modules information from a `JSON <http://www.json.org/>`_ file.
The loaded module information is merged with any existing module information
:param callables_fname: File name
:type callables_fname: :ref:`FileNameExists`
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \\\\`callables_fname\\\\` is not valid)
"""
# Validate file name
_validate_fname(callables_fname)
if not os.path.exists(callables_fname):
raise OSError('File {0} could not be found'.format(callables_fname)) # depends on [control=['if'], data=[]]
with open(callables_fname, 'r') as fobj:
fdict = json.load(fobj) # depends on [control=['with'], data=['fobj']]
if sys.hexversion < 50331648: # pragma: no cover
fdict = _unicode_to_ascii(fdict) # depends on [control=['if'], data=[]]
self._callables_db.update(fdict['_callables_db'])
# Reverse the tuple-to-string conversion that the save method
# does due to the fact that JSON keys need to be strings and the
# keys of the reverse callable dictionary are tuples where the first
# item is a file name and the second item is the starting line of the
# callable within that file (dictionary value)
rdict = {}
for (key, value) in fdict['_reverse_callables_db'].items():
tokens = key[1:-1].split(',')
key = tokens[0].strip()[1:-1]
if platform.system().lower() == 'windows': # pragma: no cover
while True:
tmp = key
key = key.replace('\\\\', '\\')
if tmp == key:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
rdict[key, int(tokens[1])] = value # depends on [control=['for'], data=[]]
self._reverse_callables_db.update(rdict)
self._modules_dict.update(fdict['_modules_dict'])
self._fnames.update(fdict['_fnames'])
self._module_names.extend(fdict['_module_names'])
self._class_names.extend(fdict['_class_names'])
self._module_names = sorted(list(set(self._module_names)))
self._class_names = sorted(list(set(self._class_names))) |
def from_traceback(cls, tb):
"""Initializes a StackTrace from a python traceback instance"""
stack_trace = cls(
stack_trace_hash_id=generate_hash_id_from_traceback(tb)
)
# use the add_stack_frame so that json formatting is applied
for tb_frame_info in traceback.extract_tb(tb):
filename, line_num, fn_name, _ = tb_frame_info
stack_trace.add_stack_frame(
StackFrame(
func_name=fn_name,
original_func_name=fn_name,
file_name=filename,
line_num=line_num,
col_num=0, # I don't think this is available in python
load_module=filename,
build_id=BUILD_ID,
source_version=SOURCE_VERSION
)
)
return stack_trace | def function[from_traceback, parameter[cls, tb]]:
constant[Initializes a StackTrace from a python traceback instance]
variable[stack_trace] assign[=] call[name[cls], parameter[]]
for taget[name[tb_frame_info]] in starred[call[name[traceback].extract_tb, parameter[name[tb]]]] begin[:]
<ast.Tuple object at 0x7da204566410> assign[=] name[tb_frame_info]
call[name[stack_trace].add_stack_frame, parameter[call[name[StackFrame], parameter[]]]]
return[name[stack_trace]] | keyword[def] identifier[from_traceback] ( identifier[cls] , identifier[tb] ):
literal[string]
identifier[stack_trace] = identifier[cls] (
identifier[stack_trace_hash_id] = identifier[generate_hash_id_from_traceback] ( identifier[tb] )
)
keyword[for] identifier[tb_frame_info] keyword[in] identifier[traceback] . identifier[extract_tb] ( identifier[tb] ):
identifier[filename] , identifier[line_num] , identifier[fn_name] , identifier[_] = identifier[tb_frame_info]
identifier[stack_trace] . identifier[add_stack_frame] (
identifier[StackFrame] (
identifier[func_name] = identifier[fn_name] ,
identifier[original_func_name] = identifier[fn_name] ,
identifier[file_name] = identifier[filename] ,
identifier[line_num] = identifier[line_num] ,
identifier[col_num] = literal[int] ,
identifier[load_module] = identifier[filename] ,
identifier[build_id] = identifier[BUILD_ID] ,
identifier[source_version] = identifier[SOURCE_VERSION]
)
)
keyword[return] identifier[stack_trace] | def from_traceback(cls, tb):
"""Initializes a StackTrace from a python traceback instance"""
stack_trace = cls(stack_trace_hash_id=generate_hash_id_from_traceback(tb))
# use the add_stack_frame so that json formatting is applied
for tb_frame_info in traceback.extract_tb(tb):
(filename, line_num, fn_name, _) = tb_frame_info # I don't think this is available in python
stack_trace.add_stack_frame(StackFrame(func_name=fn_name, original_func_name=fn_name, file_name=filename, line_num=line_num, col_num=0, load_module=filename, build_id=BUILD_ID, source_version=SOURCE_VERSION)) # depends on [control=['for'], data=['tb_frame_info']]
return stack_trace |
def _get_or_create_stack(name):
"""Returns a thread local stack uniquified by the given name."""
stack = getattr(_LOCAL_STACKS, name, None)
if stack is None:
stack = []
setattr(_LOCAL_STACKS, name, stack)
return stack | def function[_get_or_create_stack, parameter[name]]:
constant[Returns a thread local stack uniquified by the given name.]
variable[stack] assign[=] call[name[getattr], parameter[name[_LOCAL_STACKS], name[name], constant[None]]]
if compare[name[stack] is constant[None]] begin[:]
variable[stack] assign[=] list[[]]
call[name[setattr], parameter[name[_LOCAL_STACKS], name[name], name[stack]]]
return[name[stack]] | keyword[def] identifier[_get_or_create_stack] ( identifier[name] ):
literal[string]
identifier[stack] = identifier[getattr] ( identifier[_LOCAL_STACKS] , identifier[name] , keyword[None] )
keyword[if] identifier[stack] keyword[is] keyword[None] :
identifier[stack] =[]
identifier[setattr] ( identifier[_LOCAL_STACKS] , identifier[name] , identifier[stack] )
keyword[return] identifier[stack] | def _get_or_create_stack(name):
"""Returns a thread local stack uniquified by the given name."""
stack = getattr(_LOCAL_STACKS, name, None)
if stack is None:
stack = []
setattr(_LOCAL_STACKS, name, stack) # depends on [control=['if'], data=['stack']]
return stack |
def get_datanode_fp_meta(fp):
"""Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_DATANODE_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta | def function[get_datanode_fp_meta, parameter[fp]]:
constant[Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
]
variable[directory_meta] assign[=] call[name[list], parameter[name[CMIP5_DATANODE_FP_ATTS]]]
variable[meta] assign[=] call[name[get_dir_meta], parameter[name[fp], name[directory_meta]]]
call[name[meta].update, parameter[call[name[get_cmor_fname_meta], parameter[name[fp]]]]]
return[name[meta]] | keyword[def] identifier[get_datanode_fp_meta] ( identifier[fp] ):
literal[string]
identifier[directory_meta] = identifier[list] ( identifier[CMIP5_DATANODE_FP_ATTS] )
identifier[meta] = identifier[get_dir_meta] ( identifier[fp] , identifier[directory_meta] )
identifier[meta] . identifier[update] ( identifier[get_cmor_fname_meta] ( identifier[fp] ))
keyword[return] identifier[meta] | def get_datanode_fp_meta(fp):
"""Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_DATANODE_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta |
async def add_claims(self, payload, user, *args, **kwargs):
"""
Injects standard claims into the payload for: exp, iss, iat, nbf, aud.
And, custom claims, if they exist
"""
delta = timedelta(seconds=self.config.expiration_delta())
exp = datetime.utcnow() + delta
additional = {"exp": exp}
for option in ["iss", "iat", "nbf", "aud"]:
setting = "claim_{}".format(option.lower())
if setting in self.config: # noqa
attr = self.config.get(setting)
if attr:
self.claims.append(option)
method_name = "build_claim_{}".format(option)
method = getattr(utils, method_name)
additional.update({option: method(attr, self.config)})
payload.update(additional)
if self._custom_claims:
custom_claims = {}
for claim in self._custom_claims:
custom_claims[claim.get_key()] = await utils.call(
claim.setup, payload, user
)
payload.update(custom_claims)
return payload | <ast.AsyncFunctionDef object at 0x7da1b08ad330> | keyword[async] keyword[def] identifier[add_claims] ( identifier[self] , identifier[payload] , identifier[user] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[delta] = identifier[timedelta] ( identifier[seconds] = identifier[self] . identifier[config] . identifier[expiration_delta] ())
identifier[exp] = identifier[datetime] . identifier[utcnow] ()+ identifier[delta]
identifier[additional] ={ literal[string] : identifier[exp] }
keyword[for] identifier[option] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
identifier[setting] = literal[string] . identifier[format] ( identifier[option] . identifier[lower] ())
keyword[if] identifier[setting] keyword[in] identifier[self] . identifier[config] :
identifier[attr] = identifier[self] . identifier[config] . identifier[get] ( identifier[setting] )
keyword[if] identifier[attr] :
identifier[self] . identifier[claims] . identifier[append] ( identifier[option] )
identifier[method_name] = literal[string] . identifier[format] ( identifier[option] )
identifier[method] = identifier[getattr] ( identifier[utils] , identifier[method_name] )
identifier[additional] . identifier[update] ({ identifier[option] : identifier[method] ( identifier[attr] , identifier[self] . identifier[config] )})
identifier[payload] . identifier[update] ( identifier[additional] )
keyword[if] identifier[self] . identifier[_custom_claims] :
identifier[custom_claims] ={}
keyword[for] identifier[claim] keyword[in] identifier[self] . identifier[_custom_claims] :
identifier[custom_claims] [ identifier[claim] . identifier[get_key] ()]= keyword[await] identifier[utils] . identifier[call] (
identifier[claim] . identifier[setup] , identifier[payload] , identifier[user]
)
identifier[payload] . identifier[update] ( identifier[custom_claims] )
keyword[return] identifier[payload] | async def add_claims(self, payload, user, *args, **kwargs):
"""
Injects standard claims into the payload for: exp, iss, iat, nbf, aud.
And, custom claims, if they exist
"""
delta = timedelta(seconds=self.config.expiration_delta())
exp = datetime.utcnow() + delta
additional = {'exp': exp}
for option in ['iss', 'iat', 'nbf', 'aud']:
setting = 'claim_{}'.format(option.lower())
if setting in self.config: # noqa
attr = self.config.get(setting)
if attr:
self.claims.append(option)
method_name = 'build_claim_{}'.format(option)
method = getattr(utils, method_name)
additional.update({option: method(attr, self.config)}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['setting']] # depends on [control=['for'], data=['option']]
payload.update(additional)
if self._custom_claims:
custom_claims = {}
for claim in self._custom_claims:
custom_claims[claim.get_key()] = await utils.call(claim.setup, payload, user) # depends on [control=['for'], data=['claim']]
payload.update(custom_claims) # depends on [control=['if'], data=[]]
return payload |
def find_first_existing_executable(exe_list):
"""
Accepts list of [('executable_file_path', 'options')],
Returns first working executable_file_path
"""
for filepath, opts in exe_list:
try:
proc = subprocess.Popen([filepath, opts],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
proc.communicate()
except OSError:
pass
else:
return filepath | def function[find_first_existing_executable, parameter[exe_list]]:
constant[
Accepts list of [('executable_file_path', 'options')],
Returns first working executable_file_path
]
for taget[tuple[[<ast.Name object at 0x7da1b003e110>, <ast.Name object at 0x7da1b003e140>]]] in starred[name[exe_list]] begin[:]
<ast.Try object at 0x7da1b003e3b0> | keyword[def] identifier[find_first_existing_executable] ( identifier[exe_list] ):
literal[string]
keyword[for] identifier[filepath] , identifier[opts] keyword[in] identifier[exe_list] :
keyword[try] :
identifier[proc] = identifier[subprocess] . identifier[Popen] ([ identifier[filepath] , identifier[opts] ],
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[proc] . identifier[communicate] ()
keyword[except] identifier[OSError] :
keyword[pass]
keyword[else] :
keyword[return] identifier[filepath] | def find_first_existing_executable(exe_list):
"""
Accepts list of [('executable_file_path', 'options')],
Returns first working executable_file_path
"""
for (filepath, opts) in exe_list:
try:
proc = subprocess.Popen([filepath, opts], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.communicate() # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]]
else:
return filepath # depends on [control=['for'], data=[]] |
def get_evolution_stone(self, slug):
"""
Returns a Evolution Stone object containing the details about the
evolution stone.
"""
endpoint = '/evolution-stone/' + slug
return self.make_request(self.BASE_URL + endpoint) | def function[get_evolution_stone, parameter[self, slug]]:
constant[
Returns a Evolution Stone object containing the details about the
evolution stone.
]
variable[endpoint] assign[=] binary_operation[constant[/evolution-stone/] + name[slug]]
return[call[name[self].make_request, parameter[binary_operation[name[self].BASE_URL + name[endpoint]]]]] | keyword[def] identifier[get_evolution_stone] ( identifier[self] , identifier[slug] ):
literal[string]
identifier[endpoint] = literal[string] + identifier[slug]
keyword[return] identifier[self] . identifier[make_request] ( identifier[self] . identifier[BASE_URL] + identifier[endpoint] ) | def get_evolution_stone(self, slug):
"""
Returns a Evolution Stone object containing the details about the
evolution stone.
"""
endpoint = '/evolution-stone/' + slug
return self.make_request(self.BASE_URL + endpoint) |
def register_classes(self, classes):
"""
Register classes as plugins that are not subclassed from
IPlugin.
`classes` may be a single object or an iterable.
"""
classes = util.return_list(classes)
for klass in classes:
IPlugin.register(klass) | def function[register_classes, parameter[self, classes]]:
constant[
Register classes as plugins that are not subclassed from
IPlugin.
`classes` may be a single object or an iterable.
]
variable[classes] assign[=] call[name[util].return_list, parameter[name[classes]]]
for taget[name[klass]] in starred[name[classes]] begin[:]
call[name[IPlugin].register, parameter[name[klass]]] | keyword[def] identifier[register_classes] ( identifier[self] , identifier[classes] ):
literal[string]
identifier[classes] = identifier[util] . identifier[return_list] ( identifier[classes] )
keyword[for] identifier[klass] keyword[in] identifier[classes] :
identifier[IPlugin] . identifier[register] ( identifier[klass] ) | def register_classes(self, classes):
"""
Register classes as plugins that are not subclassed from
IPlugin.
`classes` may be a single object or an iterable.
"""
classes = util.return_list(classes)
for klass in classes:
IPlugin.register(klass) # depends on [control=['for'], data=['klass']] |
def UTF8Strsub(utf, start, len):
"""Create a substring from a given UTF-8 string Note:
positions are given in units of UTF-8 chars """
ret = libxml2mod.xmlUTF8Strsub(utf, start, len)
return ret | def function[UTF8Strsub, parameter[utf, start, len]]:
constant[Create a substring from a given UTF-8 string Note:
positions are given in units of UTF-8 chars ]
variable[ret] assign[=] call[name[libxml2mod].xmlUTF8Strsub, parameter[name[utf], name[start], name[len]]]
return[name[ret]] | keyword[def] identifier[UTF8Strsub] ( identifier[utf] , identifier[start] , identifier[len] ):
literal[string]
identifier[ret] = identifier[libxml2mod] . identifier[xmlUTF8Strsub] ( identifier[utf] , identifier[start] , identifier[len] )
keyword[return] identifier[ret] | def UTF8Strsub(utf, start, len):
"""Create a substring from a given UTF-8 string Note:
positions are given in units of UTF-8 chars """
ret = libxml2mod.xmlUTF8Strsub(utf, start, len)
return ret |
def plot_cov_ellipse(cov, pos, nstd=2, **kwargs):
"""
Plots an `nstd` sigma error ellipse based on the specified covariance
matrix (`cov`). Additional keyword arguments are passed on to the
ellipse patch artist.
Parameters
----------
cov : The 2x2 covariance matrix to base the ellipse on
pos : The location of the center of the ellipse. Expects a 2-element
sequence of [x0, y0].
nstd : The radius of the ellipse in numbers of standard deviations.
Defaults to 2 standard deviations.
Additional keyword arguments are pass on to the ellipse patch.
Returns
-------
A matplotlib ellipse artist
"""
def eigsorted(cov):
vals, vecs = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
return vals[order], vecs[:, order]
vals, vecs = eigsorted(cov)
theta = np.degrees(np.arctan2(*vecs[:, 0][::-1]))
# Width and height are "full" widths, not radius
width, height = 2 * nstd * np.sqrt(vals)
ellip = Ellipse(xy=pos, width=width, height=height, angle=theta, fill=False, **kwargs)
return ellip | def function[plot_cov_ellipse, parameter[cov, pos, nstd]]:
constant[
Plots an `nstd` sigma error ellipse based on the specified covariance
matrix (`cov`). Additional keyword arguments are passed on to the
ellipse patch artist.
Parameters
----------
cov : The 2x2 covariance matrix to base the ellipse on
pos : The location of the center of the ellipse. Expects a 2-element
sequence of [x0, y0].
nstd : The radius of the ellipse in numbers of standard deviations.
Defaults to 2 standard deviations.
Additional keyword arguments are pass on to the ellipse patch.
Returns
-------
A matplotlib ellipse artist
]
def function[eigsorted, parameter[cov]]:
<ast.Tuple object at 0x7da20c9914b0> assign[=] call[name[np].linalg.eigh, parameter[name[cov]]]
variable[order] assign[=] call[call[name[vals].argsort, parameter[]]][<ast.Slice object at 0x7da20c9915d0>]
return[tuple[[<ast.Subscript object at 0x7da20c9936d0>, <ast.Subscript object at 0x7da20c992710>]]]
<ast.Tuple object at 0x7da20c9904f0> assign[=] call[name[eigsorted], parameter[name[cov]]]
variable[theta] assign[=] call[name[np].degrees, parameter[call[name[np].arctan2, parameter[<ast.Starred object at 0x7da1b13a97e0>]]]]
<ast.Tuple object at 0x7da1b13a8430> assign[=] binary_operation[binary_operation[constant[2] * name[nstd]] * call[name[np].sqrt, parameter[name[vals]]]]
variable[ellip] assign[=] call[name[Ellipse], parameter[]]
return[name[ellip]] | keyword[def] identifier[plot_cov_ellipse] ( identifier[cov] , identifier[pos] , identifier[nstd] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[eigsorted] ( identifier[cov] ):
identifier[vals] , identifier[vecs] = identifier[np] . identifier[linalg] . identifier[eigh] ( identifier[cov] )
identifier[order] = identifier[vals] . identifier[argsort] ()[::- literal[int] ]
keyword[return] identifier[vals] [ identifier[order] ], identifier[vecs] [:, identifier[order] ]
identifier[vals] , identifier[vecs] = identifier[eigsorted] ( identifier[cov] )
identifier[theta] = identifier[np] . identifier[degrees] ( identifier[np] . identifier[arctan2] (* identifier[vecs] [:, literal[int] ][::- literal[int] ]))
identifier[width] , identifier[height] = literal[int] * identifier[nstd] * identifier[np] . identifier[sqrt] ( identifier[vals] )
identifier[ellip] = identifier[Ellipse] ( identifier[xy] = identifier[pos] , identifier[width] = identifier[width] , identifier[height] = identifier[height] , identifier[angle] = identifier[theta] , identifier[fill] = keyword[False] ,** identifier[kwargs] )
keyword[return] identifier[ellip] | def plot_cov_ellipse(cov, pos, nstd=2, **kwargs):
"""
Plots an `nstd` sigma error ellipse based on the specified covariance
matrix (`cov`). Additional keyword arguments are passed on to the
ellipse patch artist.
Parameters
----------
cov : The 2x2 covariance matrix to base the ellipse on
pos : The location of the center of the ellipse. Expects a 2-element
sequence of [x0, y0].
nstd : The radius of the ellipse in numbers of standard deviations.
Defaults to 2 standard deviations.
Additional keyword arguments are pass on to the ellipse patch.
Returns
-------
A matplotlib ellipse artist
"""
def eigsorted(cov):
(vals, vecs) = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
return (vals[order], vecs[:, order])
(vals, vecs) = eigsorted(cov)
theta = np.degrees(np.arctan2(*vecs[:, 0][::-1]))
# Width and height are "full" widths, not radius
(width, height) = 2 * nstd * np.sqrt(vals)
ellip = Ellipse(xy=pos, width=width, height=height, angle=theta, fill=False, **kwargs)
return ellip |
def _update_menus(self,change):
""" When pages change, update the menus"""
menus = {}
#: Get all links
links = [p.link for p in self.pages if p.link] + self.links
#: Put all links in the correct menu
for link in links:
for menu in link.menus:
if menu not in menus:
menus[menu] = []
menus[menu].append(link)
#: Update the menus
for name,menu in menus.items():
k = '{}_menu'.format(name)
if hasattr(self,k):
setattr(self,k,menu) | def function[_update_menus, parameter[self, change]]:
constant[ When pages change, update the menus]
variable[menus] assign[=] dictionary[[], []]
variable[links] assign[=] binary_operation[<ast.ListComp object at 0x7da1b0351b70> + name[self].links]
for taget[name[link]] in starred[name[links]] begin[:]
for taget[name[menu]] in starred[name[link].menus] begin[:]
if compare[name[menu] <ast.NotIn object at 0x7da2590d7190> name[menus]] begin[:]
call[name[menus]][name[menu]] assign[=] list[[]]
call[call[name[menus]][name[menu]].append, parameter[name[link]]]
for taget[tuple[[<ast.Name object at 0x7da1b0351d50>, <ast.Name object at 0x7da1b03516c0>]]] in starred[call[name[menus].items, parameter[]]] begin[:]
variable[k] assign[=] call[constant[{}_menu].format, parameter[name[name]]]
if call[name[hasattr], parameter[name[self], name[k]]] begin[:]
call[name[setattr], parameter[name[self], name[k], name[menu]]] | keyword[def] identifier[_update_menus] ( identifier[self] , identifier[change] ):
literal[string]
identifier[menus] ={}
identifier[links] =[ identifier[p] . identifier[link] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[pages] keyword[if] identifier[p] . identifier[link] ]+ identifier[self] . identifier[links]
keyword[for] identifier[link] keyword[in] identifier[links] :
keyword[for] identifier[menu] keyword[in] identifier[link] . identifier[menus] :
keyword[if] identifier[menu] keyword[not] keyword[in] identifier[menus] :
identifier[menus] [ identifier[menu] ]=[]
identifier[menus] [ identifier[menu] ]. identifier[append] ( identifier[link] )
keyword[for] identifier[name] , identifier[menu] keyword[in] identifier[menus] . identifier[items] ():
identifier[k] = literal[string] . identifier[format] ( identifier[name] )
keyword[if] identifier[hasattr] ( identifier[self] , identifier[k] ):
identifier[setattr] ( identifier[self] , identifier[k] , identifier[menu] ) | def _update_menus(self, change):
""" When pages change, update the menus"""
menus = {}
#: Get all links
links = [p.link for p in self.pages if p.link] + self.links
#: Put all links in the correct menu
for link in links:
for menu in link.menus:
if menu not in menus:
menus[menu] = [] # depends on [control=['if'], data=['menu', 'menus']]
menus[menu].append(link) # depends on [control=['for'], data=['menu']] # depends on [control=['for'], data=['link']]
#: Update the menus
for (name, menu) in menus.items():
k = '{}_menu'.format(name)
if hasattr(self, k):
setattr(self, k, menu) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def downloadThumbnail(self, outPath):
"""downloads the items's thumbnail"""
url = self._url + "/info/thumbnail"
params = {
}
return self._get(url=url,
out_folder=outPath,
file_name=None,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port) | def function[downloadThumbnail, parameter[self, outPath]]:
constant[downloads the items's thumbnail]
variable[url] assign[=] binary_operation[name[self]._url + constant[/info/thumbnail]]
variable[params] assign[=] dictionary[[], []]
return[call[name[self]._get, parameter[]]] | keyword[def] identifier[downloadThumbnail] ( identifier[self] , identifier[outPath] ):
literal[string]
identifier[url] = identifier[self] . identifier[_url] + literal[string]
identifier[params] ={
}
keyword[return] identifier[self] . identifier[_get] ( identifier[url] = identifier[url] ,
identifier[out_folder] = identifier[outPath] ,
identifier[file_name] = keyword[None] ,
identifier[param_dict] = identifier[params] ,
identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ) | def downloadThumbnail(self, outPath):
"""downloads the items's thumbnail"""
url = self._url + '/info/thumbnail'
params = {}
return self._get(url=url, out_folder=outPath, file_name=None, param_dict=params, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) |
def fatal( callingClass, astr_key, astr_extraMsg="" ):
'''
Convenience dispatcher to the error_exit() method.
Will raise "fatal" error, i.e. terminate script.
'''
b_exitToOS = True
report( callingClass, astr_key, b_exitToOS, astr_extraMsg ) | def function[fatal, parameter[callingClass, astr_key, astr_extraMsg]]:
constant[
Convenience dispatcher to the error_exit() method.
Will raise "fatal" error, i.e. terminate script.
]
variable[b_exitToOS] assign[=] constant[True]
call[name[report], parameter[name[callingClass], name[astr_key], name[b_exitToOS], name[astr_extraMsg]]] | keyword[def] identifier[fatal] ( identifier[callingClass] , identifier[astr_key] , identifier[astr_extraMsg] = literal[string] ):
literal[string]
identifier[b_exitToOS] = keyword[True]
identifier[report] ( identifier[callingClass] , identifier[astr_key] , identifier[b_exitToOS] , identifier[astr_extraMsg] ) | def fatal(callingClass, astr_key, astr_extraMsg=''):
"""
Convenience dispatcher to the error_exit() method.
Will raise "fatal" error, i.e. terminate script.
"""
b_exitToOS = True
report(callingClass, astr_key, b_exitToOS, astr_extraMsg) |
def activate_user(activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = RegistrationProfile.objects.get(
activation_key=activation_key)
except RegistrationProfile.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = RegistrationProfile.ACTIVATED
profile.save()
return user
return False | def function[activate_user, parameter[activation_key]]:
constant[
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
]
if call[name[SHA1_RE].search, parameter[name[activation_key]]] begin[:]
<ast.Try object at 0x7da18f00c2e0>
if <ast.UnaryOp object at 0x7da18f00f2e0> begin[:]
variable[user] assign[=] name[profile].user
name[user].is_active assign[=] constant[True]
call[name[user].save, parameter[]]
name[profile].activation_key assign[=] name[RegistrationProfile].ACTIVATED
call[name[profile].save, parameter[]]
return[name[user]]
return[constant[False]] | keyword[def] identifier[activate_user] ( identifier[activation_key] ):
literal[string]
keyword[if] identifier[SHA1_RE] . identifier[search] ( identifier[activation_key] ):
keyword[try] :
identifier[profile] = identifier[RegistrationProfile] . identifier[objects] . identifier[get] (
identifier[activation_key] = identifier[activation_key] )
keyword[except] identifier[RegistrationProfile] . identifier[DoesNotExist] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[profile] . identifier[activation_key_expired] ():
identifier[user] = identifier[profile] . identifier[user]
identifier[user] . identifier[is_active] = keyword[True]
identifier[user] . identifier[save] ()
identifier[profile] . identifier[activation_key] = identifier[RegistrationProfile] . identifier[ACTIVATED]
identifier[profile] . identifier[save] ()
keyword[return] identifier[user]
keyword[return] keyword[False] | def activate_user(activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = RegistrationProfile.objects.get(activation_key=activation_key) # depends on [control=['try'], data=[]]
except RegistrationProfile.DoesNotExist:
return False # depends on [control=['except'], data=[]]
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = RegistrationProfile.ACTIVATED
profile.save()
return user # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return False |
def confirm_phone_number(self, sms_code):
"""Confirm phone number with the recieved SMS code
:param sms_code: sms code
:type sms_code: :class:`str`
:return: success (returns ``False`` on request fail/timeout)
:rtype: :class:`bool`
"""
sess = self._get_web_session()
try:
resp = sess.post('https://steamcommunity.com/steamguard/phoneajax',
data={
'op': 'check_sms_code',
'arg': sms_code,
'checkfortos': 1,
'skipvoip': 1,
'sessionid': sess.cookies.get('sessionid', domain='steamcommunity.com'),
},
timeout=15).json()
except:
return False
return (resp or {}).get('success', False) | def function[confirm_phone_number, parameter[self, sms_code]]:
constant[Confirm phone number with the recieved SMS code
:param sms_code: sms code
:type sms_code: :class:`str`
:return: success (returns ``False`` on request fail/timeout)
:rtype: :class:`bool`
]
variable[sess] assign[=] call[name[self]._get_web_session, parameter[]]
<ast.Try object at 0x7da1b2362a70>
return[call[<ast.BoolOp object at 0x7da1b2362350>.get, parameter[constant[success], constant[False]]]] | keyword[def] identifier[confirm_phone_number] ( identifier[self] , identifier[sms_code] ):
literal[string]
identifier[sess] = identifier[self] . identifier[_get_web_session] ()
keyword[try] :
identifier[resp] = identifier[sess] . identifier[post] ( literal[string] ,
identifier[data] ={
literal[string] : literal[string] ,
literal[string] : identifier[sms_code] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : identifier[sess] . identifier[cookies] . identifier[get] ( literal[string] , identifier[domain] = literal[string] ),
},
identifier[timeout] = literal[int] ). identifier[json] ()
keyword[except] :
keyword[return] keyword[False]
keyword[return] ( identifier[resp] keyword[or] {}). identifier[get] ( literal[string] , keyword[False] ) | def confirm_phone_number(self, sms_code):
"""Confirm phone number with the recieved SMS code
:param sms_code: sms code
:type sms_code: :class:`str`
:return: success (returns ``False`` on request fail/timeout)
:rtype: :class:`bool`
"""
sess = self._get_web_session()
try:
resp = sess.post('https://steamcommunity.com/steamguard/phoneajax', data={'op': 'check_sms_code', 'arg': sms_code, 'checkfortos': 1, 'skipvoip': 1, 'sessionid': sess.cookies.get('sessionid', domain='steamcommunity.com')}, timeout=15).json() # depends on [control=['try'], data=[]]
except:
return False # depends on [control=['except'], data=[]]
return (resp or {}).get('success', False) |
def create(cls, name, frame_type='eth2', value1=None, comment=None):
"""
Create an ethernet service
:param str name: name of service
:param str frame_type: ethernet frame type, eth2
:param str value1: hex code representing ethertype field
:param str comment: optional comment
:raises CreateElementFailed: failure creating element with reason
:return: instance with meta
:rtype: EthernetService
"""
json = {'frame_type': frame_type,
'name': name,
'value1': int(value1, 16),
'comment': comment}
return ElementCreator(cls, json) | def function[create, parameter[cls, name, frame_type, value1, comment]]:
constant[
Create an ethernet service
:param str name: name of service
:param str frame_type: ethernet frame type, eth2
:param str value1: hex code representing ethertype field
:param str comment: optional comment
:raises CreateElementFailed: failure creating element with reason
:return: instance with meta
:rtype: EthernetService
]
variable[json] assign[=] dictionary[[<ast.Constant object at 0x7da1b1bc0f40>, <ast.Constant object at 0x7da1b1bc27a0>, <ast.Constant object at 0x7da1b1bc2bc0>, <ast.Constant object at 0x7da1b1bc0c40>], [<ast.Name object at 0x7da1b1bc3880>, <ast.Name object at 0x7da1b1bc3310>, <ast.Call object at 0x7da1b1bc1240>, <ast.Name object at 0x7da1b1bc2d70>]]
return[call[name[ElementCreator], parameter[name[cls], name[json]]]] | keyword[def] identifier[create] ( identifier[cls] , identifier[name] , identifier[frame_type] = literal[string] , identifier[value1] = keyword[None] , identifier[comment] = keyword[None] ):
literal[string]
identifier[json] ={ literal[string] : identifier[frame_type] ,
literal[string] : identifier[name] ,
literal[string] : identifier[int] ( identifier[value1] , literal[int] ),
literal[string] : identifier[comment] }
keyword[return] identifier[ElementCreator] ( identifier[cls] , identifier[json] ) | def create(cls, name, frame_type='eth2', value1=None, comment=None):
"""
Create an ethernet service
:param str name: name of service
:param str frame_type: ethernet frame type, eth2
:param str value1: hex code representing ethertype field
:param str comment: optional comment
:raises CreateElementFailed: failure creating element with reason
:return: instance with meta
:rtype: EthernetService
"""
json = {'frame_type': frame_type, 'name': name, 'value1': int(value1, 16), 'comment': comment}
return ElementCreator(cls, json) |
def get_documentation(self, element, namespace=None, schema_str=None):
"""**Helper method:** should return an schema specific documentation
given an element parsing or getting the `Clark's Notation`_
`{url:schema}Element` from the message error on validate method.
:param str element: Element string following the Clark's Notation
:param dict namespace: Element string following the Clark's Notation
:returns: The documentation text if exists
:rtype: unicode
.. _`Clark's Notation`: http://effbot.org/zone/element-namespaces.htm
"""
if namespace is None:
namespace = {'xs': 'http://www.w3.org/2001/XMLSchema'}
schema_root = etree.parse(StringIO(self.schema))
document = schema_root.xpath(self.get_element_from_clark(element),
namespaces=namespace)
return document and document[0].text or '' | def function[get_documentation, parameter[self, element, namespace, schema_str]]:
constant[**Helper method:** should return an schema specific documentation
given an element parsing or getting the `Clark's Notation`_
`{url:schema}Element` from the message error on validate method.
:param str element: Element string following the Clark's Notation
:param dict namespace: Element string following the Clark's Notation
:returns: The documentation text if exists
:rtype: unicode
.. _`Clark's Notation`: http://effbot.org/zone/element-namespaces.htm
]
if compare[name[namespace] is constant[None]] begin[:]
variable[namespace] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc8f70>], [<ast.Constant object at 0x7da18bcc9660>]]
variable[schema_root] assign[=] call[name[etree].parse, parameter[call[name[StringIO], parameter[name[self].schema]]]]
variable[document] assign[=] call[name[schema_root].xpath, parameter[call[name[self].get_element_from_clark, parameter[name[element]]]]]
return[<ast.BoolOp object at 0x7da18bcc8970>] | keyword[def] identifier[get_documentation] ( identifier[self] , identifier[element] , identifier[namespace] = keyword[None] , identifier[schema_str] = keyword[None] ):
literal[string]
keyword[if] identifier[namespace] keyword[is] keyword[None] :
identifier[namespace] ={ literal[string] : literal[string] }
identifier[schema_root] = identifier[etree] . identifier[parse] ( identifier[StringIO] ( identifier[self] . identifier[schema] ))
identifier[document] = identifier[schema_root] . identifier[xpath] ( identifier[self] . identifier[get_element_from_clark] ( identifier[element] ),
identifier[namespaces] = identifier[namespace] )
keyword[return] identifier[document] keyword[and] identifier[document] [ literal[int] ]. identifier[text] keyword[or] literal[string] | def get_documentation(self, element, namespace=None, schema_str=None):
"""**Helper method:** should return an schema specific documentation
given an element parsing or getting the `Clark's Notation`_
`{url:schema}Element` from the message error on validate method.
:param str element: Element string following the Clark's Notation
:param dict namespace: Element string following the Clark's Notation
:returns: The documentation text if exists
:rtype: unicode
.. _`Clark's Notation`: http://effbot.org/zone/element-namespaces.htm
"""
if namespace is None:
namespace = {'xs': 'http://www.w3.org/2001/XMLSchema'} # depends on [control=['if'], data=['namespace']]
schema_root = etree.parse(StringIO(self.schema))
document = schema_root.xpath(self.get_element_from_clark(element), namespaces=namespace)
return document and document[0].text or '' |
def get_site_by_id(self, id):
"""
Looks up a site by ID and returns a TSquareSite representing that
object, or throws an exception if no such site is found.
@param id - The entityID of the site to look up
@returns A TSquareSite object
"""
response = self._session.get(BASE_URL_TSQUARE + '/site/{}.json'.format(id))
response.raise_for_status()
site_data = response.json()
return TSquareSite(**site_data) | def function[get_site_by_id, parameter[self, id]]:
constant[
Looks up a site by ID and returns a TSquareSite representing that
object, or throws an exception if no such site is found.
@param id - The entityID of the site to look up
@returns A TSquareSite object
]
variable[response] assign[=] call[name[self]._session.get, parameter[binary_operation[name[BASE_URL_TSQUARE] + call[constant[/site/{}.json].format, parameter[name[id]]]]]]
call[name[response].raise_for_status, parameter[]]
variable[site_data] assign[=] call[name[response].json, parameter[]]
return[call[name[TSquareSite], parameter[]]] | keyword[def] identifier[get_site_by_id] ( identifier[self] , identifier[id] ):
literal[string]
identifier[response] = identifier[self] . identifier[_session] . identifier[get] ( identifier[BASE_URL_TSQUARE] + literal[string] . identifier[format] ( identifier[id] ))
identifier[response] . identifier[raise_for_status] ()
identifier[site_data] = identifier[response] . identifier[json] ()
keyword[return] identifier[TSquareSite] (** identifier[site_data] ) | def get_site_by_id(self, id):
"""
Looks up a site by ID and returns a TSquareSite representing that
object, or throws an exception if no such site is found.
@param id - The entityID of the site to look up
@returns A TSquareSite object
"""
response = self._session.get(BASE_URL_TSQUARE + '/site/{}.json'.format(id))
response.raise_for_status()
site_data = response.json()
return TSquareSite(**site_data) |
def _find_supported_challenge(authzr, responders):
"""
Find a challenge combination that consists of a single challenge that the
responder can satisfy.
:param ~acme.messages.AuthorizationResource auth: The authorization to
examine.
:type responder: List[`~txacme.interfaces.IResponder`]
:param responder: The possible responders to use.
:raises NoSupportedChallenges: When a suitable challenge combination is not
found.
:rtype: Tuple[`~txacme.interfaces.IResponder`,
`~acme.messages.ChallengeBody`]
:return: The responder and challenge that were found.
"""
matches = [
(responder, challbs[0])
for challbs in authzr.body.resolved_combinations
for responder in responders
if [challb.typ for challb in challbs] == [responder.challenge_type]]
if len(matches) == 0:
raise NoSupportedChallenges(authzr)
else:
return matches[0] | def function[_find_supported_challenge, parameter[authzr, responders]]:
constant[
Find a challenge combination that consists of a single challenge that the
responder can satisfy.
:param ~acme.messages.AuthorizationResource auth: The authorization to
examine.
:type responder: List[`~txacme.interfaces.IResponder`]
:param responder: The possible responders to use.
:raises NoSupportedChallenges: When a suitable challenge combination is not
found.
:rtype: Tuple[`~txacme.interfaces.IResponder`,
`~acme.messages.ChallengeBody`]
:return: The responder and challenge that were found.
]
variable[matches] assign[=] <ast.ListComp object at 0x7da20c76d030>
if compare[call[name[len], parameter[name[matches]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b0fe7c40> | keyword[def] identifier[_find_supported_challenge] ( identifier[authzr] , identifier[responders] ):
literal[string]
identifier[matches] =[
( identifier[responder] , identifier[challbs] [ literal[int] ])
keyword[for] identifier[challbs] keyword[in] identifier[authzr] . identifier[body] . identifier[resolved_combinations]
keyword[for] identifier[responder] keyword[in] identifier[responders]
keyword[if] [ identifier[challb] . identifier[typ] keyword[for] identifier[challb] keyword[in] identifier[challbs] ]==[ identifier[responder] . identifier[challenge_type] ]]
keyword[if] identifier[len] ( identifier[matches] )== literal[int] :
keyword[raise] identifier[NoSupportedChallenges] ( identifier[authzr] )
keyword[else] :
keyword[return] identifier[matches] [ literal[int] ] | def _find_supported_challenge(authzr, responders):
"""
Find a challenge combination that consists of a single challenge that the
responder can satisfy.
:param ~acme.messages.AuthorizationResource auth: The authorization to
examine.
:type responder: List[`~txacme.interfaces.IResponder`]
:param responder: The possible responders to use.
:raises NoSupportedChallenges: When a suitable challenge combination is not
found.
:rtype: Tuple[`~txacme.interfaces.IResponder`,
`~acme.messages.ChallengeBody`]
:return: The responder and challenge that were found.
"""
matches = [(responder, challbs[0]) for challbs in authzr.body.resolved_combinations for responder in responders if [challb.typ for challb in challbs] == [responder.challenge_type]]
if len(matches) == 0:
raise NoSupportedChallenges(authzr) # depends on [control=['if'], data=[]]
else:
return matches[0] |
def while_do(self, classical_reg, q_program):
"""
While a classical register at index classical_reg is 1, loop q_program
Equivalent to the following construction:
.. code::
WHILE [c]:
instr...
=>
LABEL @START
JUMP-UNLESS @END [c]
instr...
JUMP @START
LABEL @END
:param int classical_reg: The classical register to check
:param Program q_program: The Quil program to loop.
:return: The Quil Program with the loop instructions added.
:rtype: Program
"""
label_start = LabelPlaceholder("START")
label_end = LabelPlaceholder("END")
self.inst(JumpTarget(label_start))
self.inst(JumpUnless(target=label_end, condition=classical_reg))
self.inst(q_program)
self.inst(Jump(label_start))
self.inst(JumpTarget(label_end))
return self | def function[while_do, parameter[self, classical_reg, q_program]]:
constant[
While a classical register at index classical_reg is 1, loop q_program
Equivalent to the following construction:
.. code::
WHILE [c]:
instr...
=>
LABEL @START
JUMP-UNLESS @END [c]
instr...
JUMP @START
LABEL @END
:param int classical_reg: The classical register to check
:param Program q_program: The Quil program to loop.
:return: The Quil Program with the loop instructions added.
:rtype: Program
]
variable[label_start] assign[=] call[name[LabelPlaceholder], parameter[constant[START]]]
variable[label_end] assign[=] call[name[LabelPlaceholder], parameter[constant[END]]]
call[name[self].inst, parameter[call[name[JumpTarget], parameter[name[label_start]]]]]
call[name[self].inst, parameter[call[name[JumpUnless], parameter[]]]]
call[name[self].inst, parameter[name[q_program]]]
call[name[self].inst, parameter[call[name[Jump], parameter[name[label_start]]]]]
call[name[self].inst, parameter[call[name[JumpTarget], parameter[name[label_end]]]]]
return[name[self]] | keyword[def] identifier[while_do] ( identifier[self] , identifier[classical_reg] , identifier[q_program] ):
literal[string]
identifier[label_start] = identifier[LabelPlaceholder] ( literal[string] )
identifier[label_end] = identifier[LabelPlaceholder] ( literal[string] )
identifier[self] . identifier[inst] ( identifier[JumpTarget] ( identifier[label_start] ))
identifier[self] . identifier[inst] ( identifier[JumpUnless] ( identifier[target] = identifier[label_end] , identifier[condition] = identifier[classical_reg] ))
identifier[self] . identifier[inst] ( identifier[q_program] )
identifier[self] . identifier[inst] ( identifier[Jump] ( identifier[label_start] ))
identifier[self] . identifier[inst] ( identifier[JumpTarget] ( identifier[label_end] ))
keyword[return] identifier[self] | def while_do(self, classical_reg, q_program):
"""
While a classical register at index classical_reg is 1, loop q_program
Equivalent to the following construction:
.. code::
WHILE [c]:
instr...
=>
LABEL @START
JUMP-UNLESS @END [c]
instr...
JUMP @START
LABEL @END
:param int classical_reg: The classical register to check
:param Program q_program: The Quil program to loop.
:return: The Quil Program with the loop instructions added.
:rtype: Program
"""
label_start = LabelPlaceholder('START')
label_end = LabelPlaceholder('END')
self.inst(JumpTarget(label_start))
self.inst(JumpUnless(target=label_end, condition=classical_reg))
self.inst(q_program)
self.inst(Jump(label_start))
self.inst(JumpTarget(label_end))
return self |
def host_context(func):
"Sets the context of the setting to the current host"
@wraps(func)
def decorator(*args, **kwargs):
hosts = get_hosts_settings()
with settings(**hosts[env.host]):
return func(*args, **kwargs)
return decorator | def function[host_context, parameter[func]]:
constant[Sets the context of the setting to the current host]
def function[decorator, parameter[]]:
variable[hosts] assign[=] call[name[get_hosts_settings], parameter[]]
with call[name[settings], parameter[]] begin[:]
return[call[name[func], parameter[<ast.Starred object at 0x7da2041d8760>]]]
return[name[decorator]] | keyword[def] identifier[host_context] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[decorator] (* identifier[args] ,** identifier[kwargs] ):
identifier[hosts] = identifier[get_hosts_settings] ()
keyword[with] identifier[settings] (** identifier[hosts] [ identifier[env] . identifier[host] ]):
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[decorator] | def host_context(func):
"""Sets the context of the setting to the current host"""
@wraps(func)
def decorator(*args, **kwargs):
hosts = get_hosts_settings()
with settings(**hosts[env.host]):
return func(*args, **kwargs) # depends on [control=['with'], data=[]]
return decorator |
def handle_shell_config(shell, vexrc, environ):
"""Carry out the logic of the --shell-config option.
"""
from vex import shell_config
data = shell_config.shell_config_for(shell, vexrc, environ)
if not data:
raise exceptions.OtherShell("unknown shell: {0!r}".format(shell))
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(data)
else:
sys.stdout.write(data)
return 0 | def function[handle_shell_config, parameter[shell, vexrc, environ]]:
constant[Carry out the logic of the --shell-config option.
]
from relative_module[vex] import module[shell_config]
variable[data] assign[=] call[name[shell_config].shell_config_for, parameter[name[shell], name[vexrc], name[environ]]]
if <ast.UnaryOp object at 0x7da1b0f58a00> begin[:]
<ast.Raise object at 0x7da1b0f58d30>
if call[name[hasattr], parameter[name[sys].stdout, constant[buffer]]] begin[:]
call[name[sys].stdout.buffer.write, parameter[name[data]]]
return[constant[0]] | keyword[def] identifier[handle_shell_config] ( identifier[shell] , identifier[vexrc] , identifier[environ] ):
literal[string]
keyword[from] identifier[vex] keyword[import] identifier[shell_config]
identifier[data] = identifier[shell_config] . identifier[shell_config_for] ( identifier[shell] , identifier[vexrc] , identifier[environ] )
keyword[if] keyword[not] identifier[data] :
keyword[raise] identifier[exceptions] . identifier[OtherShell] ( literal[string] . identifier[format] ( identifier[shell] ))
keyword[if] identifier[hasattr] ( identifier[sys] . identifier[stdout] , literal[string] ):
identifier[sys] . identifier[stdout] . identifier[buffer] . identifier[write] ( identifier[data] )
keyword[else] :
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[data] )
keyword[return] literal[int] | def handle_shell_config(shell, vexrc, environ):
"""Carry out the logic of the --shell-config option.
"""
from vex import shell_config
data = shell_config.shell_config_for(shell, vexrc, environ)
if not data:
raise exceptions.OtherShell('unknown shell: {0!r}'.format(shell)) # depends on [control=['if'], data=[]]
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(data) # depends on [control=['if'], data=[]]
else:
sys.stdout.write(data)
return 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.