code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def transform(function):
"""Return a processor for a style's "transform" function.
"""
def transform_fn(_, result):
if isinstance(result, Nothing):
return result
lgr.debug("Transforming %r with %r", result, function)
try:
return function(result)
except:
exctype, value, tb = sys.exc_info()
try:
new_exc = StyleFunctionError(function, exctype, value)
# Remove the "During handling ..." since we're
# reraising with the traceback.
new_exc.__cause__ = None
six.reraise(StyleFunctionError, new_exc, tb)
finally:
# Remove circular reference.
# https://docs.python.org/2/library/sys.html#sys.exc_info
del tb
return transform_fn | def function[transform, parameter[function]]:
constant[Return a processor for a style's "transform" function.
]
def function[transform_fn, parameter[_, result]]:
if call[name[isinstance], parameter[name[result], name[Nothing]]] begin[:]
return[name[result]]
call[name[lgr].debug, parameter[constant[Transforming %r with %r], name[result], name[function]]]
<ast.Try object at 0x7da1b1046860>
return[name[transform_fn]] | keyword[def] identifier[transform] ( identifier[function] ):
literal[string]
keyword[def] identifier[transform_fn] ( identifier[_] , identifier[result] ):
keyword[if] identifier[isinstance] ( identifier[result] , identifier[Nothing] ):
keyword[return] identifier[result]
identifier[lgr] . identifier[debug] ( literal[string] , identifier[result] , identifier[function] )
keyword[try] :
keyword[return] identifier[function] ( identifier[result] )
keyword[except] :
identifier[exctype] , identifier[value] , identifier[tb] = identifier[sys] . identifier[exc_info] ()
keyword[try] :
identifier[new_exc] = identifier[StyleFunctionError] ( identifier[function] , identifier[exctype] , identifier[value] )
identifier[new_exc] . identifier[__cause__] = keyword[None]
identifier[six] . identifier[reraise] ( identifier[StyleFunctionError] , identifier[new_exc] , identifier[tb] )
keyword[finally] :
keyword[del] identifier[tb]
keyword[return] identifier[transform_fn] | def transform(function):
"""Return a processor for a style's "transform" function.
"""
def transform_fn(_, result):
if isinstance(result, Nothing):
return result # depends on [control=['if'], data=[]]
lgr.debug('Transforming %r with %r', result, function)
try:
return function(result) # depends on [control=['try'], data=[]]
except:
(exctype, value, tb) = sys.exc_info()
try:
new_exc = StyleFunctionError(function, exctype, value)
# Remove the "During handling ..." since we're
# reraising with the traceback.
new_exc.__cause__ = None
six.reraise(StyleFunctionError, new_exc, tb) # depends on [control=['try'], data=[]]
finally:
# Remove circular reference.
# https://docs.python.org/2/library/sys.html#sys.exc_info
del tb # depends on [control=['except'], data=[]]
return transform_fn |
def by_name(self, region, summoner_name):
"""
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_name(
region=region, summoner_name=summoner_name
)
return self._raw_request(self.by_name.__name__, region, url, query) | def function[by_name, parameter[self, region, summoner_name]]:
constant[
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
]
<ast.Tuple object at 0x7da1b1d4f250> assign[=] call[name[SummonerApiV4Urls].by_name, parameter[]]
return[call[name[self]._raw_request, parameter[name[self].by_name.__name__, name[region], name[url], name[query]]]] | keyword[def] identifier[by_name] ( identifier[self] , identifier[region] , identifier[summoner_name] ):
literal[string]
identifier[url] , identifier[query] = identifier[SummonerApiV4Urls] . identifier[by_name] (
identifier[region] = identifier[region] , identifier[summoner_name] = identifier[summoner_name]
)
keyword[return] identifier[self] . identifier[_raw_request] ( identifier[self] . identifier[by_name] . identifier[__name__] , identifier[region] , identifier[url] , identifier[query] ) | def by_name(self, region, summoner_name):
"""
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
"""
(url, query) = SummonerApiV4Urls.by_name(region=region, summoner_name=summoner_name)
return self._raw_request(self.by_name.__name__, region, url, query) |
def update_user(userid, profile='grafana', orgid=None, **kwargs):
'''
Update an existing user.
userid
Id of the user.
login
Optional - Login of the user.
email
Optional - Email of the user.
name
Optional - Full name of the user.
orgid
Optional - Default Organization of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_user <user_id> login=<login> email=<email>
'''
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile)
response = requests.put(
'{0}/api/users/{1}'.format(profile['grafana_url'], userid),
json=kwargs,
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
if response.status_code >= 400:
response.raise_for_status()
if orgid:
response2 = requests.post(
'{0}/api/users/{1}/using/{2}'.format(profile['grafana_url'], userid, orgid),
auth=_get_auth(profile),
headers=_get_headers(profile),
timeout=profile.get('grafana_timeout', 3),
)
if response2.status_code >= 400:
response2.raise_for_status()
return response.json() | def function[update_user, parameter[userid, profile, orgid]]:
constant[
Update an existing user.
userid
Id of the user.
login
Optional - Login of the user.
email
Optional - Email of the user.
name
Optional - Full name of the user.
orgid
Optional - Default Organization of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_user <user_id> login=<login> email=<email>
]
if call[name[isinstance], parameter[name[profile], name[string_types]]] begin[:]
variable[profile] assign[=] call[call[name[__salt__]][constant[config.option]], parameter[name[profile]]]
variable[response] assign[=] call[name[requests].put, parameter[call[constant[{0}/api/users/{1}].format, parameter[call[name[profile]][constant[grafana_url]], name[userid]]]]]
if compare[name[response].status_code greater_or_equal[>=] constant[400]] begin[:]
call[name[response].raise_for_status, parameter[]]
if name[orgid] begin[:]
variable[response2] assign[=] call[name[requests].post, parameter[call[constant[{0}/api/users/{1}/using/{2}].format, parameter[call[name[profile]][constant[grafana_url]], name[userid], name[orgid]]]]]
if compare[name[response2].status_code greater_or_equal[>=] constant[400]] begin[:]
call[name[response2].raise_for_status, parameter[]]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[update_user] ( identifier[userid] , identifier[profile] = literal[string] , identifier[orgid] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[profile] , identifier[string_types] ):
identifier[profile] = identifier[__salt__] [ literal[string] ]( identifier[profile] )
identifier[response] = identifier[requests] . identifier[put] (
literal[string] . identifier[format] ( identifier[profile] [ literal[string] ], identifier[userid] ),
identifier[json] = identifier[kwargs] ,
identifier[auth] = identifier[_get_auth] ( identifier[profile] ),
identifier[headers] = identifier[_get_headers] ( identifier[profile] ),
identifier[timeout] = identifier[profile] . identifier[get] ( literal[string] , literal[int] ),
)
keyword[if] identifier[response] . identifier[status_code] >= literal[int] :
identifier[response] . identifier[raise_for_status] ()
keyword[if] identifier[orgid] :
identifier[response2] = identifier[requests] . identifier[post] (
literal[string] . identifier[format] ( identifier[profile] [ literal[string] ], identifier[userid] , identifier[orgid] ),
identifier[auth] = identifier[_get_auth] ( identifier[profile] ),
identifier[headers] = identifier[_get_headers] ( identifier[profile] ),
identifier[timeout] = identifier[profile] . identifier[get] ( literal[string] , literal[int] ),
)
keyword[if] identifier[response2] . identifier[status_code] >= literal[int] :
identifier[response2] . identifier[raise_for_status] ()
keyword[return] identifier[response] . identifier[json] () | def update_user(userid, profile='grafana', orgid=None, **kwargs):
"""
Update an existing user.
userid
Id of the user.
login
Optional - Login of the user.
email
Optional - Email of the user.
name
Optional - Full name of the user.
orgid
Optional - Default Organization of the user.
profile
Configuration profile used to connect to the Grafana instance.
Default is 'grafana'.
CLI Example:
.. code-block:: bash
salt '*' grafana4.update_user <user_id> login=<login> email=<email>
"""
if isinstance(profile, string_types):
profile = __salt__['config.option'](profile) # depends on [control=['if'], data=[]]
response = requests.put('{0}/api/users/{1}'.format(profile['grafana_url'], userid), json=kwargs, auth=_get_auth(profile), headers=_get_headers(profile), timeout=profile.get('grafana_timeout', 3))
if response.status_code >= 400:
response.raise_for_status() # depends on [control=['if'], data=[]]
if orgid:
response2 = requests.post('{0}/api/users/{1}/using/{2}'.format(profile['grafana_url'], userid, orgid), auth=_get_auth(profile), headers=_get_headers(profile), timeout=profile.get('grafana_timeout', 3))
if response2.status_code >= 400:
response2.raise_for_status() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return response.json() |
def assign_vertex_attrib_location(self, vbo, location):
"""Load data into a vbo"""
with vbo:
if self.n_verts:
assert vbo.data.shape[0] == self.n_verts
else:
self.n_verts = vbo.data.shape[0]
# vbo.buffer_data()
gl.glVertexAttribPointer(location, vbo.data.shape[1], gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(location) | def function[assign_vertex_attrib_location, parameter[self, vbo, location]]:
constant[Load data into a vbo]
with name[vbo] begin[:]
if name[self].n_verts begin[:]
assert[compare[call[name[vbo].data.shape][constant[0]] equal[==] name[self].n_verts]]
call[name[gl].glVertexAttribPointer, parameter[name[location], call[name[vbo].data.shape][constant[1]], name[gl].GL_FLOAT, name[gl].GL_FALSE, constant[0], constant[0]]]
call[name[gl].glEnableVertexAttribArray, parameter[name[location]]] | keyword[def] identifier[assign_vertex_attrib_location] ( identifier[self] , identifier[vbo] , identifier[location] ):
literal[string]
keyword[with] identifier[vbo] :
keyword[if] identifier[self] . identifier[n_verts] :
keyword[assert] identifier[vbo] . identifier[data] . identifier[shape] [ literal[int] ]== identifier[self] . identifier[n_verts]
keyword[else] :
identifier[self] . identifier[n_verts] = identifier[vbo] . identifier[data] . identifier[shape] [ literal[int] ]
identifier[gl] . identifier[glVertexAttribPointer] ( identifier[location] , identifier[vbo] . identifier[data] . identifier[shape] [ literal[int] ], identifier[gl] . identifier[GL_FLOAT] , identifier[gl] . identifier[GL_FALSE] , literal[int] , literal[int] )
identifier[gl] . identifier[glEnableVertexAttribArray] ( identifier[location] ) | def assign_vertex_attrib_location(self, vbo, location):
"""Load data into a vbo"""
with vbo:
if self.n_verts:
assert vbo.data.shape[0] == self.n_verts # depends on [control=['if'], data=[]]
else:
self.n_verts = vbo.data.shape[0]
# vbo.buffer_data()
gl.glVertexAttribPointer(location, vbo.data.shape[1], gl.GL_FLOAT, gl.GL_FALSE, 0, 0)
gl.glEnableVertexAttribArray(location) # depends on [control=['with'], data=[]] |
def connected(func):
""" check connected, fails otherwise """
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if not self.connected:
self.show_output("Not connected.")
else:
try:
return func(*args, **kwargs)
except APIError:
self.show_output("ZooKeeper internal error.")
except AuthFailedError:
self.show_output("Authentication failed.")
except NoAuthError:
self.show_output("Not authenticated.")
except BadVersionError:
self.show_output("Bad version.")
except ConnectionLoss:
self.show_output("Connection loss.")
except NotReadOnlyCallError:
self.show_output("Not a read-only operation.")
except BadArgumentsError:
self.show_output("Bad arguments.")
except SessionExpiredError:
self.show_output("Session expired.")
except UnimplementedError as ex:
self.show_output("Not implemented by the server: %s." % str(ex))
except ZookeeperError as ex:
self.show_output("Unknown ZooKeeper error: %s" % str(ex))
return wrapper | def function[connected, parameter[func]]:
constant[ check connected, fails otherwise ]
def function[wrapper, parameter[]]:
variable[self] assign[=] call[name[args]][constant[0]]
if <ast.UnaryOp object at 0x7da18f00ec20> begin[:]
call[name[self].show_output, parameter[constant[Not connected.]]]
return[name[wrapper]] | keyword[def] identifier[connected] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[self] = identifier[args] [ literal[int] ]
keyword[if] keyword[not] identifier[self] . identifier[connected] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[else] :
keyword[try] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[APIError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[AuthFailedError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[NoAuthError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[BadVersionError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[ConnectionLoss] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[NotReadOnlyCallError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[BadArgumentsError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[SessionExpiredError] :
identifier[self] . identifier[show_output] ( literal[string] )
keyword[except] identifier[UnimplementedError] keyword[as] identifier[ex] :
identifier[self] . identifier[show_output] ( literal[string] % identifier[str] ( identifier[ex] ))
keyword[except] identifier[ZookeeperError] keyword[as] identifier[ex] :
identifier[self] . identifier[show_output] ( literal[string] % identifier[str] ( identifier[ex] ))
keyword[return] identifier[wrapper] | def connected(func):
""" check connected, fails otherwise """
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if not self.connected:
self.show_output('Not connected.') # depends on [control=['if'], data=[]]
else:
try:
return func(*args, **kwargs) # depends on [control=['try'], data=[]]
except APIError:
self.show_output('ZooKeeper internal error.') # depends on [control=['except'], data=[]]
except AuthFailedError:
self.show_output('Authentication failed.') # depends on [control=['except'], data=[]]
except NoAuthError:
self.show_output('Not authenticated.') # depends on [control=['except'], data=[]]
except BadVersionError:
self.show_output('Bad version.') # depends on [control=['except'], data=[]]
except ConnectionLoss:
self.show_output('Connection loss.') # depends on [control=['except'], data=[]]
except NotReadOnlyCallError:
self.show_output('Not a read-only operation.') # depends on [control=['except'], data=[]]
except BadArgumentsError:
self.show_output('Bad arguments.') # depends on [control=['except'], data=[]]
except SessionExpiredError:
self.show_output('Session expired.') # depends on [control=['except'], data=[]]
except UnimplementedError as ex:
self.show_output('Not implemented by the server: %s.' % str(ex)) # depends on [control=['except'], data=['ex']]
except ZookeeperError as ex:
self.show_output('Unknown ZooKeeper error: %s' % str(ex)) # depends on [control=['except'], data=['ex']]
return wrapper |
def min(self):
"""Minimum value."""
if self._prop.fmin is None:
return -_INF
return self._prop.fmin(self._obj) | def function[min, parameter[self]]:
constant[Minimum value.]
if compare[name[self]._prop.fmin is constant[None]] begin[:]
return[<ast.UnaryOp object at 0x7da18f8103a0>]
return[call[name[self]._prop.fmin, parameter[name[self]._obj]]] | keyword[def] identifier[min] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_prop] . identifier[fmin] keyword[is] keyword[None] :
keyword[return] - identifier[_INF]
keyword[return] identifier[self] . identifier[_prop] . identifier[fmin] ( identifier[self] . identifier[_obj] ) | def min(self):
"""Minimum value."""
if self._prop.fmin is None:
return -_INF # depends on [control=['if'], data=[]]
return self._prop.fmin(self._obj) |
def _setTypes(self, encoderSpec):
"""Set up the dataTypes and initialize encoders"""
if self.encoderType is None:
if self.dataType in ['int','float']:
self.encoderType='adaptiveScalar'
elif self.dataType=='string':
self.encoderType='category'
elif self.dataType in ['date', 'datetime']:
self.encoderType='date'
if self.dataType is None:
if self.encoderType in ['scalar','adaptiveScalar']:
self.dataType='float'
elif self.encoderType in ['category', 'enumeration']:
self.dataType='string'
elif self.encoderType in ['date', 'datetime']:
self.dataType='datetime' | def function[_setTypes, parameter[self, encoderSpec]]:
constant[Set up the dataTypes and initialize encoders]
if compare[name[self].encoderType is constant[None]] begin[:]
if compare[name[self].dataType in list[[<ast.Constant object at 0x7da20c6aa8c0>, <ast.Constant object at 0x7da20c6aa0e0>]]] begin[:]
name[self].encoderType assign[=] constant[adaptiveScalar]
if compare[name[self].dataType is constant[None]] begin[:]
if compare[name[self].encoderType in list[[<ast.Constant object at 0x7da20c6aa680>, <ast.Constant object at 0x7da20c6a9ae0>]]] begin[:]
name[self].dataType assign[=] constant[float] | keyword[def] identifier[_setTypes] ( identifier[self] , identifier[encoderSpec] ):
literal[string]
keyword[if] identifier[self] . identifier[encoderType] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[dataType] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[encoderType] = literal[string]
keyword[elif] identifier[self] . identifier[dataType] == literal[string] :
identifier[self] . identifier[encoderType] = literal[string]
keyword[elif] identifier[self] . identifier[dataType] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[encoderType] = literal[string]
keyword[if] identifier[self] . identifier[dataType] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[encoderType] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[dataType] = literal[string]
keyword[elif] identifier[self] . identifier[encoderType] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[dataType] = literal[string]
keyword[elif] identifier[self] . identifier[encoderType] keyword[in] [ literal[string] , literal[string] ]:
identifier[self] . identifier[dataType] = literal[string] | def _setTypes(self, encoderSpec):
"""Set up the dataTypes and initialize encoders"""
if self.encoderType is None:
if self.dataType in ['int', 'float']:
self.encoderType = 'adaptiveScalar' # depends on [control=['if'], data=[]]
elif self.dataType == 'string':
self.encoderType = 'category' # depends on [control=['if'], data=[]]
elif self.dataType in ['date', 'datetime']:
self.encoderType = 'date' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.dataType is None:
if self.encoderType in ['scalar', 'adaptiveScalar']:
self.dataType = 'float' # depends on [control=['if'], data=[]]
elif self.encoderType in ['category', 'enumeration']:
self.dataType = 'string' # depends on [control=['if'], data=[]]
elif self.encoderType in ['date', 'datetime']:
self.dataType = 'datetime' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def _get_directives_and_roles_from_sphinx():
"""Return a tuple of Sphinx directive and roles."""
if SPHINX_INSTALLED:
sphinx_directives = list(sphinx.domains.std.StandardDomain.directives)
sphinx_roles = list(sphinx.domains.std.StandardDomain.roles)
for domain in [sphinx.domains.c.CDomain,
sphinx.domains.cpp.CPPDomain,
sphinx.domains.javascript.JavaScriptDomain,
sphinx.domains.python.PythonDomain]:
sphinx_directives += list(domain.directives) + [
'{}:{}'.format(domain.name, item)
for item in list(domain.directives)]
sphinx_roles += list(domain.roles) + [
'{}:{}'.format(domain.name, item)
for item in list(domain.roles)]
else:
sphinx_roles = [
'abbr',
'command',
'dfn',
'doc',
'download',
'envvar',
'file',
'guilabel',
'kbd',
'keyword',
'mailheader',
'makevar',
'manpage',
'menuselection',
'mimetype',
'newsgroup',
'option',
'program',
'py:func',
'ref',
'regexp',
'samp',
'term',
'token']
sphinx_directives = [
'autosummary',
'currentmodule',
'centered',
'c:function',
'c:type',
'include',
'deprecated',
'envvar',
'glossary',
'index',
'no-code-block',
'literalinclude',
'hlist',
'option',
'productionlist',
'py:function',
'seealso',
'toctree',
'todo',
'versionadded',
'versionchanged']
return (sphinx_directives, sphinx_roles) | def function[_get_directives_and_roles_from_sphinx, parameter[]]:
constant[Return a tuple of Sphinx directive and roles.]
if name[SPHINX_INSTALLED] begin[:]
variable[sphinx_directives] assign[=] call[name[list], parameter[name[sphinx].domains.std.StandardDomain.directives]]
variable[sphinx_roles] assign[=] call[name[list], parameter[name[sphinx].domains.std.StandardDomain.roles]]
for taget[name[domain]] in starred[list[[<ast.Attribute object at 0x7da1b0720970>, <ast.Attribute object at 0x7da1b0720b80>, <ast.Attribute object at 0x7da1b0722140>, <ast.Attribute object at 0x7da1b0720ee0>]]] begin[:]
<ast.AugAssign object at 0x7da1b0722aa0>
<ast.AugAssign object at 0x7da1b0723490>
return[tuple[[<ast.Name object at 0x7da1b08fa4d0>, <ast.Name object at 0x7da1b08fbdf0>]]] | keyword[def] identifier[_get_directives_and_roles_from_sphinx] ():
literal[string]
keyword[if] identifier[SPHINX_INSTALLED] :
identifier[sphinx_directives] = identifier[list] ( identifier[sphinx] . identifier[domains] . identifier[std] . identifier[StandardDomain] . identifier[directives] )
identifier[sphinx_roles] = identifier[list] ( identifier[sphinx] . identifier[domains] . identifier[std] . identifier[StandardDomain] . identifier[roles] )
keyword[for] identifier[domain] keyword[in] [ identifier[sphinx] . identifier[domains] . identifier[c] . identifier[CDomain] ,
identifier[sphinx] . identifier[domains] . identifier[cpp] . identifier[CPPDomain] ,
identifier[sphinx] . identifier[domains] . identifier[javascript] . identifier[JavaScriptDomain] ,
identifier[sphinx] . identifier[domains] . identifier[python] . identifier[PythonDomain] ]:
identifier[sphinx_directives] += identifier[list] ( identifier[domain] . identifier[directives] )+[
literal[string] . identifier[format] ( identifier[domain] . identifier[name] , identifier[item] )
keyword[for] identifier[item] keyword[in] identifier[list] ( identifier[domain] . identifier[directives] )]
identifier[sphinx_roles] += identifier[list] ( identifier[domain] . identifier[roles] )+[
literal[string] . identifier[format] ( identifier[domain] . identifier[name] , identifier[item] )
keyword[for] identifier[item] keyword[in] identifier[list] ( identifier[domain] . identifier[roles] )]
keyword[else] :
identifier[sphinx_roles] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
identifier[sphinx_directives] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
keyword[return] ( identifier[sphinx_directives] , identifier[sphinx_roles] ) | def _get_directives_and_roles_from_sphinx():
"""Return a tuple of Sphinx directive and roles."""
if SPHINX_INSTALLED:
sphinx_directives = list(sphinx.domains.std.StandardDomain.directives)
sphinx_roles = list(sphinx.domains.std.StandardDomain.roles)
for domain in [sphinx.domains.c.CDomain, sphinx.domains.cpp.CPPDomain, sphinx.domains.javascript.JavaScriptDomain, sphinx.domains.python.PythonDomain]:
sphinx_directives += list(domain.directives) + ['{}:{}'.format(domain.name, item) for item in list(domain.directives)]
sphinx_roles += list(domain.roles) + ['{}:{}'.format(domain.name, item) for item in list(domain.roles)] # depends on [control=['for'], data=['domain']] # depends on [control=['if'], data=[]]
else:
sphinx_roles = ['abbr', 'command', 'dfn', 'doc', 'download', 'envvar', 'file', 'guilabel', 'kbd', 'keyword', 'mailheader', 'makevar', 'manpage', 'menuselection', 'mimetype', 'newsgroup', 'option', 'program', 'py:func', 'ref', 'regexp', 'samp', 'term', 'token']
sphinx_directives = ['autosummary', 'currentmodule', 'centered', 'c:function', 'c:type', 'include', 'deprecated', 'envvar', 'glossary', 'index', 'no-code-block', 'literalinclude', 'hlist', 'option', 'productionlist', 'py:function', 'seealso', 'toctree', 'todo', 'versionadded', 'versionchanged']
return (sphinx_directives, sphinx_roles) |
def cancel_observing(self, response, send_rst): # pragma: no cover
"""
Delete observing on the remote server.
:param response: the last received response
:param send_rst: if explicitly send RST message
:type send_rst: bool
"""
if send_rst:
message = Message()
message.destination = self.server
message.code = defines.Codes.EMPTY.number
message.type = defines.Types["RST"]
message.token = response.token
message.mid = response.mid
self.protocol.send_message(message)
self.stop() | def function[cancel_observing, parameter[self, response, send_rst]]:
constant[
Delete observing on the remote server.
:param response: the last received response
:param send_rst: if explicitly send RST message
:type send_rst: bool
]
if name[send_rst] begin[:]
variable[message] assign[=] call[name[Message], parameter[]]
name[message].destination assign[=] name[self].server
name[message].code assign[=] name[defines].Codes.EMPTY.number
name[message].type assign[=] call[name[defines].Types][constant[RST]]
name[message].token assign[=] name[response].token
name[message].mid assign[=] name[response].mid
call[name[self].protocol.send_message, parameter[name[message]]]
call[name[self].stop, parameter[]] | keyword[def] identifier[cancel_observing] ( identifier[self] , identifier[response] , identifier[send_rst] ):
literal[string]
keyword[if] identifier[send_rst] :
identifier[message] = identifier[Message] ()
identifier[message] . identifier[destination] = identifier[self] . identifier[server]
identifier[message] . identifier[code] = identifier[defines] . identifier[Codes] . identifier[EMPTY] . identifier[number]
identifier[message] . identifier[type] = identifier[defines] . identifier[Types] [ literal[string] ]
identifier[message] . identifier[token] = identifier[response] . identifier[token]
identifier[message] . identifier[mid] = identifier[response] . identifier[mid]
identifier[self] . identifier[protocol] . identifier[send_message] ( identifier[message] )
identifier[self] . identifier[stop] () | def cancel_observing(self, response, send_rst): # pragma: no cover
'\n Delete observing on the remote server.\n\n :param response: the last received response\n :param send_rst: if explicitly send RST message\n :type send_rst: bool\n '
if send_rst:
message = Message()
message.destination = self.server
message.code = defines.Codes.EMPTY.number
message.type = defines.Types['RST']
message.token = response.token
message.mid = response.mid
self.protocol.send_message(message) # depends on [control=['if'], data=[]]
self.stop() |
def get_containers(self, scope=None, artifact_uris=None):
"""GetContainers.
[Preview API] Gets containers filtered by a comma separated list of artifact uris within the same scope, if not specified returns all containers
:param str scope: A guid representing the scope of the container. This is often the project id.
:param str artifact_uris:
:rtype: [FileContainer]
"""
query_parameters = {}
if scope is not None:
query_parameters['scope'] = self._serialize.query('scope', scope, 'str')
if artifact_uris is not None:
query_parameters['artifactUris'] = self._serialize.query('artifact_uris', artifact_uris, 'str')
response = self._send(http_method='GET',
location_id='e4f5c81e-e250-447b-9fef-bd48471bea5e',
version='5.0-preview.4',
query_parameters=query_parameters)
return self._deserialize('[FileContainer]', self._unwrap_collection(response)) | def function[get_containers, parameter[self, scope, artifact_uris]]:
constant[GetContainers.
[Preview API] Gets containers filtered by a comma separated list of artifact uris within the same scope, if not specified returns all containers
:param str scope: A guid representing the scope of the container. This is often the project id.
:param str artifact_uris:
:rtype: [FileContainer]
]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[scope] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[scope]] assign[=] call[name[self]._serialize.query, parameter[constant[scope], name[scope], constant[str]]]
if compare[name[artifact_uris] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[artifactUris]] assign[=] call[name[self]._serialize.query, parameter[constant[artifact_uris], name[artifact_uris], constant[str]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[[FileContainer]], call[name[self]._unwrap_collection, parameter[name[response]]]]]] | keyword[def] identifier[get_containers] ( identifier[self] , identifier[scope] = keyword[None] , identifier[artifact_uris] = keyword[None] ):
literal[string]
identifier[query_parameters] ={}
keyword[if] identifier[scope] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[scope] , literal[string] )
keyword[if] identifier[artifact_uris] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[artifact_uris] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] )) | def get_containers(self, scope=None, artifact_uris=None):
"""GetContainers.
[Preview API] Gets containers filtered by a comma separated list of artifact uris within the same scope, if not specified returns all containers
:param str scope: A guid representing the scope of the container. This is often the project id.
:param str artifact_uris:
:rtype: [FileContainer]
"""
query_parameters = {}
if scope is not None:
query_parameters['scope'] = self._serialize.query('scope', scope, 'str') # depends on [control=['if'], data=['scope']]
if artifact_uris is not None:
query_parameters['artifactUris'] = self._serialize.query('artifact_uris', artifact_uris, 'str') # depends on [control=['if'], data=['artifact_uris']]
response = self._send(http_method='GET', location_id='e4f5c81e-e250-447b-9fef-bd48471bea5e', version='5.0-preview.4', query_parameters=query_parameters)
return self._deserialize('[FileContainer]', self._unwrap_collection(response)) |
def connect(self, callback, weak=False):
"""
Connects a new callback to this signal.
:param callback: The callback to connect.
:param weak: If `True`, only holds a weak reference to the specified
callback.
`callback` will be called whenever `emit` gets called on the `Signal`
instance.
If a weak reference is kept, when the callback gets destroyed, it will
be unregistered from the signal automatically. This can help avoiding
circular references in user-code.
.. warning::
Beware of bound methods ! Those are generally short-lived and don't
play nicely with weak reference.
.. note::
Connecting the same callback twice or more will cause the callback
to be called several times per `emit` call.
You will have to call `disconnect` as many times as the `connect`
call was called to unregister a callback completely.
"""
if weak:
callback = ref(callback, self._callbacks.remove)
self._callbacks.append(callback) | def function[connect, parameter[self, callback, weak]]:
constant[
Connects a new callback to this signal.
:param callback: The callback to connect.
:param weak: If `True`, only holds a weak reference to the specified
callback.
`callback` will be called whenever `emit` gets called on the `Signal`
instance.
If a weak reference is kept, when the callback gets destroyed, it will
be unregistered from the signal automatically. This can help avoiding
circular references in user-code.
.. warning::
Beware of bound methods ! Those are generally short-lived and don't
play nicely with weak reference.
.. note::
Connecting the same callback twice or more will cause the callback
to be called several times per `emit` call.
You will have to call `disconnect` as many times as the `connect`
call was called to unregister a callback completely.
]
if name[weak] begin[:]
variable[callback] assign[=] call[name[ref], parameter[name[callback], name[self]._callbacks.remove]]
call[name[self]._callbacks.append, parameter[name[callback]]] | keyword[def] identifier[connect] ( identifier[self] , identifier[callback] , identifier[weak] = keyword[False] ):
literal[string]
keyword[if] identifier[weak] :
identifier[callback] = identifier[ref] ( identifier[callback] , identifier[self] . identifier[_callbacks] . identifier[remove] )
identifier[self] . identifier[_callbacks] . identifier[append] ( identifier[callback] ) | def connect(self, callback, weak=False):
"""
Connects a new callback to this signal.
:param callback: The callback to connect.
:param weak: If `True`, only holds a weak reference to the specified
callback.
`callback` will be called whenever `emit` gets called on the `Signal`
instance.
If a weak reference is kept, when the callback gets destroyed, it will
be unregistered from the signal automatically. This can help avoiding
circular references in user-code.
.. warning::
Beware of bound methods ! Those are generally short-lived and don't
play nicely with weak reference.
.. note::
Connecting the same callback twice or more will cause the callback
to be called several times per `emit` call.
You will have to call `disconnect` as many times as the `connect`
call was called to unregister a callback completely.
"""
if weak:
callback = ref(callback, self._callbacks.remove) # depends on [control=['if'], data=[]]
self._callbacks.append(callback) |
def _validate_filenames(self):
"""Checks if passed filenames are valid.
Specifically, f_* parameter should not be passed in
conjunction with dirname.
"""
if not self.dirname:
return
def _is_truthy_and_not_str(f):
return f and not isinstance(f, str)
if (
_is_truthy_and_not_str(self.f_optimizer) or
_is_truthy_and_not_str(self.f_params) or
_is_truthy_and_not_str(self.f_history) or
_is_truthy_and_not_str(self.f_pickle)
):
raise SkorchException(
'dirname can only be used when f_* are strings') | def function[_validate_filenames, parameter[self]]:
constant[Checks if passed filenames are valid.
Specifically, f_* parameter should not be passed in
conjunction with dirname.
]
if <ast.UnaryOp object at 0x7da18eb54b50> begin[:]
return[None]
def function[_is_truthy_and_not_str, parameter[f]]:
return[<ast.BoolOp object at 0x7da18eb56890>]
if <ast.BoolOp object at 0x7da20c6c69b0> begin[:]
<ast.Raise object at 0x7da20c6c7820> | keyword[def] identifier[_validate_filenames] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[dirname] :
keyword[return]
keyword[def] identifier[_is_truthy_and_not_str] ( identifier[f] ):
keyword[return] identifier[f] keyword[and] keyword[not] identifier[isinstance] ( identifier[f] , identifier[str] )
keyword[if] (
identifier[_is_truthy_and_not_str] ( identifier[self] . identifier[f_optimizer] ) keyword[or]
identifier[_is_truthy_and_not_str] ( identifier[self] . identifier[f_params] ) keyword[or]
identifier[_is_truthy_and_not_str] ( identifier[self] . identifier[f_history] ) keyword[or]
identifier[_is_truthy_and_not_str] ( identifier[self] . identifier[f_pickle] )
):
keyword[raise] identifier[SkorchException] (
literal[string] ) | def _validate_filenames(self):
"""Checks if passed filenames are valid.
Specifically, f_* parameter should not be passed in
conjunction with dirname.
"""
if not self.dirname:
return # depends on [control=['if'], data=[]]
def _is_truthy_and_not_str(f):
return f and (not isinstance(f, str))
if _is_truthy_and_not_str(self.f_optimizer) or _is_truthy_and_not_str(self.f_params) or _is_truthy_and_not_str(self.f_history) or _is_truthy_and_not_str(self.f_pickle):
raise SkorchException('dirname can only be used when f_* are strings') # depends on [control=['if'], data=[]] |
def traceroute_batch(input_list, results={}, method="udp", cmd_arguments=None,
delay_time=0.1, max_threads=100):
"""
This is a parallel version of the traceroute primitive.
:param input_list: the input is a list of domain names
:param method: the packet type used for traceroute, UDP by default
:param cmd_arguments: the list of arguments that need to be passed
to traceroute.
:param delay_time: delay before starting each thread
:param max_threads: maximum number of concurrent threads
:return:
"""
threads = []
thread_error = False
thread_wait_timeout = 200
ind = 1
total_item_count = len(input_list)
for domain in input_list:
wait_time = 0
while threading.active_count() > max_threads:
time.sleep(1)
wait_time += 1
if wait_time > thread_wait_timeout:
thread_error = True
break
if thread_error:
results["error"] = "Threads took too long to finish."
break
# add just a little bit of delay before starting the thread
# to avoid overwhelming the connection.
time.sleep(delay_time)
log_prefix = "%d/%d: " % (ind, total_item_count)
thread = threading.Thread(target=traceroute,
args=(domain, method, cmd_arguments,
results, log_prefix))
ind += 1
thread.setDaemon(1)
thread_open_success = False
retries = 0
while not thread_open_success and retries < MAX_THREAD_START_RETRY:
try:
thread.start()
threads.append(thread)
thread_open_success = True
except:
retries += 1
time.sleep(THREAD_START_DELAY)
logging.error("%sThread start failed for %s, retrying... (%d/%d)" % (log_prefix, domain, retries, MAX_THREAD_START_RETRY))
if retries == MAX_THREAD_START_RETRY:
logging.error("%sCan't start a new thread for %s after %d retries." % (log_prefix, domain, retries))
for thread in threads:
thread.join(thread_wait_timeout)
return results | def function[traceroute_batch, parameter[input_list, results, method, cmd_arguments, delay_time, max_threads]]:
constant[
This is a parallel version of the traceroute primitive.
:param input_list: the input is a list of domain names
:param method: the packet type used for traceroute, UDP by default
:param cmd_arguments: the list of arguments that need to be passed
to traceroute.
:param delay_time: delay before starting each thread
:param max_threads: maximum number of concurrent threads
:return:
]
variable[threads] assign[=] list[[]]
variable[thread_error] assign[=] constant[False]
variable[thread_wait_timeout] assign[=] constant[200]
variable[ind] assign[=] constant[1]
variable[total_item_count] assign[=] call[name[len], parameter[name[input_list]]]
for taget[name[domain]] in starred[name[input_list]] begin[:]
variable[wait_time] assign[=] constant[0]
while compare[call[name[threading].active_count, parameter[]] greater[>] name[max_threads]] begin[:]
call[name[time].sleep, parameter[constant[1]]]
<ast.AugAssign object at 0x7da1b28191e0>
if compare[name[wait_time] greater[>] name[thread_wait_timeout]] begin[:]
variable[thread_error] assign[=] constant[True]
break
if name[thread_error] begin[:]
call[name[results]][constant[error]] assign[=] constant[Threads took too long to finish.]
break
call[name[time].sleep, parameter[name[delay_time]]]
variable[log_prefix] assign[=] binary_operation[constant[%d/%d: ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2818280>, <ast.Name object at 0x7da1b2818310>]]]
variable[thread] assign[=] call[name[threading].Thread, parameter[]]
<ast.AugAssign object at 0x7da1b281bd30>
call[name[thread].setDaemon, parameter[constant[1]]]
variable[thread_open_success] assign[=] constant[False]
variable[retries] assign[=] constant[0]
while <ast.BoolOp object at 0x7da1b2818550> begin[:]
<ast.Try object at 0x7da1b281ab00>
if compare[name[retries] equal[==] name[MAX_THREAD_START_RETRY]] begin[:]
call[name[logging].error, parameter[binary_operation[constant[%sCan't start a new thread for %s after %d retries.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2844910>, <ast.Name object at 0x7da1b2847220>, <ast.Name object at 0x7da1b2844370>]]]]]
for taget[name[thread]] in starred[name[threads]] begin[:]
call[name[thread].join, parameter[name[thread_wait_timeout]]]
return[name[results]] | keyword[def] identifier[traceroute_batch] ( identifier[input_list] , identifier[results] ={}, identifier[method] = literal[string] , identifier[cmd_arguments] = keyword[None] ,
identifier[delay_time] = literal[int] , identifier[max_threads] = literal[int] ):
literal[string]
identifier[threads] =[]
identifier[thread_error] = keyword[False]
identifier[thread_wait_timeout] = literal[int]
identifier[ind] = literal[int]
identifier[total_item_count] = identifier[len] ( identifier[input_list] )
keyword[for] identifier[domain] keyword[in] identifier[input_list] :
identifier[wait_time] = literal[int]
keyword[while] identifier[threading] . identifier[active_count] ()> identifier[max_threads] :
identifier[time] . identifier[sleep] ( literal[int] )
identifier[wait_time] += literal[int]
keyword[if] identifier[wait_time] > identifier[thread_wait_timeout] :
identifier[thread_error] = keyword[True]
keyword[break]
keyword[if] identifier[thread_error] :
identifier[results] [ literal[string] ]= literal[string]
keyword[break]
identifier[time] . identifier[sleep] ( identifier[delay_time] )
identifier[log_prefix] = literal[string] %( identifier[ind] , identifier[total_item_count] )
identifier[thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[traceroute] ,
identifier[args] =( identifier[domain] , identifier[method] , identifier[cmd_arguments] ,
identifier[results] , identifier[log_prefix] ))
identifier[ind] += literal[int]
identifier[thread] . identifier[setDaemon] ( literal[int] )
identifier[thread_open_success] = keyword[False]
identifier[retries] = literal[int]
keyword[while] keyword[not] identifier[thread_open_success] keyword[and] identifier[retries] < identifier[MAX_THREAD_START_RETRY] :
keyword[try] :
identifier[thread] . identifier[start] ()
identifier[threads] . identifier[append] ( identifier[thread] )
identifier[thread_open_success] = keyword[True]
keyword[except] :
identifier[retries] += literal[int]
identifier[time] . identifier[sleep] ( identifier[THREAD_START_DELAY] )
identifier[logging] . identifier[error] ( literal[string] %( identifier[log_prefix] , identifier[domain] , identifier[retries] , identifier[MAX_THREAD_START_RETRY] ))
keyword[if] identifier[retries] == identifier[MAX_THREAD_START_RETRY] :
identifier[logging] . identifier[error] ( literal[string] %( identifier[log_prefix] , identifier[domain] , identifier[retries] ))
keyword[for] identifier[thread] keyword[in] identifier[threads] :
identifier[thread] . identifier[join] ( identifier[thread_wait_timeout] )
keyword[return] identifier[results] | def traceroute_batch(input_list, results={}, method='udp', cmd_arguments=None, delay_time=0.1, max_threads=100):
"""
This is a parallel version of the traceroute primitive.
:param input_list: the input is a list of domain names
:param method: the packet type used for traceroute, UDP by default
:param cmd_arguments: the list of arguments that need to be passed
to traceroute.
:param delay_time: delay before starting each thread
:param max_threads: maximum number of concurrent threads
:return:
"""
threads = []
thread_error = False
thread_wait_timeout = 200
ind = 1
total_item_count = len(input_list)
for domain in input_list:
wait_time = 0
while threading.active_count() > max_threads:
time.sleep(1)
wait_time += 1
if wait_time > thread_wait_timeout:
thread_error = True
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
if thread_error:
results['error'] = 'Threads took too long to finish.'
break # depends on [control=['if'], data=[]]
# add just a little bit of delay before starting the thread
# to avoid overwhelming the connection.
time.sleep(delay_time)
log_prefix = '%d/%d: ' % (ind, total_item_count)
thread = threading.Thread(target=traceroute, args=(domain, method, cmd_arguments, results, log_prefix))
ind += 1
thread.setDaemon(1)
thread_open_success = False
retries = 0
while not thread_open_success and retries < MAX_THREAD_START_RETRY:
try:
thread.start()
threads.append(thread)
thread_open_success = True # depends on [control=['try'], data=[]]
except:
retries += 1
time.sleep(THREAD_START_DELAY)
logging.error('%sThread start failed for %s, retrying... (%d/%d)' % (log_prefix, domain, retries, MAX_THREAD_START_RETRY)) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
if retries == MAX_THREAD_START_RETRY:
logging.error("%sCan't start a new thread for %s after %d retries." % (log_prefix, domain, retries)) # depends on [control=['if'], data=['retries']] # depends on [control=['for'], data=['domain']]
for thread in threads:
thread.join(thread_wait_timeout) # depends on [control=['for'], data=['thread']]
return results |
def resolve(self, dispatcher, node):
"""
For the given node, resolve it into the scope it was declared
at, and if one was found, return its value.
"""
scope = self.identifiers.get(node)
if not scope:
return node.value
return scope.resolve(node.value) | def function[resolve, parameter[self, dispatcher, node]]:
constant[
For the given node, resolve it into the scope it was declared
at, and if one was found, return its value.
]
variable[scope] assign[=] call[name[self].identifiers.get, parameter[name[node]]]
if <ast.UnaryOp object at 0x7da20e9b3910> begin[:]
return[name[node].value]
return[call[name[scope].resolve, parameter[name[node].value]]] | keyword[def] identifier[resolve] ( identifier[self] , identifier[dispatcher] , identifier[node] ):
literal[string]
identifier[scope] = identifier[self] . identifier[identifiers] . identifier[get] ( identifier[node] )
keyword[if] keyword[not] identifier[scope] :
keyword[return] identifier[node] . identifier[value]
keyword[return] identifier[scope] . identifier[resolve] ( identifier[node] . identifier[value] ) | def resolve(self, dispatcher, node):
"""
For the given node, resolve it into the scope it was declared
at, and if one was found, return its value.
"""
scope = self.identifiers.get(node)
if not scope:
return node.value # depends on [control=['if'], data=[]]
return scope.resolve(node.value) |
def get_variant_label(v_conf):
"""
Generates name for variant images based settings (by variants sizes).
"""
if v_conf['MAX_SIZE'][0] is None:
return 'h{}'.format(v_conf['MAX_SIZE'][1])
if v_conf['MAX_SIZE'][1] is None:
return 'w{}'.format(v_conf['MAX_SIZE'][0])
return '{}x{}'.format(*v_conf['MAX_SIZE']) | def function[get_variant_label, parameter[v_conf]]:
constant[
Generates name for variant images based settings (by variants sizes).
]
if compare[call[call[name[v_conf]][constant[MAX_SIZE]]][constant[0]] is constant[None]] begin[:]
return[call[constant[h{}].format, parameter[call[call[name[v_conf]][constant[MAX_SIZE]]][constant[1]]]]]
if compare[call[call[name[v_conf]][constant[MAX_SIZE]]][constant[1]] is constant[None]] begin[:]
return[call[constant[w{}].format, parameter[call[call[name[v_conf]][constant[MAX_SIZE]]][constant[0]]]]]
return[call[constant[{}x{}].format, parameter[<ast.Starred object at 0x7da1b28ae710>]]] | keyword[def] identifier[get_variant_label] ( identifier[v_conf] ):
literal[string]
keyword[if] identifier[v_conf] [ literal[string] ][ literal[int] ] keyword[is] keyword[None] :
keyword[return] literal[string] . identifier[format] ( identifier[v_conf] [ literal[string] ][ literal[int] ])
keyword[if] identifier[v_conf] [ literal[string] ][ literal[int] ] keyword[is] keyword[None] :
keyword[return] literal[string] . identifier[format] ( identifier[v_conf] [ literal[string] ][ literal[int] ])
keyword[return] literal[string] . identifier[format] (* identifier[v_conf] [ literal[string] ]) | def get_variant_label(v_conf):
"""
Generates name for variant images based settings (by variants sizes).
"""
if v_conf['MAX_SIZE'][0] is None:
return 'h{}'.format(v_conf['MAX_SIZE'][1]) # depends on [control=['if'], data=[]]
if v_conf['MAX_SIZE'][1] is None:
return 'w{}'.format(v_conf['MAX_SIZE'][0]) # depends on [control=['if'], data=[]]
return '{}x{}'.format(*v_conf['MAX_SIZE']) |
def wrap_in_ndarray(value):
"""Wraps the argument in a numpy.ndarray.
If value is a scalar, it is converted in a list first.
If value is array-like, the shape is conserved.
"""
if hasattr(value, "__len__"):
return np.array(value)
else:
return np.array([value]) | def function[wrap_in_ndarray, parameter[value]]:
constant[Wraps the argument in a numpy.ndarray.
If value is a scalar, it is converted in a list first.
If value is array-like, the shape is conserved.
]
if call[name[hasattr], parameter[name[value], constant[__len__]]] begin[:]
return[call[name[np].array, parameter[name[value]]]] | keyword[def] identifier[wrap_in_ndarray] ( identifier[value] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[value] , literal[string] ):
keyword[return] identifier[np] . identifier[array] ( identifier[value] )
keyword[else] :
keyword[return] identifier[np] . identifier[array] ([ identifier[value] ]) | def wrap_in_ndarray(value):
"""Wraps the argument in a numpy.ndarray.
If value is a scalar, it is converted in a list first.
If value is array-like, the shape is conserved.
"""
if hasattr(value, '__len__'):
return np.array(value) # depends on [control=['if'], data=[]]
else:
return np.array([value]) |
def unsubscribe(self, request, *args, **kwargs):
""" Performs the unsubscribe action. """
self.object = self.get_object()
self.object.subscribers.remove(request.user)
messages.success(self.request, self.success_message)
return HttpResponseRedirect(self.get_success_url()) | def function[unsubscribe, parameter[self, request]]:
constant[ Performs the unsubscribe action. ]
name[self].object assign[=] call[name[self].get_object, parameter[]]
call[name[self].object.subscribers.remove, parameter[name[request].user]]
call[name[messages].success, parameter[name[self].request, name[self].success_message]]
return[call[name[HttpResponseRedirect], parameter[call[name[self].get_success_url, parameter[]]]]] | keyword[def] identifier[unsubscribe] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[object] = identifier[self] . identifier[get_object] ()
identifier[self] . identifier[object] . identifier[subscribers] . identifier[remove] ( identifier[request] . identifier[user] )
identifier[messages] . identifier[success] ( identifier[self] . identifier[request] , identifier[self] . identifier[success_message] )
keyword[return] identifier[HttpResponseRedirect] ( identifier[self] . identifier[get_success_url] ()) | def unsubscribe(self, request, *args, **kwargs):
""" Performs the unsubscribe action. """
self.object = self.get_object()
self.object.subscribers.remove(request.user)
messages.success(self.request, self.success_message)
return HttpResponseRedirect(self.get_success_url()) |
def get_development_container_name(self):
"""
Returns the development container name
"""
if self.__prefix:
return "{0}:{1}-{2}-dev".format(
self.__repository,
self.__prefix,
self.__branch)
else:
return "{0}:{1}-dev".format(
self.__repository,
self.__branch) | def function[get_development_container_name, parameter[self]]:
constant[
Returns the development container name
]
if name[self].__prefix begin[:]
return[call[constant[{0}:{1}-{2}-dev].format, parameter[name[self].__repository, name[self].__prefix, name[self].__branch]]] | keyword[def] identifier[get_development_container_name] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__prefix] :
keyword[return] literal[string] . identifier[format] (
identifier[self] . identifier[__repository] ,
identifier[self] . identifier[__prefix] ,
identifier[self] . identifier[__branch] )
keyword[else] :
keyword[return] literal[string] . identifier[format] (
identifier[self] . identifier[__repository] ,
identifier[self] . identifier[__branch] ) | def get_development_container_name(self):
"""
Returns the development container name
"""
if self.__prefix:
return '{0}:{1}-{2}-dev'.format(self.__repository, self.__prefix, self.__branch) # depends on [control=['if'], data=[]]
else:
return '{0}:{1}-dev'.format(self.__repository, self.__branch) |
def dump(self, content, filepath, indent=4):
"""
Dump settings content to filepath.
Args:
content (str): Settings content.
filepath (str): Settings file location.
"""
with open(filepath, 'w') as fp:
json.dump(content, fp, indent=indent) | def function[dump, parameter[self, content, filepath, indent]]:
constant[
Dump settings content to filepath.
Args:
content (str): Settings content.
filepath (str): Settings file location.
]
with call[name[open], parameter[name[filepath], constant[w]]] begin[:]
call[name[json].dump, parameter[name[content], name[fp]]] | keyword[def] identifier[dump] ( identifier[self] , identifier[content] , identifier[filepath] , identifier[indent] = literal[int] ):
literal[string]
keyword[with] identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[fp] :
identifier[json] . identifier[dump] ( identifier[content] , identifier[fp] , identifier[indent] = identifier[indent] ) | def dump(self, content, filepath, indent=4):
"""
Dump settings content to filepath.
Args:
content (str): Settings content.
filepath (str): Settings file location.
"""
with open(filepath, 'w') as fp:
json.dump(content, fp, indent=indent) # depends on [control=['with'], data=['fp']] |
def generate_token(self, *args, **kwargs):
""" generates a pseudo random code using os.urandom and binascii.hexlify """
# determine the length based on min_length and max_length
length = random.randint(self.min_length, self.max_length)
# generate the token using os.urandom and hexlify
return binascii.hexlify(
os.urandom(self.max_length)
).decode()[0:length] | def function[generate_token, parameter[self]]:
constant[ generates a pseudo random code using os.urandom and binascii.hexlify ]
variable[length] assign[=] call[name[random].randint, parameter[name[self].min_length, name[self].max_length]]
return[call[call[call[name[binascii].hexlify, parameter[call[name[os].urandom, parameter[name[self].max_length]]]].decode, parameter[]]][<ast.Slice object at 0x7da1b1502a70>]] | keyword[def] identifier[generate_token] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[length] = identifier[random] . identifier[randint] ( identifier[self] . identifier[min_length] , identifier[self] . identifier[max_length] )
keyword[return] identifier[binascii] . identifier[hexlify] (
identifier[os] . identifier[urandom] ( identifier[self] . identifier[max_length] )
). identifier[decode] ()[ literal[int] : identifier[length] ] | def generate_token(self, *args, **kwargs):
""" generates a pseudo random code using os.urandom and binascii.hexlify """
# determine the length based on min_length and max_length
length = random.randint(self.min_length, self.max_length)
# generate the token using os.urandom and hexlify
return binascii.hexlify(os.urandom(self.max_length)).decode()[0:length] |
def main():
"""
main
"""
arguments = IArguments(__doc__)
content = open(arguments.filepath).read()
open(arguments.filepath + ".bak", "w").write(content)
try:
newcontent = transliterate(content)
write_newcontent(arguments.filepath, newcontent)
except UnicodeEncodeError as ex:
console(str(ex), color="red")
newcontent = forceascii(content)
write_newcontent(arguments.filepath, newcontent) | def function[main, parameter[]]:
constant[
main
]
variable[arguments] assign[=] call[name[IArguments], parameter[name[__doc__]]]
variable[content] assign[=] call[call[name[open], parameter[name[arguments].filepath]].read, parameter[]]
call[call[name[open], parameter[binary_operation[name[arguments].filepath + constant[.bak]], constant[w]]].write, parameter[name[content]]]
<ast.Try object at 0x7da2054a4be0> | keyword[def] identifier[main] ():
literal[string]
identifier[arguments] = identifier[IArguments] ( identifier[__doc__] )
identifier[content] = identifier[open] ( identifier[arguments] . identifier[filepath] ). identifier[read] ()
identifier[open] ( identifier[arguments] . identifier[filepath] + literal[string] , literal[string] ). identifier[write] ( identifier[content] )
keyword[try] :
identifier[newcontent] = identifier[transliterate] ( identifier[content] )
identifier[write_newcontent] ( identifier[arguments] . identifier[filepath] , identifier[newcontent] )
keyword[except] identifier[UnicodeEncodeError] keyword[as] identifier[ex] :
identifier[console] ( identifier[str] ( identifier[ex] ), identifier[color] = literal[string] )
identifier[newcontent] = identifier[forceascii] ( identifier[content] )
identifier[write_newcontent] ( identifier[arguments] . identifier[filepath] , identifier[newcontent] ) | def main():
"""
main
"""
arguments = IArguments(__doc__)
content = open(arguments.filepath).read()
open(arguments.filepath + '.bak', 'w').write(content)
try:
newcontent = transliterate(content)
write_newcontent(arguments.filepath, newcontent) # depends on [control=['try'], data=[]]
except UnicodeEncodeError as ex:
console(str(ex), color='red')
newcontent = forceascii(content)
write_newcontent(arguments.filepath, newcontent) # depends on [control=['except'], data=['ex']] |
def range_monthly(start=None, stop=None, timezone='UTC', count=None):
"""
This an alternative way to generating sets of Delorean objects with
MONTHLY stops
"""
return stops(start=start, stop=stop, freq=MONTHLY, timezone=timezone, count=count) | def function[range_monthly, parameter[start, stop, timezone, count]]:
constant[
This an alternative way to generating sets of Delorean objects with
MONTHLY stops
]
return[call[name[stops], parameter[]]] | keyword[def] identifier[range_monthly] ( identifier[start] = keyword[None] , identifier[stop] = keyword[None] , identifier[timezone] = literal[string] , identifier[count] = keyword[None] ):
literal[string]
keyword[return] identifier[stops] ( identifier[start] = identifier[start] , identifier[stop] = identifier[stop] , identifier[freq] = identifier[MONTHLY] , identifier[timezone] = identifier[timezone] , identifier[count] = identifier[count] ) | def range_monthly(start=None, stop=None, timezone='UTC', count=None):
"""
This an alternative way to generating sets of Delorean objects with
MONTHLY stops
"""
return stops(start=start, stop=stop, freq=MONTHLY, timezone=timezone, count=count) |
def compose(self, other, qargs=None, front=False):
"""Return the composition channel self∘other.
Args:
other (QuantumChannel): a quantum channel.
qargs (list): a list of subsystem positions to compose other on.
front (bool): If False compose in standard order other(self(input))
otherwise compose in reverse order self(other(input))
[default: False]
Returns:
Choi: The composition channel as a Choi object.
Raises:
QiskitError: if other cannot be converted to a channel or
has incompatible dimensions.
"""
if qargs is not None:
return Choi(
SuperOp(self).compose(other, qargs=qargs, front=front))
# Convert to Choi matrix
if not isinstance(other, Choi):
other = Choi(other)
# Check dimensions match up
if front and self._input_dim != other._output_dim:
raise QiskitError(
'input_dim of self must match output_dim of other')
if not front and self._output_dim != other._input_dim:
raise QiskitError(
'input_dim of other must match output_dim of self')
if front:
first = np.reshape(other._data, other._bipartite_shape)
second = np.reshape(self._data, self._bipartite_shape)
input_dim = other._input_dim
input_dims = other.input_dims()
output_dim = self._output_dim
output_dims = self.output_dims()
else:
first = np.reshape(self._data, self._bipartite_shape)
second = np.reshape(other._data, other._bipartite_shape)
input_dim = self._input_dim
input_dims = self.input_dims()
output_dim = other._output_dim
output_dims = other.output_dims()
# Contract Choi matrices for composition
data = np.reshape(
np.einsum('iAjB,AkBl->ikjl', first, second),
(input_dim * output_dim, input_dim * output_dim))
return Choi(data, input_dims, output_dims) | def function[compose, parameter[self, other, qargs, front]]:
constant[Return the composition channel self∘other.
Args:
other (QuantumChannel): a quantum channel.
qargs (list): a list of subsystem positions to compose other on.
front (bool): If False compose in standard order other(self(input))
otherwise compose in reverse order self(other(input))
[default: False]
Returns:
Choi: The composition channel as a Choi object.
Raises:
QiskitError: if other cannot be converted to a channel or
has incompatible dimensions.
]
if compare[name[qargs] is_not constant[None]] begin[:]
return[call[name[Choi], parameter[call[call[name[SuperOp], parameter[name[self]]].compose, parameter[name[other]]]]]]
if <ast.UnaryOp object at 0x7da1b03828c0> begin[:]
variable[other] assign[=] call[name[Choi], parameter[name[other]]]
if <ast.BoolOp object at 0x7da1b059d720> begin[:]
<ast.Raise object at 0x7da1b059e8f0>
if <ast.BoolOp object at 0x7da1b059cac0> begin[:]
<ast.Raise object at 0x7da1b059da20>
if name[front] begin[:]
variable[first] assign[=] call[name[np].reshape, parameter[name[other]._data, name[other]._bipartite_shape]]
variable[second] assign[=] call[name[np].reshape, parameter[name[self]._data, name[self]._bipartite_shape]]
variable[input_dim] assign[=] name[other]._input_dim
variable[input_dims] assign[=] call[name[other].input_dims, parameter[]]
variable[output_dim] assign[=] name[self]._output_dim
variable[output_dims] assign[=] call[name[self].output_dims, parameter[]]
variable[data] assign[=] call[name[np].reshape, parameter[call[name[np].einsum, parameter[constant[iAjB,AkBl->ikjl], name[first], name[second]]], tuple[[<ast.BinOp object at 0x7da1b0509e10>, <ast.BinOp object at 0x7da1b05091e0>]]]]
return[call[name[Choi], parameter[name[data], name[input_dims], name[output_dims]]]] | keyword[def] identifier[compose] ( identifier[self] , identifier[other] , identifier[qargs] = keyword[None] , identifier[front] = keyword[False] ):
literal[string]
keyword[if] identifier[qargs] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[Choi] (
identifier[SuperOp] ( identifier[self] ). identifier[compose] ( identifier[other] , identifier[qargs] = identifier[qargs] , identifier[front] = identifier[front] ))
keyword[if] keyword[not] identifier[isinstance] ( identifier[other] , identifier[Choi] ):
identifier[other] = identifier[Choi] ( identifier[other] )
keyword[if] identifier[front] keyword[and] identifier[self] . identifier[_input_dim] != identifier[other] . identifier[_output_dim] :
keyword[raise] identifier[QiskitError] (
literal[string] )
keyword[if] keyword[not] identifier[front] keyword[and] identifier[self] . identifier[_output_dim] != identifier[other] . identifier[_input_dim] :
keyword[raise] identifier[QiskitError] (
literal[string] )
keyword[if] identifier[front] :
identifier[first] = identifier[np] . identifier[reshape] ( identifier[other] . identifier[_data] , identifier[other] . identifier[_bipartite_shape] )
identifier[second] = identifier[np] . identifier[reshape] ( identifier[self] . identifier[_data] , identifier[self] . identifier[_bipartite_shape] )
identifier[input_dim] = identifier[other] . identifier[_input_dim]
identifier[input_dims] = identifier[other] . identifier[input_dims] ()
identifier[output_dim] = identifier[self] . identifier[_output_dim]
identifier[output_dims] = identifier[self] . identifier[output_dims] ()
keyword[else] :
identifier[first] = identifier[np] . identifier[reshape] ( identifier[self] . identifier[_data] , identifier[self] . identifier[_bipartite_shape] )
identifier[second] = identifier[np] . identifier[reshape] ( identifier[other] . identifier[_data] , identifier[other] . identifier[_bipartite_shape] )
identifier[input_dim] = identifier[self] . identifier[_input_dim]
identifier[input_dims] = identifier[self] . identifier[input_dims] ()
identifier[output_dim] = identifier[other] . identifier[_output_dim]
identifier[output_dims] = identifier[other] . identifier[output_dims] ()
identifier[data] = identifier[np] . identifier[reshape] (
identifier[np] . identifier[einsum] ( literal[string] , identifier[first] , identifier[second] ),
( identifier[input_dim] * identifier[output_dim] , identifier[input_dim] * identifier[output_dim] ))
keyword[return] identifier[Choi] ( identifier[data] , identifier[input_dims] , identifier[output_dims] ) | def compose(self, other, qargs=None, front=False):
"""Return the composition channel self∘other.
Args:
other (QuantumChannel): a quantum channel.
qargs (list): a list of subsystem positions to compose other on.
front (bool): If False compose in standard order other(self(input))
otherwise compose in reverse order self(other(input))
[default: False]
Returns:
Choi: The composition channel as a Choi object.
Raises:
QiskitError: if other cannot be converted to a channel or
has incompatible dimensions.
"""
if qargs is not None:
return Choi(SuperOp(self).compose(other, qargs=qargs, front=front)) # depends on [control=['if'], data=['qargs']]
# Convert to Choi matrix
if not isinstance(other, Choi):
other = Choi(other) # depends on [control=['if'], data=[]]
# Check dimensions match up
if front and self._input_dim != other._output_dim:
raise QiskitError('input_dim of self must match output_dim of other') # depends on [control=['if'], data=[]]
if not front and self._output_dim != other._input_dim:
raise QiskitError('input_dim of other must match output_dim of self') # depends on [control=['if'], data=[]]
if front:
first = np.reshape(other._data, other._bipartite_shape)
second = np.reshape(self._data, self._bipartite_shape)
input_dim = other._input_dim
input_dims = other.input_dims()
output_dim = self._output_dim
output_dims = self.output_dims() # depends on [control=['if'], data=[]]
else:
first = np.reshape(self._data, self._bipartite_shape)
second = np.reshape(other._data, other._bipartite_shape)
input_dim = self._input_dim
input_dims = self.input_dims()
output_dim = other._output_dim
output_dims = other.output_dims()
# Contract Choi matrices for composition
data = np.reshape(np.einsum('iAjB,AkBl->ikjl', first, second), (input_dim * output_dim, input_dim * output_dim))
return Choi(data, input_dims, output_dims) |
def shell_context_processor(self, fn):
"""
Registers a shell context processor function.
"""
self._defer(lambda app: app.shell_context_processor(fn))
return fn | def function[shell_context_processor, parameter[self, fn]]:
constant[
Registers a shell context processor function.
]
call[name[self]._defer, parameter[<ast.Lambda object at 0x7da20c6c7e20>]]
return[name[fn]] | keyword[def] identifier[shell_context_processor] ( identifier[self] , identifier[fn] ):
literal[string]
identifier[self] . identifier[_defer] ( keyword[lambda] identifier[app] : identifier[app] . identifier[shell_context_processor] ( identifier[fn] ))
keyword[return] identifier[fn] | def shell_context_processor(self, fn):
"""
Registers a shell context processor function.
"""
self._defer(lambda app: app.shell_context_processor(fn))
return fn |
def prep_message(msg):
"""
Add the size header
"""
if six.PY3:
msg_out = msg.as_string().encode("utf-8")
else:
msg_out = msg.as_string()
our_len = len(msg_out) + 4
size = struct.pack('>L', our_len)
# why the hell is this "bytes" on python3?
return size + msg_out | def function[prep_message, parameter[msg]]:
constant[
Add the size header
]
if name[six].PY3 begin[:]
variable[msg_out] assign[=] call[call[name[msg].as_string, parameter[]].encode, parameter[constant[utf-8]]]
variable[our_len] assign[=] binary_operation[call[name[len], parameter[name[msg_out]]] + constant[4]]
variable[size] assign[=] call[name[struct].pack, parameter[constant[>L], name[our_len]]]
return[binary_operation[name[size] + name[msg_out]]] | keyword[def] identifier[prep_message] ( identifier[msg] ):
literal[string]
keyword[if] identifier[six] . identifier[PY3] :
identifier[msg_out] = identifier[msg] . identifier[as_string] (). identifier[encode] ( literal[string] )
keyword[else] :
identifier[msg_out] = identifier[msg] . identifier[as_string] ()
identifier[our_len] = identifier[len] ( identifier[msg_out] )+ literal[int]
identifier[size] = identifier[struct] . identifier[pack] ( literal[string] , identifier[our_len] )
keyword[return] identifier[size] + identifier[msg_out] | def prep_message(msg):
"""
Add the size header
"""
if six.PY3:
msg_out = msg.as_string().encode('utf-8') # depends on [control=['if'], data=[]]
else:
msg_out = msg.as_string()
our_len = len(msg_out) + 4
size = struct.pack('>L', our_len)
# why the hell is this "bytes" on python3?
return size + msg_out |
def project_has_listeners(self, project):
"""
:param project_id: Project object
:returns: True if client listen this project
"""
return project.id in self._listeners and len(self._listeners[project.id]) > 0 | def function[project_has_listeners, parameter[self, project]]:
constant[
:param project_id: Project object
:returns: True if client listen this project
]
return[<ast.BoolOp object at 0x7da20c6c60e0>] | keyword[def] identifier[project_has_listeners] ( identifier[self] , identifier[project] ):
literal[string]
keyword[return] identifier[project] . identifier[id] keyword[in] identifier[self] . identifier[_listeners] keyword[and] identifier[len] ( identifier[self] . identifier[_listeners] [ identifier[project] . identifier[id] ])> literal[int] | def project_has_listeners(self, project):
"""
:param project_id: Project object
:returns: True if client listen this project
"""
return project.id in self._listeners and len(self._listeners[project.id]) > 0 |
def EMAIL_REQUIRED(self):
"""
The user is required to hand over an e-mail address when signing up
"""
from allauth.account import app_settings as account_settings
return self._setting("EMAIL_REQUIRED", account_settings.EMAIL_REQUIRED) | def function[EMAIL_REQUIRED, parameter[self]]:
constant[
The user is required to hand over an e-mail address when signing up
]
from relative_module[allauth.account] import module[app_settings]
return[call[name[self]._setting, parameter[constant[EMAIL_REQUIRED], name[account_settings].EMAIL_REQUIRED]]] | keyword[def] identifier[EMAIL_REQUIRED] ( identifier[self] ):
literal[string]
keyword[from] identifier[allauth] . identifier[account] keyword[import] identifier[app_settings] keyword[as] identifier[account_settings]
keyword[return] identifier[self] . identifier[_setting] ( literal[string] , identifier[account_settings] . identifier[EMAIL_REQUIRED] ) | def EMAIL_REQUIRED(self):
"""
The user is required to hand over an e-mail address when signing up
"""
from allauth.account import app_settings as account_settings
return self._setting('EMAIL_REQUIRED', account_settings.EMAIL_REQUIRED) |
def complete_file(self, text, line, *_):
""" Autocomplete DQL file lookup """
leading = line[len("file ") :]
curpath = os.path.join(os.path.curdir, leading)
def isdql(parent, filename):
""" Check if a file is .dql or a dir """
return not filename.startswith(".") and (
os.path.isdir(os.path.join(parent, filename))
or filename.lower().endswith(".dql")
)
def addslash(path):
""" Append a slash if a file is a directory """
if path.lower().endswith(".dql"):
return path + " "
else:
return path + "/"
if not os.path.exists(curpath) or not os.path.isdir(curpath):
curpath = os.path.dirname(curpath)
return [
addslash(f)
for f in os.listdir(curpath)
if f.startswith(text) and isdql(curpath, f)
] | def function[complete_file, parameter[self, text, line]]:
constant[ Autocomplete DQL file lookup ]
variable[leading] assign[=] call[name[line]][<ast.Slice object at 0x7da1b0e2db70>]
variable[curpath] assign[=] call[name[os].path.join, parameter[name[os].path.curdir, name[leading]]]
def function[isdql, parameter[parent, filename]]:
constant[ Check if a file is .dql or a dir ]
return[<ast.BoolOp object at 0x7da1b0e2d690>]
def function[addslash, parameter[path]]:
constant[ Append a slash if a file is a directory ]
if call[call[name[path].lower, parameter[]].endswith, parameter[constant[.dql]]] begin[:]
return[binary_operation[name[path] + constant[ ]]]
if <ast.BoolOp object at 0x7da1b0ebec20> begin[:]
variable[curpath] assign[=] call[name[os].path.dirname, parameter[name[curpath]]]
return[<ast.ListComp object at 0x7da1b0ebfaf0>] | keyword[def] identifier[complete_file] ( identifier[self] , identifier[text] , identifier[line] ,* identifier[_] ):
literal[string]
identifier[leading] = identifier[line] [ identifier[len] ( literal[string] ):]
identifier[curpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[curdir] , identifier[leading] )
keyword[def] identifier[isdql] ( identifier[parent] , identifier[filename] ):
literal[string]
keyword[return] keyword[not] identifier[filename] . identifier[startswith] ( literal[string] ) keyword[and] (
identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[parent] , identifier[filename] ))
keyword[or] identifier[filename] . identifier[lower] (). identifier[endswith] ( literal[string] )
)
keyword[def] identifier[addslash] ( identifier[path] ):
literal[string]
keyword[if] identifier[path] . identifier[lower] (). identifier[endswith] ( literal[string] ):
keyword[return] identifier[path] + literal[string]
keyword[else] :
keyword[return] identifier[path] + literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[curpath] ) keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[curpath] ):
identifier[curpath] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[curpath] )
keyword[return] [
identifier[addslash] ( identifier[f] )
keyword[for] identifier[f] keyword[in] identifier[os] . identifier[listdir] ( identifier[curpath] )
keyword[if] identifier[f] . identifier[startswith] ( identifier[text] ) keyword[and] identifier[isdql] ( identifier[curpath] , identifier[f] )
] | def complete_file(self, text, line, *_):
""" Autocomplete DQL file lookup """
leading = line[len('file '):]
curpath = os.path.join(os.path.curdir, leading)
def isdql(parent, filename):
""" Check if a file is .dql or a dir """
return not filename.startswith('.') and (os.path.isdir(os.path.join(parent, filename)) or filename.lower().endswith('.dql'))
def addslash(path):
""" Append a slash if a file is a directory """
if path.lower().endswith('.dql'):
return path + ' ' # depends on [control=['if'], data=[]]
else:
return path + '/'
if not os.path.exists(curpath) or not os.path.isdir(curpath):
curpath = os.path.dirname(curpath) # depends on [control=['if'], data=[]]
return [addslash(f) for f in os.listdir(curpath) if f.startswith(text) and isdql(curpath, f)] |
def add_z(xy: np.ndarray, z: float) -> np.ndarray:
"""
Turn a 2-D transform matrix into a 3-D transform matrix (scale/shift only,
no rotation).
:param xy: A two-dimensional transform matrix (a 3x3 numpy ndarray) in the
following form:
[ 1 0 x ]
[ 0 1 y ]
[ 0 0 1 ]
:param z: a float for the z component
:return: a three-dimensional transformation matrix (a 4x4 numpy ndarray)
with x, y, and z from the function parameters, in the following form:
[ 1 0 0 x ]
[ 0 1 0 y ]
[ 0 0 1 z ]
[ 0 0 0 1 ]
"""
# First, insert a column of zeros as into the input matrix
interm = insert(xy, 2, [0, 0, 0], axis=1)
# Result:
# [ 1 0 0 x ]
# [ 0 1 0 y ]
# [ 0 0 0 1 ]
# Then, insert the z row to create a properly formed 3-D transform matrix:
xyz = insert(
interm,
2,
[0, 0, 1, z],
axis=0)
# Result:
# [ 1 0 0 x ]
# [ 0 1 0 y ]
# [ 0 0 1 z ]
# [ 0 0 0 1 ]
return xyz.round(11) | def function[add_z, parameter[xy, z]]:
constant[
Turn a 2-D transform matrix into a 3-D transform matrix (scale/shift only,
no rotation).
:param xy: A two-dimensional transform matrix (a 3x3 numpy ndarray) in the
following form:
[ 1 0 x ]
[ 0 1 y ]
[ 0 0 1 ]
:param z: a float for the z component
:return: a three-dimensional transformation matrix (a 4x4 numpy ndarray)
with x, y, and z from the function parameters, in the following form:
[ 1 0 0 x ]
[ 0 1 0 y ]
[ 0 0 1 z ]
[ 0 0 0 1 ]
]
variable[interm] assign[=] call[name[insert], parameter[name[xy], constant[2], list[[<ast.Constant object at 0x7da20e9b0490>, <ast.Constant object at 0x7da20e9b3a00>, <ast.Constant object at 0x7da20e9b28c0>]]]]
variable[xyz] assign[=] call[name[insert], parameter[name[interm], constant[2], list[[<ast.Constant object at 0x7da20e9b1300>, <ast.Constant object at 0x7da20e9b2230>, <ast.Constant object at 0x7da20e9b0a30>, <ast.Name object at 0x7da20e9b2ad0>]]]]
return[call[name[xyz].round, parameter[constant[11]]]] | keyword[def] identifier[add_z] ( identifier[xy] : identifier[np] . identifier[ndarray] , identifier[z] : identifier[float] )-> identifier[np] . identifier[ndarray] :
literal[string]
identifier[interm] = identifier[insert] ( identifier[xy] , literal[int] ,[ literal[int] , literal[int] , literal[int] ], identifier[axis] = literal[int] )
identifier[xyz] = identifier[insert] (
identifier[interm] ,
literal[int] ,
[ literal[int] , literal[int] , literal[int] , identifier[z] ],
identifier[axis] = literal[int] )
keyword[return] identifier[xyz] . identifier[round] ( literal[int] ) | def add_z(xy: np.ndarray, z: float) -> np.ndarray:
"""
Turn a 2-D transform matrix into a 3-D transform matrix (scale/shift only,
no rotation).
:param xy: A two-dimensional transform matrix (a 3x3 numpy ndarray) in the
following form:
[ 1 0 x ]
[ 0 1 y ]
[ 0 0 1 ]
:param z: a float for the z component
:return: a three-dimensional transformation matrix (a 4x4 numpy ndarray)
with x, y, and z from the function parameters, in the following form:
[ 1 0 0 x ]
[ 0 1 0 y ]
[ 0 0 1 z ]
[ 0 0 0 1 ]
"""
# First, insert a column of zeros as into the input matrix
interm = insert(xy, 2, [0, 0, 0], axis=1)
# Result:
# [ 1 0 0 x ]
# [ 0 1 0 y ]
# [ 0 0 0 1 ]
# Then, insert the z row to create a properly formed 3-D transform matrix:
xyz = insert(interm, 2, [0, 0, 1, z], axis=0)
# Result:
# [ 1 0 0 x ]
# [ 0 1 0 y ]
# [ 0 0 1 z ]
# [ 0 0 0 1 ]
return xyz.round(11) |
def prepare(cls, value, length=0, position=0, is_last_data=True):
"""Prepare Lob header.
Note that the actual lob data is NOT written here but appended after the parameter block for each row!
"""
hstruct = WriteLobHeader.header_struct
lob_option_dataincluded = WriteLobHeader.LOB_OPTION_DATAINCLUDED if length > 0 else 0
lob_option_lastdata = WriteLobHeader.LOB_OPTION_LASTDATA if is_last_data else 0
options = lob_option_dataincluded | lob_option_lastdata
pfield = hstruct.pack(cls.type_code, options, length, position)
return pfield | def function[prepare, parameter[cls, value, length, position, is_last_data]]:
constant[Prepare Lob header.
Note that the actual lob data is NOT written here but appended after the parameter block for each row!
]
variable[hstruct] assign[=] name[WriteLobHeader].header_struct
variable[lob_option_dataincluded] assign[=] <ast.IfExp object at 0x7da18f00dc00>
variable[lob_option_lastdata] assign[=] <ast.IfExp object at 0x7da18f00d480>
variable[options] assign[=] binary_operation[name[lob_option_dataincluded] <ast.BitOr object at 0x7da2590d6aa0> name[lob_option_lastdata]]
variable[pfield] assign[=] call[name[hstruct].pack, parameter[name[cls].type_code, name[options], name[length], name[position]]]
return[name[pfield]] | keyword[def] identifier[prepare] ( identifier[cls] , identifier[value] , identifier[length] = literal[int] , identifier[position] = literal[int] , identifier[is_last_data] = keyword[True] ):
literal[string]
identifier[hstruct] = identifier[WriteLobHeader] . identifier[header_struct]
identifier[lob_option_dataincluded] = identifier[WriteLobHeader] . identifier[LOB_OPTION_DATAINCLUDED] keyword[if] identifier[length] > literal[int] keyword[else] literal[int]
identifier[lob_option_lastdata] = identifier[WriteLobHeader] . identifier[LOB_OPTION_LASTDATA] keyword[if] identifier[is_last_data] keyword[else] literal[int]
identifier[options] = identifier[lob_option_dataincluded] | identifier[lob_option_lastdata]
identifier[pfield] = identifier[hstruct] . identifier[pack] ( identifier[cls] . identifier[type_code] , identifier[options] , identifier[length] , identifier[position] )
keyword[return] identifier[pfield] | def prepare(cls, value, length=0, position=0, is_last_data=True):
"""Prepare Lob header.
Note that the actual lob data is NOT written here but appended after the parameter block for each row!
"""
hstruct = WriteLobHeader.header_struct
lob_option_dataincluded = WriteLobHeader.LOB_OPTION_DATAINCLUDED if length > 0 else 0
lob_option_lastdata = WriteLobHeader.LOB_OPTION_LASTDATA if is_last_data else 0
options = lob_option_dataincluded | lob_option_lastdata
pfield = hstruct.pack(cls.type_code, options, length, position)
return pfield |
def _bipartite_tensor(mat1, mat2, shape1=None, shape2=None):
"""Tensor product (A ⊗ B) to bipartite matrices and reravel indicies.
This is used for tensor product of superoperators and Choi matrices.
Args:
mat1 (matrix_like): a bipartite matrix A
mat2 (matrix_like): a bipartite matrix B
shape1 (tuple): bipartite-shape for matrix A (a0, a1, a2, a3)
shape2 (tuple): bipartite-shape for matrix B (b0, b1, b2, b3)
Returns:
np.array: a bipartite matrix for reravel(A ⊗ B).
Raises:
QiskitError: if input matrices are wrong shape.
"""
# Convert inputs to numpy arrays
mat1 = np.array(mat1)
mat2 = np.array(mat2)
# Determine bipartite dimensions if not provided
dim_a0, dim_a1 = mat1.shape
dim_b0, dim_b1 = mat2.shape
if shape1 is None:
sdim_a0 = int(np.sqrt(dim_a0))
sdim_a1 = int(np.sqrt(dim_a1))
shape1 = (sdim_a0, sdim_a0, sdim_a1, sdim_a1)
if shape2 is None:
sdim_b0 = int(np.sqrt(dim_b0))
sdim_b1 = int(np.sqrt(dim_b1))
shape2 = (sdim_b0, sdim_b0, sdim_b1, sdim_b1)
# Check dimensions
if len(shape1) != 4 or shape1[0] * shape1[1] != dim_a0 or \
shape1[2] * shape1[3] != dim_a1:
raise QiskitError("Invalid shape_a")
if len(shape2) != 4 or shape2[0] * shape2[1] != dim_b0 or \
shape2[2] * shape2[3] != dim_b1:
raise QiskitError("Invalid shape_b")
return _reravel(mat1, mat2, shape1, shape2) | def function[_bipartite_tensor, parameter[mat1, mat2, shape1, shape2]]:
constant[Tensor product (A ⊗ B) to bipartite matrices and reravel indicies.
This is used for tensor product of superoperators and Choi matrices.
Args:
mat1 (matrix_like): a bipartite matrix A
mat2 (matrix_like): a bipartite matrix B
shape1 (tuple): bipartite-shape for matrix A (a0, a1, a2, a3)
shape2 (tuple): bipartite-shape for matrix B (b0, b1, b2, b3)
Returns:
np.array: a bipartite matrix for reravel(A ⊗ B).
Raises:
QiskitError: if input matrices are wrong shape.
]
variable[mat1] assign[=] call[name[np].array, parameter[name[mat1]]]
variable[mat2] assign[=] call[name[np].array, parameter[name[mat2]]]
<ast.Tuple object at 0x7da20c6e7730> assign[=] name[mat1].shape
<ast.Tuple object at 0x7da20c6e5ae0> assign[=] name[mat2].shape
if compare[name[shape1] is constant[None]] begin[:]
variable[sdim_a0] assign[=] call[name[int], parameter[call[name[np].sqrt, parameter[name[dim_a0]]]]]
variable[sdim_a1] assign[=] call[name[int], parameter[call[name[np].sqrt, parameter[name[dim_a1]]]]]
variable[shape1] assign[=] tuple[[<ast.Name object at 0x7da20c6c7970>, <ast.Name object at 0x7da20c6c6d70>, <ast.Name object at 0x7da20c6c6a40>, <ast.Name object at 0x7da20c6c5f00>]]
if compare[name[shape2] is constant[None]] begin[:]
variable[sdim_b0] assign[=] call[name[int], parameter[call[name[np].sqrt, parameter[name[dim_b0]]]]]
variable[sdim_b1] assign[=] call[name[int], parameter[call[name[np].sqrt, parameter[name[dim_b1]]]]]
variable[shape2] assign[=] tuple[[<ast.Name object at 0x7da20c6c59c0>, <ast.Name object at 0x7da18f00e080>, <ast.Name object at 0x7da18f00f0d0>, <ast.Name object at 0x7da18f00d300>]]
if <ast.BoolOp object at 0x7da18f00cee0> begin[:]
<ast.Raise object at 0x7da2047eac50>
if <ast.BoolOp object at 0x7da2047eb700> begin[:]
<ast.Raise object at 0x7da1b05b0f70>
return[call[name[_reravel], parameter[name[mat1], name[mat2], name[shape1], name[shape2]]]] | keyword[def] identifier[_bipartite_tensor] ( identifier[mat1] , identifier[mat2] , identifier[shape1] = keyword[None] , identifier[shape2] = keyword[None] ):
literal[string]
identifier[mat1] = identifier[np] . identifier[array] ( identifier[mat1] )
identifier[mat2] = identifier[np] . identifier[array] ( identifier[mat2] )
identifier[dim_a0] , identifier[dim_a1] = identifier[mat1] . identifier[shape]
identifier[dim_b0] , identifier[dim_b1] = identifier[mat2] . identifier[shape]
keyword[if] identifier[shape1] keyword[is] keyword[None] :
identifier[sdim_a0] = identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[dim_a0] ))
identifier[sdim_a1] = identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[dim_a1] ))
identifier[shape1] =( identifier[sdim_a0] , identifier[sdim_a0] , identifier[sdim_a1] , identifier[sdim_a1] )
keyword[if] identifier[shape2] keyword[is] keyword[None] :
identifier[sdim_b0] = identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[dim_b0] ))
identifier[sdim_b1] = identifier[int] ( identifier[np] . identifier[sqrt] ( identifier[dim_b1] ))
identifier[shape2] =( identifier[sdim_b0] , identifier[sdim_b0] , identifier[sdim_b1] , identifier[sdim_b1] )
keyword[if] identifier[len] ( identifier[shape1] )!= literal[int] keyword[or] identifier[shape1] [ literal[int] ]* identifier[shape1] [ literal[int] ]!= identifier[dim_a0] keyword[or] identifier[shape1] [ literal[int] ]* identifier[shape1] [ literal[int] ]!= identifier[dim_a1] :
keyword[raise] identifier[QiskitError] ( literal[string] )
keyword[if] identifier[len] ( identifier[shape2] )!= literal[int] keyword[or] identifier[shape2] [ literal[int] ]* identifier[shape2] [ literal[int] ]!= identifier[dim_b0] keyword[or] identifier[shape2] [ literal[int] ]* identifier[shape2] [ literal[int] ]!= identifier[dim_b1] :
keyword[raise] identifier[QiskitError] ( literal[string] )
keyword[return] identifier[_reravel] ( identifier[mat1] , identifier[mat2] , identifier[shape1] , identifier[shape2] ) | def _bipartite_tensor(mat1, mat2, shape1=None, shape2=None):
"""Tensor product (A ⊗ B) to bipartite matrices and reravel indicies.
This is used for tensor product of superoperators and Choi matrices.
Args:
mat1 (matrix_like): a bipartite matrix A
mat2 (matrix_like): a bipartite matrix B
shape1 (tuple): bipartite-shape for matrix A (a0, a1, a2, a3)
shape2 (tuple): bipartite-shape for matrix B (b0, b1, b2, b3)
Returns:
np.array: a bipartite matrix for reravel(A ⊗ B).
Raises:
QiskitError: if input matrices are wrong shape.
"""
# Convert inputs to numpy arrays
mat1 = np.array(mat1)
mat2 = np.array(mat2)
# Determine bipartite dimensions if not provided
(dim_a0, dim_a1) = mat1.shape
(dim_b0, dim_b1) = mat2.shape
if shape1 is None:
sdim_a0 = int(np.sqrt(dim_a0))
sdim_a1 = int(np.sqrt(dim_a1))
shape1 = (sdim_a0, sdim_a0, sdim_a1, sdim_a1) # depends on [control=['if'], data=['shape1']]
if shape2 is None:
sdim_b0 = int(np.sqrt(dim_b0))
sdim_b1 = int(np.sqrt(dim_b1))
shape2 = (sdim_b0, sdim_b0, sdim_b1, sdim_b1) # depends on [control=['if'], data=['shape2']]
# Check dimensions
if len(shape1) != 4 or shape1[0] * shape1[1] != dim_a0 or shape1[2] * shape1[3] != dim_a1:
raise QiskitError('Invalid shape_a') # depends on [control=['if'], data=[]]
if len(shape2) != 4 or shape2[0] * shape2[1] != dim_b0 or shape2[2] * shape2[3] != dim_b1:
raise QiskitError('Invalid shape_b') # depends on [control=['if'], data=[]]
return _reravel(mat1, mat2, shape1, shape2) |
def patch_instance_group_manager(self, zone, resource_id,
body, request_id=None, project_id=None):
"""
Patches Instance Group Manager with the specified body.
Must be called with keyword arguments rather than positional.
:param zone: Google Cloud Platform zone where the Instance Group Manager exists
:type zone: str
:param resource_id: Name of the Instance Group Manager
:type resource_id: str
:param body: Instance Group Manager representation as json-merge-patch object
according to
https://cloud.google.com/compute/docs/reference/rest/beta/instanceTemplates/patch
:type body: dict
:param request_id: Optional, unique request_id that you might add to achieve
full idempotence (for example when client call times out repeating the request
with the same request id will not create a new instance template again).
It should be in UUID format as defined in RFC 4122
:type request_id: str
:param project_id: Optional, Google Cloud Platform project ID where the
Compute Engine Instance exists. If set to None or missing,
the default project_id from the GCP connection is used.
:type project_id: str
:return: None
"""
response = self.get_conn().instanceGroupManagers().patch(
project=project_id,
zone=zone,
instanceGroupManager=resource_id,
body=body,
requestId=request_id
).execute(num_retries=self.num_retries)
try:
operation_name = response["name"]
except KeyError:
raise AirflowException(
"Wrong response '{}' returned - it should contain "
"'name' field".format(response))
self._wait_for_operation_to_complete(project_id=project_id,
operation_name=operation_name,
zone=zone) | def function[patch_instance_group_manager, parameter[self, zone, resource_id, body, request_id, project_id]]:
constant[
Patches Instance Group Manager with the specified body.
Must be called with keyword arguments rather than positional.
:param zone: Google Cloud Platform zone where the Instance Group Manager exists
:type zone: str
:param resource_id: Name of the Instance Group Manager
:type resource_id: str
:param body: Instance Group Manager representation as json-merge-patch object
according to
https://cloud.google.com/compute/docs/reference/rest/beta/instanceTemplates/patch
:type body: dict
:param request_id: Optional, unique request_id that you might add to achieve
full idempotence (for example when client call times out repeating the request
with the same request id will not create a new instance template again).
It should be in UUID format as defined in RFC 4122
:type request_id: str
:param project_id: Optional, Google Cloud Platform project ID where the
Compute Engine Instance exists. If set to None or missing,
the default project_id from the GCP connection is used.
:type project_id: str
:return: None
]
variable[response] assign[=] call[call[call[call[name[self].get_conn, parameter[]].instanceGroupManagers, parameter[]].patch, parameter[]].execute, parameter[]]
<ast.Try object at 0x7da18bcc9150>
call[name[self]._wait_for_operation_to_complete, parameter[]] | keyword[def] identifier[patch_instance_group_manager] ( identifier[self] , identifier[zone] , identifier[resource_id] ,
identifier[body] , identifier[request_id] = keyword[None] , identifier[project_id] = keyword[None] ):
literal[string]
identifier[response] = identifier[self] . identifier[get_conn] (). identifier[instanceGroupManagers] (). identifier[patch] (
identifier[project] = identifier[project_id] ,
identifier[zone] = identifier[zone] ,
identifier[instanceGroupManager] = identifier[resource_id] ,
identifier[body] = identifier[body] ,
identifier[requestId] = identifier[request_id]
). identifier[execute] ( identifier[num_retries] = identifier[self] . identifier[num_retries] )
keyword[try] :
identifier[operation_name] = identifier[response] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[AirflowException] (
literal[string]
literal[string] . identifier[format] ( identifier[response] ))
identifier[self] . identifier[_wait_for_operation_to_complete] ( identifier[project_id] = identifier[project_id] ,
identifier[operation_name] = identifier[operation_name] ,
identifier[zone] = identifier[zone] ) | def patch_instance_group_manager(self, zone, resource_id, body, request_id=None, project_id=None):
"""
Patches Instance Group Manager with the specified body.
Must be called with keyword arguments rather than positional.
:param zone: Google Cloud Platform zone where the Instance Group Manager exists
:type zone: str
:param resource_id: Name of the Instance Group Manager
:type resource_id: str
:param body: Instance Group Manager representation as json-merge-patch object
according to
https://cloud.google.com/compute/docs/reference/rest/beta/instanceTemplates/patch
:type body: dict
:param request_id: Optional, unique request_id that you might add to achieve
full idempotence (for example when client call times out repeating the request
with the same request id will not create a new instance template again).
It should be in UUID format as defined in RFC 4122
:type request_id: str
:param project_id: Optional, Google Cloud Platform project ID where the
Compute Engine Instance exists. If set to None or missing,
the default project_id from the GCP connection is used.
:type project_id: str
:return: None
"""
response = self.get_conn().instanceGroupManagers().patch(project=project_id, zone=zone, instanceGroupManager=resource_id, body=body, requestId=request_id).execute(num_retries=self.num_retries)
try:
operation_name = response['name'] # depends on [control=['try'], data=[]]
except KeyError:
raise AirflowException("Wrong response '{}' returned - it should contain 'name' field".format(response)) # depends on [control=['except'], data=[]]
self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name, zone=zone) |
def sortarai(self, datablock, s, Zdiff):
"""
sorts data block in to first_Z, first_I, etc.
"""
first_Z, first_I, zptrm_check, ptrm_check, ptrm_tail = [], [], [], [], []
field, phi, theta = "", "", ""
starthere = 0
Treat_I, Treat_Z, Treat_PZ, Treat_PI, Treat_M, Treat_AC = [], [], [], [], [], []
ISteps, ZSteps, PISteps, PZSteps, MSteps, ACSteps = [], [], [], [], [], []
GammaChecks = [] # comparison of pTRM direction acquired and lab field
Mkeys = ['measurement_magn_moment', 'measurement_magn_volume',
'measurement_magn_mass', 'measurement_magnitude']
rec = datablock[0]
for key in Mkeys:
if key in list(rec.keys()) and rec[key] != "":
momkey = key
break
# first find all the steps
for k in range(len(datablock)):
rec = datablock[k]
if "treatment_temp" in list(rec.keys()) and rec["treatment_temp"] != "":
temp = float(rec["treatment_temp"])
THERMAL = True
MICROWAVE = False
elif "treatment_mw_power" in list(rec.keys()) and rec["treatment_mw_power"] != "":
THERMAL = False
MICROWAVE = True
if "measurement_description" in list(rec.keys()):
MW_step = rec["measurement_description"].strip(
'\n').split(":")
for STEP in MW_step:
if "Number" in STEP:
temp = float(STEP.split("-")[-1])
methcodes = []
tmp = rec["magic_method_codes"].split(":")
for meth in tmp:
methcodes.append(meth.strip())
# for thellier-thellier
if 'LT-T-I' in methcodes and 'LP-PI-TRM' in methcodes and 'LP-TRM' not in methcodes:
Treat_I.append(temp)
ISteps.append(k)
if field == "":
field = float(rec["treatment_dc_field"])
if phi == "":
phi = float(rec['treatment_dc_field_phi'])
theta = float(rec['treatment_dc_field_theta'])
# for Microwave
if 'LT-M-I' in methcodes and 'LP-PI-M' in methcodes:
Treat_I.append(temp)
ISteps.append(k)
if field == "":
field = float(rec["treatment_dc_field"])
if phi == "":
phi = float(rec['treatment_dc_field_phi'])
theta = float(rec['treatment_dc_field_theta'])
# stick first zero field stuff into first_Z
if 'LT-NO' in methcodes:
Treat_Z.append(temp)
ZSteps.append(k)
if "LT-AF-Z" in methcodes and 'treatment_ac_field' in list(rec.keys()):
if rec['treatment_ac_field'] != "":
AFD_after_NRM = True
# consider AFD before T-T experiment ONLY if it comes before
# the experiment
for i in range(len(first_I)):
# check if there was an infield step before the AFD
if float(first_I[i][3]) != 0:
AFD_after_NRM = False
if AFD_after_NRM:
AF_field = 0
if 'treatment_ac_field' in rec:
try:
AF_field = float(rec['treatment_ac_field']) * 1000
except ValueError:
pass
dec = float(rec["measurement_dec"])
inc = float(rec["measurement_inc"])
intensity = float(rec[momkey])
first_I.append([273. - AF_field, 0., 0., 0., 1])
first_Z.append(
[273. - AF_field, dec, inc, intensity, 1]) # NRM step
if 'LT-T-Z' in methcodes or 'LT-M-Z' in methcodes:
Treat_Z.append(temp)
ZSteps.append(k)
if 'LT-PTRM-Z':
Treat_PZ.append(temp)
PZSteps.append(k)
if 'LT-PTRM-I' in methcodes or 'LT-PMRM-I' in methcodes:
Treat_PI.append(temp)
PISteps.append(k)
if 'LT-PTRM-MD' in methcodes or 'LT-PMRM-MD' in methcodes:
Treat_M.append(temp)
MSteps.append(k)
if 'LT-PTRM-AC' in methcodes or 'LT-PMRM-AC' in methcodes:
Treat_AC.append(temp)
ACSteps.append(k)
if 'LT-NO' in methcodes:
dec = float(rec["measurement_dec"])
inc = float(rec["measurement_inc"])
moment = float(rec["measurement_magn_moment"])
if 'LP-PI-M' not in methcodes:
first_I.append([273, 0., 0., 0., 1])
first_Z.append([273, dec, inc, moment, 1]) # NRM step
else:
first_I.append([0, 0., 0., 0., 1])
first_Z.append([0, dec, inc, moment, 1]) # NRM step
#---------------------
# find IZ and ZI
#---------------------
for temp in Treat_I: # look through infield steps and find matching Z step
if temp in Treat_Z: # found a match
istep = ISteps[Treat_I.index(temp)]
irec = datablock[istep]
methcodes = []
tmp = irec["magic_method_codes"].split(":")
for meth in tmp:
methcodes.append(meth.strip())
# take last record as baseline to subtract
brec = datablock[istep - 1]
zstep = ZSteps[Treat_Z.index(temp)]
zrec = datablock[zstep]
# sort out first_Z records
# check if ZI/IZ in in method codes:
ZI = ""
if "LP-PI-TRM-IZ" in methcodes or "LP-PI-M-IZ" in methcodes or "LP-PI-IZ" in methcodes:
ZI = 0
elif "LP-PI-TRM-ZI" in methcodes or "LP-PI-M-ZI" in methcodes or "LP-PI-ZI" in methcodes:
ZI = 1
elif "LP-PI-BT-IZZI" in methcodes:
ZI == ""
i_intex, z_intex = 0, 0
foundit = False
for i in range(len(datablock)):
if THERMAL:
if ('treatment_temp' in list(datablock[i].keys()) and float(temp) == float(datablock[i]['treatment_temp'])):
foundit = True
if MICROWAVE:
if ('measurement_description' in list(datablock[i].keys())):
MW_step = datablock[i]["measurement_description"].strip(
'\n').split(":")
for STEP in MW_step:
if "Number" in STEP:
ThisStep = float(STEP.split("-")[-1])
if ThisStep == float(temp):
foundit = True
if foundit:
if "LT-T-Z" in datablock[i]['magic_method_codes'].split(":") or "LT-M-Z" in datablock[i]['magic_method_codes'].split(":"):
z_intex = i
if "LT-T-I" in datablock[i]['magic_method_codes'].split(":") or "LT-M-I" in datablock[i]['magic_method_codes'].split(":"):
i_intex = i
foundit = False
if z_intex < i_intex:
ZI = 1
else:
ZI = 0
dec = float(zrec["measurement_dec"])
inc = float(zrec["measurement_inc"])
str = float(zrec[momkey])
first_Z.append([temp, dec, inc, str, ZI])
# sort out first_I records
idec = float(irec["measurement_dec"])
iinc = float(irec["measurement_inc"])
istr = float(irec[momkey])
X = pmag.dir2cart([idec, iinc, istr])
BL = pmag.dir2cart([dec, inc, str])
I = []
for c in range(3):
I.append((X[c] - BL[c]))
if I[2] != 0:
iDir = pmag.cart2dir(I)
if Zdiff == 0:
first_I.append([temp, iDir[0], iDir[1], iDir[2], ZI])
else:
first_I.append([temp, 0., 0., I[2], ZI])
# gamma=angle([iDir[0],iDir[1]],[phi,theta])
else:
first_I.append([temp, 0., 0., 0., ZI])
# gamma=0.0
# put in Gamma check (infield trm versus lab field)
# if 180.-gamma<gamma:
# gamma=180.-gamma
# GammaChecks.append([temp-273.,gamma])
#---------------------
# find Thellier Thellier protocol
#---------------------
if 'LP-PI-II'in methcodes or 'LP-PI-T-II' in methcodes or 'LP-PI-M-II' in methcodes:
# look through infield steps and find matching Z step
for i in range(1, len(Treat_I)):
if Treat_I[i] == Treat_I[i - 1]:
# ignore, if there are more than
temp = Treat_I[i]
irec1 = datablock[ISteps[i - 1]]
dec1 = float(irec1["measurement_dec"])
inc1 = float(irec1["measurement_inc"])
moment1 = float(irec1["measurement_magn_moment"])
if len(first_I) < 2:
dec_initial = dec1
inc_initial = inc1
cart1 = np.array(pmag.dir2cart([dec1, inc1, moment1]))
irec2 = datablock[ISteps[i]]
dec2 = float(irec2["measurement_dec"])
inc2 = float(irec2["measurement_inc"])
moment2 = float(irec2["measurement_magn_moment"])
cart2 = np.array(pmag.dir2cart([dec2, inc2, moment2]))
# check if its in the same treatment
if Treat_I[i] == Treat_I[i - 2] and dec2 != dec_initial and inc2 != inc_initial:
continue
if dec1 != dec2 and inc1 != inc2:
zerofield = (cart2 + cart1) / 2
infield = (cart2 - cart1) / 2
DIR_zerofield = pmag.cart2dir(zerofield)
DIR_infield = pmag.cart2dir(infield)
first_Z.append(
[temp, DIR_zerofield[0], DIR_zerofield[1], DIR_zerofield[2], 0])
first_I.append(
[temp, DIR_infield[0], DIR_infield[1], DIR_infield[2], 0])
#---------------------
# find pTRM checks
#---------------------
for i in range(len(Treat_PI)): # look through infield steps and find matching Z step
temp = Treat_PI[i]
k = PISteps[i]
rec = datablock[k]
dec = float(rec["measurement_dec"])
inc = float(rec["measurement_inc"])
moment = float(rec["measurement_magn_moment"])
phi = float(rec["treatment_dc_field_phi"])
theta = float(rec["treatment_dc_field_theta"])
M = np.array(pmag.dir2cart([dec, inc, moment]))
foundit = False
if 'LP-PI-II' not in methcodes:
# Important: suport several pTRM checks in a row, but
# does not support pTRM checks after infield step
for j in range(k, 1, -1):
if "LT-M-I" in datablock[j]['magic_method_codes'] or "LT-T-I" in datablock[j]['magic_method_codes']:
after_zerofield = 0.
foundit = True
prev_rec = datablock[j]
zerofield_index = j
break
if float(datablock[j]['treatment_dc_field']) == 0:
after_zerofield = 1.
foundit = True
prev_rec = datablock[j]
zerofield_index = j
break
else: # Thellier-Thellier protocol
foundit = True
prev_rec = datablock[k - 1]
zerofield_index = k - 1
if foundit:
prev_dec = float(prev_rec["measurement_dec"])
prev_inc = float(prev_rec["measurement_inc"])
prev_moment = float(prev_rec["measurement_magn_moment"])
prev_phi = float(prev_rec["treatment_dc_field_phi"])
prev_theta = float(prev_rec["treatment_dc_field_theta"])
prev_M = np.array(pmag.dir2cart(
[prev_dec, prev_inc, prev_moment]))
if 'LP-PI-II' not in methcodes:
diff_cart = M - prev_M
diff_dir = pmag.cart2dir(diff_cart)
if after_zerofield == 0:
ptrm_check.append(
[temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, after_zerofield])
else:
ptrm_check.append(
[temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, after_zerofield])
else:
# health check for T-T protocol:
if theta != prev_theta:
diff = (M - prev_M) / 2
diff_dir = pmag.cart2dir(diff)
ptrm_check.append(
[temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, ""])
else:
print(
"-W- WARNING: specimen. pTRM check not in place in Thellier Thellier protocol. step please check")
#---------------------
# find Tail checks
#---------------------
for temp in Treat_M:
# print temp
step = MSteps[Treat_M.index(temp)]
rec = datablock[step]
dec = float(rec["measurement_dec"])
inc = float(rec["measurement_inc"])
moment = float(rec["measurement_magn_moment"])
foundit = False
for i in range(1, len(datablock)):
if 'LT-T-Z' in datablock[i]['magic_method_codes'] or 'LT-M-Z' in datablock[i]['magic_method_codes']:
if (THERMAL and "treatment_temp" in list(datablock[i].keys()) and float(datablock[i]["treatment_temp"]) == float(temp))\
or (MICROWAVE and "measurement_description" in list(datablock[i].keys()) and "Step Number-%.0f" % float(temp) in datablock[i]["measurement_description"]):
prev_rec = datablock[i]
prev_dec = float(prev_rec["measurement_dec"])
prev_inc = float(prev_rec["measurement_inc"])
prev_moment = float(
prev_rec["measurement_magn_moment"])
foundit = True
break
if foundit:
ptrm_tail.append([temp, 0, 0, moment - prev_moment])
#
# final check
#
if len(first_Z) != len(first_I):
print(len(first_Z), len(first_I))
print(" Something wrong with this specimen! Better fix it or delete it ")
input(" press return to acknowledge message")
#---------------------
# find Additivity (patch by rshaar)
#---------------------
additivity_check = []
for i in range(len(Treat_AC)):
step_0 = ACSteps[i]
temp = Treat_AC[i]
dec0 = float(datablock[step_0]["measurement_dec"])
inc0 = float(datablock[step_0]["measurement_inc"])
moment0 = float(datablock[step_0]['measurement_magn_moment'])
V0 = pmag.dir2cart([dec0, inc0, moment0])
# find the infield step that comes before the additivity check
foundit = False
for j in range(step_0, 1, -1):
if "LT-T-I" in datablock[j]['magic_method_codes']:
foundit = True
break
if foundit:
dec1 = float(datablock[j]["measurement_dec"])
inc1 = float(datablock[j]["measurement_inc"])
moment1 = float(datablock[j]['measurement_magn_moment'])
V1 = pmag.dir2cart([dec1, inc1, moment1])
# print "additivity check: ",s
# print j
# print "ACC=V1-V0:"
# print "V1=",[dec1,inc1,moment1],pmag.dir2cart([dec1,inc1,moment1])/float(datablock[0]["measurement_magn_moment"])
# print "V1=",pmag.dir2cart([dec1,inc1,moment1])/float(datablock[0]["measurement_magn_moment"])
# print "V0=",[dec0,inc0,moment0],pmag.dir2cart([dec0,inc0,moment0])/float(datablock[0]["measurement_magn_moment"])
# print "NRM=",float(datablock[0]["measurement_magn_moment"])
# print "-------"
I = []
for c in range(3):
I.append(V1[c] - V0[c])
dir1 = pmag.cart2dir(I)
additivity_check.append([temp, dir1[0], dir1[1], dir1[2]])
# print
# "I",np.array(I)/float(datablock[0]["measurement_magn_moment"]),dir1,"(dir1
# unnormalized)"
X = np.array(I) / \
float(datablock[0]["measurement_magn_moment"])
# print "I",np.sqrt(sum(X**2))
araiblock = (first_Z, first_I, ptrm_check, ptrm_tail,
zptrm_check, GammaChecks, additivity_check)
return araiblock, field | def function[sortarai, parameter[self, datablock, s, Zdiff]]:
constant[
sorts data block in to first_Z, first_I, etc.
]
<ast.Tuple object at 0x7da1b02c1ab0> assign[=] tuple[[<ast.List object at 0x7da1b02c1900>, <ast.List object at 0x7da1b02c18d0>, <ast.List object at 0x7da1b02c18a0>, <ast.List object at 0x7da1b02c1870>, <ast.List object at 0x7da1b02c1840>]]
<ast.Tuple object at 0x7da1b02c17e0> assign[=] tuple[[<ast.Constant object at 0x7da1b02c16f0>, <ast.Constant object at 0x7da1b02c16c0>, <ast.Constant object at 0x7da1b02c1690>]]
variable[starthere] assign[=] constant[0]
<ast.Tuple object at 0x7da1b02c15a0> assign[=] tuple[[<ast.List object at 0x7da1b02c13c0>, <ast.List object at 0x7da1b02c1390>, <ast.List object at 0x7da1b02c1360>, <ast.List object at 0x7da1b02c1330>, <ast.List object at 0x7da1b02c1300>, <ast.List object at 0x7da1b02c12d0>]]
<ast.Tuple object at 0x7da1b02c1270> assign[=] tuple[[<ast.List object at 0x7da1b02c1090>, <ast.List object at 0x7da1b02c1060>, <ast.List object at 0x7da1b02c1030>, <ast.List object at 0x7da1b02c1000>, <ast.List object at 0x7da1b02c0fd0>, <ast.List object at 0x7da1b02c0fa0>]]
variable[GammaChecks] assign[=] list[[]]
variable[Mkeys] assign[=] list[[<ast.Constant object at 0x7da1b02c0e50>, <ast.Constant object at 0x7da1b02c0e20>, <ast.Constant object at 0x7da1b02c0df0>, <ast.Constant object at 0x7da1b02c0dc0>]]
variable[rec] assign[=] call[name[datablock]][constant[0]]
for taget[name[key]] in starred[name[Mkeys]] begin[:]
if <ast.BoolOp object at 0x7da1b02c0be0> begin[:]
variable[momkey] assign[=] name[key]
break
for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[datablock]]]]]] begin[:]
variable[rec] assign[=] call[name[datablock]][name[k]]
if <ast.BoolOp object at 0x7da1b02c0610> begin[:]
variable[temp] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_temp]]]]
variable[THERMAL] assign[=] constant[True]
variable[MICROWAVE] assign[=] constant[False]
variable[methcodes] assign[=] list[[]]
variable[tmp] assign[=] call[call[name[rec]][constant[magic_method_codes]].split, parameter[constant[:]]]
for taget[name[meth]] in starred[name[tmp]] begin[:]
call[name[methcodes].append, parameter[call[name[meth].strip, parameter[]]]]
if <ast.BoolOp object at 0x7da1b022f520> begin[:]
call[name[Treat_I].append, parameter[name[temp]]]
call[name[ISteps].append, parameter[name[k]]]
if compare[name[field] equal[==] constant[]] begin[:]
variable[field] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field]]]]
if compare[name[phi] equal[==] constant[]] begin[:]
variable[phi] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_phi]]]]
variable[theta] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_theta]]]]
if <ast.BoolOp object at 0x7da1b022fbb0> begin[:]
call[name[Treat_I].append, parameter[name[temp]]]
call[name[ISteps].append, parameter[name[k]]]
if compare[name[field] equal[==] constant[]] begin[:]
variable[field] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field]]]]
if compare[name[phi] equal[==] constant[]] begin[:]
variable[phi] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_phi]]]]
variable[theta] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_theta]]]]
if compare[constant[LT-NO] in name[methcodes]] begin[:]
call[name[Treat_Z].append, parameter[name[temp]]]
call[name[ZSteps].append, parameter[name[k]]]
if <ast.BoolOp object at 0x7da1b022e980> begin[:]
if compare[call[name[rec]][constant[treatment_ac_field]] not_equal[!=] constant[]] begin[:]
variable[AFD_after_NRM] assign[=] constant[True]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[first_I]]]]]] begin[:]
if compare[call[name[float], parameter[call[call[name[first_I]][name[i]]][constant[3]]]] not_equal[!=] constant[0]] begin[:]
variable[AFD_after_NRM] assign[=] constant[False]
if name[AFD_after_NRM] begin[:]
variable[AF_field] assign[=] constant[0]
if compare[constant[treatment_ac_field] in name[rec]] begin[:]
<ast.Try object at 0x7da1b02e8ac0>
variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]]
variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]]
variable[intensity] assign[=] call[name[float], parameter[call[name[rec]][name[momkey]]]]
call[name[first_I].append, parameter[list[[<ast.BinOp object at 0x7da1b02021a0>, <ast.Constant object at 0x7da1b0202320>, <ast.Constant object at 0x7da1b02022c0>, <ast.Constant object at 0x7da1b02022f0>, <ast.Constant object at 0x7da1b0202290>]]]]
call[name[first_Z].append, parameter[list[[<ast.BinOp object at 0x7da1b0202020>, <ast.Name object at 0x7da1b0202140>, <ast.Name object at 0x7da1b0201fc0>, <ast.Name object at 0x7da1b0201e40>, <ast.Constant object at 0x7da1b0201de0>]]]]
if <ast.BoolOp object at 0x7da1b0201e10> begin[:]
call[name[Treat_Z].append, parameter[name[temp]]]
call[name[ZSteps].append, parameter[name[k]]]
if constant[LT-PTRM-Z] begin[:]
call[name[Treat_PZ].append, parameter[name[temp]]]
call[name[PZSteps].append, parameter[name[k]]]
if <ast.BoolOp object at 0x7da1b0201420> begin[:]
call[name[Treat_PI].append, parameter[name[temp]]]
call[name[PISteps].append, parameter[name[k]]]
if <ast.BoolOp object at 0x7da1b0201270> begin[:]
call[name[Treat_M].append, parameter[name[temp]]]
call[name[MSteps].append, parameter[name[k]]]
if <ast.BoolOp object at 0x7da1b0200a30> begin[:]
call[name[Treat_AC].append, parameter[name[temp]]]
call[name[ACSteps].append, parameter[name[k]]]
if compare[constant[LT-NO] in name[methcodes]] begin[:]
variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]]
variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]]
variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]]
if compare[constant[LP-PI-M] <ast.NotIn object at 0x7da2590d7190> name[methcodes]] begin[:]
call[name[first_I].append, parameter[list[[<ast.Constant object at 0x7da1b0200be0>, <ast.Constant object at 0x7da1b0200cd0>, <ast.Constant object at 0x7da1b0200ca0>, <ast.Constant object at 0x7da1b0201180>, <ast.Constant object at 0x7da1b0201150>]]]]
call[name[first_Z].append, parameter[list[[<ast.Constant object at 0x7da1b0200fa0>, <ast.Name object at 0x7da1b0200f40>, <ast.Name object at 0x7da1b0200f10>, <ast.Name object at 0x7da1b0200ee0>, <ast.Constant object at 0x7da1b0200f70>]]]]
for taget[name[temp]] in starred[name[Treat_I]] begin[:]
if compare[name[temp] in name[Treat_Z]] begin[:]
variable[istep] assign[=] call[name[ISteps]][call[name[Treat_I].index, parameter[name[temp]]]]
variable[irec] assign[=] call[name[datablock]][name[istep]]
variable[methcodes] assign[=] list[[]]
variable[tmp] assign[=] call[call[name[irec]][constant[magic_method_codes]].split, parameter[constant[:]]]
for taget[name[meth]] in starred[name[tmp]] begin[:]
call[name[methcodes].append, parameter[call[name[meth].strip, parameter[]]]]
variable[brec] assign[=] call[name[datablock]][binary_operation[name[istep] - constant[1]]]
variable[zstep] assign[=] call[name[ZSteps]][call[name[Treat_Z].index, parameter[name[temp]]]]
variable[zrec] assign[=] call[name[datablock]][name[zstep]]
variable[ZI] assign[=] constant[]
if <ast.BoolOp object at 0x7da1b02270d0> begin[:]
variable[ZI] assign[=] constant[0]
variable[dec] assign[=] call[name[float], parameter[call[name[zrec]][constant[measurement_dec]]]]
variable[inc] assign[=] call[name[float], parameter[call[name[zrec]][constant[measurement_inc]]]]
variable[str] assign[=] call[name[float], parameter[call[name[zrec]][name[momkey]]]]
call[name[first_Z].append, parameter[list[[<ast.Name object at 0x7da1b0451510>, <ast.Name object at 0x7da1b04514e0>, <ast.Name object at 0x7da1b0451480>, <ast.Name object at 0x7da1b04514b0>, <ast.Name object at 0x7da1b0451750>]]]]
variable[idec] assign[=] call[name[float], parameter[call[name[irec]][constant[measurement_dec]]]]
variable[iinc] assign[=] call[name[float], parameter[call[name[irec]][constant[measurement_inc]]]]
variable[istr] assign[=] call[name[float], parameter[call[name[irec]][name[momkey]]]]
variable[X] assign[=] call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b0450fd0>, <ast.Name object at 0x7da1b0451030>, <ast.Name object at 0x7da1b0451000>]]]]
variable[BL] assign[=] call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b0451180>, <ast.Name object at 0x7da1b04511e0>, <ast.Name object at 0x7da1b04511b0>]]]]
variable[I] assign[=] list[[]]
for taget[name[c]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
call[name[I].append, parameter[binary_operation[call[name[X]][name[c]] - call[name[BL]][name[c]]]]]
if compare[call[name[I]][constant[2]] not_equal[!=] constant[0]] begin[:]
variable[iDir] assign[=] call[name[pmag].cart2dir, parameter[name[I]]]
if compare[name[Zdiff] equal[==] constant[0]] begin[:]
call[name[first_I].append, parameter[list[[<ast.Name object at 0x7da1b0450550>, <ast.Subscript object at 0x7da1b0450430>, <ast.Subscript object at 0x7da1b0450520>, <ast.Subscript object at 0x7da1b0450490>, <ast.Name object at 0x7da1b04505b0>]]]]
if <ast.BoolOp object at 0x7da1b04d7100> begin[:]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[Treat_I]]]]]] begin[:]
if compare[call[name[Treat_I]][name[i]] equal[==] call[name[Treat_I]][binary_operation[name[i] - constant[1]]]] begin[:]
variable[temp] assign[=] call[name[Treat_I]][name[i]]
variable[irec1] assign[=] call[name[datablock]][call[name[ISteps]][binary_operation[name[i] - constant[1]]]]
variable[dec1] assign[=] call[name[float], parameter[call[name[irec1]][constant[measurement_dec]]]]
variable[inc1] assign[=] call[name[float], parameter[call[name[irec1]][constant[measurement_inc]]]]
variable[moment1] assign[=] call[name[float], parameter[call[name[irec1]][constant[measurement_magn_moment]]]]
if compare[call[name[len], parameter[name[first_I]]] less[<] constant[2]] begin[:]
variable[dec_initial] assign[=] name[dec1]
variable[inc_initial] assign[=] name[inc1]
variable[cart1] assign[=] call[name[np].array, parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b04d5c60>, <ast.Name object at 0x7da1b04d5db0>, <ast.Name object at 0x7da1b04d5c90>]]]]]]
variable[irec2] assign[=] call[name[datablock]][call[name[ISteps]][name[i]]]
variable[dec2] assign[=] call[name[float], parameter[call[name[irec2]][constant[measurement_dec]]]]
variable[inc2] assign[=] call[name[float], parameter[call[name[irec2]][constant[measurement_inc]]]]
variable[moment2] assign[=] call[name[float], parameter[call[name[irec2]][constant[measurement_magn_moment]]]]
variable[cart2] assign[=] call[name[np].array, parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b04d5510>, <ast.Name object at 0x7da1b04d5540>, <ast.Name object at 0x7da1b04d5570>]]]]]]
if <ast.BoolOp object at 0x7da1b04d55d0> begin[:]
continue
if <ast.BoolOp object at 0x7da1b04d61a0> begin[:]
variable[zerofield] assign[=] binary_operation[binary_operation[name[cart2] + name[cart1]] / constant[2]]
variable[infield] assign[=] binary_operation[binary_operation[name[cart2] - name[cart1]] / constant[2]]
variable[DIR_zerofield] assign[=] call[name[pmag].cart2dir, parameter[name[zerofield]]]
variable[DIR_infield] assign[=] call[name[pmag].cart2dir, parameter[name[infield]]]
call[name[first_Z].append, parameter[list[[<ast.Name object at 0x7da1b04d7ac0>, <ast.Subscript object at 0x7da1b04d7b20>, <ast.Subscript object at 0x7da1b04d7670>, <ast.Subscript object at 0x7da1b04d7640>, <ast.Constant object at 0x7da1b04d75b0>]]]]
call[name[first_I].append, parameter[list[[<ast.Name object at 0x7da1b04d77c0>, <ast.Subscript object at 0x7da1b04d78b0>, <ast.Subscript object at 0x7da1b04d7820>, <ast.Subscript object at 0x7da1b04d7a30>, <ast.Constant object at 0x7da1b04d7940>]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[Treat_PI]]]]]] begin[:]
variable[temp] assign[=] call[name[Treat_PI]][name[i]]
variable[k] assign[=] call[name[PISteps]][name[i]]
variable[rec] assign[=] call[name[datablock]][name[k]]
variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]]
variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]]
variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]]
variable[phi] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_phi]]]]
variable[theta] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_theta]]]]
variable[M] assign[=] call[name[np].array, parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b04d4c70>, <ast.Name object at 0x7da1b04d4b20>, <ast.Name object at 0x7da1b04d4b80>]]]]]]
variable[foundit] assign[=] constant[False]
if compare[constant[LP-PI-II] <ast.NotIn object at 0x7da2590d7190> name[methcodes]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[k], constant[1], <ast.UnaryOp object at 0x7da1b04d51e0>]]] begin[:]
if <ast.BoolOp object at 0x7da1b04d5240> begin[:]
variable[after_zerofield] assign[=] constant[0.0]
variable[foundit] assign[=] constant[True]
variable[prev_rec] assign[=] call[name[datablock]][name[j]]
variable[zerofield_index] assign[=] name[j]
break
if compare[call[name[float], parameter[call[call[name[datablock]][name[j]]][constant[treatment_dc_field]]]] equal[==] constant[0]] begin[:]
variable[after_zerofield] assign[=] constant[1.0]
variable[foundit] assign[=] constant[True]
variable[prev_rec] assign[=] call[name[datablock]][name[j]]
variable[zerofield_index] assign[=] name[j]
break
if name[foundit] begin[:]
variable[prev_dec] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[measurement_dec]]]]
variable[prev_inc] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[measurement_inc]]]]
variable[prev_moment] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[measurement_magn_moment]]]]
variable[prev_phi] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[treatment_dc_field_phi]]]]
variable[prev_theta] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[treatment_dc_field_theta]]]]
variable[prev_M] assign[=] call[name[np].array, parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b02e85b0>, <ast.Name object at 0x7da1b02eb700>, <ast.Name object at 0x7da1b02eb760>]]]]]]
if compare[constant[LP-PI-II] <ast.NotIn object at 0x7da2590d7190> name[methcodes]] begin[:]
variable[diff_cart] assign[=] binary_operation[name[M] - name[prev_M]]
variable[diff_dir] assign[=] call[name[pmag].cart2dir, parameter[name[diff_cart]]]
if compare[name[after_zerofield] equal[==] constant[0]] begin[:]
call[name[ptrm_check].append, parameter[list[[<ast.Name object at 0x7da1b02e8f10>, <ast.Subscript object at 0x7da1b02eba30>, <ast.Subscript object at 0x7da1b02eba90>, <ast.Subscript object at 0x7da1b02e9210>, <ast.Name object at 0x7da1b02e8910>, <ast.Name object at 0x7da1b02e90f0>]]]]
for taget[name[temp]] in starred[name[Treat_M]] begin[:]
variable[step] assign[=] call[name[MSteps]][call[name[Treat_M].index, parameter[name[temp]]]]
variable[rec] assign[=] call[name[datablock]][name[step]]
variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]]
variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]]
variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]]
variable[foundit] assign[=] constant[False]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[datablock]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0212fe0> begin[:]
if <ast.BoolOp object at 0x7da1b0212ce0> begin[:]
variable[prev_rec] assign[=] call[name[datablock]][name[i]]
variable[prev_dec] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[measurement_dec]]]]
variable[prev_inc] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[measurement_inc]]]]
variable[prev_moment] assign[=] call[name[float], parameter[call[name[prev_rec]][constant[measurement_magn_moment]]]]
variable[foundit] assign[=] constant[True]
break
if name[foundit] begin[:]
call[name[ptrm_tail].append, parameter[list[[<ast.Name object at 0x7da1b0211d20>, <ast.Constant object at 0x7da1b0211cf0>, <ast.Constant object at 0x7da1b0211cc0>, <ast.BinOp object at 0x7da1b0211c90>]]]]
if compare[call[name[len], parameter[name[first_Z]]] not_equal[!=] call[name[len], parameter[name[first_I]]]] begin[:]
call[name[print], parameter[call[name[len], parameter[name[first_Z]]], call[name[len], parameter[name[first_I]]]]]
call[name[print], parameter[constant[ Something wrong with this specimen! Better fix it or delete it ]]]
call[name[input], parameter[constant[ press return to acknowledge message]]]
variable[additivity_check] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[Treat_AC]]]]]] begin[:]
variable[step_0] assign[=] call[name[ACSteps]][name[i]]
variable[temp] assign[=] call[name[Treat_AC]][name[i]]
variable[dec0] assign[=] call[name[float], parameter[call[call[name[datablock]][name[step_0]]][constant[measurement_dec]]]]
variable[inc0] assign[=] call[name[float], parameter[call[call[name[datablock]][name[step_0]]][constant[measurement_inc]]]]
variable[moment0] assign[=] call[name[float], parameter[call[call[name[datablock]][name[step_0]]][constant[measurement_magn_moment]]]]
variable[V0] assign[=] call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b02b2620>, <ast.Name object at 0x7da1b02b25f0>, <ast.Name object at 0x7da1b02b25c0>]]]]
variable[foundit] assign[=] constant[False]
for taget[name[j]] in starred[call[name[range], parameter[name[step_0], constant[1], <ast.UnaryOp object at 0x7da1b02b23e0>]]] begin[:]
if compare[constant[LT-T-I] in call[call[name[datablock]][name[j]]][constant[magic_method_codes]]] begin[:]
variable[foundit] assign[=] constant[True]
break
if name[foundit] begin[:]
variable[dec1] assign[=] call[name[float], parameter[call[call[name[datablock]][name[j]]][constant[measurement_dec]]]]
variable[inc1] assign[=] call[name[float], parameter[call[call[name[datablock]][name[j]]][constant[measurement_inc]]]]
variable[moment1] assign[=] call[name[float], parameter[call[call[name[datablock]][name[j]]][constant[measurement_magn_moment]]]]
variable[V1] assign[=] call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b02b1a80>, <ast.Name object at 0x7da1b02b1a50>, <ast.Name object at 0x7da1b02b1a20>]]]]
variable[I] assign[=] list[[]]
for taget[name[c]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
call[name[I].append, parameter[binary_operation[call[name[V1]][name[c]] - call[name[V0]][name[c]]]]]
variable[dir1] assign[=] call[name[pmag].cart2dir, parameter[name[I]]]
call[name[additivity_check].append, parameter[list[[<ast.Name object at 0x7da1b02b1420>, <ast.Subscript object at 0x7da1b02b13f0>, <ast.Subscript object at 0x7da1b02b1360>, <ast.Subscript object at 0x7da1b02b12d0>]]]]
variable[X] assign[=] binary_operation[call[name[np].array, parameter[name[I]]] / call[name[float], parameter[call[call[name[datablock]][constant[0]]][constant[measurement_magn_moment]]]]]
variable[araiblock] assign[=] tuple[[<ast.Name object at 0x7da1b02b0ee0>, <ast.Name object at 0x7da1b02b0eb0>, <ast.Name object at 0x7da1b02b0e80>, <ast.Name object at 0x7da1b02b0e50>, <ast.Name object at 0x7da1b02b0e20>, <ast.Name object at 0x7da1b02b0df0>, <ast.Name object at 0x7da1b02b0dc0>]]
return[tuple[[<ast.Name object at 0x7da1b02b0d30>, <ast.Name object at 0x7da1b02b0d00>]]] | keyword[def] identifier[sortarai] ( identifier[self] , identifier[datablock] , identifier[s] , identifier[Zdiff] ):
literal[string]
identifier[first_Z] , identifier[first_I] , identifier[zptrm_check] , identifier[ptrm_check] , identifier[ptrm_tail] =[],[],[],[],[]
identifier[field] , identifier[phi] , identifier[theta] = literal[string] , literal[string] , literal[string]
identifier[starthere] = literal[int]
identifier[Treat_I] , identifier[Treat_Z] , identifier[Treat_PZ] , identifier[Treat_PI] , identifier[Treat_M] , identifier[Treat_AC] =[],[],[],[],[],[]
identifier[ISteps] , identifier[ZSteps] , identifier[PISteps] , identifier[PZSteps] , identifier[MSteps] , identifier[ACSteps] =[],[],[],[],[],[]
identifier[GammaChecks] =[]
identifier[Mkeys] =[ literal[string] , literal[string] ,
literal[string] , literal[string] ]
identifier[rec] = identifier[datablock] [ literal[int] ]
keyword[for] identifier[key] keyword[in] identifier[Mkeys] :
keyword[if] identifier[key] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[and] identifier[rec] [ identifier[key] ]!= literal[string] :
identifier[momkey] = identifier[key]
keyword[break]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[datablock] )):
identifier[rec] = identifier[datablock] [ identifier[k] ]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[and] identifier[rec] [ literal[string] ]!= literal[string] :
identifier[temp] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[THERMAL] = keyword[True]
identifier[MICROWAVE] = keyword[False]
keyword[elif] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[and] identifier[rec] [ literal[string] ]!= literal[string] :
identifier[THERMAL] = keyword[False]
identifier[MICROWAVE] = keyword[True]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[MW_step] = identifier[rec] [ literal[string] ]. identifier[strip] (
literal[string] ). identifier[split] ( literal[string] )
keyword[for] identifier[STEP] keyword[in] identifier[MW_step] :
keyword[if] literal[string] keyword[in] identifier[STEP] :
identifier[temp] = identifier[float] ( identifier[STEP] . identifier[split] ( literal[string] )[- literal[int] ])
identifier[methcodes] =[]
identifier[tmp] = identifier[rec] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[meth] keyword[in] identifier[tmp] :
identifier[methcodes] . identifier[append] ( identifier[meth] . identifier[strip] ())
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[and] literal[string] keyword[in] identifier[methcodes] keyword[and] literal[string] keyword[not] keyword[in] identifier[methcodes] :
identifier[Treat_I] . identifier[append] ( identifier[temp] )
identifier[ISteps] . identifier[append] ( identifier[k] )
keyword[if] identifier[field] == literal[string] :
identifier[field] = identifier[float] ( identifier[rec] [ literal[string] ])
keyword[if] identifier[phi] == literal[string] :
identifier[phi] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[theta] = identifier[float] ( identifier[rec] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[and] literal[string] keyword[in] identifier[methcodes] :
identifier[Treat_I] . identifier[append] ( identifier[temp] )
identifier[ISteps] . identifier[append] ( identifier[k] )
keyword[if] identifier[field] == literal[string] :
identifier[field] = identifier[float] ( identifier[rec] [ literal[string] ])
keyword[if] identifier[phi] == literal[string] :
identifier[phi] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[theta] = identifier[float] ( identifier[rec] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[methcodes] :
identifier[Treat_Z] . identifier[append] ( identifier[temp] )
identifier[ZSteps] . identifier[append] ( identifier[k] )
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[and] literal[string] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
keyword[if] identifier[rec] [ literal[string] ]!= literal[string] :
identifier[AFD_after_NRM] = keyword[True]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[first_I] )):
keyword[if] identifier[float] ( identifier[first_I] [ identifier[i] ][ literal[int] ])!= literal[int] :
identifier[AFD_after_NRM] = keyword[False]
keyword[if] identifier[AFD_after_NRM] :
identifier[AF_field] = literal[int]
keyword[if] literal[string] keyword[in] identifier[rec] :
keyword[try] :
identifier[AF_field] = identifier[float] ( identifier[rec] [ literal[string] ])* literal[int]
keyword[except] identifier[ValueError] :
keyword[pass]
identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[intensity] = identifier[float] ( identifier[rec] [ identifier[momkey] ])
identifier[first_I] . identifier[append] ([ literal[int] - identifier[AF_field] , literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[first_Z] . identifier[append] (
[ literal[int] - identifier[AF_field] , identifier[dec] , identifier[inc] , identifier[intensity] , literal[int] ])
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
identifier[Treat_Z] . identifier[append] ( identifier[temp] )
identifier[ZSteps] . identifier[append] ( identifier[k] )
keyword[if] literal[string] :
identifier[Treat_PZ] . identifier[append] ( identifier[temp] )
identifier[PZSteps] . identifier[append] ( identifier[k] )
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
identifier[Treat_PI] . identifier[append] ( identifier[temp] )
identifier[PISteps] . identifier[append] ( identifier[k] )
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
identifier[Treat_M] . identifier[append] ( identifier[temp] )
identifier[MSteps] . identifier[append] ( identifier[k] )
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
identifier[Treat_AC] . identifier[append] ( identifier[temp] )
identifier[ACSteps] . identifier[append] ( identifier[k] )
keyword[if] literal[string] keyword[in] identifier[methcodes] :
identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[methcodes] :
identifier[first_I] . identifier[append] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[first_Z] . identifier[append] ([ literal[int] , identifier[dec] , identifier[inc] , identifier[moment] , literal[int] ])
keyword[else] :
identifier[first_I] . identifier[append] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[first_Z] . identifier[append] ([ literal[int] , identifier[dec] , identifier[inc] , identifier[moment] , literal[int] ])
keyword[for] identifier[temp] keyword[in] identifier[Treat_I] :
keyword[if] identifier[temp] keyword[in] identifier[Treat_Z] :
identifier[istep] = identifier[ISteps] [ identifier[Treat_I] . identifier[index] ( identifier[temp] )]
identifier[irec] = identifier[datablock] [ identifier[istep] ]
identifier[methcodes] =[]
identifier[tmp] = identifier[irec] [ literal[string] ]. identifier[split] ( literal[string] )
keyword[for] identifier[meth] keyword[in] identifier[tmp] :
identifier[methcodes] . identifier[append] ( identifier[meth] . identifier[strip] ())
identifier[brec] = identifier[datablock] [ identifier[istep] - literal[int] ]
identifier[zstep] = identifier[ZSteps] [ identifier[Treat_Z] . identifier[index] ( identifier[temp] )]
identifier[zrec] = identifier[datablock] [ identifier[zstep] ]
identifier[ZI] = literal[string]
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
identifier[ZI] = literal[int]
keyword[elif] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
identifier[ZI] = literal[int]
keyword[elif] literal[string] keyword[in] identifier[methcodes] :
identifier[ZI] == literal[string]
identifier[i_intex] , identifier[z_intex] = literal[int] , literal[int]
identifier[foundit] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[datablock] )):
keyword[if] identifier[THERMAL] :
keyword[if] ( literal[string] keyword[in] identifier[list] ( identifier[datablock] [ identifier[i] ]. identifier[keys] ()) keyword[and] identifier[float] ( identifier[temp] )== identifier[float] ( identifier[datablock] [ identifier[i] ][ literal[string] ])):
identifier[foundit] = keyword[True]
keyword[if] identifier[MICROWAVE] :
keyword[if] ( literal[string] keyword[in] identifier[list] ( identifier[datablock] [ identifier[i] ]. identifier[keys] ())):
identifier[MW_step] = identifier[datablock] [ identifier[i] ][ literal[string] ]. identifier[strip] (
literal[string] ). identifier[split] ( literal[string] )
keyword[for] identifier[STEP] keyword[in] identifier[MW_step] :
keyword[if] literal[string] keyword[in] identifier[STEP] :
identifier[ThisStep] = identifier[float] ( identifier[STEP] . identifier[split] ( literal[string] )[- literal[int] ])
keyword[if] identifier[ThisStep] == identifier[float] ( identifier[temp] ):
identifier[foundit] = keyword[True]
keyword[if] identifier[foundit] :
keyword[if] literal[string] keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ]. identifier[split] ( literal[string] ) keyword[or] literal[string] keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ]. identifier[split] ( literal[string] ):
identifier[z_intex] = identifier[i]
keyword[if] literal[string] keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ]. identifier[split] ( literal[string] ) keyword[or] literal[string] keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ]. identifier[split] ( literal[string] ):
identifier[i_intex] = identifier[i]
identifier[foundit] = keyword[False]
keyword[if] identifier[z_intex] < identifier[i_intex] :
identifier[ZI] = literal[int]
keyword[else] :
identifier[ZI] = literal[int]
identifier[dec] = identifier[float] ( identifier[zrec] [ literal[string] ])
identifier[inc] = identifier[float] ( identifier[zrec] [ literal[string] ])
identifier[str] = identifier[float] ( identifier[zrec] [ identifier[momkey] ])
identifier[first_Z] . identifier[append] ([ identifier[temp] , identifier[dec] , identifier[inc] , identifier[str] , identifier[ZI] ])
identifier[idec] = identifier[float] ( identifier[irec] [ literal[string] ])
identifier[iinc] = identifier[float] ( identifier[irec] [ literal[string] ])
identifier[istr] = identifier[float] ( identifier[irec] [ identifier[momkey] ])
identifier[X] = identifier[pmag] . identifier[dir2cart] ([ identifier[idec] , identifier[iinc] , identifier[istr] ])
identifier[BL] = identifier[pmag] . identifier[dir2cart] ([ identifier[dec] , identifier[inc] , identifier[str] ])
identifier[I] =[]
keyword[for] identifier[c] keyword[in] identifier[range] ( literal[int] ):
identifier[I] . identifier[append] (( identifier[X] [ identifier[c] ]- identifier[BL] [ identifier[c] ]))
keyword[if] identifier[I] [ literal[int] ]!= literal[int] :
identifier[iDir] = identifier[pmag] . identifier[cart2dir] ( identifier[I] )
keyword[if] identifier[Zdiff] == literal[int] :
identifier[first_I] . identifier[append] ([ identifier[temp] , identifier[iDir] [ literal[int] ], identifier[iDir] [ literal[int] ], identifier[iDir] [ literal[int] ], identifier[ZI] ])
keyword[else] :
identifier[first_I] . identifier[append] ([ identifier[temp] , literal[int] , literal[int] , identifier[I] [ literal[int] ], identifier[ZI] ])
keyword[else] :
identifier[first_I] . identifier[append] ([ identifier[temp] , literal[int] , literal[int] , literal[int] , identifier[ZI] ])
keyword[if] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] keyword[or] literal[string] keyword[in] identifier[methcodes] :
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[Treat_I] )):
keyword[if] identifier[Treat_I] [ identifier[i] ]== identifier[Treat_I] [ identifier[i] - literal[int] ]:
identifier[temp] = identifier[Treat_I] [ identifier[i] ]
identifier[irec1] = identifier[datablock] [ identifier[ISteps] [ identifier[i] - literal[int] ]]
identifier[dec1] = identifier[float] ( identifier[irec1] [ literal[string] ])
identifier[inc1] = identifier[float] ( identifier[irec1] [ literal[string] ])
identifier[moment1] = identifier[float] ( identifier[irec1] [ literal[string] ])
keyword[if] identifier[len] ( identifier[first_I] )< literal[int] :
identifier[dec_initial] = identifier[dec1]
identifier[inc_initial] = identifier[inc1]
identifier[cart1] = identifier[np] . identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec1] , identifier[inc1] , identifier[moment1] ]))
identifier[irec2] = identifier[datablock] [ identifier[ISteps] [ identifier[i] ]]
identifier[dec2] = identifier[float] ( identifier[irec2] [ literal[string] ])
identifier[inc2] = identifier[float] ( identifier[irec2] [ literal[string] ])
identifier[moment2] = identifier[float] ( identifier[irec2] [ literal[string] ])
identifier[cart2] = identifier[np] . identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec2] , identifier[inc2] , identifier[moment2] ]))
keyword[if] identifier[Treat_I] [ identifier[i] ]== identifier[Treat_I] [ identifier[i] - literal[int] ] keyword[and] identifier[dec2] != identifier[dec_initial] keyword[and] identifier[inc2] != identifier[inc_initial] :
keyword[continue]
keyword[if] identifier[dec1] != identifier[dec2] keyword[and] identifier[inc1] != identifier[inc2] :
identifier[zerofield] =( identifier[cart2] + identifier[cart1] )/ literal[int]
identifier[infield] =( identifier[cart2] - identifier[cart1] )/ literal[int]
identifier[DIR_zerofield] = identifier[pmag] . identifier[cart2dir] ( identifier[zerofield] )
identifier[DIR_infield] = identifier[pmag] . identifier[cart2dir] ( identifier[infield] )
identifier[first_Z] . identifier[append] (
[ identifier[temp] , identifier[DIR_zerofield] [ literal[int] ], identifier[DIR_zerofield] [ literal[int] ], identifier[DIR_zerofield] [ literal[int] ], literal[int] ])
identifier[first_I] . identifier[append] (
[ identifier[temp] , identifier[DIR_infield] [ literal[int] ], identifier[DIR_infield] [ literal[int] ], identifier[DIR_infield] [ literal[int] ], literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[Treat_PI] )):
identifier[temp] = identifier[Treat_PI] [ identifier[i] ]
identifier[k] = identifier[PISteps] [ identifier[i] ]
identifier[rec] = identifier[datablock] [ identifier[k] ]
identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[phi] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[theta] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[M] = identifier[np] . identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec] , identifier[inc] , identifier[moment] ]))
identifier[foundit] = keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[methcodes] :
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[k] , literal[int] ,- literal[int] ):
keyword[if] literal[string] keyword[in] identifier[datablock] [ identifier[j] ][ literal[string] ] keyword[or] literal[string] keyword[in] identifier[datablock] [ identifier[j] ][ literal[string] ]:
identifier[after_zerofield] = literal[int]
identifier[foundit] = keyword[True]
identifier[prev_rec] = identifier[datablock] [ identifier[j] ]
identifier[zerofield_index] = identifier[j]
keyword[break]
keyword[if] identifier[float] ( identifier[datablock] [ identifier[j] ][ literal[string] ])== literal[int] :
identifier[after_zerofield] = literal[int]
identifier[foundit] = keyword[True]
identifier[prev_rec] = identifier[datablock] [ identifier[j] ]
identifier[zerofield_index] = identifier[j]
keyword[break]
keyword[else] :
identifier[foundit] = keyword[True]
identifier[prev_rec] = identifier[datablock] [ identifier[k] - literal[int] ]
identifier[zerofield_index] = identifier[k] - literal[int]
keyword[if] identifier[foundit] :
identifier[prev_dec] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_inc] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_moment] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_phi] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_theta] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_M] = identifier[np] . identifier[array] ( identifier[pmag] . identifier[dir2cart] (
[ identifier[prev_dec] , identifier[prev_inc] , identifier[prev_moment] ]))
keyword[if] literal[string] keyword[not] keyword[in] identifier[methcodes] :
identifier[diff_cart] = identifier[M] - identifier[prev_M]
identifier[diff_dir] = identifier[pmag] . identifier[cart2dir] ( identifier[diff_cart] )
keyword[if] identifier[after_zerofield] == literal[int] :
identifier[ptrm_check] . identifier[append] (
[ identifier[temp] , identifier[diff_dir] [ literal[int] ], identifier[diff_dir] [ literal[int] ], identifier[diff_dir] [ literal[int] ], identifier[zerofield_index] , identifier[after_zerofield] ])
keyword[else] :
identifier[ptrm_check] . identifier[append] (
[ identifier[temp] , identifier[diff_dir] [ literal[int] ], identifier[diff_dir] [ literal[int] ], identifier[diff_dir] [ literal[int] ], identifier[zerofield_index] , identifier[after_zerofield] ])
keyword[else] :
keyword[if] identifier[theta] != identifier[prev_theta] :
identifier[diff] =( identifier[M] - identifier[prev_M] )/ literal[int]
identifier[diff_dir] = identifier[pmag] . identifier[cart2dir] ( identifier[diff] )
identifier[ptrm_check] . identifier[append] (
[ identifier[temp] , identifier[diff_dir] [ literal[int] ], identifier[diff_dir] [ literal[int] ], identifier[diff_dir] [ literal[int] ], identifier[zerofield_index] , literal[string] ])
keyword[else] :
identifier[print] (
literal[string] )
keyword[for] identifier[temp] keyword[in] identifier[Treat_M] :
identifier[step] = identifier[MSteps] [ identifier[Treat_M] . identifier[index] ( identifier[temp] )]
identifier[rec] = identifier[datablock] [ identifier[step] ]
identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ])
identifier[foundit] = keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[datablock] )):
keyword[if] literal[string] keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ] keyword[or] literal[string] keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ]:
keyword[if] ( identifier[THERMAL] keyword[and] literal[string] keyword[in] identifier[list] ( identifier[datablock] [ identifier[i] ]. identifier[keys] ()) keyword[and] identifier[float] ( identifier[datablock] [ identifier[i] ][ literal[string] ])== identifier[float] ( identifier[temp] )) keyword[or] ( identifier[MICROWAVE] keyword[and] literal[string] keyword[in] identifier[list] ( identifier[datablock] [ identifier[i] ]. identifier[keys] ()) keyword[and] literal[string] % identifier[float] ( identifier[temp] ) keyword[in] identifier[datablock] [ identifier[i] ][ literal[string] ]):
identifier[prev_rec] = identifier[datablock] [ identifier[i] ]
identifier[prev_dec] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_inc] = identifier[float] ( identifier[prev_rec] [ literal[string] ])
identifier[prev_moment] = identifier[float] (
identifier[prev_rec] [ literal[string] ])
identifier[foundit] = keyword[True]
keyword[break]
keyword[if] identifier[foundit] :
identifier[ptrm_tail] . identifier[append] ([ identifier[temp] , literal[int] , literal[int] , identifier[moment] - identifier[prev_moment] ])
keyword[if] identifier[len] ( identifier[first_Z] )!= identifier[len] ( identifier[first_I] ):
identifier[print] ( identifier[len] ( identifier[first_Z] ), identifier[len] ( identifier[first_I] ))
identifier[print] ( literal[string] )
identifier[input] ( literal[string] )
identifier[additivity_check] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[Treat_AC] )):
identifier[step_0] = identifier[ACSteps] [ identifier[i] ]
identifier[temp] = identifier[Treat_AC] [ identifier[i] ]
identifier[dec0] = identifier[float] ( identifier[datablock] [ identifier[step_0] ][ literal[string] ])
identifier[inc0] = identifier[float] ( identifier[datablock] [ identifier[step_0] ][ literal[string] ])
identifier[moment0] = identifier[float] ( identifier[datablock] [ identifier[step_0] ][ literal[string] ])
identifier[V0] = identifier[pmag] . identifier[dir2cart] ([ identifier[dec0] , identifier[inc0] , identifier[moment0] ])
identifier[foundit] = keyword[False]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[step_0] , literal[int] ,- literal[int] ):
keyword[if] literal[string] keyword[in] identifier[datablock] [ identifier[j] ][ literal[string] ]:
identifier[foundit] = keyword[True]
keyword[break]
keyword[if] identifier[foundit] :
identifier[dec1] = identifier[float] ( identifier[datablock] [ identifier[j] ][ literal[string] ])
identifier[inc1] = identifier[float] ( identifier[datablock] [ identifier[j] ][ literal[string] ])
identifier[moment1] = identifier[float] ( identifier[datablock] [ identifier[j] ][ literal[string] ])
identifier[V1] = identifier[pmag] . identifier[dir2cart] ([ identifier[dec1] , identifier[inc1] , identifier[moment1] ])
identifier[I] =[]
keyword[for] identifier[c] keyword[in] identifier[range] ( literal[int] ):
identifier[I] . identifier[append] ( identifier[V1] [ identifier[c] ]- identifier[V0] [ identifier[c] ])
identifier[dir1] = identifier[pmag] . identifier[cart2dir] ( identifier[I] )
identifier[additivity_check] . identifier[append] ([ identifier[temp] , identifier[dir1] [ literal[int] ], identifier[dir1] [ literal[int] ], identifier[dir1] [ literal[int] ]])
identifier[X] = identifier[np] . identifier[array] ( identifier[I] )/ identifier[float] ( identifier[datablock] [ literal[int] ][ literal[string] ])
identifier[araiblock] =( identifier[first_Z] , identifier[first_I] , identifier[ptrm_check] , identifier[ptrm_tail] ,
identifier[zptrm_check] , identifier[GammaChecks] , identifier[additivity_check] )
keyword[return] identifier[araiblock] , identifier[field] | def sortarai(self, datablock, s, Zdiff):
"""
sorts data block in to first_Z, first_I, etc.
"""
(first_Z, first_I, zptrm_check, ptrm_check, ptrm_tail) = ([], [], [], [], [])
(field, phi, theta) = ('', '', '')
starthere = 0
(Treat_I, Treat_Z, Treat_PZ, Treat_PI, Treat_M, Treat_AC) = ([], [], [], [], [], [])
(ISteps, ZSteps, PISteps, PZSteps, MSteps, ACSteps) = ([], [], [], [], [], [])
GammaChecks = [] # comparison of pTRM direction acquired and lab field
Mkeys = ['measurement_magn_moment', 'measurement_magn_volume', 'measurement_magn_mass', 'measurement_magnitude']
rec = datablock[0]
for key in Mkeys:
if key in list(rec.keys()) and rec[key] != '':
momkey = key
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# first find all the steps
for k in range(len(datablock)):
rec = datablock[k]
if 'treatment_temp' in list(rec.keys()) and rec['treatment_temp'] != '':
temp = float(rec['treatment_temp'])
THERMAL = True
MICROWAVE = False # depends on [control=['if'], data=[]]
elif 'treatment_mw_power' in list(rec.keys()) and rec['treatment_mw_power'] != '':
THERMAL = False
MICROWAVE = True
if 'measurement_description' in list(rec.keys()):
MW_step = rec['measurement_description'].strip('\n').split(':')
for STEP in MW_step:
if 'Number' in STEP:
temp = float(STEP.split('-')[-1]) # depends on [control=['if'], data=['STEP']] # depends on [control=['for'], data=['STEP']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
methcodes = []
tmp = rec['magic_method_codes'].split(':')
for meth in tmp:
methcodes.append(meth.strip()) # depends on [control=['for'], data=['meth']]
# for thellier-thellier
if 'LT-T-I' in methcodes and 'LP-PI-TRM' in methcodes and ('LP-TRM' not in methcodes):
Treat_I.append(temp)
ISteps.append(k)
if field == '':
field = float(rec['treatment_dc_field']) # depends on [control=['if'], data=['field']]
if phi == '':
phi = float(rec['treatment_dc_field_phi'])
theta = float(rec['treatment_dc_field_theta']) # depends on [control=['if'], data=['phi']] # depends on [control=['if'], data=[]]
# for Microwave
if 'LT-M-I' in methcodes and 'LP-PI-M' in methcodes:
Treat_I.append(temp)
ISteps.append(k)
if field == '':
field = float(rec['treatment_dc_field']) # depends on [control=['if'], data=['field']]
if phi == '':
phi = float(rec['treatment_dc_field_phi'])
theta = float(rec['treatment_dc_field_theta']) # depends on [control=['if'], data=['phi']] # depends on [control=['if'], data=[]]
# stick first zero field stuff into first_Z
if 'LT-NO' in methcodes:
Treat_Z.append(temp)
ZSteps.append(k) # depends on [control=['if'], data=[]]
if 'LT-AF-Z' in methcodes and 'treatment_ac_field' in list(rec.keys()):
if rec['treatment_ac_field'] != '':
AFD_after_NRM = True
# consider AFD before T-T experiment ONLY if it comes before
# the experiment
for i in range(len(first_I)):
# check if there was an infield step before the AFD
if float(first_I[i][3]) != 0:
AFD_after_NRM = False # depends on [control=['if'], data=[]]
if AFD_after_NRM:
AF_field = 0
if 'treatment_ac_field' in rec:
try:
AF_field = float(rec['treatment_ac_field']) * 1000 # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['rec']]
dec = float(rec['measurement_dec'])
inc = float(rec['measurement_inc'])
intensity = float(rec[momkey])
first_I.append([273.0 - AF_field, 0.0, 0.0, 0.0, 1])
first_Z.append([273.0 - AF_field, dec, inc, intensity, 1]) # NRM step # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 'LT-T-Z' in methcodes or 'LT-M-Z' in methcodes:
Treat_Z.append(temp)
ZSteps.append(k) # depends on [control=['if'], data=[]]
if 'LT-PTRM-Z':
Treat_PZ.append(temp)
PZSteps.append(k) # depends on [control=['if'], data=[]]
if 'LT-PTRM-I' in methcodes or 'LT-PMRM-I' in methcodes:
Treat_PI.append(temp)
PISteps.append(k) # depends on [control=['if'], data=[]]
if 'LT-PTRM-MD' in methcodes or 'LT-PMRM-MD' in methcodes:
Treat_M.append(temp)
MSteps.append(k) # depends on [control=['if'], data=[]]
if 'LT-PTRM-AC' in methcodes or 'LT-PMRM-AC' in methcodes:
Treat_AC.append(temp)
ACSteps.append(k) # depends on [control=['if'], data=[]]
if 'LT-NO' in methcodes:
dec = float(rec['measurement_dec'])
inc = float(rec['measurement_inc'])
moment = float(rec['measurement_magn_moment'])
if 'LP-PI-M' not in methcodes:
first_I.append([273, 0.0, 0.0, 0.0, 1])
first_Z.append([273, dec, inc, moment, 1]) # NRM step # depends on [control=['if'], data=[]]
else:
first_I.append([0, 0.0, 0.0, 0.0, 1])
first_Z.append([0, dec, inc, moment, 1]) # NRM step # depends on [control=['if'], data=['methcodes']] # depends on [control=['for'], data=['k']]
#---------------------
# find IZ and ZI
#---------------------
for temp in Treat_I: # look through infield steps and find matching Z step
if temp in Treat_Z: # found a match
istep = ISteps[Treat_I.index(temp)]
irec = datablock[istep]
methcodes = []
tmp = irec['magic_method_codes'].split(':')
for meth in tmp:
methcodes.append(meth.strip()) # depends on [control=['for'], data=['meth']]
# take last record as baseline to subtract
brec = datablock[istep - 1]
zstep = ZSteps[Treat_Z.index(temp)]
zrec = datablock[zstep]
# sort out first_Z records
# check if ZI/IZ in in method codes:
ZI = ''
if 'LP-PI-TRM-IZ' in methcodes or 'LP-PI-M-IZ' in methcodes or 'LP-PI-IZ' in methcodes:
ZI = 0 # depends on [control=['if'], data=[]]
elif 'LP-PI-TRM-ZI' in methcodes or 'LP-PI-M-ZI' in methcodes or 'LP-PI-ZI' in methcodes:
ZI = 1 # depends on [control=['if'], data=[]]
elif 'LP-PI-BT-IZZI' in methcodes:
ZI == ''
(i_intex, z_intex) = (0, 0)
foundit = False
for i in range(len(datablock)):
if THERMAL:
if 'treatment_temp' in list(datablock[i].keys()) and float(temp) == float(datablock[i]['treatment_temp']):
foundit = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if MICROWAVE:
if 'measurement_description' in list(datablock[i].keys()):
MW_step = datablock[i]['measurement_description'].strip('\n').split(':')
for STEP in MW_step:
if 'Number' in STEP:
ThisStep = float(STEP.split('-')[-1])
if ThisStep == float(temp):
foundit = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['STEP']] # depends on [control=['for'], data=['STEP']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if foundit:
if 'LT-T-Z' in datablock[i]['magic_method_codes'].split(':') or 'LT-M-Z' in datablock[i]['magic_method_codes'].split(':'):
z_intex = i # depends on [control=['if'], data=[]]
if 'LT-T-I' in datablock[i]['magic_method_codes'].split(':') or 'LT-M-I' in datablock[i]['magic_method_codes'].split(':'):
i_intex = i # depends on [control=['if'], data=[]]
foundit = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if z_intex < i_intex:
ZI = 1 # depends on [control=['if'], data=[]]
else:
ZI = 0 # depends on [control=['if'], data=[]]
dec = float(zrec['measurement_dec'])
inc = float(zrec['measurement_inc'])
str = float(zrec[momkey])
first_Z.append([temp, dec, inc, str, ZI])
# sort out first_I records
idec = float(irec['measurement_dec'])
iinc = float(irec['measurement_inc'])
istr = float(irec[momkey])
X = pmag.dir2cart([idec, iinc, istr])
BL = pmag.dir2cart([dec, inc, str])
I = []
for c in range(3):
I.append(X[c] - BL[c]) # depends on [control=['for'], data=['c']]
if I[2] != 0:
iDir = pmag.cart2dir(I)
if Zdiff == 0:
first_I.append([temp, iDir[0], iDir[1], iDir[2], ZI]) # depends on [control=['if'], data=[]]
else:
first_I.append([temp, 0.0, 0.0, I[2], ZI]) # depends on [control=['if'], data=[]]
else:
# gamma=angle([iDir[0],iDir[1]],[phi,theta])
first_I.append([temp, 0.0, 0.0, 0.0, ZI]) # depends on [control=['if'], data=['temp', 'Treat_Z']] # depends on [control=['for'], data=['temp']]
# gamma=0.0
# put in Gamma check (infield trm versus lab field)
# if 180.-gamma<gamma:
# gamma=180.-gamma
# GammaChecks.append([temp-273.,gamma])
#---------------------
# find Thellier Thellier protocol
#---------------------
if 'LP-PI-II' in methcodes or 'LP-PI-T-II' in methcodes or 'LP-PI-M-II' in methcodes:
# look through infield steps and find matching Z step
for i in range(1, len(Treat_I)):
if Treat_I[i] == Treat_I[i - 1]:
# ignore, if there are more than
temp = Treat_I[i]
irec1 = datablock[ISteps[i - 1]]
dec1 = float(irec1['measurement_dec'])
inc1 = float(irec1['measurement_inc'])
moment1 = float(irec1['measurement_magn_moment'])
if len(first_I) < 2:
dec_initial = dec1
inc_initial = inc1 # depends on [control=['if'], data=[]]
cart1 = np.array(pmag.dir2cart([dec1, inc1, moment1]))
irec2 = datablock[ISteps[i]]
dec2 = float(irec2['measurement_dec'])
inc2 = float(irec2['measurement_inc'])
moment2 = float(irec2['measurement_magn_moment'])
cart2 = np.array(pmag.dir2cart([dec2, inc2, moment2]))
# check if its in the same treatment
if Treat_I[i] == Treat_I[i - 2] and dec2 != dec_initial and (inc2 != inc_initial):
continue # depends on [control=['if'], data=[]]
if dec1 != dec2 and inc1 != inc2:
zerofield = (cart2 + cart1) / 2
infield = (cart2 - cart1) / 2
DIR_zerofield = pmag.cart2dir(zerofield)
DIR_infield = pmag.cart2dir(infield)
first_Z.append([temp, DIR_zerofield[0], DIR_zerofield[1], DIR_zerofield[2], 0])
first_I.append([temp, DIR_infield[0], DIR_infield[1], DIR_infield[2], 0]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
#---------------------
# find pTRM checks
#---------------------
for i in range(len(Treat_PI)): # look through infield steps and find matching Z step
temp = Treat_PI[i]
k = PISteps[i]
rec = datablock[k]
dec = float(rec['measurement_dec'])
inc = float(rec['measurement_inc'])
moment = float(rec['measurement_magn_moment'])
phi = float(rec['treatment_dc_field_phi'])
theta = float(rec['treatment_dc_field_theta'])
M = np.array(pmag.dir2cart([dec, inc, moment]))
foundit = False
if 'LP-PI-II' not in methcodes:
# Important: suport several pTRM checks in a row, but
# does not support pTRM checks after infield step
for j in range(k, 1, -1):
if 'LT-M-I' in datablock[j]['magic_method_codes'] or 'LT-T-I' in datablock[j]['magic_method_codes']:
after_zerofield = 0.0
foundit = True
prev_rec = datablock[j]
zerofield_index = j
break # depends on [control=['if'], data=[]]
if float(datablock[j]['treatment_dc_field']) == 0:
after_zerofield = 1.0
foundit = True
prev_rec = datablock[j]
zerofield_index = j
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['if'], data=[]]
else: # Thellier-Thellier protocol
foundit = True
prev_rec = datablock[k - 1]
zerofield_index = k - 1
if foundit:
prev_dec = float(prev_rec['measurement_dec'])
prev_inc = float(prev_rec['measurement_inc'])
prev_moment = float(prev_rec['measurement_magn_moment'])
prev_phi = float(prev_rec['treatment_dc_field_phi'])
prev_theta = float(prev_rec['treatment_dc_field_theta'])
prev_M = np.array(pmag.dir2cart([prev_dec, prev_inc, prev_moment]))
if 'LP-PI-II' not in methcodes:
diff_cart = M - prev_M
diff_dir = pmag.cart2dir(diff_cart)
if after_zerofield == 0:
ptrm_check.append([temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, after_zerofield]) # depends on [control=['if'], data=['after_zerofield']]
else:
ptrm_check.append([temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, after_zerofield]) # depends on [control=['if'], data=[]]
# health check for T-T protocol:
elif theta != prev_theta:
diff = (M - prev_M) / 2
diff_dir = pmag.cart2dir(diff)
ptrm_check.append([temp, diff_dir[0], diff_dir[1], diff_dir[2], zerofield_index, '']) # depends on [control=['if'], data=[]]
else:
print('-W- WARNING: specimen. pTRM check not in place in Thellier Thellier protocol. step please check') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
#---------------------
# find Tail checks
#---------------------
for temp in Treat_M:
# print temp
step = MSteps[Treat_M.index(temp)]
rec = datablock[step]
dec = float(rec['measurement_dec'])
inc = float(rec['measurement_inc'])
moment = float(rec['measurement_magn_moment'])
foundit = False
for i in range(1, len(datablock)):
if 'LT-T-Z' in datablock[i]['magic_method_codes'] or 'LT-M-Z' in datablock[i]['magic_method_codes']:
if THERMAL and 'treatment_temp' in list(datablock[i].keys()) and (float(datablock[i]['treatment_temp']) == float(temp)) or (MICROWAVE and 'measurement_description' in list(datablock[i].keys()) and ('Step Number-%.0f' % float(temp) in datablock[i]['measurement_description'])):
prev_rec = datablock[i]
prev_dec = float(prev_rec['measurement_dec'])
prev_inc = float(prev_rec['measurement_inc'])
prev_moment = float(prev_rec['measurement_magn_moment'])
foundit = True
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
if foundit:
ptrm_tail.append([temp, 0, 0, moment - prev_moment]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['temp']]
#
# final check
#
if len(first_Z) != len(first_I):
print(len(first_Z), len(first_I))
print(' Something wrong with this specimen! Better fix it or delete it ')
input(' press return to acknowledge message') # depends on [control=['if'], data=[]]
#---------------------
# find Additivity (patch by rshaar)
#---------------------
additivity_check = []
for i in range(len(Treat_AC)):
step_0 = ACSteps[i]
temp = Treat_AC[i]
dec0 = float(datablock[step_0]['measurement_dec'])
inc0 = float(datablock[step_0]['measurement_inc'])
moment0 = float(datablock[step_0]['measurement_magn_moment'])
V0 = pmag.dir2cart([dec0, inc0, moment0])
# find the infield step that comes before the additivity check
foundit = False
for j in range(step_0, 1, -1):
if 'LT-T-I' in datablock[j]['magic_method_codes']:
foundit = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']]
if foundit:
dec1 = float(datablock[j]['measurement_dec'])
inc1 = float(datablock[j]['measurement_inc'])
moment1 = float(datablock[j]['measurement_magn_moment'])
V1 = pmag.dir2cart([dec1, inc1, moment1])
# print "additivity check: ",s
# print j
# print "ACC=V1-V0:"
# print "V1=",[dec1,inc1,moment1],pmag.dir2cart([dec1,inc1,moment1])/float(datablock[0]["measurement_magn_moment"])
# print "V1=",pmag.dir2cart([dec1,inc1,moment1])/float(datablock[0]["measurement_magn_moment"])
# print "V0=",[dec0,inc0,moment0],pmag.dir2cart([dec0,inc0,moment0])/float(datablock[0]["measurement_magn_moment"])
# print "NRM=",float(datablock[0]["measurement_magn_moment"])
# print "-------"
I = []
for c in range(3):
I.append(V1[c] - V0[c]) # depends on [control=['for'], data=['c']]
dir1 = pmag.cart2dir(I)
additivity_check.append([temp, dir1[0], dir1[1], dir1[2]])
# print
# "I",np.array(I)/float(datablock[0]["measurement_magn_moment"]),dir1,"(dir1
# unnormalized)"
X = np.array(I) / float(datablock[0]['measurement_magn_moment']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# print "I",np.sqrt(sum(X**2))
araiblock = (first_Z, first_I, ptrm_check, ptrm_tail, zptrm_check, GammaChecks, additivity_check)
return (araiblock, field) |
def makeMarkovApproxToNormalByMonteCarlo(x_grid,mu,sigma,N_draws = 10000):
'''
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, by Monte Carlo.
Returns a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,f(x_grid)).
Parameters
----------
x_grid: numpy.array
A sorted 1D array of floats representing discrete values that a normally
distributed RV could take on.
mu: float
Mean of the normal distribution to be approximated.
sigma: float
Standard deviation of the normal distribution to be approximated.
N_draws: int
Number of draws to use in Monte Carlo.
Returns
-------
p_vec: numpy.array
A stochastic vector with probability weights for each x in x_grid.
'''
# Take random draws from the desired normal distribution
random_draws = np.random.normal(loc = mu, scale = sigma, size = N_draws)
# Compute the distance between the draws and points in x_grid
distance = np.abs(x_grid[:,np.newaxis] - random_draws[np.newaxis,:])
# Find the indices of the points in x_grid that are closest to the draws
distance_minimizing_index = np.argmin(distance,axis=0)
# For each point in x_grid, the approximate probability of that point is the number
# of Monte Carlo draws that are closest to that point
p_vec = np.zeros_like(x_grid)
for p_index,p in enumerate(p_vec):
p_vec[p_index] = np.sum(distance_minimizing_index==p_index) / N_draws
# Check for obvious errors, and return p_vec
assert (np.all(p_vec>=0.)) and (np.all(p_vec<=1.)) and (np.isclose(np.sum(p_vec)),1.)
return p_vec | def function[makeMarkovApproxToNormalByMonteCarlo, parameter[x_grid, mu, sigma, N_draws]]:
constant[
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, by Monte Carlo.
Returns a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,f(x_grid)).
Parameters
----------
x_grid: numpy.array
A sorted 1D array of floats representing discrete values that a normally
distributed RV could take on.
mu: float
Mean of the normal distribution to be approximated.
sigma: float
Standard deviation of the normal distribution to be approximated.
N_draws: int
Number of draws to use in Monte Carlo.
Returns
-------
p_vec: numpy.array
A stochastic vector with probability weights for each x in x_grid.
]
variable[random_draws] assign[=] call[name[np].random.normal, parameter[]]
variable[distance] assign[=] call[name[np].abs, parameter[binary_operation[call[name[x_grid]][tuple[[<ast.Slice object at 0x7da18bc708e0>, <ast.Attribute object at 0x7da18bc73640>]]] - call[name[random_draws]][tuple[[<ast.Attribute object at 0x7da18bc71bd0>, <ast.Slice object at 0x7da18bc73d90>]]]]]]
variable[distance_minimizing_index] assign[=] call[name[np].argmin, parameter[name[distance]]]
variable[p_vec] assign[=] call[name[np].zeros_like, parameter[name[x_grid]]]
for taget[tuple[[<ast.Name object at 0x7da207f9ae90>, <ast.Name object at 0x7da207f9ba60>]]] in starred[call[name[enumerate], parameter[name[p_vec]]]] begin[:]
call[name[p_vec]][name[p_index]] assign[=] binary_operation[call[name[np].sum, parameter[compare[name[distance_minimizing_index] equal[==] name[p_index]]]] / name[N_draws]]
assert[<ast.BoolOp object at 0x7da207f983a0>]
return[name[p_vec]] | keyword[def] identifier[makeMarkovApproxToNormalByMonteCarlo] ( identifier[x_grid] , identifier[mu] , identifier[sigma] , identifier[N_draws] = literal[int] ):
literal[string]
identifier[random_draws] = identifier[np] . identifier[random] . identifier[normal] ( identifier[loc] = identifier[mu] , identifier[scale] = identifier[sigma] , identifier[size] = identifier[N_draws] )
identifier[distance] = identifier[np] . identifier[abs] ( identifier[x_grid] [:, identifier[np] . identifier[newaxis] ]- identifier[random_draws] [ identifier[np] . identifier[newaxis] ,:])
identifier[distance_minimizing_index] = identifier[np] . identifier[argmin] ( identifier[distance] , identifier[axis] = literal[int] )
identifier[p_vec] = identifier[np] . identifier[zeros_like] ( identifier[x_grid] )
keyword[for] identifier[p_index] , identifier[p] keyword[in] identifier[enumerate] ( identifier[p_vec] ):
identifier[p_vec] [ identifier[p_index] ]= identifier[np] . identifier[sum] ( identifier[distance_minimizing_index] == identifier[p_index] )/ identifier[N_draws]
keyword[assert] ( identifier[np] . identifier[all] ( identifier[p_vec] >= literal[int] )) keyword[and] ( identifier[np] . identifier[all] ( identifier[p_vec] <= literal[int] )) keyword[and] ( identifier[np] . identifier[isclose] ( identifier[np] . identifier[sum] ( identifier[p_vec] )), literal[int] )
keyword[return] identifier[p_vec] | def makeMarkovApproxToNormalByMonteCarlo(x_grid, mu, sigma, N_draws=10000):
"""
Creates an approximation to a normal distribution with mean mu and standard
deviation sigma, by Monte Carlo.
Returns a stochastic vector called p_vec, corresponding
to values in x_grid. If a RV is distributed x~N(mu,sigma), then the expectation
of a continuous function f() is E[f(x)] = numpy.dot(p_vec,f(x_grid)).
Parameters
----------
x_grid: numpy.array
A sorted 1D array of floats representing discrete values that a normally
distributed RV could take on.
mu: float
Mean of the normal distribution to be approximated.
sigma: float
Standard deviation of the normal distribution to be approximated.
N_draws: int
Number of draws to use in Monte Carlo.
Returns
-------
p_vec: numpy.array
A stochastic vector with probability weights for each x in x_grid.
"""
# Take random draws from the desired normal distribution
random_draws = np.random.normal(loc=mu, scale=sigma, size=N_draws)
# Compute the distance between the draws and points in x_grid
distance = np.abs(x_grid[:, np.newaxis] - random_draws[np.newaxis, :])
# Find the indices of the points in x_grid that are closest to the draws
distance_minimizing_index = np.argmin(distance, axis=0)
# For each point in x_grid, the approximate probability of that point is the number
# of Monte Carlo draws that are closest to that point
p_vec = np.zeros_like(x_grid)
for (p_index, p) in enumerate(p_vec):
p_vec[p_index] = np.sum(distance_minimizing_index == p_index) / N_draws # depends on [control=['for'], data=[]]
# Check for obvious errors, and return p_vec
assert np.all(p_vec >= 0.0) and np.all(p_vec <= 1.0) and (np.isclose(np.sum(p_vec)), 1.0)
return p_vec |
def lola_image(self, save=False, name='BaseLola.png'):
''' Draw the topography of the region of interest
Args:
save (Optional[bool]): Weither or not to save the image.
Defaults to False.
name (Optional[str]): Absolut path to save the resulting
image. Default to 'BaseLola.png' in the working
directory.
Returns:
An image correponding to the region tography. Realized
from the data taken by the LOLA instrument on board of LRO.
Note:
Nice to use in a jupyter notebook with ``%matplotib inline``
activated.
Feel free to modify this method to plot exactly what you need.
'''
fig = plt.figure(figsize=(10, 8))
ax1 = fig.add_subplot(111)
lon_m, lon_M, lat_m, lat_M = self.lambert_window(
self.size_window, self.lat0, self.lon0)
m = Basemap(llcrnrlon=lon_m, llcrnrlat=lat_m, urcrnrlon=lon_M, urcrnrlat=lat_M,
resolution='i', projection='laea', rsphere=1734400, lat_0=self.lat0, lon_0=self.lon0)
Xl, Yl, Zl = self.get_arrays('Lola')
Xl, Yl = m(Xl, Yl)
CS = m.pcolormesh(Xl, Yl, Zl, cmap='gist_earth',
alpha=.5, ax=ax1, zorder=1)
# m.contour(Xl,Yl,Zl,20, colors = 'black', alpha = 1.0 , zorder=2)
xc, yc = m(self.lon0, self.lat0)
ax1.scatter(xc, yc, s=200, marker='v', zorder=2)
self._add_scale(m, ax1)
self._add_colorbar(m, CS, ax1, 'Topography')
if save == True:
fig.savefig(name, rasterized=True, dpi=50,
bbox_inches='tight', pad_inches=0.1) | def function[lola_image, parameter[self, save, name]]:
constant[ Draw the topography of the region of interest
Args:
save (Optional[bool]): Weither or not to save the image.
Defaults to False.
name (Optional[str]): Absolut path to save the resulting
image. Default to 'BaseLola.png' in the working
directory.
Returns:
An image correponding to the region tography. Realized
from the data taken by the LOLA instrument on board of LRO.
Note:
Nice to use in a jupyter notebook with ``%matplotib inline``
activated.
Feel free to modify this method to plot exactly what you need.
]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax1] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
<ast.Tuple object at 0x7da18f00c730> assign[=] call[name[self].lambert_window, parameter[name[self].size_window, name[self].lat0, name[self].lon0]]
variable[m] assign[=] call[name[Basemap], parameter[]]
<ast.Tuple object at 0x7da18f00f280> assign[=] call[name[self].get_arrays, parameter[constant[Lola]]]
<ast.Tuple object at 0x7da18f00f670> assign[=] call[name[m], parameter[name[Xl], name[Yl]]]
variable[CS] assign[=] call[name[m].pcolormesh, parameter[name[Xl], name[Yl], name[Zl]]]
<ast.Tuple object at 0x7da18f00ebf0> assign[=] call[name[m], parameter[name[self].lon0, name[self].lat0]]
call[name[ax1].scatter, parameter[name[xc], name[yc]]]
call[name[self]._add_scale, parameter[name[m], name[ax1]]]
call[name[self]._add_colorbar, parameter[name[m], name[CS], name[ax1], constant[Topography]]]
if compare[name[save] equal[==] constant[True]] begin[:]
call[name[fig].savefig, parameter[name[name]]] | keyword[def] identifier[lola_image] ( identifier[self] , identifier[save] = keyword[False] , identifier[name] = literal[string] ):
literal[string]
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ))
identifier[ax1] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[lon_m] , identifier[lon_M] , identifier[lat_m] , identifier[lat_M] = identifier[self] . identifier[lambert_window] (
identifier[self] . identifier[size_window] , identifier[self] . identifier[lat0] , identifier[self] . identifier[lon0] )
identifier[m] = identifier[Basemap] ( identifier[llcrnrlon] = identifier[lon_m] , identifier[llcrnrlat] = identifier[lat_m] , identifier[urcrnrlon] = identifier[lon_M] , identifier[urcrnrlat] = identifier[lat_M] ,
identifier[resolution] = literal[string] , identifier[projection] = literal[string] , identifier[rsphere] = literal[int] , identifier[lat_0] = identifier[self] . identifier[lat0] , identifier[lon_0] = identifier[self] . identifier[lon0] )
identifier[Xl] , identifier[Yl] , identifier[Zl] = identifier[self] . identifier[get_arrays] ( literal[string] )
identifier[Xl] , identifier[Yl] = identifier[m] ( identifier[Xl] , identifier[Yl] )
identifier[CS] = identifier[m] . identifier[pcolormesh] ( identifier[Xl] , identifier[Yl] , identifier[Zl] , identifier[cmap] = literal[string] ,
identifier[alpha] = literal[int] , identifier[ax] = identifier[ax1] , identifier[zorder] = literal[int] )
identifier[xc] , identifier[yc] = identifier[m] ( identifier[self] . identifier[lon0] , identifier[self] . identifier[lat0] )
identifier[ax1] . identifier[scatter] ( identifier[xc] , identifier[yc] , identifier[s] = literal[int] , identifier[marker] = literal[string] , identifier[zorder] = literal[int] )
identifier[self] . identifier[_add_scale] ( identifier[m] , identifier[ax1] )
identifier[self] . identifier[_add_colorbar] ( identifier[m] , identifier[CS] , identifier[ax1] , literal[string] )
keyword[if] identifier[save] == keyword[True] :
identifier[fig] . identifier[savefig] ( identifier[name] , identifier[rasterized] = keyword[True] , identifier[dpi] = literal[int] ,
identifier[bbox_inches] = literal[string] , identifier[pad_inches] = literal[int] ) | def lola_image(self, save=False, name='BaseLola.png'):
""" Draw the topography of the region of interest
Args:
save (Optional[bool]): Weither or not to save the image.
Defaults to False.
name (Optional[str]): Absolut path to save the resulting
image. Default to 'BaseLola.png' in the working
directory.
Returns:
An image correponding to the region tography. Realized
from the data taken by the LOLA instrument on board of LRO.
Note:
Nice to use in a jupyter notebook with ``%matplotib inline``
activated.
Feel free to modify this method to plot exactly what you need.
"""
fig = plt.figure(figsize=(10, 8))
ax1 = fig.add_subplot(111)
(lon_m, lon_M, lat_m, lat_M) = self.lambert_window(self.size_window, self.lat0, self.lon0)
m = Basemap(llcrnrlon=lon_m, llcrnrlat=lat_m, urcrnrlon=lon_M, urcrnrlat=lat_M, resolution='i', projection='laea', rsphere=1734400, lat_0=self.lat0, lon_0=self.lon0)
(Xl, Yl, Zl) = self.get_arrays('Lola')
(Xl, Yl) = m(Xl, Yl)
CS = m.pcolormesh(Xl, Yl, Zl, cmap='gist_earth', alpha=0.5, ax=ax1, zorder=1)
# m.contour(Xl,Yl,Zl,20, colors = 'black', alpha = 1.0 , zorder=2)
(xc, yc) = m(self.lon0, self.lat0)
ax1.scatter(xc, yc, s=200, marker='v', zorder=2)
self._add_scale(m, ax1)
self._add_colorbar(m, CS, ax1, 'Topography')
if save == True:
fig.savefig(name, rasterized=True, dpi=50, bbox_inches='tight', pad_inches=0.1) # depends on [control=['if'], data=[]] |
def _root_amplitude_brentq(counts, bkg, model, root_fn=_f_cash_root):
"""Fit amplitude by finding roots using Brent algorithm.
See Appendix A Stewart (2009).
Parameters
----------
counts : `~numpy.ndarray`
Slice of count map.
bkg : `~numpy.ndarray`
Slice of background map.
model : `~numpy.ndarray`
Model template to fit.
Returns
-------
amplitude : float
Fitted flux amplitude.
niter : int
Number of function evaluations needed for the fit.
"""
# Compute amplitude bounds and assert counts > 0
amplitude_min, amplitude_max = _amplitude_bounds(counts, bkg, model)
if not np.sum(counts) > 0:
return amplitude_min, 0
args = (counts, bkg, model)
if root_fn(0.0, *args) < 0:
return 0.0, 1
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
result = brentq(root_fn, amplitude_min, amplitude_max, args=args,
maxiter=MAX_NITER, full_output=True, rtol=1E-4)
return result[0], result[1].iterations
except (RuntimeError, ValueError):
# Where the root finding fails NaN is set as amplitude
return np.nan, MAX_NITER | def function[_root_amplitude_brentq, parameter[counts, bkg, model, root_fn]]:
constant[Fit amplitude by finding roots using Brent algorithm.
See Appendix A Stewart (2009).
Parameters
----------
counts : `~numpy.ndarray`
Slice of count map.
bkg : `~numpy.ndarray`
Slice of background map.
model : `~numpy.ndarray`
Model template to fit.
Returns
-------
amplitude : float
Fitted flux amplitude.
niter : int
Number of function evaluations needed for the fit.
]
<ast.Tuple object at 0x7da20c6c4760> assign[=] call[name[_amplitude_bounds], parameter[name[counts], name[bkg], name[model]]]
if <ast.UnaryOp object at 0x7da20c6c4610> begin[:]
return[tuple[[<ast.Name object at 0x7da20c6c72b0>, <ast.Constant object at 0x7da20c6c7850>]]]
variable[args] assign[=] tuple[[<ast.Name object at 0x7da20c6c7f40>, <ast.Name object at 0x7da20c6c71c0>, <ast.Name object at 0x7da20c6c7a60>]]
if compare[call[name[root_fn], parameter[constant[0.0], <ast.Starred object at 0x7da20c6c4a00>]] less[<] constant[0]] begin[:]
return[tuple[[<ast.Constant object at 0x7da20c6c64a0>, <ast.Constant object at 0x7da20c6c6560>]]]
with call[name[warnings].catch_warnings, parameter[]] begin[:]
call[name[warnings].simplefilter, parameter[constant[ignore]]]
<ast.Try object at 0x7da20c6c7d90> | keyword[def] identifier[_root_amplitude_brentq] ( identifier[counts] , identifier[bkg] , identifier[model] , identifier[root_fn] = identifier[_f_cash_root] ):
literal[string]
identifier[amplitude_min] , identifier[amplitude_max] = identifier[_amplitude_bounds] ( identifier[counts] , identifier[bkg] , identifier[model] )
keyword[if] keyword[not] identifier[np] . identifier[sum] ( identifier[counts] )> literal[int] :
keyword[return] identifier[amplitude_min] , literal[int]
identifier[args] =( identifier[counts] , identifier[bkg] , identifier[model] )
keyword[if] identifier[root_fn] ( literal[int] ,* identifier[args] )< literal[int] :
keyword[return] literal[int] , literal[int]
keyword[with] identifier[warnings] . identifier[catch_warnings] ():
identifier[warnings] . identifier[simplefilter] ( literal[string] )
keyword[try] :
identifier[result] = identifier[brentq] ( identifier[root_fn] , identifier[amplitude_min] , identifier[amplitude_max] , identifier[args] = identifier[args] ,
identifier[maxiter] = identifier[MAX_NITER] , identifier[full_output] = keyword[True] , identifier[rtol] = literal[int] )
keyword[return] identifier[result] [ literal[int] ], identifier[result] [ literal[int] ]. identifier[iterations]
keyword[except] ( identifier[RuntimeError] , identifier[ValueError] ):
keyword[return] identifier[np] . identifier[nan] , identifier[MAX_NITER] | def _root_amplitude_brentq(counts, bkg, model, root_fn=_f_cash_root):
"""Fit amplitude by finding roots using Brent algorithm.
See Appendix A Stewart (2009).
Parameters
----------
counts : `~numpy.ndarray`
Slice of count map.
bkg : `~numpy.ndarray`
Slice of background map.
model : `~numpy.ndarray`
Model template to fit.
Returns
-------
amplitude : float
Fitted flux amplitude.
niter : int
Number of function evaluations needed for the fit.
"""
# Compute amplitude bounds and assert counts > 0
(amplitude_min, amplitude_max) = _amplitude_bounds(counts, bkg, model)
if not np.sum(counts) > 0:
return (amplitude_min, 0) # depends on [control=['if'], data=[]]
args = (counts, bkg, model)
if root_fn(0.0, *args) < 0:
return (0.0, 1) # depends on [control=['if'], data=[]]
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
result = brentq(root_fn, amplitude_min, amplitude_max, args=args, maxiter=MAX_NITER, full_output=True, rtol=0.0001)
return (result[0], result[1].iterations) # depends on [control=['try'], data=[]]
except (RuntimeError, ValueError):
# Where the root finding fails NaN is set as amplitude
return (np.nan, MAX_NITER) # depends on [control=['except'], data=[]] # depends on [control=['with'], data=[]] |
def get(self, **kwargs):
"""Get suggestions."""
completions = []
size = request.values.get('size', type=int)
for k in self.suggesters.keys():
val = request.values.get(k)
if val:
# Get completion suggestions
opts = copy.deepcopy(self.suggesters[k])
if 'context' in opts.get('completion', {}):
ctx_field = opts['completion']['context']
ctx_val = request.values.get(ctx_field)
if not ctx_val:
raise SuggestMissingContextRESTError
opts['completion']['context'] = {
ctx_field: ctx_val
}
if size:
opts['completion']['size'] = size
completions.append((k, val, opts))
if not completions:
raise SuggestNoCompletionsRESTError(
', '.join(sorted(self.suggesters.keys())))
# Add completions
s = self.search_class()
for field, val, opts in completions:
source = opts.pop('_source', None)
if source is not None and ES_VERSION[0] >= 5:
s = s.source(source).suggest(field, val, **opts)
else:
s = s.suggest(field, val, **opts)
if ES_VERSION[0] == 2:
# Execute search
response = s.execute_suggest().to_dict()
for field, _, _ in completions:
for resp in response[field]:
for op in resp['options']:
if 'payload' in op:
op['_source'] = copy.deepcopy(op['payload'])
elif ES_VERSION[0] >= 5:
response = s.execute().to_dict()['suggest']
result = dict()
for field, val, opts in completions:
result[field] = response[field]
return make_response(jsonify(result)) | def function[get, parameter[self]]:
constant[Get suggestions.]
variable[completions] assign[=] list[[]]
variable[size] assign[=] call[name[request].values.get, parameter[constant[size]]]
for taget[name[k]] in starred[call[name[self].suggesters.keys, parameter[]]] begin[:]
variable[val] assign[=] call[name[request].values.get, parameter[name[k]]]
if name[val] begin[:]
variable[opts] assign[=] call[name[copy].deepcopy, parameter[call[name[self].suggesters][name[k]]]]
if compare[constant[context] in call[name[opts].get, parameter[constant[completion], dictionary[[], []]]]] begin[:]
variable[ctx_field] assign[=] call[call[name[opts]][constant[completion]]][constant[context]]
variable[ctx_val] assign[=] call[name[request].values.get, parameter[name[ctx_field]]]
if <ast.UnaryOp object at 0x7da1b0382950> begin[:]
<ast.Raise object at 0x7da1b0380b80>
call[call[name[opts]][constant[completion]]][constant[context]] assign[=] dictionary[[<ast.Name object at 0x7da1b0382f80>], [<ast.Name object at 0x7da1b0381720>]]
if name[size] begin[:]
call[call[name[opts]][constant[completion]]][constant[size]] assign[=] name[size]
call[name[completions].append, parameter[tuple[[<ast.Name object at 0x7da1b0382fb0>, <ast.Name object at 0x7da1b0383370>, <ast.Name object at 0x7da1b0380bb0>]]]]
if <ast.UnaryOp object at 0x7da1b0382ce0> begin[:]
<ast.Raise object at 0x7da1b03829b0>
variable[s] assign[=] call[name[self].search_class, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b031fca0>, <ast.Name object at 0x7da1b031ee60>, <ast.Name object at 0x7da1b031e8c0>]]] in starred[name[completions]] begin[:]
variable[source] assign[=] call[name[opts].pop, parameter[constant[_source], constant[None]]]
if <ast.BoolOp object at 0x7da1b031f130> begin[:]
variable[s] assign[=] call[call[name[s].source, parameter[name[source]]].suggest, parameter[name[field], name[val]]]
if compare[call[name[ES_VERSION]][constant[0]] equal[==] constant[2]] begin[:]
variable[response] assign[=] call[call[name[s].execute_suggest, parameter[]].to_dict, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b031ed70>, <ast.Name object at 0x7da1b031eaa0>, <ast.Name object at 0x7da1b031f4c0>]]] in starred[name[completions]] begin[:]
for taget[name[resp]] in starred[call[name[response]][name[field]]] begin[:]
for taget[name[op]] in starred[call[name[resp]][constant[options]]] begin[:]
if compare[constant[payload] in name[op]] begin[:]
call[name[op]][constant[_source]] assign[=] call[name[copy].deepcopy, parameter[call[name[op]][constant[payload]]]]
variable[result] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b031ff70>, <ast.Name object at 0x7da1b031f0a0>, <ast.Name object at 0x7da1b031fbe0>]]] in starred[name[completions]] begin[:]
call[name[result]][name[field]] assign[=] call[name[response]][name[field]]
return[call[name[make_response], parameter[call[name[jsonify], parameter[name[result]]]]]] | keyword[def] identifier[get] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[completions] =[]
identifier[size] = identifier[request] . identifier[values] . identifier[get] ( literal[string] , identifier[type] = identifier[int] )
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[suggesters] . identifier[keys] ():
identifier[val] = identifier[request] . identifier[values] . identifier[get] ( identifier[k] )
keyword[if] identifier[val] :
identifier[opts] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[suggesters] [ identifier[k] ])
keyword[if] literal[string] keyword[in] identifier[opts] . identifier[get] ( literal[string] ,{}):
identifier[ctx_field] = identifier[opts] [ literal[string] ][ literal[string] ]
identifier[ctx_val] = identifier[request] . identifier[values] . identifier[get] ( identifier[ctx_field] )
keyword[if] keyword[not] identifier[ctx_val] :
keyword[raise] identifier[SuggestMissingContextRESTError]
identifier[opts] [ literal[string] ][ literal[string] ]={
identifier[ctx_field] : identifier[ctx_val]
}
keyword[if] identifier[size] :
identifier[opts] [ literal[string] ][ literal[string] ]= identifier[size]
identifier[completions] . identifier[append] (( identifier[k] , identifier[val] , identifier[opts] ))
keyword[if] keyword[not] identifier[completions] :
keyword[raise] identifier[SuggestNoCompletionsRESTError] (
literal[string] . identifier[join] ( identifier[sorted] ( identifier[self] . identifier[suggesters] . identifier[keys] ())))
identifier[s] = identifier[self] . identifier[search_class] ()
keyword[for] identifier[field] , identifier[val] , identifier[opts] keyword[in] identifier[completions] :
identifier[source] = identifier[opts] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[source] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ES_VERSION] [ literal[int] ]>= literal[int] :
identifier[s] = identifier[s] . identifier[source] ( identifier[source] ). identifier[suggest] ( identifier[field] , identifier[val] ,** identifier[opts] )
keyword[else] :
identifier[s] = identifier[s] . identifier[suggest] ( identifier[field] , identifier[val] ,** identifier[opts] )
keyword[if] identifier[ES_VERSION] [ literal[int] ]== literal[int] :
identifier[response] = identifier[s] . identifier[execute_suggest] (). identifier[to_dict] ()
keyword[for] identifier[field] , identifier[_] , identifier[_] keyword[in] identifier[completions] :
keyword[for] identifier[resp] keyword[in] identifier[response] [ identifier[field] ]:
keyword[for] identifier[op] keyword[in] identifier[resp] [ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[op] :
identifier[op] [ literal[string] ]= identifier[copy] . identifier[deepcopy] ( identifier[op] [ literal[string] ])
keyword[elif] identifier[ES_VERSION] [ literal[int] ]>= literal[int] :
identifier[response] = identifier[s] . identifier[execute] (). identifier[to_dict] ()[ literal[string] ]
identifier[result] = identifier[dict] ()
keyword[for] identifier[field] , identifier[val] , identifier[opts] keyword[in] identifier[completions] :
identifier[result] [ identifier[field] ]= identifier[response] [ identifier[field] ]
keyword[return] identifier[make_response] ( identifier[jsonify] ( identifier[result] )) | def get(self, **kwargs):
"""Get suggestions."""
completions = []
size = request.values.get('size', type=int)
for k in self.suggesters.keys():
val = request.values.get(k)
if val:
# Get completion suggestions
opts = copy.deepcopy(self.suggesters[k])
if 'context' in opts.get('completion', {}):
ctx_field = opts['completion']['context']
ctx_val = request.values.get(ctx_field)
if not ctx_val:
raise SuggestMissingContextRESTError # depends on [control=['if'], data=[]]
opts['completion']['context'] = {ctx_field: ctx_val} # depends on [control=['if'], data=[]]
if size:
opts['completion']['size'] = size # depends on [control=['if'], data=[]]
completions.append((k, val, opts)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
if not completions:
raise SuggestNoCompletionsRESTError(', '.join(sorted(self.suggesters.keys()))) # depends on [control=['if'], data=[]]
# Add completions
s = self.search_class()
for (field, val, opts) in completions:
source = opts.pop('_source', None)
if source is not None and ES_VERSION[0] >= 5:
s = s.source(source).suggest(field, val, **opts) # depends on [control=['if'], data=[]]
else:
s = s.suggest(field, val, **opts) # depends on [control=['for'], data=[]]
if ES_VERSION[0] == 2:
# Execute search
response = s.execute_suggest().to_dict()
for (field, _, _) in completions:
for resp in response[field]:
for op in resp['options']:
if 'payload' in op:
op['_source'] = copy.deepcopy(op['payload']) # depends on [control=['if'], data=['op']] # depends on [control=['for'], data=['op']] # depends on [control=['for'], data=['resp']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif ES_VERSION[0] >= 5:
response = s.execute().to_dict()['suggest'] # depends on [control=['if'], data=[]]
result = dict()
for (field, val, opts) in completions:
result[field] = response[field] # depends on [control=['for'], data=[]]
return make_response(jsonify(result)) |
def to_requests(self, method='get'):
"""Export to Requests format.
:param str method: Request method
:return: Dict of keyword arguments formatted for `requests.request`
"""
out = {}
data_key = 'params' if method.lower() == 'get' else 'data'
out[data_key] = self.data
out.update(self.options)
return dict([
(key, list(value.items(multi=True)))
for key, value in iteritems(out)
]) | def function[to_requests, parameter[self, method]]:
constant[Export to Requests format.
:param str method: Request method
:return: Dict of keyword arguments formatted for `requests.request`
]
variable[out] assign[=] dictionary[[], []]
variable[data_key] assign[=] <ast.IfExp object at 0x7da1b12338e0>
call[name[out]][name[data_key]] assign[=] name[self].data
call[name[out].update, parameter[name[self].options]]
return[call[name[dict], parameter[<ast.ListComp object at 0x7da1b1390a60>]]] | keyword[def] identifier[to_requests] ( identifier[self] , identifier[method] = literal[string] ):
literal[string]
identifier[out] ={}
identifier[data_key] = literal[string] keyword[if] identifier[method] . identifier[lower] ()== literal[string] keyword[else] literal[string]
identifier[out] [ identifier[data_key] ]= identifier[self] . identifier[data]
identifier[out] . identifier[update] ( identifier[self] . identifier[options] )
keyword[return] identifier[dict] ([
( identifier[key] , identifier[list] ( identifier[value] . identifier[items] ( identifier[multi] = keyword[True] )))
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[iteritems] ( identifier[out] )
]) | def to_requests(self, method='get'):
"""Export to Requests format.
:param str method: Request method
:return: Dict of keyword arguments formatted for `requests.request`
"""
out = {}
data_key = 'params' if method.lower() == 'get' else 'data'
out[data_key] = self.data
out.update(self.options)
return dict([(key, list(value.items(multi=True))) for (key, value) in iteritems(out)]) |
def cli(debug, cache, incremental):
"""Crawler framework for documents and structured scrapers."""
settings.HTTP_CACHE = cache
settings.INCREMENTAL = incremental
settings.DEBUG = debug
if settings.DEBUG:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
init_memorious() | def function[cli, parameter[debug, cache, incremental]]:
constant[Crawler framework for documents and structured scrapers.]
name[settings].HTTP_CACHE assign[=] name[cache]
name[settings].INCREMENTAL assign[=] name[incremental]
name[settings].DEBUG assign[=] name[debug]
if name[settings].DEBUG begin[:]
call[name[logging].basicConfig, parameter[]]
call[name[init_memorious], parameter[]] | keyword[def] identifier[cli] ( identifier[debug] , identifier[cache] , identifier[incremental] ):
literal[string]
identifier[settings] . identifier[HTTP_CACHE] = identifier[cache]
identifier[settings] . identifier[INCREMENTAL] = identifier[incremental]
identifier[settings] . identifier[DEBUG] = identifier[debug]
keyword[if] identifier[settings] . identifier[DEBUG] :
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[DEBUG] )
keyword[else] :
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[INFO] )
identifier[init_memorious] () | def cli(debug, cache, incremental):
"""Crawler framework for documents and structured scrapers."""
settings.HTTP_CACHE = cache
settings.INCREMENTAL = incremental
settings.DEBUG = debug
if settings.DEBUG:
logging.basicConfig(level=logging.DEBUG) # depends on [control=['if'], data=[]]
else:
logging.basicConfig(level=logging.INFO)
init_memorious() |
def key_info(self, **kwargs):
"""
Retrieve info about the permissions associated with the
key associated to the given Zotero instance
"""
query_string = "/keys/{k}".format(k=self.api_key)
return self._build_query(query_string) | def function[key_info, parameter[self]]:
constant[
Retrieve info about the permissions associated with the
key associated to the given Zotero instance
]
variable[query_string] assign[=] call[constant[/keys/{k}].format, parameter[]]
return[call[name[self]._build_query, parameter[name[query_string]]]] | keyword[def] identifier[key_info] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[query_string] = literal[string] . identifier[format] ( identifier[k] = identifier[self] . identifier[api_key] )
keyword[return] identifier[self] . identifier[_build_query] ( identifier[query_string] ) | def key_info(self, **kwargs):
"""
Retrieve info about the permissions associated with the
key associated to the given Zotero instance
"""
query_string = '/keys/{k}'.format(k=self.api_key)
return self._build_query(query_string) |
def convert_geojson_to_shapefile(geojson_path):
"""Convert geojson file to shapefile.
It will create a necessary file next to the geojson file. It will not
affect another files (e.g. .xml, .qml, etc).
:param geojson_path: The path to geojson file.
:type geojson_path: basestring
:returns: True if shapefile layer created, False otherwise.
:rtype: bool
"""
layer = QgsVectorLayer(geojson_path, 'vector layer', 'ogr')
if not layer.isValid():
return False
# Construct shapefile path
shapefile_path = os.path.splitext(geojson_path)[0] + '.shp'
QgsVectorFileWriter.writeAsVectorFormat(
layer,
shapefile_path,
'utf-8',
layer.crs(),
'ESRI Shapefile')
if os.path.exists(shapefile_path):
return True
return False | def function[convert_geojson_to_shapefile, parameter[geojson_path]]:
constant[Convert geojson file to shapefile.
It will create a necessary file next to the geojson file. It will not
affect another files (e.g. .xml, .qml, etc).
:param geojson_path: The path to geojson file.
:type geojson_path: basestring
:returns: True if shapefile layer created, False otherwise.
:rtype: bool
]
variable[layer] assign[=] call[name[QgsVectorLayer], parameter[name[geojson_path], constant[vector layer], constant[ogr]]]
if <ast.UnaryOp object at 0x7da204567550> begin[:]
return[constant[False]]
variable[shapefile_path] assign[=] binary_operation[call[call[name[os].path.splitext, parameter[name[geojson_path]]]][constant[0]] + constant[.shp]]
call[name[QgsVectorFileWriter].writeAsVectorFormat, parameter[name[layer], name[shapefile_path], constant[utf-8], call[name[layer].crs, parameter[]], constant[ESRI Shapefile]]]
if call[name[os].path.exists, parameter[name[shapefile_path]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[convert_geojson_to_shapefile] ( identifier[geojson_path] ):
literal[string]
identifier[layer] = identifier[QgsVectorLayer] ( identifier[geojson_path] , literal[string] , literal[string] )
keyword[if] keyword[not] identifier[layer] . identifier[isValid] ():
keyword[return] keyword[False]
identifier[shapefile_path] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[geojson_path] )[ literal[int] ]+ literal[string]
identifier[QgsVectorFileWriter] . identifier[writeAsVectorFormat] (
identifier[layer] ,
identifier[shapefile_path] ,
literal[string] ,
identifier[layer] . identifier[crs] (),
literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[shapefile_path] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def convert_geojson_to_shapefile(geojson_path):
"""Convert geojson file to shapefile.
It will create a necessary file next to the geojson file. It will not
affect another files (e.g. .xml, .qml, etc).
:param geojson_path: The path to geojson file.
:type geojson_path: basestring
:returns: True if shapefile layer created, False otherwise.
:rtype: bool
"""
layer = QgsVectorLayer(geojson_path, 'vector layer', 'ogr')
if not layer.isValid():
return False # depends on [control=['if'], data=[]]
# Construct shapefile path
shapefile_path = os.path.splitext(geojson_path)[0] + '.shp'
QgsVectorFileWriter.writeAsVectorFormat(layer, shapefile_path, 'utf-8', layer.crs(), 'ESRI Shapefile')
if os.path.exists(shapefile_path):
return True # depends on [control=['if'], data=[]]
return False |
def dispatch(self, request):
"""
View to handle final steps of OAuth based authentication where the user
gets redirected back to from the service provider
"""
login_done_url = reverse(self.adapter.provider_id + "_callback")
client = self._get_client(request, login_done_url)
if not client.is_valid():
if 'denied' in request.GET:
error = AuthError.CANCELLED
else:
error = AuthError.UNKNOWN
extra_context = dict(oauth_client=client)
return render_authentication_error(
request,
self.adapter.provider_id,
error=error,
extra_context=extra_context)
app = self.adapter.get_provider().get_app(request)
try:
access_token = client.get_access_token()
token = SocialToken(
app=app,
token=access_token['oauth_token'],
# .get() -- e.g. Evernote does not feature a secret
token_secret=access_token.get('oauth_token_secret', ''))
login = self.adapter.complete_login(request,
app,
token,
response=access_token)
login.token = token
login.state = SocialLogin.unstash_state(request)
return complete_social_login(request, login)
except OAuthError as e:
return render_authentication_error(
request,
self.adapter.provider_id,
exception=e) | def function[dispatch, parameter[self, request]]:
constant[
View to handle final steps of OAuth based authentication where the user
gets redirected back to from the service provider
]
variable[login_done_url] assign[=] call[name[reverse], parameter[binary_operation[name[self].adapter.provider_id + constant[_callback]]]]
variable[client] assign[=] call[name[self]._get_client, parameter[name[request], name[login_done_url]]]
if <ast.UnaryOp object at 0x7da204623970> begin[:]
if compare[constant[denied] in name[request].GET] begin[:]
variable[error] assign[=] name[AuthError].CANCELLED
variable[extra_context] assign[=] call[name[dict], parameter[]]
return[call[name[render_authentication_error], parameter[name[request], name[self].adapter.provider_id]]]
variable[app] assign[=] call[call[name[self].adapter.get_provider, parameter[]].get_app, parameter[name[request]]]
<ast.Try object at 0x7da204620ee0> | keyword[def] identifier[dispatch] ( identifier[self] , identifier[request] ):
literal[string]
identifier[login_done_url] = identifier[reverse] ( identifier[self] . identifier[adapter] . identifier[provider_id] + literal[string] )
identifier[client] = identifier[self] . identifier[_get_client] ( identifier[request] , identifier[login_done_url] )
keyword[if] keyword[not] identifier[client] . identifier[is_valid] ():
keyword[if] literal[string] keyword[in] identifier[request] . identifier[GET] :
identifier[error] = identifier[AuthError] . identifier[CANCELLED]
keyword[else] :
identifier[error] = identifier[AuthError] . identifier[UNKNOWN]
identifier[extra_context] = identifier[dict] ( identifier[oauth_client] = identifier[client] )
keyword[return] identifier[render_authentication_error] (
identifier[request] ,
identifier[self] . identifier[adapter] . identifier[provider_id] ,
identifier[error] = identifier[error] ,
identifier[extra_context] = identifier[extra_context] )
identifier[app] = identifier[self] . identifier[adapter] . identifier[get_provider] (). identifier[get_app] ( identifier[request] )
keyword[try] :
identifier[access_token] = identifier[client] . identifier[get_access_token] ()
identifier[token] = identifier[SocialToken] (
identifier[app] = identifier[app] ,
identifier[token] = identifier[access_token] [ literal[string] ],
identifier[token_secret] = identifier[access_token] . identifier[get] ( literal[string] , literal[string] ))
identifier[login] = identifier[self] . identifier[adapter] . identifier[complete_login] ( identifier[request] ,
identifier[app] ,
identifier[token] ,
identifier[response] = identifier[access_token] )
identifier[login] . identifier[token] = identifier[token]
identifier[login] . identifier[state] = identifier[SocialLogin] . identifier[unstash_state] ( identifier[request] )
keyword[return] identifier[complete_social_login] ( identifier[request] , identifier[login] )
keyword[except] identifier[OAuthError] keyword[as] identifier[e] :
keyword[return] identifier[render_authentication_error] (
identifier[request] ,
identifier[self] . identifier[adapter] . identifier[provider_id] ,
identifier[exception] = identifier[e] ) | def dispatch(self, request):
"""
View to handle final steps of OAuth based authentication where the user
gets redirected back to from the service provider
"""
login_done_url = reverse(self.adapter.provider_id + '_callback')
client = self._get_client(request, login_done_url)
if not client.is_valid():
if 'denied' in request.GET:
error = AuthError.CANCELLED # depends on [control=['if'], data=[]]
else:
error = AuthError.UNKNOWN
extra_context = dict(oauth_client=client)
return render_authentication_error(request, self.adapter.provider_id, error=error, extra_context=extra_context) # depends on [control=['if'], data=[]]
app = self.adapter.get_provider().get_app(request)
try:
access_token = client.get_access_token()
# .get() -- e.g. Evernote does not feature a secret
token = SocialToken(app=app, token=access_token['oauth_token'], token_secret=access_token.get('oauth_token_secret', ''))
login = self.adapter.complete_login(request, app, token, response=access_token)
login.token = token
login.state = SocialLogin.unstash_state(request)
return complete_social_login(request, login) # depends on [control=['try'], data=[]]
except OAuthError as e:
return render_authentication_error(request, self.adapter.provider_id, exception=e) # depends on [control=['except'], data=['e']] |
def extract_names(source):
"""Extract names from a function definition
Looks for a function definition in the source.
Only the first function definition is examined.
Returns:
a list names(identifiers) used in the body of the function
excluding function parameters.
"""
if source is None:
return None
source = dedent(source)
funcdef = find_funcdef(source)
params = extract_params(source)
names = []
if isinstance(funcdef, ast.FunctionDef):
stmts = funcdef.body
elif isinstance(funcdef, ast.Lambda):
stmts = [funcdef.body]
else:
raise ValueError("must not happen")
for stmt in stmts:
for node in ast.walk(stmt):
if isinstance(node, ast.Name):
if node.id not in names and node.id not in params:
names.append(node.id)
return names | def function[extract_names, parameter[source]]:
constant[Extract names from a function definition
Looks for a function definition in the source.
Only the first function definition is examined.
Returns:
a list names(identifiers) used in the body of the function
excluding function parameters.
]
if compare[name[source] is constant[None]] begin[:]
return[constant[None]]
variable[source] assign[=] call[name[dedent], parameter[name[source]]]
variable[funcdef] assign[=] call[name[find_funcdef], parameter[name[source]]]
variable[params] assign[=] call[name[extract_params], parameter[name[source]]]
variable[names] assign[=] list[[]]
if call[name[isinstance], parameter[name[funcdef], name[ast].FunctionDef]] begin[:]
variable[stmts] assign[=] name[funcdef].body
for taget[name[stmt]] in starred[name[stmts]] begin[:]
for taget[name[node]] in starred[call[name[ast].walk, parameter[name[stmt]]]] begin[:]
if call[name[isinstance], parameter[name[node], name[ast].Name]] begin[:]
if <ast.BoolOp object at 0x7da1afe6e890> begin[:]
call[name[names].append, parameter[name[node].id]]
return[name[names]] | keyword[def] identifier[extract_names] ( identifier[source] ):
literal[string]
keyword[if] identifier[source] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[source] = identifier[dedent] ( identifier[source] )
identifier[funcdef] = identifier[find_funcdef] ( identifier[source] )
identifier[params] = identifier[extract_params] ( identifier[source] )
identifier[names] =[]
keyword[if] identifier[isinstance] ( identifier[funcdef] , identifier[ast] . identifier[FunctionDef] ):
identifier[stmts] = identifier[funcdef] . identifier[body]
keyword[elif] identifier[isinstance] ( identifier[funcdef] , identifier[ast] . identifier[Lambda] ):
identifier[stmts] =[ identifier[funcdef] . identifier[body] ]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[stmt] keyword[in] identifier[stmts] :
keyword[for] identifier[node] keyword[in] identifier[ast] . identifier[walk] ( identifier[stmt] ):
keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Name] ):
keyword[if] identifier[node] . identifier[id] keyword[not] keyword[in] identifier[names] keyword[and] identifier[node] . identifier[id] keyword[not] keyword[in] identifier[params] :
identifier[names] . identifier[append] ( identifier[node] . identifier[id] )
keyword[return] identifier[names] | def extract_names(source):
"""Extract names from a function definition
Looks for a function definition in the source.
Only the first function definition is examined.
Returns:
a list names(identifiers) used in the body of the function
excluding function parameters.
"""
if source is None:
return None # depends on [control=['if'], data=[]]
source = dedent(source)
funcdef = find_funcdef(source)
params = extract_params(source)
names = []
if isinstance(funcdef, ast.FunctionDef):
stmts = funcdef.body # depends on [control=['if'], data=[]]
elif isinstance(funcdef, ast.Lambda):
stmts = [funcdef.body] # depends on [control=['if'], data=[]]
else:
raise ValueError('must not happen')
for stmt in stmts:
for node in ast.walk(stmt):
if isinstance(node, ast.Name):
if node.id not in names and node.id not in params:
names.append(node.id) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] # depends on [control=['for'], data=['stmt']]
return names |
def comment(self, text, comment_prefix='#'):
"""Creates a comment block
Args:
text (str): content of comment without #
comment_prefix (str): character indicating start of comment
Returns:
self for chaining
"""
comment = Comment(self._container)
if not text.startswith(comment_prefix):
text = "{} {}".format(comment_prefix, text)
if not text.endswith('\n'):
text = "{}{}".format(text, '\n')
comment.add_line(text)
self._container.structure.insert(self._idx, comment)
self._idx += 1
return self | def function[comment, parameter[self, text, comment_prefix]]:
constant[Creates a comment block
Args:
text (str): content of comment without #
comment_prefix (str): character indicating start of comment
Returns:
self for chaining
]
variable[comment] assign[=] call[name[Comment], parameter[name[self]._container]]
if <ast.UnaryOp object at 0x7da2054a7970> begin[:]
variable[text] assign[=] call[constant[{} {}].format, parameter[name[comment_prefix], name[text]]]
if <ast.UnaryOp object at 0x7da2054a5270> begin[:]
variable[text] assign[=] call[constant[{}{}].format, parameter[name[text], constant[
]]]
call[name[comment].add_line, parameter[name[text]]]
call[name[self]._container.structure.insert, parameter[name[self]._idx, name[comment]]]
<ast.AugAssign object at 0x7da2054a7670>
return[name[self]] | keyword[def] identifier[comment] ( identifier[self] , identifier[text] , identifier[comment_prefix] = literal[string] ):
literal[string]
identifier[comment] = identifier[Comment] ( identifier[self] . identifier[_container] )
keyword[if] keyword[not] identifier[text] . identifier[startswith] ( identifier[comment_prefix] ):
identifier[text] = literal[string] . identifier[format] ( identifier[comment_prefix] , identifier[text] )
keyword[if] keyword[not] identifier[text] . identifier[endswith] ( literal[string] ):
identifier[text] = literal[string] . identifier[format] ( identifier[text] , literal[string] )
identifier[comment] . identifier[add_line] ( identifier[text] )
identifier[self] . identifier[_container] . identifier[structure] . identifier[insert] ( identifier[self] . identifier[_idx] , identifier[comment] )
identifier[self] . identifier[_idx] += literal[int]
keyword[return] identifier[self] | def comment(self, text, comment_prefix='#'):
"""Creates a comment block
Args:
text (str): content of comment without #
comment_prefix (str): character indicating start of comment
Returns:
self for chaining
"""
comment = Comment(self._container)
if not text.startswith(comment_prefix):
text = '{} {}'.format(comment_prefix, text) # depends on [control=['if'], data=[]]
if not text.endswith('\n'):
text = '{}{}'.format(text, '\n') # depends on [control=['if'], data=[]]
comment.add_line(text)
self._container.structure.insert(self._idx, comment)
self._idx += 1
return self |
def open(self, name, *mode):
"""
Return an open file object for a file in the reference package.
"""
return self.file_factory(self.file_path(name), *mode) | def function[open, parameter[self, name]]:
constant[
Return an open file object for a file in the reference package.
]
return[call[name[self].file_factory, parameter[call[name[self].file_path, parameter[name[name]]], <ast.Starred object at 0x7da1b1b9eaa0>]]] | keyword[def] identifier[open] ( identifier[self] , identifier[name] ,* identifier[mode] ):
literal[string]
keyword[return] identifier[self] . identifier[file_factory] ( identifier[self] . identifier[file_path] ( identifier[name] ),* identifier[mode] ) | def open(self, name, *mode):
"""
Return an open file object for a file in the reference package.
"""
return self.file_factory(self.file_path(name), *mode) |
def _to_choi(rep, data, input_dim, output_dim):
"""Transform a QuantumChannel to the Choi representation."""
if rep == 'Choi':
return data
if rep == 'Operator':
return _from_operator('Choi', data, input_dim, output_dim)
if rep == 'SuperOp':
return _superop_to_choi(data, input_dim, output_dim)
if rep == 'Kraus':
return _kraus_to_choi(data, input_dim, output_dim)
if rep == 'Chi':
return _chi_to_choi(data, input_dim, output_dim)
if rep == 'PTM':
data = _ptm_to_superop(data, input_dim, output_dim)
return _superop_to_choi(data, input_dim, output_dim)
if rep == 'Stinespring':
return _stinespring_to_choi(data, input_dim, output_dim)
raise QiskitError('Invalid QuantumChannel {}'.format(rep)) | def function[_to_choi, parameter[rep, data, input_dim, output_dim]]:
constant[Transform a QuantumChannel to the Choi representation.]
if compare[name[rep] equal[==] constant[Choi]] begin[:]
return[name[data]]
if compare[name[rep] equal[==] constant[Operator]] begin[:]
return[call[name[_from_operator], parameter[constant[Choi], name[data], name[input_dim], name[output_dim]]]]
if compare[name[rep] equal[==] constant[SuperOp]] begin[:]
return[call[name[_superop_to_choi], parameter[name[data], name[input_dim], name[output_dim]]]]
if compare[name[rep] equal[==] constant[Kraus]] begin[:]
return[call[name[_kraus_to_choi], parameter[name[data], name[input_dim], name[output_dim]]]]
if compare[name[rep] equal[==] constant[Chi]] begin[:]
return[call[name[_chi_to_choi], parameter[name[data], name[input_dim], name[output_dim]]]]
if compare[name[rep] equal[==] constant[PTM]] begin[:]
variable[data] assign[=] call[name[_ptm_to_superop], parameter[name[data], name[input_dim], name[output_dim]]]
return[call[name[_superop_to_choi], parameter[name[data], name[input_dim], name[output_dim]]]]
if compare[name[rep] equal[==] constant[Stinespring]] begin[:]
return[call[name[_stinespring_to_choi], parameter[name[data], name[input_dim], name[output_dim]]]]
<ast.Raise object at 0x7da1b03e0d00> | keyword[def] identifier[_to_choi] ( identifier[rep] , identifier[data] , identifier[input_dim] , identifier[output_dim] ):
literal[string]
keyword[if] identifier[rep] == literal[string] :
keyword[return] identifier[data]
keyword[if] identifier[rep] == literal[string] :
keyword[return] identifier[_from_operator] ( literal[string] , identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[if] identifier[rep] == literal[string] :
keyword[return] identifier[_superop_to_choi] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[if] identifier[rep] == literal[string] :
keyword[return] identifier[_kraus_to_choi] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[if] identifier[rep] == literal[string] :
keyword[return] identifier[_chi_to_choi] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[if] identifier[rep] == literal[string] :
identifier[data] = identifier[_ptm_to_superop] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[return] identifier[_superop_to_choi] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[if] identifier[rep] == literal[string] :
keyword[return] identifier[_stinespring_to_choi] ( identifier[data] , identifier[input_dim] , identifier[output_dim] )
keyword[raise] identifier[QiskitError] ( literal[string] . identifier[format] ( identifier[rep] )) | def _to_choi(rep, data, input_dim, output_dim):
"""Transform a QuantumChannel to the Choi representation."""
if rep == 'Choi':
return data # depends on [control=['if'], data=[]]
if rep == 'Operator':
return _from_operator('Choi', data, input_dim, output_dim) # depends on [control=['if'], data=[]]
if rep == 'SuperOp':
return _superop_to_choi(data, input_dim, output_dim) # depends on [control=['if'], data=[]]
if rep == 'Kraus':
return _kraus_to_choi(data, input_dim, output_dim) # depends on [control=['if'], data=[]]
if rep == 'Chi':
return _chi_to_choi(data, input_dim, output_dim) # depends on [control=['if'], data=[]]
if rep == 'PTM':
data = _ptm_to_superop(data, input_dim, output_dim)
return _superop_to_choi(data, input_dim, output_dim) # depends on [control=['if'], data=[]]
if rep == 'Stinespring':
return _stinespring_to_choi(data, input_dim, output_dim) # depends on [control=['if'], data=[]]
raise QiskitError('Invalid QuantumChannel {}'.format(rep)) |
def cancel_all(self, product_id=None):
""" With best effort, cancel all open orders.
Args:
product_id (Optional[str]): Only cancel orders for this
product_id
Returns:
list: A list of ids of the canceled orders. Example::
[
"144c6f8e-713f-4682-8435-5280fbe8b2b4",
"debe4907-95dc-442f-af3b-cec12f42ebda",
"cf7aceee-7b08-4227-a76c-3858144323ab",
"dfc5ae27-cadb-4c0c-beef-8994936fde8a",
"34fecfbf-de33-4273-b2c6-baf8e8948be4"
]
"""
if product_id is not None:
params = {'product_id': product_id}
else:
params = None
return self._send_message('delete', '/orders', params=params) | def function[cancel_all, parameter[self, product_id]]:
constant[ With best effort, cancel all open orders.
Args:
product_id (Optional[str]): Only cancel orders for this
product_id
Returns:
list: A list of ids of the canceled orders. Example::
[
"144c6f8e-713f-4682-8435-5280fbe8b2b4",
"debe4907-95dc-442f-af3b-cec12f42ebda",
"cf7aceee-7b08-4227-a76c-3858144323ab",
"dfc5ae27-cadb-4c0c-beef-8994936fde8a",
"34fecfbf-de33-4273-b2c6-baf8e8948be4"
]
]
if compare[name[product_id] is_not constant[None]] begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1914e80>], [<ast.Name object at 0x7da1b1916470>]]
return[call[name[self]._send_message, parameter[constant[delete], constant[/orders]]]] | keyword[def] identifier[cancel_all] ( identifier[self] , identifier[product_id] = keyword[None] ):
literal[string]
keyword[if] identifier[product_id] keyword[is] keyword[not] keyword[None] :
identifier[params] ={ literal[string] : identifier[product_id] }
keyword[else] :
identifier[params] = keyword[None]
keyword[return] identifier[self] . identifier[_send_message] ( literal[string] , literal[string] , identifier[params] = identifier[params] ) | def cancel_all(self, product_id=None):
""" With best effort, cancel all open orders.
Args:
product_id (Optional[str]): Only cancel orders for this
product_id
Returns:
list: A list of ids of the canceled orders. Example::
[
"144c6f8e-713f-4682-8435-5280fbe8b2b4",
"debe4907-95dc-442f-af3b-cec12f42ebda",
"cf7aceee-7b08-4227-a76c-3858144323ab",
"dfc5ae27-cadb-4c0c-beef-8994936fde8a",
"34fecfbf-de33-4273-b2c6-baf8e8948be4"
]
"""
if product_id is not None:
params = {'product_id': product_id} # depends on [control=['if'], data=['product_id']]
else:
params = None
return self._send_message('delete', '/orders', params=params) |
def edit( parent, template, actions = None ):
"""
Prompts the user to edit the menu template with the given actions. \
If no actions are supplied, then the actions from the parent will \
be used.
:param parent | <QWidget>
template | <str>
actions | {<str> name: <QAction>, .. } || None
:return (<str> template, <bool> accepted)
"""
# collect the potential actions from the widget
if ( actions is None ):
actions = {}
for action in parent.actions():
key = nativestring(action.objectName())
if ( not key ):
key = nativestring(action.text()).replace('&', '')
if ( key ):
actions[key] = action
if ( not actions ):
return ('', False)
dlg = QDialog(parent)
dlg.setWindowTitle('Edit Menu')
widget = XMenuTemplateWidget(dlg)
widget.setActions(actions)
widget.setMenuTemplate(template)
widget.layout().setContentsMargins(0, 0, 0, 0)
opts = QDialogButtonBox.Save | QDialogButtonBox.Cancel
btns = QDialogButtonBox(opts, Qt.Horizontal, dlg)
btns.accepted.connect( dlg.accept )
btns.rejected.connect( dlg.reject )
layout = QVBoxLayout()
layout.addWidget(widget)
layout.addWidget(btns)
dlg.setLayout(layout)
dlg.adjustSize()
dlg.resize(650, 400)
if ( dlg.exec_() ):
return (widget.menuTemplate(), True)
return ('', False) | def function[edit, parameter[parent, template, actions]]:
constant[
Prompts the user to edit the menu template with the given actions. If no actions are supplied, then the actions from the parent will be used.
:param parent | <QWidget>
template | <str>
actions | {<str> name: <QAction>, .. } || None
:return (<str> template, <bool> accepted)
]
if compare[name[actions] is constant[None]] begin[:]
variable[actions] assign[=] dictionary[[], []]
for taget[name[action]] in starred[call[name[parent].actions, parameter[]]] begin[:]
variable[key] assign[=] call[name[nativestring], parameter[call[name[action].objectName, parameter[]]]]
if <ast.UnaryOp object at 0x7da20cabe560> begin[:]
variable[key] assign[=] call[call[name[nativestring], parameter[call[name[action].text, parameter[]]]].replace, parameter[constant[&], constant[]]]
if name[key] begin[:]
call[name[actions]][name[key]] assign[=] name[action]
if <ast.UnaryOp object at 0x7da20cabf190> begin[:]
return[tuple[[<ast.Constant object at 0x7da20cabd8a0>, <ast.Constant object at 0x7da20cabcb50>]]]
variable[dlg] assign[=] call[name[QDialog], parameter[name[parent]]]
call[name[dlg].setWindowTitle, parameter[constant[Edit Menu]]]
variable[widget] assign[=] call[name[XMenuTemplateWidget], parameter[name[dlg]]]
call[name[widget].setActions, parameter[name[actions]]]
call[name[widget].setMenuTemplate, parameter[name[template]]]
call[call[name[widget].layout, parameter[]].setContentsMargins, parameter[constant[0], constant[0], constant[0], constant[0]]]
variable[opts] assign[=] binary_operation[name[QDialogButtonBox].Save <ast.BitOr object at 0x7da2590d6aa0> name[QDialogButtonBox].Cancel]
variable[btns] assign[=] call[name[QDialogButtonBox], parameter[name[opts], name[Qt].Horizontal, name[dlg]]]
call[name[btns].accepted.connect, parameter[name[dlg].accept]]
call[name[btns].rejected.connect, parameter[name[dlg].reject]]
variable[layout] assign[=] call[name[QVBoxLayout], parameter[]]
call[name[layout].addWidget, parameter[name[widget]]]
call[name[layout].addWidget, parameter[name[btns]]]
call[name[dlg].setLayout, parameter[name[layout]]]
call[name[dlg].adjustSize, parameter[]]
call[name[dlg].resize, parameter[constant[650], constant[400]]]
if call[name[dlg].exec_, parameter[]] begin[:]
return[tuple[[<ast.Call object at 0x7da20c795090>, <ast.Constant object at 0x7da20c796770>]]]
return[tuple[[<ast.Constant object at 0x7da20c794850>, <ast.Constant object at 0x7da20c796170>]]] | keyword[def] identifier[edit] ( identifier[parent] , identifier[template] , identifier[actions] = keyword[None] ):
literal[string]
keyword[if] ( identifier[actions] keyword[is] keyword[None] ):
identifier[actions] ={}
keyword[for] identifier[action] keyword[in] identifier[parent] . identifier[actions] ():
identifier[key] = identifier[nativestring] ( identifier[action] . identifier[objectName] ())
keyword[if] ( keyword[not] identifier[key] ):
identifier[key] = identifier[nativestring] ( identifier[action] . identifier[text] ()). identifier[replace] ( literal[string] , literal[string] )
keyword[if] ( identifier[key] ):
identifier[actions] [ identifier[key] ]= identifier[action]
keyword[if] ( keyword[not] identifier[actions] ):
keyword[return] ( literal[string] , keyword[False] )
identifier[dlg] = identifier[QDialog] ( identifier[parent] )
identifier[dlg] . identifier[setWindowTitle] ( literal[string] )
identifier[widget] = identifier[XMenuTemplateWidget] ( identifier[dlg] )
identifier[widget] . identifier[setActions] ( identifier[actions] )
identifier[widget] . identifier[setMenuTemplate] ( identifier[template] )
identifier[widget] . identifier[layout] (). identifier[setContentsMargins] ( literal[int] , literal[int] , literal[int] , literal[int] )
identifier[opts] = identifier[QDialogButtonBox] . identifier[Save] | identifier[QDialogButtonBox] . identifier[Cancel]
identifier[btns] = identifier[QDialogButtonBox] ( identifier[opts] , identifier[Qt] . identifier[Horizontal] , identifier[dlg] )
identifier[btns] . identifier[accepted] . identifier[connect] ( identifier[dlg] . identifier[accept] )
identifier[btns] . identifier[rejected] . identifier[connect] ( identifier[dlg] . identifier[reject] )
identifier[layout] = identifier[QVBoxLayout] ()
identifier[layout] . identifier[addWidget] ( identifier[widget] )
identifier[layout] . identifier[addWidget] ( identifier[btns] )
identifier[dlg] . identifier[setLayout] ( identifier[layout] )
identifier[dlg] . identifier[adjustSize] ()
identifier[dlg] . identifier[resize] ( literal[int] , literal[int] )
keyword[if] ( identifier[dlg] . identifier[exec_] ()):
keyword[return] ( identifier[widget] . identifier[menuTemplate] (), keyword[True] )
keyword[return] ( literal[string] , keyword[False] ) | def edit(parent, template, actions=None):
"""
Prompts the user to edit the menu template with the given actions. If no actions are supplied, then the actions from the parent will be used.
:param parent | <QWidget>
template | <str>
actions | {<str> name: <QAction>, .. } || None
:return (<str> template, <bool> accepted)
"""
# collect the potential actions from the widget
if actions is None:
actions = {}
for action in parent.actions():
key = nativestring(action.objectName())
if not key:
key = nativestring(action.text()).replace('&', '') # depends on [control=['if'], data=[]]
if key:
actions[key] = action # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['action']] # depends on [control=['if'], data=['actions']]
if not actions:
return ('', False) # depends on [control=['if'], data=[]]
dlg = QDialog(parent)
dlg.setWindowTitle('Edit Menu')
widget = XMenuTemplateWidget(dlg)
widget.setActions(actions)
widget.setMenuTemplate(template)
widget.layout().setContentsMargins(0, 0, 0, 0)
opts = QDialogButtonBox.Save | QDialogButtonBox.Cancel
btns = QDialogButtonBox(opts, Qt.Horizontal, dlg)
btns.accepted.connect(dlg.accept)
btns.rejected.connect(dlg.reject)
layout = QVBoxLayout()
layout.addWidget(widget)
layout.addWidget(btns)
dlg.setLayout(layout)
dlg.adjustSize()
dlg.resize(650, 400)
if dlg.exec_():
return (widget.menuTemplate(), True) # depends on [control=['if'], data=[]]
return ('', False) |
def fixed_point_quantized_affine(inp, n_outmaps,
base_axis=1,
w_init=None, b_init=None,
fix_parameters=False, rng=None, with_bias=True,
quantize_w=True, sign_w=True, n_w=8, delta_w=2**-4, ste_fine_grained_w=True,
quantize_b=True, sign_b=True, n_b=8, delta_b=2**-4, ste_fine_grained_b=True):
"""Fixed-Point Quantized Affine.
Fixed-Point Quantized Affine is the affine function,
except the definition of the inner product is modified.
The input-output relation of this function is as follows:
.. math::
y_j = \sum_{i} Q(w_{ji}) x_i,
where :math:`Q(w_{ji})` is the fixed-point quantization function.
.. note::
1) if you would like to share weights between some layers, please
make sure to share the standard, floating value weights (`weight`)
and not the quantized weights (`quantized weight`)
2) The weights and the quantized weights become synced only after :func:`~nnabla._variable.Variable.forward` is called,
and not after a call to :func:`~nnabla._variable.Variable.backward`.
To access the parameters of the network, remember to call :func:`~nnabla._variable.Variable.forward` once before doing so, otherwise the
float weights and the quantized weights will not be in sync.
3) CPU and GPU implementations now use float value for `quantized weight`,
since this function is only for simulation purposes.
Args:
inp (~nnabla.Variable): Input N-D array with shape (:math:`M_0 \\times \ldots \\times M_{B-1} \\times D_B \\times \ldots \\times D_N`). Dimensions before and after base_axis are flattened as if it is a matrix.
n_outmaps (:obj:`int` or :obj:`tuple` of :obj:`int`): Number of output neurons per data.
base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions.
w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`.
fix_parameters (bool): When set to `True`, the weights and biases will not be updated.
rng (numpy.random.RandomState): Random generator for Initializer.
with_bias (bool): Specify whether to include the bias term.
quantize_w (bool): Quantize weights if `True`.
sign_w (bool): Use signed quantization if `True`.
n_w (int): Bit width used for weight.
delta_w (float): Step size for weight.
ste_fine_grained_w (bool): STE is fine-grained if `True`.
quantize_b (bool): Quantize bias if `True`.
n_b (int): Bit width used for bias.
delta_w (float): Step size for bias.
ste_fine_grained_b (bool): STE is fine-grained if `True`.
Returns:
:class:`~nnabla.Variable`: :math:`(B + 1)`-D array. (:math:`M_0 \\times \ldots \\times M_{B-1} \\times L`)
"""
if not hasattr(n_outmaps, '__iter__'):
n_outmaps = [n_outmaps]
n_outmaps = list(n_outmaps)
n_outmap = int(np.prod(n_outmaps))
if w_init is None:
inmaps = np.prod(inp.shape[base_axis:])
w_init = UniformInitializer(
calc_uniform_lim_glorot(inmaps, n_outmap), rng=rng)
if with_bias and b_init is None:
b_init = ConstantInitializer()
# Floating Weight
w = get_parameter_or_create(
"W", [int(np.prod(inp.shape[base_axis:]))] + n_outmaps,
w_init, True, not fix_parameters)
# Quantized Weight
if quantize_w:
w_q = get_parameter_or_create(
"W_q", [int(np.prod(inp.shape[base_axis:]))] + n_outmaps,
w_init, False)
# Link computation graph
real_w_q = F.fixed_point_quantize(w, quantize=quantize_w,
sign=sign_w, n=n_w, delta=delta_w,
ste_fine_grained=ste_fine_grained_w,
outputs=[w_q.data])
real_w_q.persistent = True
else:
real_w_q = w
# Bias
# Floating
b = None
b_q = None
real_b_q = None
if with_bias:
b = get_parameter_or_create(
"b", n_outmaps, b_init, True, not fix_parameters)
if quantize_b:
b_q = get_parameter_or_create(
"b_q", n_outmaps, b_init, False)
# Link computation graph
real_b_q = F.fixed_point_quantize(b, quantize=quantize_b,
sign=sign_b, n=n_b, delta=delta_b,
ste_fine_grained=ste_fine_grained_b,
outputs=[b_q.data])
real_b_q.persistent = True
else:
real_b_q = b
return F.affine(inp, real_w_q, real_b_q, base_axis) | def function[fixed_point_quantized_affine, parameter[inp, n_outmaps, base_axis, w_init, b_init, fix_parameters, rng, with_bias, quantize_w, sign_w, n_w, delta_w, ste_fine_grained_w, quantize_b, sign_b, n_b, delta_b, ste_fine_grained_b]]:
constant[Fixed-Point Quantized Affine.
Fixed-Point Quantized Affine is the affine function,
except the definition of the inner product is modified.
The input-output relation of this function is as follows:
.. math::
y_j = \sum_{i} Q(w_{ji}) x_i,
where :math:`Q(w_{ji})` is the fixed-point quantization function.
.. note::
1) if you would like to share weights between some layers, please
make sure to share the standard, floating value weights (`weight`)
and not the quantized weights (`quantized weight`)
2) The weights and the quantized weights become synced only after :func:`~nnabla._variable.Variable.forward` is called,
and not after a call to :func:`~nnabla._variable.Variable.backward`.
To access the parameters of the network, remember to call :func:`~nnabla._variable.Variable.forward` once before doing so, otherwise the
float weights and the quantized weights will not be in sync.
3) CPU and GPU implementations now use float value for `quantized weight`,
since this function is only for simulation purposes.
Args:
inp (~nnabla.Variable): Input N-D array with shape (:math:`M_0 \times \ldots \times M_{B-1} \times D_B \times \ldots \times D_N`). Dimensions before and after base_axis are flattened as if it is a matrix.
n_outmaps (:obj:`int` or :obj:`tuple` of :obj:`int`): Number of output neurons per data.
base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions.
w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`.
fix_parameters (bool): When set to `True`, the weights and biases will not be updated.
rng (numpy.random.RandomState): Random generator for Initializer.
with_bias (bool): Specify whether to include the bias term.
quantize_w (bool): Quantize weights if `True`.
sign_w (bool): Use signed quantization if `True`.
n_w (int): Bit width used for weight.
delta_w (float): Step size for weight.
ste_fine_grained_w (bool): STE is fine-grained if `True`.
quantize_b (bool): Quantize bias if `True`.
n_b (int): Bit width used for bias.
delta_w (float): Step size for bias.
ste_fine_grained_b (bool): STE is fine-grained if `True`.
Returns:
:class:`~nnabla.Variable`: :math:`(B + 1)`-D array. (:math:`M_0 \times \ldots \times M_{B-1} \times L`)
]
if <ast.UnaryOp object at 0x7da1b18dcaf0> begin[:]
variable[n_outmaps] assign[=] list[[<ast.Name object at 0x7da1b18dd840>]]
variable[n_outmaps] assign[=] call[name[list], parameter[name[n_outmaps]]]
variable[n_outmap] assign[=] call[name[int], parameter[call[name[np].prod, parameter[name[n_outmaps]]]]]
if compare[name[w_init] is constant[None]] begin[:]
variable[inmaps] assign[=] call[name[np].prod, parameter[call[name[inp].shape][<ast.Slice object at 0x7da2054a4f40>]]]
variable[w_init] assign[=] call[name[UniformInitializer], parameter[call[name[calc_uniform_lim_glorot], parameter[name[inmaps], name[n_outmap]]]]]
if <ast.BoolOp object at 0x7da2054a6fb0> begin[:]
variable[b_init] assign[=] call[name[ConstantInitializer], parameter[]]
variable[w] assign[=] call[name[get_parameter_or_create], parameter[constant[W], binary_operation[list[[<ast.Call object at 0x7da1b18ddcc0>]] + name[n_outmaps]], name[w_init], constant[True], <ast.UnaryOp object at 0x7da1b18df5b0>]]
if name[quantize_w] begin[:]
variable[w_q] assign[=] call[name[get_parameter_or_create], parameter[constant[W_q], binary_operation[list[[<ast.Call object at 0x7da1b18ddc30>]] + name[n_outmaps]], name[w_init], constant[False]]]
variable[real_w_q] assign[=] call[name[F].fixed_point_quantize, parameter[name[w]]]
name[real_w_q].persistent assign[=] constant[True]
variable[b] assign[=] constant[None]
variable[b_q] assign[=] constant[None]
variable[real_b_q] assign[=] constant[None]
if name[with_bias] begin[:]
variable[b] assign[=] call[name[get_parameter_or_create], parameter[constant[b], name[n_outmaps], name[b_init], constant[True], <ast.UnaryOp object at 0x7da1b1675630>]]
if name[quantize_b] begin[:]
variable[b_q] assign[=] call[name[get_parameter_or_create], parameter[constant[b_q], name[n_outmaps], name[b_init], constant[False]]]
variable[real_b_q] assign[=] call[name[F].fixed_point_quantize, parameter[name[b]]]
name[real_b_q].persistent assign[=] constant[True]
return[call[name[F].affine, parameter[name[inp], name[real_w_q], name[real_b_q], name[base_axis]]]] | keyword[def] identifier[fixed_point_quantized_affine] ( identifier[inp] , identifier[n_outmaps] ,
identifier[base_axis] = literal[int] ,
identifier[w_init] = keyword[None] , identifier[b_init] = keyword[None] ,
identifier[fix_parameters] = keyword[False] , identifier[rng] = keyword[None] , identifier[with_bias] = keyword[True] ,
identifier[quantize_w] = keyword[True] , identifier[sign_w] = keyword[True] , identifier[n_w] = literal[int] , identifier[delta_w] = literal[int] **- literal[int] , identifier[ste_fine_grained_w] = keyword[True] ,
identifier[quantize_b] = keyword[True] , identifier[sign_b] = keyword[True] , identifier[n_b] = literal[int] , identifier[delta_b] = literal[int] **- literal[int] , identifier[ste_fine_grained_b] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[n_outmaps] , literal[string] ):
identifier[n_outmaps] =[ identifier[n_outmaps] ]
identifier[n_outmaps] = identifier[list] ( identifier[n_outmaps] )
identifier[n_outmap] = identifier[int] ( identifier[np] . identifier[prod] ( identifier[n_outmaps] ))
keyword[if] identifier[w_init] keyword[is] keyword[None] :
identifier[inmaps] = identifier[np] . identifier[prod] ( identifier[inp] . identifier[shape] [ identifier[base_axis] :])
identifier[w_init] = identifier[UniformInitializer] (
identifier[calc_uniform_lim_glorot] ( identifier[inmaps] , identifier[n_outmap] ), identifier[rng] = identifier[rng] )
keyword[if] identifier[with_bias] keyword[and] identifier[b_init] keyword[is] keyword[None] :
identifier[b_init] = identifier[ConstantInitializer] ()
identifier[w] = identifier[get_parameter_or_create] (
literal[string] ,[ identifier[int] ( identifier[np] . identifier[prod] ( identifier[inp] . identifier[shape] [ identifier[base_axis] :]))]+ identifier[n_outmaps] ,
identifier[w_init] , keyword[True] , keyword[not] identifier[fix_parameters] )
keyword[if] identifier[quantize_w] :
identifier[w_q] = identifier[get_parameter_or_create] (
literal[string] ,[ identifier[int] ( identifier[np] . identifier[prod] ( identifier[inp] . identifier[shape] [ identifier[base_axis] :]))]+ identifier[n_outmaps] ,
identifier[w_init] , keyword[False] )
identifier[real_w_q] = identifier[F] . identifier[fixed_point_quantize] ( identifier[w] , identifier[quantize] = identifier[quantize_w] ,
identifier[sign] = identifier[sign_w] , identifier[n] = identifier[n_w] , identifier[delta] = identifier[delta_w] ,
identifier[ste_fine_grained] = identifier[ste_fine_grained_w] ,
identifier[outputs] =[ identifier[w_q] . identifier[data] ])
identifier[real_w_q] . identifier[persistent] = keyword[True]
keyword[else] :
identifier[real_w_q] = identifier[w]
identifier[b] = keyword[None]
identifier[b_q] = keyword[None]
identifier[real_b_q] = keyword[None]
keyword[if] identifier[with_bias] :
identifier[b] = identifier[get_parameter_or_create] (
literal[string] , identifier[n_outmaps] , identifier[b_init] , keyword[True] , keyword[not] identifier[fix_parameters] )
keyword[if] identifier[quantize_b] :
identifier[b_q] = identifier[get_parameter_or_create] (
literal[string] , identifier[n_outmaps] , identifier[b_init] , keyword[False] )
identifier[real_b_q] = identifier[F] . identifier[fixed_point_quantize] ( identifier[b] , identifier[quantize] = identifier[quantize_b] ,
identifier[sign] = identifier[sign_b] , identifier[n] = identifier[n_b] , identifier[delta] = identifier[delta_b] ,
identifier[ste_fine_grained] = identifier[ste_fine_grained_b] ,
identifier[outputs] =[ identifier[b_q] . identifier[data] ])
identifier[real_b_q] . identifier[persistent] = keyword[True]
keyword[else] :
identifier[real_b_q] = identifier[b]
keyword[return] identifier[F] . identifier[affine] ( identifier[inp] , identifier[real_w_q] , identifier[real_b_q] , identifier[base_axis] ) | def fixed_point_quantized_affine(inp, n_outmaps, base_axis=1, w_init=None, b_init=None, fix_parameters=False, rng=None, with_bias=True, quantize_w=True, sign_w=True, n_w=8, delta_w=2 ** (-4), ste_fine_grained_w=True, quantize_b=True, sign_b=True, n_b=8, delta_b=2 ** (-4), ste_fine_grained_b=True):
"""Fixed-Point Quantized Affine.
Fixed-Point Quantized Affine is the affine function,
except the definition of the inner product is modified.
The input-output relation of this function is as follows:
.. math::
y_j = \\sum_{i} Q(w_{ji}) x_i,
where :math:`Q(w_{ji})` is the fixed-point quantization function.
.. note::
1) if you would like to share weights between some layers, please
make sure to share the standard, floating value weights (`weight`)
and not the quantized weights (`quantized weight`)
2) The weights and the quantized weights become synced only after :func:`~nnabla._variable.Variable.forward` is called,
and not after a call to :func:`~nnabla._variable.Variable.backward`.
To access the parameters of the network, remember to call :func:`~nnabla._variable.Variable.forward` once before doing so, otherwise the
float weights and the quantized weights will not be in sync.
3) CPU and GPU implementations now use float value for `quantized weight`,
since this function is only for simulation purposes.
Args:
inp (~nnabla.Variable): Input N-D array with shape (:math:`M_0 \\times \\ldots \\times M_{B-1} \\times D_B \\times \\ldots \\times D_N`). Dimensions before and after base_axis are flattened as if it is a matrix.
n_outmaps (:obj:`int` or :obj:`tuple` of :obj:`int`): Number of output neurons per data.
base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions.
w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`.
b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`.
fix_parameters (bool): When set to `True`, the weights and biases will not be updated.
rng (numpy.random.RandomState): Random generator for Initializer.
with_bias (bool): Specify whether to include the bias term.
quantize_w (bool): Quantize weights if `True`.
sign_w (bool): Use signed quantization if `True`.
n_w (int): Bit width used for weight.
delta_w (float): Step size for weight.
ste_fine_grained_w (bool): STE is fine-grained if `True`.
quantize_b (bool): Quantize bias if `True`.
n_b (int): Bit width used for bias.
delta_w (float): Step size for bias.
ste_fine_grained_b (bool): STE is fine-grained if `True`.
Returns:
:class:`~nnabla.Variable`: :math:`(B + 1)`-D array. (:math:`M_0 \\times \\ldots \\times M_{B-1} \\times L`)
"""
if not hasattr(n_outmaps, '__iter__'):
n_outmaps = [n_outmaps] # depends on [control=['if'], data=[]]
n_outmaps = list(n_outmaps)
n_outmap = int(np.prod(n_outmaps))
if w_init is None:
inmaps = np.prod(inp.shape[base_axis:])
w_init = UniformInitializer(calc_uniform_lim_glorot(inmaps, n_outmap), rng=rng) # depends on [control=['if'], data=['w_init']]
if with_bias and b_init is None:
b_init = ConstantInitializer() # depends on [control=['if'], data=[]]
# Floating Weight
w = get_parameter_or_create('W', [int(np.prod(inp.shape[base_axis:]))] + n_outmaps, w_init, True, not fix_parameters)
# Quantized Weight
if quantize_w:
w_q = get_parameter_or_create('W_q', [int(np.prod(inp.shape[base_axis:]))] + n_outmaps, w_init, False)
# Link computation graph
real_w_q = F.fixed_point_quantize(w, quantize=quantize_w, sign=sign_w, n=n_w, delta=delta_w, ste_fine_grained=ste_fine_grained_w, outputs=[w_q.data])
real_w_q.persistent = True # depends on [control=['if'], data=[]]
else:
real_w_q = w
# Bias
# Floating
b = None
b_q = None
real_b_q = None
if with_bias:
b = get_parameter_or_create('b', n_outmaps, b_init, True, not fix_parameters)
if quantize_b:
b_q = get_parameter_or_create('b_q', n_outmaps, b_init, False)
# Link computation graph
real_b_q = F.fixed_point_quantize(b, quantize=quantize_b, sign=sign_b, n=n_b, delta=delta_b, ste_fine_grained=ste_fine_grained_b, outputs=[b_q.data])
real_b_q.persistent = True # depends on [control=['if'], data=[]]
else:
real_b_q = b # depends on [control=['if'], data=[]]
return F.affine(inp, real_w_q, real_b_q, base_axis) |
def add_size(self, n):
"""
Add an integer to the stream.
:param int n: integer to add
"""
self.packet.write(struct.pack('>I', n))
return self | def function[add_size, parameter[self, n]]:
constant[
Add an integer to the stream.
:param int n: integer to add
]
call[name[self].packet.write, parameter[call[name[struct].pack, parameter[constant[>I], name[n]]]]]
return[name[self]] | keyword[def] identifier[add_size] ( identifier[self] , identifier[n] ):
literal[string]
identifier[self] . identifier[packet] . identifier[write] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[n] ))
keyword[return] identifier[self] | def add_size(self, n):
"""
Add an integer to the stream.
:param int n: integer to add
"""
self.packet.write(struct.pack('>I', n))
return self |
def _get_revision(self):
"""Validate and return the revision to use for current command
"""
assert self._revisions, "no migration revision exist"
revision = self._rev or self._revisions[-1]
# revision count must be less or equal since revisions are ordered
assert revision in self._revisions, "invalid revision specified"
return revision | def function[_get_revision, parameter[self]]:
constant[Validate and return the revision to use for current command
]
assert[name[self]._revisions]
variable[revision] assign[=] <ast.BoolOp object at 0x7da20c795bd0>
assert[compare[name[revision] in name[self]._revisions]]
return[name[revision]] | keyword[def] identifier[_get_revision] ( identifier[self] ):
literal[string]
keyword[assert] identifier[self] . identifier[_revisions] , literal[string]
identifier[revision] = identifier[self] . identifier[_rev] keyword[or] identifier[self] . identifier[_revisions] [- literal[int] ]
keyword[assert] identifier[revision] keyword[in] identifier[self] . identifier[_revisions] , literal[string]
keyword[return] identifier[revision] | def _get_revision(self):
"""Validate and return the revision to use for current command
"""
assert self._revisions, 'no migration revision exist'
revision = self._rev or self._revisions[-1]
# revision count must be less or equal since revisions are ordered
assert revision in self._revisions, 'invalid revision specified'
return revision |
def snr(test, ref, mask=None):
"""Signal-to-Noise Ratio (SNR)
Calculate the SNR between a test image and a reference image.
Parameters
----------
ref: np.ndarray
the reference image
test: np.ndarray
the tested image
mask: np.ndarray, optional
the mask for the ROI
Notes
-----
Compute the metric only on magnetude.
Returns
-------
snr: float, the snr
"""
test, ref, mask = _preprocess_input(test, ref, mask)
if mask is not None:
test = mask * test
num = np.mean(np.square(test))
deno = mse(test, ref)
return 10.0 * np.log10(num / deno) | def function[snr, parameter[test, ref, mask]]:
constant[Signal-to-Noise Ratio (SNR)
Calculate the SNR between a test image and a reference image.
Parameters
----------
ref: np.ndarray
the reference image
test: np.ndarray
the tested image
mask: np.ndarray, optional
the mask for the ROI
Notes
-----
Compute the metric only on magnetude.
Returns
-------
snr: float, the snr
]
<ast.Tuple object at 0x7da1b0ebda20> assign[=] call[name[_preprocess_input], parameter[name[test], name[ref], name[mask]]]
if compare[name[mask] is_not constant[None]] begin[:]
variable[test] assign[=] binary_operation[name[mask] * name[test]]
variable[num] assign[=] call[name[np].mean, parameter[call[name[np].square, parameter[name[test]]]]]
variable[deno] assign[=] call[name[mse], parameter[name[test], name[ref]]]
return[binary_operation[constant[10.0] * call[name[np].log10, parameter[binary_operation[name[num] / name[deno]]]]]] | keyword[def] identifier[snr] ( identifier[test] , identifier[ref] , identifier[mask] = keyword[None] ):
literal[string]
identifier[test] , identifier[ref] , identifier[mask] = identifier[_preprocess_input] ( identifier[test] , identifier[ref] , identifier[mask] )
keyword[if] identifier[mask] keyword[is] keyword[not] keyword[None] :
identifier[test] = identifier[mask] * identifier[test]
identifier[num] = identifier[np] . identifier[mean] ( identifier[np] . identifier[square] ( identifier[test] ))
identifier[deno] = identifier[mse] ( identifier[test] , identifier[ref] )
keyword[return] literal[int] * identifier[np] . identifier[log10] ( identifier[num] / identifier[deno] ) | def snr(test, ref, mask=None):
"""Signal-to-Noise Ratio (SNR)
Calculate the SNR between a test image and a reference image.
Parameters
----------
ref: np.ndarray
the reference image
test: np.ndarray
the tested image
mask: np.ndarray, optional
the mask for the ROI
Notes
-----
Compute the metric only on magnetude.
Returns
-------
snr: float, the snr
"""
(test, ref, mask) = _preprocess_input(test, ref, mask)
if mask is not None:
test = mask * test # depends on [control=['if'], data=['mask']]
num = np.mean(np.square(test))
deno = mse(test, ref)
return 10.0 * np.log10(num / deno) |
def casperjs_command():
"""
Determine which capture engine is specified. Possible options:
- casperjs
- phantomjs
Based on this value, locate the binary of the capture engine.
If setting <engine>_CMD is not defined, then
look up for ``<engine>`` in shell PATH and
build the whole capture command.
"""
method = app_settings['CAPTURE_METHOD']
cmd = app_settings['%s_CMD' % method.upper()]
sys_path = os.getenv('PATH', '').split(':')
if cmd is None:
for binpath in sys_path:
cmd = os.path.join(binpath, method)
if os.path.exists(cmd):
break
cmd = [cmd]
if app_settings['TEST_CAPTURE_SCRIPT']:
try:
proc = subprocess.Popen(cmd + ['--version'], **casperjs_command_kwargs())
proc.communicate()
status = proc.returncode
assert status == 0
except OSError:
msg = "%s binary cannot be found in PATH (%s)" % (method, sys_path)
raise ImproperlyConfigured(msg)
except AssertionError:
msg = "%s returned status code %s" % (method, status)
raise ImproperlyConfigured(msg)
# Add extra CLI arguments
cmd += app_settings['CLI_ARGS']
# Concatenate with capture script
app_path = os.path.dirname(__file__)
capture = app_settings['CAPTURE_SCRIPT']
if capture.startswith('./'):
capture = os.path.join(app_path, 'scripts', capture)
assert os.path.exists(capture), 'Cannot find %s' % capture
return cmd + [capture] | def function[casperjs_command, parameter[]]:
constant[
Determine which capture engine is specified. Possible options:
- casperjs
- phantomjs
Based on this value, locate the binary of the capture engine.
If setting <engine>_CMD is not defined, then
look up for ``<engine>`` in shell PATH and
build the whole capture command.
]
variable[method] assign[=] call[name[app_settings]][constant[CAPTURE_METHOD]]
variable[cmd] assign[=] call[name[app_settings]][binary_operation[constant[%s_CMD] <ast.Mod object at 0x7da2590d6920> call[name[method].upper, parameter[]]]]
variable[sys_path] assign[=] call[call[name[os].getenv, parameter[constant[PATH], constant[]]].split, parameter[constant[:]]]
if compare[name[cmd] is constant[None]] begin[:]
for taget[name[binpath]] in starred[name[sys_path]] begin[:]
variable[cmd] assign[=] call[name[os].path.join, parameter[name[binpath], name[method]]]
if call[name[os].path.exists, parameter[name[cmd]]] begin[:]
break
variable[cmd] assign[=] list[[<ast.Name object at 0x7da1b27ed540>]]
if call[name[app_settings]][constant[TEST_CAPTURE_SCRIPT]] begin[:]
<ast.Try object at 0x7da1b27ed150>
<ast.AugAssign object at 0x7da1b27eece0>
variable[app_path] assign[=] call[name[os].path.dirname, parameter[name[__file__]]]
variable[capture] assign[=] call[name[app_settings]][constant[CAPTURE_SCRIPT]]
if call[name[capture].startswith, parameter[constant[./]]] begin[:]
variable[capture] assign[=] call[name[os].path.join, parameter[name[app_path], constant[scripts], name[capture]]]
assert[call[name[os].path.exists, parameter[name[capture]]]]
return[binary_operation[name[cmd] + list[[<ast.Name object at 0x7da1b27ed690>]]]] | keyword[def] identifier[casperjs_command] ():
literal[string]
identifier[method] = identifier[app_settings] [ literal[string] ]
identifier[cmd] = identifier[app_settings] [ literal[string] % identifier[method] . identifier[upper] ()]
identifier[sys_path] = identifier[os] . identifier[getenv] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )
keyword[if] identifier[cmd] keyword[is] keyword[None] :
keyword[for] identifier[binpath] keyword[in] identifier[sys_path] :
identifier[cmd] = identifier[os] . identifier[path] . identifier[join] ( identifier[binpath] , identifier[method] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[cmd] ):
keyword[break]
identifier[cmd] =[ identifier[cmd] ]
keyword[if] identifier[app_settings] [ literal[string] ]:
keyword[try] :
identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] +[ literal[string] ],** identifier[casperjs_command_kwargs] ())
identifier[proc] . identifier[communicate] ()
identifier[status] = identifier[proc] . identifier[returncode]
keyword[assert] identifier[status] == literal[int]
keyword[except] identifier[OSError] :
identifier[msg] = literal[string] %( identifier[method] , identifier[sys_path] )
keyword[raise] identifier[ImproperlyConfigured] ( identifier[msg] )
keyword[except] identifier[AssertionError] :
identifier[msg] = literal[string] %( identifier[method] , identifier[status] )
keyword[raise] identifier[ImproperlyConfigured] ( identifier[msg] )
identifier[cmd] += identifier[app_settings] [ literal[string] ]
identifier[app_path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] )
identifier[capture] = identifier[app_settings] [ literal[string] ]
keyword[if] identifier[capture] . identifier[startswith] ( literal[string] ):
identifier[capture] = identifier[os] . identifier[path] . identifier[join] ( identifier[app_path] , literal[string] , identifier[capture] )
keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[capture] ), literal[string] % identifier[capture]
keyword[return] identifier[cmd] +[ identifier[capture] ] | def casperjs_command():
"""
Determine which capture engine is specified. Possible options:
- casperjs
- phantomjs
Based on this value, locate the binary of the capture engine.
If setting <engine>_CMD is not defined, then
look up for ``<engine>`` in shell PATH and
build the whole capture command.
"""
method = app_settings['CAPTURE_METHOD']
cmd = app_settings['%s_CMD' % method.upper()]
sys_path = os.getenv('PATH', '').split(':')
if cmd is None:
for binpath in sys_path:
cmd = os.path.join(binpath, method)
if os.path.exists(cmd):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['binpath']] # depends on [control=['if'], data=['cmd']]
cmd = [cmd]
if app_settings['TEST_CAPTURE_SCRIPT']:
try:
proc = subprocess.Popen(cmd + ['--version'], **casperjs_command_kwargs())
proc.communicate()
status = proc.returncode
assert status == 0 # depends on [control=['try'], data=[]]
except OSError:
msg = '%s binary cannot be found in PATH (%s)' % (method, sys_path)
raise ImproperlyConfigured(msg) # depends on [control=['except'], data=[]]
except AssertionError:
msg = '%s returned status code %s' % (method, status)
raise ImproperlyConfigured(msg) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# Add extra CLI arguments
cmd += app_settings['CLI_ARGS']
# Concatenate with capture script
app_path = os.path.dirname(__file__)
capture = app_settings['CAPTURE_SCRIPT']
if capture.startswith('./'):
capture = os.path.join(app_path, 'scripts', capture) # depends on [control=['if'], data=[]]
assert os.path.exists(capture), 'Cannot find %s' % capture
return cmd + [capture] |
def JSONObjectWriter(registry, fo, host=None):
"""Sink; writes object as JSON to a file.
:param registry: serialisation registry.
:param fo: output file.
:param host: name of the host that encodes the JSON. This is relevant if
the encoded data refers to external files for mass storage.
In normal use, it may occur that the pipe to which we write is broken,
for instance when the remote process shuts down. In that case, this
coroutine exits.
"""
while True:
obj = yield
try:
print(registry.to_json(obj, host=host), file=fo, flush=True)
except BrokenPipeError:
return | def function[JSONObjectWriter, parameter[registry, fo, host]]:
constant[Sink; writes object as JSON to a file.
:param registry: serialisation registry.
:param fo: output file.
:param host: name of the host that encodes the JSON. This is relevant if
the encoded data refers to external files for mass storage.
In normal use, it may occur that the pipe to which we write is broken,
for instance when the remote process shuts down. In that case, this
coroutine exits.
]
while constant[True] begin[:]
variable[obj] assign[=] <ast.Yield object at 0x7da2043466e0>
<ast.Try object at 0x7da2043452a0> | keyword[def] identifier[JSONObjectWriter] ( identifier[registry] , identifier[fo] , identifier[host] = keyword[None] ):
literal[string]
keyword[while] keyword[True] :
identifier[obj] = keyword[yield]
keyword[try] :
identifier[print] ( identifier[registry] . identifier[to_json] ( identifier[obj] , identifier[host] = identifier[host] ), identifier[file] = identifier[fo] , identifier[flush] = keyword[True] )
keyword[except] identifier[BrokenPipeError] :
keyword[return] | def JSONObjectWriter(registry, fo, host=None):
"""Sink; writes object as JSON to a file.
:param registry: serialisation registry.
:param fo: output file.
:param host: name of the host that encodes the JSON. This is relevant if
the encoded data refers to external files for mass storage.
In normal use, it may occur that the pipe to which we write is broken,
for instance when the remote process shuts down. In that case, this
coroutine exits.
"""
while True:
obj = (yield)
try:
print(registry.to_json(obj, host=host), file=fo, flush=True) # depends on [control=['try'], data=[]]
except BrokenPipeError:
return # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] |
def graph_route(self, request):
"""Given a single run, return the graph definition in protobuf format."""
run = request.args.get('run')
tag = request.args.get('tag', '')
conceptual_arg = request.args.get('conceptual', False)
is_conceptual = True if conceptual_arg == 'true' else False
if run is None:
return http_util.Respond(
request, 'query parameter "run" is required', 'text/plain', 400)
limit_attr_size = request.args.get('limit_attr_size', None)
if limit_attr_size is not None:
try:
limit_attr_size = int(limit_attr_size)
except ValueError:
return http_util.Respond(
request, 'query parameter `limit_attr_size` must be an integer',
'text/plain', 400)
large_attrs_key = request.args.get('large_attrs_key', None)
try:
result = self.graph_impl(run, tag, is_conceptual, limit_attr_size, large_attrs_key)
except ValueError as e:
return http_util.Respond(request, e.message, 'text/plain', code=400)
else:
if result is not None:
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
return http_util.Respond(request, body, mime_type)
else:
return http_util.Respond(request, '404 Not Found', 'text/plain',
code=404) | def function[graph_route, parameter[self, request]]:
constant[Given a single run, return the graph definition in protobuf format.]
variable[run] assign[=] call[name[request].args.get, parameter[constant[run]]]
variable[tag] assign[=] call[name[request].args.get, parameter[constant[tag], constant[]]]
variable[conceptual_arg] assign[=] call[name[request].args.get, parameter[constant[conceptual], constant[False]]]
variable[is_conceptual] assign[=] <ast.IfExp object at 0x7da18dc06230>
if compare[name[run] is constant[None]] begin[:]
return[call[name[http_util].Respond, parameter[name[request], constant[query parameter "run" is required], constant[text/plain], constant[400]]]]
variable[limit_attr_size] assign[=] call[name[request].args.get, parameter[constant[limit_attr_size], constant[None]]]
if compare[name[limit_attr_size] is_not constant[None]] begin[:]
<ast.Try object at 0x7da18dc04c70>
variable[large_attrs_key] assign[=] call[name[request].args.get, parameter[constant[large_attrs_key], constant[None]]]
<ast.Try object at 0x7da18dc07790> | keyword[def] identifier[graph_route] ( identifier[self] , identifier[request] ):
literal[string]
identifier[run] = identifier[request] . identifier[args] . identifier[get] ( literal[string] )
identifier[tag] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , literal[string] )
identifier[conceptual_arg] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , keyword[False] )
identifier[is_conceptual] = keyword[True] keyword[if] identifier[conceptual_arg] == literal[string] keyword[else] keyword[False]
keyword[if] identifier[run] keyword[is] keyword[None] :
keyword[return] identifier[http_util] . identifier[Respond] (
identifier[request] , literal[string] , literal[string] , literal[int] )
identifier[limit_attr_size] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[limit_attr_size] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[limit_attr_size] = identifier[int] ( identifier[limit_attr_size] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[http_util] . identifier[Respond] (
identifier[request] , literal[string] ,
literal[string] , literal[int] )
identifier[large_attrs_key] = identifier[request] . identifier[args] . identifier[get] ( literal[string] , keyword[None] )
keyword[try] :
identifier[result] = identifier[self] . identifier[graph_impl] ( identifier[run] , identifier[tag] , identifier[is_conceptual] , identifier[limit_attr_size] , identifier[large_attrs_key] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[return] identifier[http_util] . identifier[Respond] ( identifier[request] , identifier[e] . identifier[message] , literal[string] , identifier[code] = literal[int] )
keyword[else] :
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
( identifier[body] , identifier[mime_type] )= identifier[result]
keyword[return] identifier[http_util] . identifier[Respond] ( identifier[request] , identifier[body] , identifier[mime_type] )
keyword[else] :
keyword[return] identifier[http_util] . identifier[Respond] ( identifier[request] , literal[string] , literal[string] ,
identifier[code] = literal[int] ) | def graph_route(self, request):
"""Given a single run, return the graph definition in protobuf format."""
run = request.args.get('run')
tag = request.args.get('tag', '')
conceptual_arg = request.args.get('conceptual', False)
is_conceptual = True if conceptual_arg == 'true' else False
if run is None:
return http_util.Respond(request, 'query parameter "run" is required', 'text/plain', 400) # depends on [control=['if'], data=[]]
limit_attr_size = request.args.get('limit_attr_size', None)
if limit_attr_size is not None:
try:
limit_attr_size = int(limit_attr_size) # depends on [control=['try'], data=[]]
except ValueError:
return http_util.Respond(request, 'query parameter `limit_attr_size` must be an integer', 'text/plain', 400) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['limit_attr_size']]
large_attrs_key = request.args.get('large_attrs_key', None)
try:
result = self.graph_impl(run, tag, is_conceptual, limit_attr_size, large_attrs_key) # depends on [control=['try'], data=[]]
except ValueError as e:
return http_util.Respond(request, e.message, 'text/plain', code=400) # depends on [control=['except'], data=['e']]
else:
if result is not None:
(body, mime_type) = result # pylint: disable=unpacking-non-sequence
return http_util.Respond(request, body, mime_type) # depends on [control=['if'], data=['result']]
else:
return http_util.Respond(request, '404 Not Found', 'text/plain', code=404) |
def _set_policy_map(self, v, load=False):
"""
Setter method for policy_map, mapped from YANG variable /policy_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy_map() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("po_name",policy_map.policy_map, yang_name="policy-map", rest_name="policy-map", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='po-name', extensions={u'tailf-common': {u'info': u'Policy Map Configuration', u'callpoint': u'policer-policy-map', u'sort-priority': u'69', u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-policymap'}}), is_container='list', yang_name="policy-map", rest_name="policy-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Policy Map Configuration', u'callpoint': u'policer-policy-map', u'sort-priority': u'69', u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-policymap'}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """policy_map must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("po_name",policy_map.policy_map, yang_name="policy-map", rest_name="policy-map", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='po-name', extensions={u'tailf-common': {u'info': u'Policy Map Configuration', u'callpoint': u'policer-policy-map', u'sort-priority': u'69', u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-policymap'}}), is_container='list', yang_name="policy-map", rest_name="policy-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Policy Map Configuration', u'callpoint': u'policer-policy-map', u'sort-priority': u'69', u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-policymap'}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='list', is_config=True)""",
})
self.__policy_map = t
if hasattr(self, '_set'):
self._set() | def function[_set_policy_map, parameter[self, v, load]]:
constant[
Setter method for policy_map, mapped from YANG variable /policy_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy_map() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da20c76dea0>
name[self].__policy_map assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_policy_map] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[policy_map] . identifier[policy_map] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__policy_map] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_policy_map(self, v, load=False):
"""
Setter method for policy_map, mapped from YANG variable /policy_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_policy_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_policy_map() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=YANGListType('po_name', policy_map.policy_map, yang_name='policy-map', rest_name='policy-map', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='po-name', extensions={u'tailf-common': {u'info': u'Policy Map Configuration', u'callpoint': u'policer-policy-map', u'sort-priority': u'69', u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-policymap'}}), is_container='list', yang_name='policy-map', rest_name='policy-map', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Policy Map Configuration', u'callpoint': u'policer-policy-map', u'sort-priority': u'69', u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-full-no': None, u'cli-mode-name': u'config-policymap'}}, namespace='urn:brocade.com:mgmt:brocade-policer', defining_module='brocade-policer', yang_type='list', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'policy_map must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("po_name",policy_map.policy_map, yang_name="policy-map", rest_name="policy-map", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'po-name\', extensions={u\'tailf-common\': {u\'info\': u\'Policy Map Configuration\', u\'callpoint\': u\'policer-policy-map\', u\'sort-priority\': u\'69\', u\'cli-suppress-list-no\': None, u\'cli-full-command\': None, u\'cli-full-no\': None, u\'cli-mode-name\': u\'config-policymap\'}}), is_container=\'list\', yang_name="policy-map", rest_name="policy-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Policy Map Configuration\', u\'callpoint\': u\'policer-policy-map\', u\'sort-priority\': u\'69\', u\'cli-suppress-list-no\': None, u\'cli-full-command\': None, u\'cli-full-no\': None, u\'cli-mode-name\': u\'config-policymap\'}}, namespace=\'urn:brocade.com:mgmt:brocade-policer\', defining_module=\'brocade-policer\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__policy_map = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def template(self, key):
"""
Returns the template associated with this scaffold.
:param key | <str>
:return <projex.scaffold.Template> || None
"""
try:
return self._templates[key]
except KeyError:
return Template.Plugins[key] | def function[template, parameter[self, key]]:
constant[
Returns the template associated with this scaffold.
:param key | <str>
:return <projex.scaffold.Template> || None
]
<ast.Try object at 0x7da1b2777d60> | keyword[def] identifier[template] ( identifier[self] , identifier[key] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_templates] [ identifier[key] ]
keyword[except] identifier[KeyError] :
keyword[return] identifier[Template] . identifier[Plugins] [ identifier[key] ] | def template(self, key):
"""
Returns the template associated with this scaffold.
:param key | <str>
:return <projex.scaffold.Template> || None
"""
try:
return self._templates[key] # depends on [control=['try'], data=[]]
except KeyError:
return Template.Plugins[key] # depends on [control=['except'], data=[]] |
def get_groups(self, gs=None, processed=[], initial=True):
'''
<--------------------------------------- 12 columns ------------------------------------>
<--- 6 columns ---> <--- 6 columns --->
------------------------------------------ ------------------------------------------
| Info | | Personal |
|==========================================| |==========================================|
| ----------------- ------------------ | | |
| | Passport | | Name | | | Phone Zipcode |
| |=================| | [.....] [.....] | | | [...........................] [.......] |
| | CID Country | | <- 6 -> <- 6 -> | | | <--- 8 columns ---> <-4 col-> |
| | [.....] [.....] | | | | | |
| | <- 6 -> <- 6 -> | ----------------- | | Address |
| ----------------- | | [.....................................] |
------------------------------------------ | <--- 12 columns ---> |
| [..] number |
| <--- 12 columns ---> |
| |
------------------------------------------
group = [
(_('Info'),(6,'#8a6d3b','#fcf8e3','center'),
(_('Identification'),6,
["cid",6],
["country",6],
),
(None,6,
["name",None,6],
["surname",None,6,False],
),
),
(_('Personal'),6,
["phone",None,8],
["zipcode",None,4],
["address",None,12],
["number",None,12, True],
),
]
Group: it is defined as tuple with 3 or more elements:
Grammar: (<Name>, <Attributes>, <Element1>, <Element2>, ..., <ElementN>)
If <Name> is None: no name will be given to the group and no panel decoration will be shown
If <Size in columns> is None: default of 6 will be used
<Attributes>:
it can be an integer that represent the size in columns
it can be a tuple with several attributes where each element represents:
(<Size in columns>,'#<Font color>','#<Background color>','<Alignment>')
<Element>:
it can be a Group
it can be a Field
Examples:
('Info', 6, ["name",6], ["surname",6]) -> Info panel using 6 columns with 2 boxes 6 columns for each with name and surname inputs
('Info', (6,None,'#fcf8e3','center'), ["name",6], ["surname",6]) -> Info panel using 6 columns with a yellow brackground in centered title, 2 boxes, 6 columns for each with name and surname inputs
('Info', 12, ('Name', 6, ["name",12]), ('Surname',6, ["surname",12])) -> Info panel using 12 columns with 2 panels inside
of 6 columns each named "Name" and "Surname" and inside each of them an input "name" and "surname" where it belongs.
Field: must be a list with at least 1 element in it:
Grammar: [<Name of field>, <Size in columns>, <Label>]
<Name of field>:
This must be filled always
It is the input's name inside the form
Must exists as a form element or as a grouped form element
<Size in columns>:
Size of the input in columns
If it is not defined or if it is defined as None: default of 6 will be used
<Label>:
It it is defined as False: the label for this field will not be shown
If it is not defined or if it is defined as None: default of True will be used (default input's label will be shown)
If it is a string: this string will be shown as a label
Examples:
['age'] Input 'age' will be shown with 6 columns and its default label
['age',8] Input 'age' will be shown with 8 columns and its default label
['age', None, False] Input 'age' will be shown with 6 columns and NO LABEL
['age',8,False] Input 'age' will be shown with 8 columns and NO LABEL
['age',8,_("Age in days")] Input 'age' will be shown with 8 columns and translated label text "Age in days" to user's language
['age',8,_("Age in days"), True] Input 'age' will be shown with 8 columns and translated label text "Age in days" to user's language, and input inline with label
['age',6, None, None, None, None, None, ["ng-click=functionjs('param1')", "ng-change=functionjs2()"]] Input 'age' with extras functions
['age',None,None,None,None, 'filter'] Input 'age' with extras filter ONLY DETAILS
['age',6, {'color': 'red'} Input 'age' will be shown with red title
'''
# Check if language is set
if not self.__language:
raise IOError("ERROR: No language suplied!")
# Initialize the list
if initial:
processed = []
# Where to look for fields
if 'list_fields' in dir(self):
list_fields = self.list_fields
check_system = "html_name"
else:
list_fields = self
check_system = "name"
# Default attributes for fields
attributes = [
('columns', 6),
('color', None),
('bgcolor', None),
('textalign', None),
('inline', False), # input in line with label
('label', True),
('extra', None),
('extra_div', None),
('foreign_info', {}),
]
labels = [x[0] for x in attributes]
# Get groups if none was given
if gs is None:
gs = self.__groups__()
# Prepare the answer
groups = []
# Prepare focus control
focus_first = None
focus_must = None
# html helper for groups and fields
html_helper = self.html_helper()
# Start processing
for g in gs:
token = {}
token['name'] = g[0]
if token['name'] in html_helper:
if 'pre' in html_helper[token['name']]:
token["html_helper_pre"] = html_helper[token['name']]['pre']
if 'post' in html_helper[token['name']]:
token["html_helper_post"] = html_helper[token['name']]['post']
styles = g[1]
if type(styles) is tuple:
if len(styles) >= 1:
token['columns'] = g[1][0]
if len(styles) >= 2:
token['color'] = g[1][1]
if len(styles) >= 3:
token['bgcolor'] = g[1][2]
if len(styles) >= 4:
token['textalign'] = g[1][3]
if len(styles) >= 5:
token['inline'] = g[1][4]
if len(styles) >= 7:
token['extra'] = g[1][5]
if len(styles) >= 8:
token['extra_div'] = g[1][6]
else:
token['columns'] = g[1]
fs = g[2:]
fields = []
for f in fs:
# Field
atr = {}
# Decide weather this is a Group or not
if type(f) == tuple:
# Recursive
fields += self.get_groups([list(f)], processed, False)
else:
try:
list_type = [str, unicode, ]
except NameError:
list_type = [str, ]
# Check if it is a list
if type(f) == list:
# This is a field with attributes, get the name
field = f[0]
if html_helper and token['name'] in html_helper and 'items' in html_helper[token['name']] and field in html_helper[token['name']]['items']:
if 'pre' in html_helper[token['name']]['items'][field]:
atr["html_helper_pre"] = html_helper[token['name']]['items'][field]['pre']
if 'post' in html_helper[token['name']]['items'][field]:
atr["html_helper_post"] = html_helper[token['name']]['items'][field]['post']
# Process each attribute (if any)
dictionary = False
for idx, element in enumerate(f[1:]):
if type(element) == dict:
dictionary = True
for key in element.keys():
if key in labels:
atr[key] = element[key]
else:
raise IOError("Unknown attribute '{0}' as field '{1}' in list of fields".format(key, field))
else:
if not dictionary:
if element is not None:
atr[attributes[idx][0]] = element
else:
raise IOError("We already processed a dicionary element in this list of fields, you can not add anoother type of elements to it, you must keep going with dictionaries")
elif type(f) in list_type:
field = f
else:
raise IOError("Uknown element type '{0}' inside group '{1}'".format(type(f), token['name']))
# Get the Django Field object
found = None
for infield in list_fields:
if infield.__dict__[check_system] == field:
found = infield
break
if found:
# Get attributes (required and original attributes)
wrequired = found.field.widget.is_required
wattrs = found.field.widget.attrs
# Fill base attributes
atr['name'] = found.html_name
atr['input'] = found
atr['focus'] = False
# Set focus
if focus_must is None:
if focus_first is None:
focus_first = atr
if wrequired:
focus_must = atr
# Autocomplete
if 'autofill' in dir(self.Meta):
autofill = self.Meta.autofill.get(found.html_name, None)
atr['autofill'] = autofill
if autofill:
# Check format of the request
autokind = autofill[0]
if type(autokind) == str:
# Using new format
if autokind == 'select':
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = DynamicSelect(wattrs)
elif autokind == 'multiselect':
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = MultiDynamicSelect(wattrs)
elif autokind == 'input':
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = DynamicInput(wattrs)
else:
raise IOError("Autofill filled using new format but autokind is '{}' and I only know 'input' or 'select'".format(autokind))
# Configure the field
found.field.widget.is_required = wrequired
found.field.widget.form_name = self.form_name
found.field.widget.field_name = infield.html_name
found.field.widget.autofill_deepness = autofill[1]
found.field.widget.autofill_url = autofill[2]
found.field.widget.autofill = autofill[3:]
else:
# Get old information [COMPATIBILITY WITH OLD VERSION]
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = DynamicSelect(wattrs)
found.field.widget.is_required = wrequired
found.field.widget.form_name = self.form_name
found.field.widget.field_name = infield.html_name
found.field.widget.autofill_deepness = autofill[0]
found.field.widget.autofill_url = autofill[1]
found.field.widget.autofill = autofill[2:]
else:
# Set we don't have autofill for this field
atr['autofill'] = None
# Check if we have to replace the widget with a newer one
if isinstance(found.field.widget, Select) and not isinstance(found.field.widget, DynamicSelect):
if not isinstance(found.field.widget, MultiStaticSelect):
found.field.widget = StaticSelect(wattrs)
found.field.widget.choices = found.field.choices
found.field.widget.is_required = wrequired
found.field.widget.form_name = self.form_name
found.field.widget.field_name = infield.html_name
# Fill all attributes
for (attribute, default) in attributes:
if attribute not in atr.keys():
atr[attribute] = default
# Fill label
if atr['label'] is True:
atr['label'] = found.label
# Set language
flang = getattr(found.field, "set_language", None)
if flang:
flang(self.__language)
flang = getattr(found.field.widget, "set_language", None)
if flang:
flang(self.__language)
# Attach the element
fields.append(atr)
# Remember we have processed it
processed.append(found.__dict__[check_system])
else:
raise IOError("Unknown field '{0}' specified in group '{1}'".format(f, token['name']))
token['fields'] = fields
groups.append(token)
# Add the rest of attributes we didn't use yet
if initial:
fields = []
for infield in list_fields:
if infield.__dict__[check_system] not in processed:
# Get attributes (required and original attributes)
wattrs = infield.field.widget.attrs
wrequired = infield.field.widget.is_required
# Prepare attr
atr = {}
# Fill base attributes
atr['name'] = infield.html_name
atr['input'] = infield
atr['focus'] = False
# Set focus
if focus_must is None:
if focus_first is None:
focus_first = atr
if wrequired:
focus_must = atr
# Autocomplete
if 'autofill' in dir(self.Meta):
autofill = self.Meta.autofill.get(infield.html_name, None)
atr['autofill'] = autofill
if autofill:
# Check format of the request
autokind = autofill[0]
if type(autokind) == str:
# Get old information
# Using new format
if autokind == 'select':
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = DynamicSelect(wattrs)
elif autokind == 'multiselect':
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = MultiDynamicSelect(wattrs)
elif autokind == 'input':
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = DynamicInput(wattrs)
else:
raise IOError("Autofill filled using new format but autokind is '{}' and I only know 'input' or 'select'".format(autokind))
# Configure the field
infield.field.widget.is_required = wrequired
infield.field.widget.form_name = self.form_name
infield.field.widget.field_name = infield.html_name
infield.field.widget.autofill_deepness = autofill[1]
infield.field.widget.autofill_url = autofill[2]
infield.field.widget.autofill = autofill[3:]
else:
# Get old information [COMPATIBILITY WITH OLD VERSION]
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = DynamicSelect(wattrs)
infield.field.widget.is_required = wrequired
infield.field.widget.form_name = self.form_name
infield.field.widget.field_name = infield.html_name
infield.field.widget.autofill_deepness = autofill[0]
infield.field.widget.autofill_url = autofill[1]
infield.field.widget.autofill = autofill[2:]
else:
# Set we don't have autofill for this field
atr['autofill'] = None
# Check if we have to replace the widget with a newer one
if isinstance(infield.field.widget, Select) and not isinstance(infield.field.widget, DynamicSelect):
if isinstance(infield.field, NullBooleanField):
infield.field.widget = CheckboxInput(wattrs)
elif not isinstance(infield.field.widget, MultiStaticSelect):
infield.field.widget = StaticSelect(wattrs)
if hasattr(infield.field.widget, 'choices') and hasattr(infield.field, 'choices'):
infield.field.widget.choices = infield.field.choices
infield.field.widget.is_required = wrequired
infield.field.widget.form_name = self.form_name
infield.field.widget.field_name = infield.html_name
# Fill all attributes
for (attribute, default) in attributes:
if attribute not in atr.keys():
atr[attribute] = default
# Fill label
if atr['label'] is True:
atr['label'] = infield.label
# Set language
flang = getattr(infield.field, "set_language", None)
if flang:
flang(self.__language)
flang = getattr(infield.field.widget, "set_language", None)
if flang:
flang(self.__language)
# Attach the attribute
fields.append(atr)
# Save the new elements
if fields:
groups.append({'name': None, 'columns': 12, 'fields': fields})
# Set focus
if focus_must:
focus_must['focus'] = True
elif focus_first is not None:
focus_first['focus'] = True
# Return the resulting groups
return groups | def function[get_groups, parameter[self, gs, processed, initial]]:
constant[
<--------------------------------------- 12 columns ------------------------------------>
<--- 6 columns ---> <--- 6 columns --->
------------------------------------------ ------------------------------------------
| Info | | Personal |
|==========================================| |==========================================|
| ----------------- ------------------ | | |
| | Passport | | Name | | | Phone Zipcode |
| |=================| | [.....] [.....] | | | [...........................] [.......] |
| | CID Country | | <- 6 -> <- 6 -> | | | <--- 8 columns ---> <-4 col-> |
| | [.....] [.....] | | | | | |
| | <- 6 -> <- 6 -> | ----------------- | | Address |
| ----------------- | | [.....................................] |
------------------------------------------ | <--- 12 columns ---> |
| [..] number |
| <--- 12 columns ---> |
| |
------------------------------------------
group = [
(_('Info'),(6,'#8a6d3b','#fcf8e3','center'),
(_('Identification'),6,
["cid",6],
["country",6],
),
(None,6,
["name",None,6],
["surname",None,6,False],
),
),
(_('Personal'),6,
["phone",None,8],
["zipcode",None,4],
["address",None,12],
["number",None,12, True],
),
]
Group: it is defined as tuple with 3 or more elements:
Grammar: (<Name>, <Attributes>, <Element1>, <Element2>, ..., <ElementN>)
If <Name> is None: no name will be given to the group and no panel decoration will be shown
If <Size in columns> is None: default of 6 will be used
<Attributes>:
it can be an integer that represent the size in columns
it can be a tuple with several attributes where each element represents:
(<Size in columns>,'#<Font color>','#<Background color>','<Alignment>')
<Element>:
it can be a Group
it can be a Field
Examples:
('Info', 6, ["name",6], ["surname",6]) -> Info panel using 6 columns with 2 boxes 6 columns for each with name and surname inputs
('Info', (6,None,'#fcf8e3','center'), ["name",6], ["surname",6]) -> Info panel using 6 columns with a yellow brackground in centered title, 2 boxes, 6 columns for each with name and surname inputs
('Info', 12, ('Name', 6, ["name",12]), ('Surname',6, ["surname",12])) -> Info panel using 12 columns with 2 panels inside
of 6 columns each named "Name" and "Surname" and inside each of them an input "name" and "surname" where it belongs.
Field: must be a list with at least 1 element in it:
Grammar: [<Name of field>, <Size in columns>, <Label>]
<Name of field>:
This must be filled always
It is the input's name inside the form
Must exists as a form element or as a grouped form element
<Size in columns>:
Size of the input in columns
If it is not defined or if it is defined as None: default of 6 will be used
<Label>:
It it is defined as False: the label for this field will not be shown
If it is not defined or if it is defined as None: default of True will be used (default input's label will be shown)
If it is a string: this string will be shown as a label
Examples:
['age'] Input 'age' will be shown with 6 columns and its default label
['age',8] Input 'age' will be shown with 8 columns and its default label
['age', None, False] Input 'age' will be shown with 6 columns and NO LABEL
['age',8,False] Input 'age' will be shown with 8 columns and NO LABEL
['age',8,_("Age in days")] Input 'age' will be shown with 8 columns and translated label text "Age in days" to user's language
['age',8,_("Age in days"), True] Input 'age' will be shown with 8 columns and translated label text "Age in days" to user's language, and input inline with label
['age',6, None, None, None, None, None, ["ng-click=functionjs('param1')", "ng-change=functionjs2()"]] Input 'age' with extras functions
['age',None,None,None,None, 'filter'] Input 'age' with extras filter ONLY DETAILS
['age',6, {'color': 'red'} Input 'age' will be shown with red title
]
if <ast.UnaryOp object at 0x7da1b0d27d30> begin[:]
<ast.Raise object at 0x7da1b0d27ca0>
if name[initial] begin[:]
variable[processed] assign[=] list[[]]
if compare[constant[list_fields] in call[name[dir], parameter[name[self]]]] begin[:]
variable[list_fields] assign[=] name[self].list_fields
variable[check_system] assign[=] constant[html_name]
variable[attributes] assign[=] list[[<ast.Tuple object at 0x7da1b0d276d0>, <ast.Tuple object at 0x7da1b0d27640>, <ast.Tuple object at 0x7da1b0d275b0>, <ast.Tuple object at 0x7da1b0d27520>, <ast.Tuple object at 0x7da1b0d27490>, <ast.Tuple object at 0x7da1b0d27400>, <ast.Tuple object at 0x7da1b0d27370>, <ast.Tuple object at 0x7da1b0d272e0>, <ast.Tuple object at 0x7da1b0d27250>]]
variable[labels] assign[=] <ast.ListComp object at 0x7da1b0d27160>
if compare[name[gs] is constant[None]] begin[:]
variable[gs] assign[=] call[name[self].__groups__, parameter[]]
variable[groups] assign[=] list[[]]
variable[focus_first] assign[=] constant[None]
variable[focus_must] assign[=] constant[None]
variable[html_helper] assign[=] call[name[self].html_helper, parameter[]]
for taget[name[g]] in starred[name[gs]] begin[:]
variable[token] assign[=] dictionary[[], []]
call[name[token]][constant[name]] assign[=] call[name[g]][constant[0]]
if compare[call[name[token]][constant[name]] in name[html_helper]] begin[:]
if compare[constant[pre] in call[name[html_helper]][call[name[token]][constant[name]]]] begin[:]
call[name[token]][constant[html_helper_pre]] assign[=] call[call[name[html_helper]][call[name[token]][constant[name]]]][constant[pre]]
if compare[constant[post] in call[name[html_helper]][call[name[token]][constant[name]]]] begin[:]
call[name[token]][constant[html_helper_post]] assign[=] call[call[name[html_helper]][call[name[token]][constant[name]]]][constant[post]]
variable[styles] assign[=] call[name[g]][constant[1]]
if compare[call[name[type], parameter[name[styles]]] is name[tuple]] begin[:]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[1]] begin[:]
call[name[token]][constant[columns]] assign[=] call[call[name[g]][constant[1]]][constant[0]]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[2]] begin[:]
call[name[token]][constant[color]] assign[=] call[call[name[g]][constant[1]]][constant[1]]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[3]] begin[:]
call[name[token]][constant[bgcolor]] assign[=] call[call[name[g]][constant[1]]][constant[2]]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[4]] begin[:]
call[name[token]][constant[textalign]] assign[=] call[call[name[g]][constant[1]]][constant[3]]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[5]] begin[:]
call[name[token]][constant[inline]] assign[=] call[call[name[g]][constant[1]]][constant[4]]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[7]] begin[:]
call[name[token]][constant[extra]] assign[=] call[call[name[g]][constant[1]]][constant[5]]
if compare[call[name[len], parameter[name[styles]]] greater_or_equal[>=] constant[8]] begin[:]
call[name[token]][constant[extra_div]] assign[=] call[call[name[g]][constant[1]]][constant[6]]
variable[fs] assign[=] call[name[g]][<ast.Slice object at 0x7da1b0d17a90>]
variable[fields] assign[=] list[[]]
for taget[name[f]] in starred[name[fs]] begin[:]
variable[atr] assign[=] dictionary[[], []]
if compare[call[name[type], parameter[name[f]]] equal[==] name[tuple]] begin[:]
<ast.AugAssign object at 0x7da1b0d17760>
call[name[token]][constant[fields]] assign[=] name[fields]
call[name[groups].append, parameter[name[token]]]
if name[initial] begin[:]
variable[fields] assign[=] list[[]]
for taget[name[infield]] in starred[name[list_fields]] begin[:]
if compare[call[name[infield].__dict__][name[check_system]] <ast.NotIn object at 0x7da2590d7190> name[processed]] begin[:]
variable[wattrs] assign[=] name[infield].field.widget.attrs
variable[wrequired] assign[=] name[infield].field.widget.is_required
variable[atr] assign[=] dictionary[[], []]
call[name[atr]][constant[name]] assign[=] name[infield].html_name
call[name[atr]][constant[input]] assign[=] name[infield]
call[name[atr]][constant[focus]] assign[=] constant[False]
if compare[name[focus_must] is constant[None]] begin[:]
if compare[name[focus_first] is constant[None]] begin[:]
variable[focus_first] assign[=] name[atr]
if name[wrequired] begin[:]
variable[focus_must] assign[=] name[atr]
if compare[constant[autofill] in call[name[dir], parameter[name[self].Meta]]] begin[:]
variable[autofill] assign[=] call[name[self].Meta.autofill.get, parameter[name[infield].html_name, constant[None]]]
call[name[atr]][constant[autofill]] assign[=] name[autofill]
if name[autofill] begin[:]
variable[autokind] assign[=] call[name[autofill]][constant[0]]
if compare[call[name[type], parameter[name[autokind]]] equal[==] name[str]] begin[:]
if compare[name[autokind] equal[==] constant[select]] begin[:]
name[infield].field.widget assign[=] call[name[DynamicSelect], parameter[name[wattrs]]]
name[infield].field.widget.is_required assign[=] name[wrequired]
name[infield].field.widget.form_name assign[=] name[self].form_name
name[infield].field.widget.field_name assign[=] name[infield].html_name
name[infield].field.widget.autofill_deepness assign[=] call[name[autofill]][constant[1]]
name[infield].field.widget.autofill_url assign[=] call[name[autofill]][constant[2]]
name[infield].field.widget.autofill assign[=] call[name[autofill]][<ast.Slice object at 0x7da1b0e4cf70>]
if <ast.BoolOp object at 0x7da1b0e4e0e0> begin[:]
if call[name[isinstance], parameter[name[infield].field, name[NullBooleanField]]] begin[:]
name[infield].field.widget assign[=] call[name[CheckboxInput], parameter[name[wattrs]]]
if <ast.BoolOp object at 0x7da1b0ebe800> begin[:]
name[infield].field.widget.choices assign[=] name[infield].field.choices
name[infield].field.widget.is_required assign[=] name[wrequired]
name[infield].field.widget.form_name assign[=] name[self].form_name
name[infield].field.widget.field_name assign[=] name[infield].html_name
for taget[tuple[[<ast.Name object at 0x7da1b0ebc280>, <ast.Name object at 0x7da1b0ebf7c0>]]] in starred[name[attributes]] begin[:]
if compare[name[attribute] <ast.NotIn object at 0x7da2590d7190> call[name[atr].keys, parameter[]]] begin[:]
call[name[atr]][name[attribute]] assign[=] name[default]
if compare[call[name[atr]][constant[label]] is constant[True]] begin[:]
call[name[atr]][constant[label]] assign[=] name[infield].label
variable[flang] assign[=] call[name[getattr], parameter[name[infield].field, constant[set_language], constant[None]]]
if name[flang] begin[:]
call[name[flang], parameter[name[self].__language]]
variable[flang] assign[=] call[name[getattr], parameter[name[infield].field.widget, constant[set_language], constant[None]]]
if name[flang] begin[:]
call[name[flang], parameter[name[self].__language]]
call[name[fields].append, parameter[name[atr]]]
if name[fields] begin[:]
call[name[groups].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0ebe530>, <ast.Constant object at 0x7da1b0ebe350>, <ast.Constant object at 0x7da1b0ebd9c0>], [<ast.Constant object at 0x7da1b0ebf460>, <ast.Constant object at 0x7da1b0ef55d0>, <ast.Name object at 0x7da1b0ef4e50>]]]]
if name[focus_must] begin[:]
call[name[focus_must]][constant[focus]] assign[=] constant[True]
return[name[groups]] | keyword[def] identifier[get_groups] ( identifier[self] , identifier[gs] = keyword[None] , identifier[processed] =[], identifier[initial] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[__language] :
keyword[raise] identifier[IOError] ( literal[string] )
keyword[if] identifier[initial] :
identifier[processed] =[]
keyword[if] literal[string] keyword[in] identifier[dir] ( identifier[self] ):
identifier[list_fields] = identifier[self] . identifier[list_fields]
identifier[check_system] = literal[string]
keyword[else] :
identifier[list_fields] = identifier[self]
identifier[check_system] = literal[string]
identifier[attributes] =[
( literal[string] , literal[int] ),
( literal[string] , keyword[None] ),
( literal[string] , keyword[None] ),
( literal[string] , keyword[None] ),
( literal[string] , keyword[False] ),
( literal[string] , keyword[True] ),
( literal[string] , keyword[None] ),
( literal[string] , keyword[None] ),
( literal[string] ,{}),
]
identifier[labels] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[attributes] ]
keyword[if] identifier[gs] keyword[is] keyword[None] :
identifier[gs] = identifier[self] . identifier[__groups__] ()
identifier[groups] =[]
identifier[focus_first] = keyword[None]
identifier[focus_must] = keyword[None]
identifier[html_helper] = identifier[self] . identifier[html_helper] ()
keyword[for] identifier[g] keyword[in] identifier[gs] :
identifier[token] ={}
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ]
keyword[if] identifier[token] [ literal[string] ] keyword[in] identifier[html_helper] :
keyword[if] literal[string] keyword[in] identifier[html_helper] [ identifier[token] [ literal[string] ]]:
identifier[token] [ literal[string] ]= identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[html_helper] [ identifier[token] [ literal[string] ]]:
identifier[token] [ literal[string] ]= identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ]
identifier[styles] = identifier[g] [ literal[int] ]
keyword[if] identifier[type] ( identifier[styles] ) keyword[is] identifier[tuple] :
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[styles] )>= literal[int] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ][ literal[int] ]
keyword[else] :
identifier[token] [ literal[string] ]= identifier[g] [ literal[int] ]
identifier[fs] = identifier[g] [ literal[int] :]
identifier[fields] =[]
keyword[for] identifier[f] keyword[in] identifier[fs] :
identifier[atr] ={}
keyword[if] identifier[type] ( identifier[f] )== identifier[tuple] :
identifier[fields] += identifier[self] . identifier[get_groups] ([ identifier[list] ( identifier[f] )], identifier[processed] , keyword[False] )
keyword[else] :
keyword[try] :
identifier[list_type] =[ identifier[str] , identifier[unicode] ,]
keyword[except] identifier[NameError] :
identifier[list_type] =[ identifier[str] ,]
keyword[if] identifier[type] ( identifier[f] )== identifier[list] :
identifier[field] = identifier[f] [ literal[int] ]
keyword[if] identifier[html_helper] keyword[and] identifier[token] [ literal[string] ] keyword[in] identifier[html_helper] keyword[and] literal[string] keyword[in] identifier[html_helper] [ identifier[token] [ literal[string] ]] keyword[and] identifier[field] keyword[in] identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ][ identifier[field] ]:
identifier[atr] [ literal[string] ]= identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ][ identifier[field] ][ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ][ identifier[field] ]:
identifier[atr] [ literal[string] ]= identifier[html_helper] [ identifier[token] [ literal[string] ]][ literal[string] ][ identifier[field] ][ literal[string] ]
identifier[dictionary] = keyword[False]
keyword[for] identifier[idx] , identifier[element] keyword[in] identifier[enumerate] ( identifier[f] [ literal[int] :]):
keyword[if] identifier[type] ( identifier[element] )== identifier[dict] :
identifier[dictionary] = keyword[True]
keyword[for] identifier[key] keyword[in] identifier[element] . identifier[keys] ():
keyword[if] identifier[key] keyword[in] identifier[labels] :
identifier[atr] [ identifier[key] ]= identifier[element] [ identifier[key] ]
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[key] , identifier[field] ))
keyword[else] :
keyword[if] keyword[not] identifier[dictionary] :
keyword[if] identifier[element] keyword[is] keyword[not] keyword[None] :
identifier[atr] [ identifier[attributes] [ identifier[idx] ][ literal[int] ]]= identifier[element]
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] )
keyword[elif] identifier[type] ( identifier[f] ) keyword[in] identifier[list_type] :
identifier[field] = identifier[f]
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[f] ), identifier[token] [ literal[string] ]))
identifier[found] = keyword[None]
keyword[for] identifier[infield] keyword[in] identifier[list_fields] :
keyword[if] identifier[infield] . identifier[__dict__] [ identifier[check_system] ]== identifier[field] :
identifier[found] = identifier[infield]
keyword[break]
keyword[if] identifier[found] :
identifier[wrequired] = identifier[found] . identifier[field] . identifier[widget] . identifier[is_required]
identifier[wattrs] = identifier[found] . identifier[field] . identifier[widget] . identifier[attrs]
identifier[atr] [ literal[string] ]= identifier[found] . identifier[html_name]
identifier[atr] [ literal[string] ]= identifier[found]
identifier[atr] [ literal[string] ]= keyword[False]
keyword[if] identifier[focus_must] keyword[is] keyword[None] :
keyword[if] identifier[focus_first] keyword[is] keyword[None] :
identifier[focus_first] = identifier[atr]
keyword[if] identifier[wrequired] :
identifier[focus_must] = identifier[atr]
keyword[if] literal[string] keyword[in] identifier[dir] ( identifier[self] . identifier[Meta] ):
identifier[autofill] = identifier[self] . identifier[Meta] . identifier[autofill] . identifier[get] ( identifier[found] . identifier[html_name] , keyword[None] )
identifier[atr] [ literal[string] ]= identifier[autofill]
keyword[if] identifier[autofill] :
identifier[autokind] = identifier[autofill] [ literal[int] ]
keyword[if] identifier[type] ( identifier[autokind] )== identifier[str] :
keyword[if] identifier[autokind] == literal[string] :
identifier[found] . identifier[field] . identifier[widget] = identifier[DynamicSelect] ( identifier[wattrs] )
keyword[elif] identifier[autokind] == literal[string] :
identifier[found] . identifier[field] . identifier[widget] = identifier[MultiDynamicSelect] ( identifier[wattrs] )
keyword[elif] identifier[autokind] == literal[string] :
identifier[found] . identifier[field] . identifier[widget] = identifier[DynamicInput] ( identifier[wattrs] )
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[autokind] ))
identifier[found] . identifier[field] . identifier[widget] . identifier[is_required] = identifier[wrequired]
identifier[found] . identifier[field] . identifier[widget] . identifier[form_name] = identifier[self] . identifier[form_name]
identifier[found] . identifier[field] . identifier[widget] . identifier[field_name] = identifier[infield] . identifier[html_name]
identifier[found] . identifier[field] . identifier[widget] . identifier[autofill_deepness] = identifier[autofill] [ literal[int] ]
identifier[found] . identifier[field] . identifier[widget] . identifier[autofill_url] = identifier[autofill] [ literal[int] ]
identifier[found] . identifier[field] . identifier[widget] . identifier[autofill] = identifier[autofill] [ literal[int] :]
keyword[else] :
identifier[found] . identifier[field] . identifier[widget] = identifier[DynamicSelect] ( identifier[wattrs] )
identifier[found] . identifier[field] . identifier[widget] . identifier[is_required] = identifier[wrequired]
identifier[found] . identifier[field] . identifier[widget] . identifier[form_name] = identifier[self] . identifier[form_name]
identifier[found] . identifier[field] . identifier[widget] . identifier[field_name] = identifier[infield] . identifier[html_name]
identifier[found] . identifier[field] . identifier[widget] . identifier[autofill_deepness] = identifier[autofill] [ literal[int] ]
identifier[found] . identifier[field] . identifier[widget] . identifier[autofill_url] = identifier[autofill] [ literal[int] ]
identifier[found] . identifier[field] . identifier[widget] . identifier[autofill] = identifier[autofill] [ literal[int] :]
keyword[else] :
identifier[atr] [ literal[string] ]= keyword[None]
keyword[if] identifier[isinstance] ( identifier[found] . identifier[field] . identifier[widget] , identifier[Select] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[found] . identifier[field] . identifier[widget] , identifier[DynamicSelect] ):
keyword[if] keyword[not] identifier[isinstance] ( identifier[found] . identifier[field] . identifier[widget] , identifier[MultiStaticSelect] ):
identifier[found] . identifier[field] . identifier[widget] = identifier[StaticSelect] ( identifier[wattrs] )
identifier[found] . identifier[field] . identifier[widget] . identifier[choices] = identifier[found] . identifier[field] . identifier[choices]
identifier[found] . identifier[field] . identifier[widget] . identifier[is_required] = identifier[wrequired]
identifier[found] . identifier[field] . identifier[widget] . identifier[form_name] = identifier[self] . identifier[form_name]
identifier[found] . identifier[field] . identifier[widget] . identifier[field_name] = identifier[infield] . identifier[html_name]
keyword[for] ( identifier[attribute] , identifier[default] ) keyword[in] identifier[attributes] :
keyword[if] identifier[attribute] keyword[not] keyword[in] identifier[atr] . identifier[keys] ():
identifier[atr] [ identifier[attribute] ]= identifier[default]
keyword[if] identifier[atr] [ literal[string] ] keyword[is] keyword[True] :
identifier[atr] [ literal[string] ]= identifier[found] . identifier[label]
identifier[flang] = identifier[getattr] ( identifier[found] . identifier[field] , literal[string] , keyword[None] )
keyword[if] identifier[flang] :
identifier[flang] ( identifier[self] . identifier[__language] )
identifier[flang] = identifier[getattr] ( identifier[found] . identifier[field] . identifier[widget] , literal[string] , keyword[None] )
keyword[if] identifier[flang] :
identifier[flang] ( identifier[self] . identifier[__language] )
identifier[fields] . identifier[append] ( identifier[atr] )
identifier[processed] . identifier[append] ( identifier[found] . identifier[__dict__] [ identifier[check_system] ])
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[f] , identifier[token] [ literal[string] ]))
identifier[token] [ literal[string] ]= identifier[fields]
identifier[groups] . identifier[append] ( identifier[token] )
keyword[if] identifier[initial] :
identifier[fields] =[]
keyword[for] identifier[infield] keyword[in] identifier[list_fields] :
keyword[if] identifier[infield] . identifier[__dict__] [ identifier[check_system] ] keyword[not] keyword[in] identifier[processed] :
identifier[wattrs] = identifier[infield] . identifier[field] . identifier[widget] . identifier[attrs]
identifier[wrequired] = identifier[infield] . identifier[field] . identifier[widget] . identifier[is_required]
identifier[atr] ={}
identifier[atr] [ literal[string] ]= identifier[infield] . identifier[html_name]
identifier[atr] [ literal[string] ]= identifier[infield]
identifier[atr] [ literal[string] ]= keyword[False]
keyword[if] identifier[focus_must] keyword[is] keyword[None] :
keyword[if] identifier[focus_first] keyword[is] keyword[None] :
identifier[focus_first] = identifier[atr]
keyword[if] identifier[wrequired] :
identifier[focus_must] = identifier[atr]
keyword[if] literal[string] keyword[in] identifier[dir] ( identifier[self] . identifier[Meta] ):
identifier[autofill] = identifier[self] . identifier[Meta] . identifier[autofill] . identifier[get] ( identifier[infield] . identifier[html_name] , keyword[None] )
identifier[atr] [ literal[string] ]= identifier[autofill]
keyword[if] identifier[autofill] :
identifier[autokind] = identifier[autofill] [ literal[int] ]
keyword[if] identifier[type] ( identifier[autokind] )== identifier[str] :
keyword[if] identifier[autokind] == literal[string] :
identifier[infield] . identifier[field] . identifier[widget] = identifier[DynamicSelect] ( identifier[wattrs] )
keyword[elif] identifier[autokind] == literal[string] :
identifier[infield] . identifier[field] . identifier[widget] = identifier[MultiDynamicSelect] ( identifier[wattrs] )
keyword[elif] identifier[autokind] == literal[string] :
identifier[infield] . identifier[field] . identifier[widget] = identifier[DynamicInput] ( identifier[wattrs] )
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[autokind] ))
identifier[infield] . identifier[field] . identifier[widget] . identifier[is_required] = identifier[wrequired]
identifier[infield] . identifier[field] . identifier[widget] . identifier[form_name] = identifier[self] . identifier[form_name]
identifier[infield] . identifier[field] . identifier[widget] . identifier[field_name] = identifier[infield] . identifier[html_name]
identifier[infield] . identifier[field] . identifier[widget] . identifier[autofill_deepness] = identifier[autofill] [ literal[int] ]
identifier[infield] . identifier[field] . identifier[widget] . identifier[autofill_url] = identifier[autofill] [ literal[int] ]
identifier[infield] . identifier[field] . identifier[widget] . identifier[autofill] = identifier[autofill] [ literal[int] :]
keyword[else] :
identifier[infield] . identifier[field] . identifier[widget] = identifier[DynamicSelect] ( identifier[wattrs] )
identifier[infield] . identifier[field] . identifier[widget] . identifier[is_required] = identifier[wrequired]
identifier[infield] . identifier[field] . identifier[widget] . identifier[form_name] = identifier[self] . identifier[form_name]
identifier[infield] . identifier[field] . identifier[widget] . identifier[field_name] = identifier[infield] . identifier[html_name]
identifier[infield] . identifier[field] . identifier[widget] . identifier[autofill_deepness] = identifier[autofill] [ literal[int] ]
identifier[infield] . identifier[field] . identifier[widget] . identifier[autofill_url] = identifier[autofill] [ literal[int] ]
identifier[infield] . identifier[field] . identifier[widget] . identifier[autofill] = identifier[autofill] [ literal[int] :]
keyword[else] :
identifier[atr] [ literal[string] ]= keyword[None]
keyword[if] identifier[isinstance] ( identifier[infield] . identifier[field] . identifier[widget] , identifier[Select] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[infield] . identifier[field] . identifier[widget] , identifier[DynamicSelect] ):
keyword[if] identifier[isinstance] ( identifier[infield] . identifier[field] , identifier[NullBooleanField] ):
identifier[infield] . identifier[field] . identifier[widget] = identifier[CheckboxInput] ( identifier[wattrs] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[infield] . identifier[field] . identifier[widget] , identifier[MultiStaticSelect] ):
identifier[infield] . identifier[field] . identifier[widget] = identifier[StaticSelect] ( identifier[wattrs] )
keyword[if] identifier[hasattr] ( identifier[infield] . identifier[field] . identifier[widget] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[infield] . identifier[field] , literal[string] ):
identifier[infield] . identifier[field] . identifier[widget] . identifier[choices] = identifier[infield] . identifier[field] . identifier[choices]
identifier[infield] . identifier[field] . identifier[widget] . identifier[is_required] = identifier[wrequired]
identifier[infield] . identifier[field] . identifier[widget] . identifier[form_name] = identifier[self] . identifier[form_name]
identifier[infield] . identifier[field] . identifier[widget] . identifier[field_name] = identifier[infield] . identifier[html_name]
keyword[for] ( identifier[attribute] , identifier[default] ) keyword[in] identifier[attributes] :
keyword[if] identifier[attribute] keyword[not] keyword[in] identifier[atr] . identifier[keys] ():
identifier[atr] [ identifier[attribute] ]= identifier[default]
keyword[if] identifier[atr] [ literal[string] ] keyword[is] keyword[True] :
identifier[atr] [ literal[string] ]= identifier[infield] . identifier[label]
identifier[flang] = identifier[getattr] ( identifier[infield] . identifier[field] , literal[string] , keyword[None] )
keyword[if] identifier[flang] :
identifier[flang] ( identifier[self] . identifier[__language] )
identifier[flang] = identifier[getattr] ( identifier[infield] . identifier[field] . identifier[widget] , literal[string] , keyword[None] )
keyword[if] identifier[flang] :
identifier[flang] ( identifier[self] . identifier[__language] )
identifier[fields] . identifier[append] ( identifier[atr] )
keyword[if] identifier[fields] :
identifier[groups] . identifier[append] ({ literal[string] : keyword[None] , literal[string] : literal[int] , literal[string] : identifier[fields] })
keyword[if] identifier[focus_must] :
identifier[focus_must] [ literal[string] ]= keyword[True]
keyword[elif] identifier[focus_first] keyword[is] keyword[not] keyword[None] :
identifier[focus_first] [ literal[string] ]= keyword[True]
keyword[return] identifier[groups] | def get_groups(self, gs=None, processed=[], initial=True):
"""
<--------------------------------------- 12 columns ------------------------------------>
<--- 6 columns ---> <--- 6 columns --->
------------------------------------------ ------------------------------------------
| Info | | Personal |
|==========================================| |==========================================|
| ----------------- ------------------ | | |
| | Passport | | Name | | | Phone Zipcode |
| |=================| | [.....] [.....] | | | [...........................] [.......] |
| | CID Country | | <- 6 -> <- 6 -> | | | <--- 8 columns ---> <-4 col-> |
| | [.....] [.....] | | | | | |
| | <- 6 -> <- 6 -> | ----------------- | | Address |
| ----------------- | | [.....................................] |
------------------------------------------ | <--- 12 columns ---> |
| [..] number |
| <--- 12 columns ---> |
| |
------------------------------------------
group = [
(_('Info'),(6,'#8a6d3b','#fcf8e3','center'),
(_('Identification'),6,
["cid",6],
["country",6],
),
(None,6,
["name",None,6],
["surname",None,6,False],
),
),
(_('Personal'),6,
["phone",None,8],
["zipcode",None,4],
["address",None,12],
["number",None,12, True],
),
]
Group: it is defined as tuple with 3 or more elements:
Grammar: (<Name>, <Attributes>, <Element1>, <Element2>, ..., <ElementN>)
If <Name> is None: no name will be given to the group and no panel decoration will be shown
If <Size in columns> is None: default of 6 will be used
<Attributes>:
it can be an integer that represent the size in columns
it can be a tuple with several attributes where each element represents:
(<Size in columns>,'#<Font color>','#<Background color>','<Alignment>')
<Element>:
it can be a Group
it can be a Field
Examples:
('Info', 6, ["name",6], ["surname",6]) -> Info panel using 6 columns with 2 boxes 6 columns for each with name and surname inputs
('Info', (6,None,'#fcf8e3','center'), ["name",6], ["surname",6]) -> Info panel using 6 columns with a yellow brackground in centered title, 2 boxes, 6 columns for each with name and surname inputs
('Info', 12, ('Name', 6, ["name",12]), ('Surname',6, ["surname",12])) -> Info panel using 12 columns with 2 panels inside
of 6 columns each named "Name" and "Surname" and inside each of them an input "name" and "surname" where it belongs.
Field: must be a list with at least 1 element in it:
Grammar: [<Name of field>, <Size in columns>, <Label>]
<Name of field>:
This must be filled always
It is the input's name inside the form
Must exists as a form element or as a grouped form element
<Size in columns>:
Size of the input in columns
If it is not defined or if it is defined as None: default of 6 will be used
<Label>:
It it is defined as False: the label for this field will not be shown
If it is not defined or if it is defined as None: default of True will be used (default input's label will be shown)
If it is a string: this string will be shown as a label
Examples:
['age'] Input 'age' will be shown with 6 columns and its default label
['age',8] Input 'age' will be shown with 8 columns and its default label
['age', None, False] Input 'age' will be shown with 6 columns and NO LABEL
['age',8,False] Input 'age' will be shown with 8 columns and NO LABEL
['age',8,_("Age in days")] Input 'age' will be shown with 8 columns and translated label text "Age in days" to user's language
['age',8,_("Age in days"), True] Input 'age' will be shown with 8 columns and translated label text "Age in days" to user's language, and input inline with label
['age',6, None, None, None, None, None, ["ng-click=functionjs('param1')", "ng-change=functionjs2()"]] Input 'age' with extras functions
['age',None,None,None,None, 'filter'] Input 'age' with extras filter ONLY DETAILS
['age',6, {'color': 'red'} Input 'age' will be shown with red title
"""
# Check if language is set
if not self.__language:
raise IOError('ERROR: No language suplied!') # depends on [control=['if'], data=[]]
# Initialize the list
if initial:
processed = [] # depends on [control=['if'], data=[]]
# Where to look for fields
if 'list_fields' in dir(self):
list_fields = self.list_fields
check_system = 'html_name' # depends on [control=['if'], data=[]]
else:
list_fields = self
check_system = 'name'
# Default attributes for fields
# input in line with label
attributes = [('columns', 6), ('color', None), ('bgcolor', None), ('textalign', None), ('inline', False), ('label', True), ('extra', None), ('extra_div', None), ('foreign_info', {})]
labels = [x[0] for x in attributes]
# Get groups if none was given
if gs is None:
gs = self.__groups__() # depends on [control=['if'], data=['gs']]
# Prepare the answer
groups = []
# Prepare focus control
focus_first = None
focus_must = None
# html helper for groups and fields
html_helper = self.html_helper()
# Start processing
for g in gs:
token = {}
token['name'] = g[0]
if token['name'] in html_helper:
if 'pre' in html_helper[token['name']]:
token['html_helper_pre'] = html_helper[token['name']]['pre'] # depends on [control=['if'], data=[]]
if 'post' in html_helper[token['name']]:
token['html_helper_post'] = html_helper[token['name']]['post'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['html_helper']]
styles = g[1]
if type(styles) is tuple:
if len(styles) >= 1:
token['columns'] = g[1][0] # depends on [control=['if'], data=[]]
if len(styles) >= 2:
token['color'] = g[1][1] # depends on [control=['if'], data=[]]
if len(styles) >= 3:
token['bgcolor'] = g[1][2] # depends on [control=['if'], data=[]]
if len(styles) >= 4:
token['textalign'] = g[1][3] # depends on [control=['if'], data=[]]
if len(styles) >= 5:
token['inline'] = g[1][4] # depends on [control=['if'], data=[]]
if len(styles) >= 7:
token['extra'] = g[1][5] # depends on [control=['if'], data=[]]
if len(styles) >= 8:
token['extra_div'] = g[1][6] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
token['columns'] = g[1]
fs = g[2:]
fields = []
for f in fs:
# Field
atr = {}
# Decide weather this is a Group or not
if type(f) == tuple:
# Recursive
fields += self.get_groups([list(f)], processed, False) # depends on [control=['if'], data=[]]
else:
try:
list_type = [str, unicode] # depends on [control=['try'], data=[]]
except NameError:
list_type = [str] # depends on [control=['except'], data=[]]
# Check if it is a list
if type(f) == list:
# This is a field with attributes, get the name
field = f[0]
if html_helper and token['name'] in html_helper and ('items' in html_helper[token['name']]) and (field in html_helper[token['name']]['items']):
if 'pre' in html_helper[token['name']]['items'][field]:
atr['html_helper_pre'] = html_helper[token['name']]['items'][field]['pre'] # depends on [control=['if'], data=[]]
if 'post' in html_helper[token['name']]['items'][field]:
atr['html_helper_post'] = html_helper[token['name']]['items'][field]['post'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Process each attribute (if any)
dictionary = False
for (idx, element) in enumerate(f[1:]):
if type(element) == dict:
dictionary = True
for key in element.keys():
if key in labels:
atr[key] = element[key] # depends on [control=['if'], data=['key']]
else:
raise IOError("Unknown attribute '{0}' as field '{1}' in list of fields".format(key, field)) # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
elif not dictionary:
if element is not None:
atr[attributes[idx][0]] = element # depends on [control=['if'], data=['element']] # depends on [control=['if'], data=[]]
else:
raise IOError('We already processed a dicionary element in this list of fields, you can not add anoother type of elements to it, you must keep going with dictionaries') # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif type(f) in list_type:
field = f # depends on [control=['if'], data=[]]
else:
raise IOError("Uknown element type '{0}' inside group '{1}'".format(type(f), token['name']))
# Get the Django Field object
found = None
for infield in list_fields:
if infield.__dict__[check_system] == field:
found = infield
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['infield']]
if found:
# Get attributes (required and original attributes)
wrequired = found.field.widget.is_required
wattrs = found.field.widget.attrs
# Fill base attributes
atr['name'] = found.html_name
atr['input'] = found
atr['focus'] = False
# Set focus
if focus_must is None:
if focus_first is None:
focus_first = atr # depends on [control=['if'], data=['focus_first']]
if wrequired:
focus_must = atr # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['focus_must']]
# Autocomplete
if 'autofill' in dir(self.Meta):
autofill = self.Meta.autofill.get(found.html_name, None)
atr['autofill'] = autofill
if autofill:
# Check format of the request
autokind = autofill[0]
if type(autokind) == str:
# Using new format
if autokind == 'select':
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = DynamicSelect(wattrs) # depends on [control=['if'], data=[]]
elif autokind == 'multiselect':
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = MultiDynamicSelect(wattrs) # depends on [control=['if'], data=[]]
elif autokind == 'input':
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = DynamicInput(wattrs) # depends on [control=['if'], data=[]]
else:
raise IOError("Autofill filled using new format but autokind is '{}' and I only know 'input' or 'select'".format(autokind))
# Configure the field
found.field.widget.is_required = wrequired
found.field.widget.form_name = self.form_name
found.field.widget.field_name = infield.html_name
found.field.widget.autofill_deepness = autofill[1]
found.field.widget.autofill_url = autofill[2]
found.field.widget.autofill = autofill[3:] # depends on [control=['if'], data=[]]
else:
# Get old information [COMPATIBILITY WITH OLD VERSION]
# If autofill is True for this field set the DynamicSelect widget
found.field.widget = DynamicSelect(wattrs)
found.field.widget.is_required = wrequired
found.field.widget.form_name = self.form_name
found.field.widget.field_name = infield.html_name
found.field.widget.autofill_deepness = autofill[0]
found.field.widget.autofill_url = autofill[1]
found.field.widget.autofill = autofill[2:] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Set we don't have autofill for this field
atr['autofill'] = None
# Check if we have to replace the widget with a newer one
if isinstance(found.field.widget, Select) and (not isinstance(found.field.widget, DynamicSelect)):
if not isinstance(found.field.widget, MultiStaticSelect):
found.field.widget = StaticSelect(wattrs) # depends on [control=['if'], data=[]]
found.field.widget.choices = found.field.choices
found.field.widget.is_required = wrequired
found.field.widget.form_name = self.form_name
found.field.widget.field_name = infield.html_name # depends on [control=['if'], data=[]]
# Fill all attributes
for (attribute, default) in attributes:
if attribute not in atr.keys():
atr[attribute] = default # depends on [control=['if'], data=['attribute']] # depends on [control=['for'], data=[]]
# Fill label
if atr['label'] is True:
atr['label'] = found.label # depends on [control=['if'], data=[]]
# Set language
flang = getattr(found.field, 'set_language', None)
if flang:
flang(self.__language) # depends on [control=['if'], data=[]]
flang = getattr(found.field.widget, 'set_language', None)
if flang:
flang(self.__language) # depends on [control=['if'], data=[]]
# Attach the element
fields.append(atr)
# Remember we have processed it
processed.append(found.__dict__[check_system]) # depends on [control=['if'], data=[]]
else:
raise IOError("Unknown field '{0}' specified in group '{1}'".format(f, token['name'])) # depends on [control=['for'], data=['f']]
token['fields'] = fields
groups.append(token) # depends on [control=['for'], data=['g']]
# Add the rest of attributes we didn't use yet
if initial:
fields = []
for infield in list_fields:
if infield.__dict__[check_system] not in processed:
# Get attributes (required and original attributes)
wattrs = infield.field.widget.attrs
wrequired = infield.field.widget.is_required
# Prepare attr
atr = {}
# Fill base attributes
atr['name'] = infield.html_name
atr['input'] = infield
atr['focus'] = False
# Set focus
if focus_must is None:
if focus_first is None:
focus_first = atr # depends on [control=['if'], data=['focus_first']]
if wrequired:
focus_must = atr # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['focus_must']]
# Autocomplete
if 'autofill' in dir(self.Meta):
autofill = self.Meta.autofill.get(infield.html_name, None)
atr['autofill'] = autofill
if autofill:
# Check format of the request
autokind = autofill[0]
if type(autokind) == str:
# Get old information
# Using new format
if autokind == 'select':
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = DynamicSelect(wattrs) # depends on [control=['if'], data=[]]
elif autokind == 'multiselect':
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = MultiDynamicSelect(wattrs) # depends on [control=['if'], data=[]]
elif autokind == 'input':
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = DynamicInput(wattrs) # depends on [control=['if'], data=[]]
else:
raise IOError("Autofill filled using new format but autokind is '{}' and I only know 'input' or 'select'".format(autokind))
# Configure the field
infield.field.widget.is_required = wrequired
infield.field.widget.form_name = self.form_name
infield.field.widget.field_name = infield.html_name
infield.field.widget.autofill_deepness = autofill[1]
infield.field.widget.autofill_url = autofill[2]
infield.field.widget.autofill = autofill[3:] # depends on [control=['if'], data=[]]
else:
# Get old information [COMPATIBILITY WITH OLD VERSION]
# If autofill is True for this field set the DynamicSelect widget
infield.field.widget = DynamicSelect(wattrs)
infield.field.widget.is_required = wrequired
infield.field.widget.form_name = self.form_name
infield.field.widget.field_name = infield.html_name
infield.field.widget.autofill_deepness = autofill[0]
infield.field.widget.autofill_url = autofill[1]
infield.field.widget.autofill = autofill[2:] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Set we don't have autofill for this field
atr['autofill'] = None
# Check if we have to replace the widget with a newer one
if isinstance(infield.field.widget, Select) and (not isinstance(infield.field.widget, DynamicSelect)):
if isinstance(infield.field, NullBooleanField):
infield.field.widget = CheckboxInput(wattrs) # depends on [control=['if'], data=[]]
elif not isinstance(infield.field.widget, MultiStaticSelect):
infield.field.widget = StaticSelect(wattrs) # depends on [control=['if'], data=[]]
if hasattr(infield.field.widget, 'choices') and hasattr(infield.field, 'choices'):
infield.field.widget.choices = infield.field.choices # depends on [control=['if'], data=[]]
infield.field.widget.is_required = wrequired
infield.field.widget.form_name = self.form_name
infield.field.widget.field_name = infield.html_name # depends on [control=['if'], data=[]]
# Fill all attributes
for (attribute, default) in attributes:
if attribute not in atr.keys():
atr[attribute] = default # depends on [control=['if'], data=['attribute']] # depends on [control=['for'], data=[]]
# Fill label
if atr['label'] is True:
atr['label'] = infield.label # depends on [control=['if'], data=[]]
# Set language
flang = getattr(infield.field, 'set_language', None)
if flang:
flang(self.__language) # depends on [control=['if'], data=[]]
flang = getattr(infield.field.widget, 'set_language', None)
if flang:
flang(self.__language) # depends on [control=['if'], data=[]]
# Attach the attribute
fields.append(atr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['infield']]
# Save the new elements
if fields:
groups.append({'name': None, 'columns': 12, 'fields': fields}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Set focus
if focus_must:
focus_must['focus'] = True # depends on [control=['if'], data=[]]
elif focus_first is not None:
focus_first['focus'] = True # depends on [control=['if'], data=['focus_first']]
# Return the resulting groups
return groups |
def _allocateSpatialFDR(self, rfInput):
"""Allocate the spatial pooler instance."""
if self._sfdr:
return
# Retrieve the necessary extra arguments that were handled automatically
autoArgs = dict((name, getattr(self, name))
for name in self._spatialArgNames)
# Instantiate the spatial pooler class.
if ( (self.SpatialClass == CPPSpatialPooler) or
(self.SpatialClass == PYSpatialPooler) ):
autoArgs['columnDimensions'] = [self.columnCount]
autoArgs['inputDimensions'] = [self.inputWidth]
autoArgs['potentialRadius'] = self.inputWidth
self._sfdr = self.SpatialClass(
**autoArgs
) | def function[_allocateSpatialFDR, parameter[self, rfInput]]:
constant[Allocate the spatial pooler instance.]
if name[self]._sfdr begin[:]
return[None]
variable[autoArgs] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20c6c6800>]]
if <ast.BoolOp object at 0x7da20c6c50f0> begin[:]
call[name[autoArgs]][constant[columnDimensions]] assign[=] list[[<ast.Attribute object at 0x7da20c6c60e0>]]
call[name[autoArgs]][constant[inputDimensions]] assign[=] list[[<ast.Attribute object at 0x7da20c6c4580>]]
call[name[autoArgs]][constant[potentialRadius]] assign[=] name[self].inputWidth
name[self]._sfdr assign[=] call[name[self].SpatialClass, parameter[]] | keyword[def] identifier[_allocateSpatialFDR] ( identifier[self] , identifier[rfInput] ):
literal[string]
keyword[if] identifier[self] . identifier[_sfdr] :
keyword[return]
identifier[autoArgs] = identifier[dict] (( identifier[name] , identifier[getattr] ( identifier[self] , identifier[name] ))
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[_spatialArgNames] )
keyword[if] (( identifier[self] . identifier[SpatialClass] == identifier[CPPSpatialPooler] ) keyword[or]
( identifier[self] . identifier[SpatialClass] == identifier[PYSpatialPooler] )):
identifier[autoArgs] [ literal[string] ]=[ identifier[self] . identifier[columnCount] ]
identifier[autoArgs] [ literal[string] ]=[ identifier[self] . identifier[inputWidth] ]
identifier[autoArgs] [ literal[string] ]= identifier[self] . identifier[inputWidth]
identifier[self] . identifier[_sfdr] = identifier[self] . identifier[SpatialClass] (
** identifier[autoArgs]
) | def _allocateSpatialFDR(self, rfInput):
"""Allocate the spatial pooler instance."""
if self._sfdr:
return # depends on [control=['if'], data=[]]
# Retrieve the necessary extra arguments that were handled automatically
autoArgs = dict(((name, getattr(self, name)) for name in self._spatialArgNames))
# Instantiate the spatial pooler class.
if self.SpatialClass == CPPSpatialPooler or self.SpatialClass == PYSpatialPooler:
autoArgs['columnDimensions'] = [self.columnCount]
autoArgs['inputDimensions'] = [self.inputWidth]
autoArgs['potentialRadius'] = self.inputWidth
self._sfdr = self.SpatialClass(**autoArgs) # depends on [control=['if'], data=[]] |
def _build(
self, inputs, targets, input_sequence_length, output_sequence_length):
"""Dynamic unroll across input objects.
Args:
inputs: tensor (input_sequence_length x batch x feature_size). Encoder
sequence.
targets: tensor (output_sequence_length x batch x feature_size). Decoder
sequence.
input_sequence_length: tensor (batch). Size of each batched input
sequence.
output_sequence_length: tensor (batch). Size of each batched target
sequence.
Returns:
Tensor (batch x num_objects); logits indicating the reference objects.
"""
# Connect decoding steps.
batch_size = inputs.get_shape()[1]
initial_state = self._core.initial_state(batch_size, trainable=False)
_, state = tf.nn.dynamic_rnn(
cell=self._core,
inputs=inputs,
sequence_length=input_sequence_length,
time_major=True,
initial_state=initial_state
)
# Connect decoding steps.
zero_input = tf.zeros(shape=targets.get_shape())
output_sequence, _ = tf.nn.dynamic_rnn(
cell=self._core,
inputs=zero_input, # Non-autoregressive model. Zeroed input.
sequence_length=output_sequence_length,
initial_state=state,
time_major=True)
outputs = snt.BatchApply(self._final_mlp)(output_sequence)
logits = snt.BatchApply(snt.Linear(self._target_size))(outputs)
tf.logging.info("Connected seq2seq model.")
return logits | def function[_build, parameter[self, inputs, targets, input_sequence_length, output_sequence_length]]:
constant[Dynamic unroll across input objects.
Args:
inputs: tensor (input_sequence_length x batch x feature_size). Encoder
sequence.
targets: tensor (output_sequence_length x batch x feature_size). Decoder
sequence.
input_sequence_length: tensor (batch). Size of each batched input
sequence.
output_sequence_length: tensor (batch). Size of each batched target
sequence.
Returns:
Tensor (batch x num_objects); logits indicating the reference objects.
]
variable[batch_size] assign[=] call[call[name[inputs].get_shape, parameter[]]][constant[1]]
variable[initial_state] assign[=] call[name[self]._core.initial_state, parameter[name[batch_size]]]
<ast.Tuple object at 0x7da1b1cad030> assign[=] call[name[tf].nn.dynamic_rnn, parameter[]]
variable[zero_input] assign[=] call[name[tf].zeros, parameter[]]
<ast.Tuple object at 0x7da1b1caf4c0> assign[=] call[name[tf].nn.dynamic_rnn, parameter[]]
variable[outputs] assign[=] call[call[name[snt].BatchApply, parameter[name[self]._final_mlp]], parameter[name[output_sequence]]]
variable[logits] assign[=] call[call[name[snt].BatchApply, parameter[call[name[snt].Linear, parameter[name[self]._target_size]]]], parameter[name[outputs]]]
call[name[tf].logging.info, parameter[constant[Connected seq2seq model.]]]
return[name[logits]] | keyword[def] identifier[_build] (
identifier[self] , identifier[inputs] , identifier[targets] , identifier[input_sequence_length] , identifier[output_sequence_length] ):
literal[string]
identifier[batch_size] = identifier[inputs] . identifier[get_shape] ()[ literal[int] ]
identifier[initial_state] = identifier[self] . identifier[_core] . identifier[initial_state] ( identifier[batch_size] , identifier[trainable] = keyword[False] )
identifier[_] , identifier[state] = identifier[tf] . identifier[nn] . identifier[dynamic_rnn] (
identifier[cell] = identifier[self] . identifier[_core] ,
identifier[inputs] = identifier[inputs] ,
identifier[sequence_length] = identifier[input_sequence_length] ,
identifier[time_major] = keyword[True] ,
identifier[initial_state] = identifier[initial_state]
)
identifier[zero_input] = identifier[tf] . identifier[zeros] ( identifier[shape] = identifier[targets] . identifier[get_shape] ())
identifier[output_sequence] , identifier[_] = identifier[tf] . identifier[nn] . identifier[dynamic_rnn] (
identifier[cell] = identifier[self] . identifier[_core] ,
identifier[inputs] = identifier[zero_input] ,
identifier[sequence_length] = identifier[output_sequence_length] ,
identifier[initial_state] = identifier[state] ,
identifier[time_major] = keyword[True] )
identifier[outputs] = identifier[snt] . identifier[BatchApply] ( identifier[self] . identifier[_final_mlp] )( identifier[output_sequence] )
identifier[logits] = identifier[snt] . identifier[BatchApply] ( identifier[snt] . identifier[Linear] ( identifier[self] . identifier[_target_size] ))( identifier[outputs] )
identifier[tf] . identifier[logging] . identifier[info] ( literal[string] )
keyword[return] identifier[logits] | def _build(self, inputs, targets, input_sequence_length, output_sequence_length):
"""Dynamic unroll across input objects.
Args:
inputs: tensor (input_sequence_length x batch x feature_size). Encoder
sequence.
targets: tensor (output_sequence_length x batch x feature_size). Decoder
sequence.
input_sequence_length: tensor (batch). Size of each batched input
sequence.
output_sequence_length: tensor (batch). Size of each batched target
sequence.
Returns:
Tensor (batch x num_objects); logits indicating the reference objects.
"""
# Connect decoding steps.
batch_size = inputs.get_shape()[1]
initial_state = self._core.initial_state(batch_size, trainable=False)
(_, state) = tf.nn.dynamic_rnn(cell=self._core, inputs=inputs, sequence_length=input_sequence_length, time_major=True, initial_state=initial_state)
# Connect decoding steps.
zero_input = tf.zeros(shape=targets.get_shape()) # Non-autoregressive model. Zeroed input.
(output_sequence, _) = tf.nn.dynamic_rnn(cell=self._core, inputs=zero_input, sequence_length=output_sequence_length, initial_state=state, time_major=True)
outputs = snt.BatchApply(self._final_mlp)(output_sequence)
logits = snt.BatchApply(snt.Linear(self._target_size))(outputs)
tf.logging.info('Connected seq2seq model.')
return logits |
def default_if_none(default=NOTHING, factory=None):
"""
A converter that allows to replace ``None`` values by *default* or the
result of *factory*.
:param default: Value to be used if ``None`` is passed. Passing an instance
of :class:`attr.Factory` is supported, however the ``takes_self`` option
is *not*.
:param callable factory: A callable that takes not parameters whose result
is used if ``None`` is passed.
:raises TypeError: If **neither** *default* or *factory* is passed.
:raises TypeError: If **both** *default* and *factory* are passed.
:raises ValueError: If an instance of :class:`attr.Factory` is passed with
``takes_self=True``.
.. versionadded:: 18.2.0
"""
if default is NOTHING and factory is None:
raise TypeError("Must pass either `default` or `factory`.")
if default is not NOTHING and factory is not None:
raise TypeError(
"Must pass either `default` or `factory` but not both."
)
if factory is not None:
default = Factory(factory)
if isinstance(default, Factory):
if default.takes_self:
raise ValueError(
"`takes_self` is not supported by default_if_none."
)
def default_if_none_converter(val):
if val is not None:
return val
return default.factory()
else:
def default_if_none_converter(val):
if val is not None:
return val
return default
return default_if_none_converter | def function[default_if_none, parameter[default, factory]]:
constant[
A converter that allows to replace ``None`` values by *default* or the
result of *factory*.
:param default: Value to be used if ``None`` is passed. Passing an instance
of :class:`attr.Factory` is supported, however the ``takes_self`` option
is *not*.
:param callable factory: A callable that takes not parameters whose result
is used if ``None`` is passed.
:raises TypeError: If **neither** *default* or *factory* is passed.
:raises TypeError: If **both** *default* and *factory* are passed.
:raises ValueError: If an instance of :class:`attr.Factory` is passed with
``takes_self=True``.
.. versionadded:: 18.2.0
]
if <ast.BoolOp object at 0x7da2041d97b0> begin[:]
<ast.Raise object at 0x7da2041d8d60>
if <ast.BoolOp object at 0x7da2041dbfd0> begin[:]
<ast.Raise object at 0x7da2041dab90>
if compare[name[factory] is_not constant[None]] begin[:]
variable[default] assign[=] call[name[Factory], parameter[name[factory]]]
if call[name[isinstance], parameter[name[default], name[Factory]]] begin[:]
if name[default].takes_self begin[:]
<ast.Raise object at 0x7da2041d8af0>
def function[default_if_none_converter, parameter[val]]:
if compare[name[val] is_not constant[None]] begin[:]
return[name[val]]
return[call[name[default].factory, parameter[]]]
return[name[default_if_none_converter]] | keyword[def] identifier[default_if_none] ( identifier[default] = identifier[NOTHING] , identifier[factory] = keyword[None] ):
literal[string]
keyword[if] identifier[default] keyword[is] identifier[NOTHING] keyword[and] identifier[factory] keyword[is] keyword[None] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[default] keyword[is] keyword[not] identifier[NOTHING] keyword[and] identifier[factory] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[TypeError] (
literal[string]
)
keyword[if] identifier[factory] keyword[is] keyword[not] keyword[None] :
identifier[default] = identifier[Factory] ( identifier[factory] )
keyword[if] identifier[isinstance] ( identifier[default] , identifier[Factory] ):
keyword[if] identifier[default] . identifier[takes_self] :
keyword[raise] identifier[ValueError] (
literal[string]
)
keyword[def] identifier[default_if_none_converter] ( identifier[val] ):
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[val]
keyword[return] identifier[default] . identifier[factory] ()
keyword[else] :
keyword[def] identifier[default_if_none_converter] ( identifier[val] ):
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[val]
keyword[return] identifier[default]
keyword[return] identifier[default_if_none_converter] | def default_if_none(default=NOTHING, factory=None):
"""
A converter that allows to replace ``None`` values by *default* or the
result of *factory*.
:param default: Value to be used if ``None`` is passed. Passing an instance
of :class:`attr.Factory` is supported, however the ``takes_self`` option
is *not*.
:param callable factory: A callable that takes not parameters whose result
is used if ``None`` is passed.
:raises TypeError: If **neither** *default* or *factory* is passed.
:raises TypeError: If **both** *default* and *factory* are passed.
:raises ValueError: If an instance of :class:`attr.Factory` is passed with
``takes_self=True``.
.. versionadded:: 18.2.0
"""
if default is NOTHING and factory is None:
raise TypeError('Must pass either `default` or `factory`.') # depends on [control=['if'], data=[]]
if default is not NOTHING and factory is not None:
raise TypeError('Must pass either `default` or `factory` but not both.') # depends on [control=['if'], data=[]]
if factory is not None:
default = Factory(factory) # depends on [control=['if'], data=['factory']]
if isinstance(default, Factory):
if default.takes_self:
raise ValueError('`takes_self` is not supported by default_if_none.') # depends on [control=['if'], data=[]]
def default_if_none_converter(val):
if val is not None:
return val # depends on [control=['if'], data=['val']]
return default.factory() # depends on [control=['if'], data=[]]
else:
def default_if_none_converter(val):
if val is not None:
return val # depends on [control=['if'], data=['val']]
return default
return default_if_none_converter |
def send_email(name, ctx_dict, send_to=None, subject=u'Subject', **kwargs):
"""
Shortcut function for EmailFromTemplate class
@return: None
"""
eft = EmailFromTemplate(name=name)
eft.subject = subject
eft.context = ctx_dict
eft.get_object()
eft.render_message()
eft.send_email(send_to=send_to, **kwargs) | def function[send_email, parameter[name, ctx_dict, send_to, subject]]:
constant[
Shortcut function for EmailFromTemplate class
@return: None
]
variable[eft] assign[=] call[name[EmailFromTemplate], parameter[]]
name[eft].subject assign[=] name[subject]
name[eft].context assign[=] name[ctx_dict]
call[name[eft].get_object, parameter[]]
call[name[eft].render_message, parameter[]]
call[name[eft].send_email, parameter[]] | keyword[def] identifier[send_email] ( identifier[name] , identifier[ctx_dict] , identifier[send_to] = keyword[None] , identifier[subject] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[eft] = identifier[EmailFromTemplate] ( identifier[name] = identifier[name] )
identifier[eft] . identifier[subject] = identifier[subject]
identifier[eft] . identifier[context] = identifier[ctx_dict]
identifier[eft] . identifier[get_object] ()
identifier[eft] . identifier[render_message] ()
identifier[eft] . identifier[send_email] ( identifier[send_to] = identifier[send_to] ,** identifier[kwargs] ) | def send_email(name, ctx_dict, send_to=None, subject=u'Subject', **kwargs):
"""
Shortcut function for EmailFromTemplate class
@return: None
"""
eft = EmailFromTemplate(name=name)
eft.subject = subject
eft.context = ctx_dict
eft.get_object()
eft.render_message()
eft.send_email(send_to=send_to, **kwargs) |
def touch_object(self, objects: Set[Object]) -> Set[Object]:
"""
Returns all objects that touch the given set of objects.
"""
objects_per_box = self._separate_objects_by_boxes(objects)
return_set = set()
for box, box_objects in objects_per_box.items():
candidate_objects = box.objects
for object_ in box_objects:
for candidate_object in candidate_objects:
if self._objects_touch_each_other(object_, candidate_object):
return_set.add(candidate_object)
return return_set | def function[touch_object, parameter[self, objects]]:
constant[
Returns all objects that touch the given set of objects.
]
variable[objects_per_box] assign[=] call[name[self]._separate_objects_by_boxes, parameter[name[objects]]]
variable[return_set] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2044c0ee0>, <ast.Name object at 0x7da2044c2d70>]]] in starred[call[name[objects_per_box].items, parameter[]]] begin[:]
variable[candidate_objects] assign[=] name[box].objects
for taget[name[object_]] in starred[name[box_objects]] begin[:]
for taget[name[candidate_object]] in starred[name[candidate_objects]] begin[:]
if call[name[self]._objects_touch_each_other, parameter[name[object_], name[candidate_object]]] begin[:]
call[name[return_set].add, parameter[name[candidate_object]]]
return[name[return_set]] | keyword[def] identifier[touch_object] ( identifier[self] , identifier[objects] : identifier[Set] [ identifier[Object] ])-> identifier[Set] [ identifier[Object] ]:
literal[string]
identifier[objects_per_box] = identifier[self] . identifier[_separate_objects_by_boxes] ( identifier[objects] )
identifier[return_set] = identifier[set] ()
keyword[for] identifier[box] , identifier[box_objects] keyword[in] identifier[objects_per_box] . identifier[items] ():
identifier[candidate_objects] = identifier[box] . identifier[objects]
keyword[for] identifier[object_] keyword[in] identifier[box_objects] :
keyword[for] identifier[candidate_object] keyword[in] identifier[candidate_objects] :
keyword[if] identifier[self] . identifier[_objects_touch_each_other] ( identifier[object_] , identifier[candidate_object] ):
identifier[return_set] . identifier[add] ( identifier[candidate_object] )
keyword[return] identifier[return_set] | def touch_object(self, objects: Set[Object]) -> Set[Object]:
"""
Returns all objects that touch the given set of objects.
"""
objects_per_box = self._separate_objects_by_boxes(objects)
return_set = set()
for (box, box_objects) in objects_per_box.items():
candidate_objects = box.objects
for object_ in box_objects:
for candidate_object in candidate_objects:
if self._objects_touch_each_other(object_, candidate_object):
return_set.add(candidate_object) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['candidate_object']] # depends on [control=['for'], data=['object_']] # depends on [control=['for'], data=[]]
return return_set |
async def upload_file(self, data: bytes, mime_type: Optional[str] = None) -> str:
"""
Upload a file to the content repository. See also: `API reference`_
Args:
data: The data to upload.
mime_type: The MIME type to send with the upload request.
Returns:
The MXC URI to the uploaded file.
Raises:
MatrixResponseError: If the response does not contain a ``content_uri`` field.
.. _API reference:
https://matrix.org/docs/spec/client_server/r0.3.0.html#post-matrix-media-r0-upload
"""
await self.ensure_registered()
if magic:
mime_type = mime_type or magic.from_buffer(data, mime=True)
resp = await self.client.request("POST", "", content=data,
headers={"Content-Type": mime_type},
api_path="/_matrix/media/r0/upload")
try:
return resp["content_uri"]
except KeyError:
raise MatrixResponseError("Media repo upload response did not contain content_uri.") | <ast.AsyncFunctionDef object at 0x7da18dc06bc0> | keyword[async] keyword[def] identifier[upload_file] ( identifier[self] , identifier[data] : identifier[bytes] , identifier[mime_type] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> identifier[str] :
literal[string]
keyword[await] identifier[self] . identifier[ensure_registered] ()
keyword[if] identifier[magic] :
identifier[mime_type] = identifier[mime_type] keyword[or] identifier[magic] . identifier[from_buffer] ( identifier[data] , identifier[mime] = keyword[True] )
identifier[resp] = keyword[await] identifier[self] . identifier[client] . identifier[request] ( literal[string] , literal[string] , identifier[content] = identifier[data] ,
identifier[headers] ={ literal[string] : identifier[mime_type] },
identifier[api_path] = literal[string] )
keyword[try] :
keyword[return] identifier[resp] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[MatrixResponseError] ( literal[string] ) | async def upload_file(self, data: bytes, mime_type: Optional[str]=None) -> str:
"""
Upload a file to the content repository. See also: `API reference`_
Args:
data: The data to upload.
mime_type: The MIME type to send with the upload request.
Returns:
The MXC URI to the uploaded file.
Raises:
MatrixResponseError: If the response does not contain a ``content_uri`` field.
.. _API reference:
https://matrix.org/docs/spec/client_server/r0.3.0.html#post-matrix-media-r0-upload
"""
await self.ensure_registered()
if magic:
mime_type = mime_type or magic.from_buffer(data, mime=True) # depends on [control=['if'], data=[]]
resp = await self.client.request('POST', '', content=data, headers={'Content-Type': mime_type}, api_path='/_matrix/media/r0/upload')
try:
return resp['content_uri'] # depends on [control=['try'], data=[]]
except KeyError:
raise MatrixResponseError('Media repo upload response did not contain content_uri.') # depends on [control=['except'], data=[]] |
def write(filename, samples, write_params=None, static_args=None):
"""Writes the injection samples to the given xml.
Parameters
----------
filename : str
The name of the file to write to.
samples : io.FieldArray
FieldArray of parameters.
write_params : list, optional
Only write the given parameter names. All given names must be keys
in ``samples``. Default is to write all parameters in ``samples``.
static_args : dict, optional
Dictionary mapping static parameter names to values. These are
written to the ``attrs``.
"""
xmldoc = ligolw.Document()
xmldoc.appendChild(ligolw.LIGO_LW())
simtable = lsctables.New(lsctables.SimInspiralTable)
xmldoc.childNodes[0].appendChild(simtable)
if static_args is None:
static_args = {}
if write_params is None:
write_params = samples.fieldnames
for ii in range(samples.size):
sim = lsctables.SimInspiral()
# initialize all elements to None
for col in sim.__slots__:
setattr(sim, col, None)
for field in write_params:
data = samples[ii][field]
set_sim_data(sim, field, data)
# set any static args
for (field, value) in static_args.items():
set_sim_data(sim, field, value)
simtable.append(sim)
ligolw_utils.write_filename(xmldoc, filename,
gz=filename.endswith('gz')) | def function[write, parameter[filename, samples, write_params, static_args]]:
constant[Writes the injection samples to the given xml.
Parameters
----------
filename : str
The name of the file to write to.
samples : io.FieldArray
FieldArray of parameters.
write_params : list, optional
Only write the given parameter names. All given names must be keys
in ``samples``. Default is to write all parameters in ``samples``.
static_args : dict, optional
Dictionary mapping static parameter names to values. These are
written to the ``attrs``.
]
variable[xmldoc] assign[=] call[name[ligolw].Document, parameter[]]
call[name[xmldoc].appendChild, parameter[call[name[ligolw].LIGO_LW, parameter[]]]]
variable[simtable] assign[=] call[name[lsctables].New, parameter[name[lsctables].SimInspiralTable]]
call[call[name[xmldoc].childNodes][constant[0]].appendChild, parameter[name[simtable]]]
if compare[name[static_args] is constant[None]] begin[:]
variable[static_args] assign[=] dictionary[[], []]
if compare[name[write_params] is constant[None]] begin[:]
variable[write_params] assign[=] name[samples].fieldnames
for taget[name[ii]] in starred[call[name[range], parameter[name[samples].size]]] begin[:]
variable[sim] assign[=] call[name[lsctables].SimInspiral, parameter[]]
for taget[name[col]] in starred[name[sim].__slots__] begin[:]
call[name[setattr], parameter[name[sim], name[col], constant[None]]]
for taget[name[field]] in starred[name[write_params]] begin[:]
variable[data] assign[=] call[call[name[samples]][name[ii]]][name[field]]
call[name[set_sim_data], parameter[name[sim], name[field], name[data]]]
for taget[tuple[[<ast.Name object at 0x7da20cabd6c0>, <ast.Name object at 0x7da20cabc820>]]] in starred[call[name[static_args].items, parameter[]]] begin[:]
call[name[set_sim_data], parameter[name[sim], name[field], name[value]]]
call[name[simtable].append, parameter[name[sim]]]
call[name[ligolw_utils].write_filename, parameter[name[xmldoc], name[filename]]] | keyword[def] identifier[write] ( identifier[filename] , identifier[samples] , identifier[write_params] = keyword[None] , identifier[static_args] = keyword[None] ):
literal[string]
identifier[xmldoc] = identifier[ligolw] . identifier[Document] ()
identifier[xmldoc] . identifier[appendChild] ( identifier[ligolw] . identifier[LIGO_LW] ())
identifier[simtable] = identifier[lsctables] . identifier[New] ( identifier[lsctables] . identifier[SimInspiralTable] )
identifier[xmldoc] . identifier[childNodes] [ literal[int] ]. identifier[appendChild] ( identifier[simtable] )
keyword[if] identifier[static_args] keyword[is] keyword[None] :
identifier[static_args] ={}
keyword[if] identifier[write_params] keyword[is] keyword[None] :
identifier[write_params] = identifier[samples] . identifier[fieldnames]
keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[samples] . identifier[size] ):
identifier[sim] = identifier[lsctables] . identifier[SimInspiral] ()
keyword[for] identifier[col] keyword[in] identifier[sim] . identifier[__slots__] :
identifier[setattr] ( identifier[sim] , identifier[col] , keyword[None] )
keyword[for] identifier[field] keyword[in] identifier[write_params] :
identifier[data] = identifier[samples] [ identifier[ii] ][ identifier[field] ]
identifier[set_sim_data] ( identifier[sim] , identifier[field] , identifier[data] )
keyword[for] ( identifier[field] , identifier[value] ) keyword[in] identifier[static_args] . identifier[items] ():
identifier[set_sim_data] ( identifier[sim] , identifier[field] , identifier[value] )
identifier[simtable] . identifier[append] ( identifier[sim] )
identifier[ligolw_utils] . identifier[write_filename] ( identifier[xmldoc] , identifier[filename] ,
identifier[gz] = identifier[filename] . identifier[endswith] ( literal[string] )) | def write(filename, samples, write_params=None, static_args=None):
"""Writes the injection samples to the given xml.
Parameters
----------
filename : str
The name of the file to write to.
samples : io.FieldArray
FieldArray of parameters.
write_params : list, optional
Only write the given parameter names. All given names must be keys
in ``samples``. Default is to write all parameters in ``samples``.
static_args : dict, optional
Dictionary mapping static parameter names to values. These are
written to the ``attrs``.
"""
xmldoc = ligolw.Document()
xmldoc.appendChild(ligolw.LIGO_LW())
simtable = lsctables.New(lsctables.SimInspiralTable)
xmldoc.childNodes[0].appendChild(simtable)
if static_args is None:
static_args = {} # depends on [control=['if'], data=['static_args']]
if write_params is None:
write_params = samples.fieldnames # depends on [control=['if'], data=['write_params']]
for ii in range(samples.size):
sim = lsctables.SimInspiral()
# initialize all elements to None
for col in sim.__slots__:
setattr(sim, col, None) # depends on [control=['for'], data=['col']]
for field in write_params:
data = samples[ii][field]
set_sim_data(sim, field, data) # depends on [control=['for'], data=['field']]
# set any static args
for (field, value) in static_args.items():
set_sim_data(sim, field, value) # depends on [control=['for'], data=[]]
simtable.append(sim) # depends on [control=['for'], data=['ii']]
ligolw_utils.write_filename(xmldoc, filename, gz=filename.endswith('gz')) |
def create_address(self, account_id, **params):
"""https://developers.coinbase.com/api/v2#create-address"""
response = self._post('v2', 'accounts', account_id, 'addresses', data=params)
return self._make_api_object(response, Address) | def function[create_address, parameter[self, account_id]]:
constant[https://developers.coinbase.com/api/v2#create-address]
variable[response] assign[=] call[name[self]._post, parameter[constant[v2], constant[accounts], name[account_id], constant[addresses]]]
return[call[name[self]._make_api_object, parameter[name[response], name[Address]]]] | keyword[def] identifier[create_address] ( identifier[self] , identifier[account_id] ,** identifier[params] ):
literal[string]
identifier[response] = identifier[self] . identifier[_post] ( literal[string] , literal[string] , identifier[account_id] , literal[string] , identifier[data] = identifier[params] )
keyword[return] identifier[self] . identifier[_make_api_object] ( identifier[response] , identifier[Address] ) | def create_address(self, account_id, **params):
"""https://developers.coinbase.com/api/v2#create-address"""
response = self._post('v2', 'accounts', account_id, 'addresses', data=params)
return self._make_api_object(response, Address) |
def sinter(self, keys, *args):
"""Emulate sinter."""
func = lambda left, right: left.intersection(right)
return self._apply_to_sets(func, "SINTER", keys, *args) | def function[sinter, parameter[self, keys]]:
constant[Emulate sinter.]
variable[func] assign[=] <ast.Lambda object at 0x7da2054a4310>
return[call[name[self]._apply_to_sets, parameter[name[func], constant[SINTER], name[keys], <ast.Starred object at 0x7da2054a6d40>]]] | keyword[def] identifier[sinter] ( identifier[self] , identifier[keys] ,* identifier[args] ):
literal[string]
identifier[func] = keyword[lambda] identifier[left] , identifier[right] : identifier[left] . identifier[intersection] ( identifier[right] )
keyword[return] identifier[self] . identifier[_apply_to_sets] ( identifier[func] , literal[string] , identifier[keys] ,* identifier[args] ) | def sinter(self, keys, *args):
"""Emulate sinter."""
func = lambda left, right: left.intersection(right)
return self._apply_to_sets(func, 'SINTER', keys, *args) |
def extend(self, items, replace=True):
'''
Append the items to the metadata.
'''
if isinstance(items, dict) or isinstance(items, SortableDict):
items = list(items.items())
for (key, value) in items:
self.append(key, value, replace=replace) | def function[extend, parameter[self, items, replace]]:
constant[
Append the items to the metadata.
]
if <ast.BoolOp object at 0x7da1afe529b0> begin[:]
variable[items] assign[=] call[name[list], parameter[call[name[items].items, parameter[]]]]
for taget[tuple[[<ast.Name object at 0x7da1afe89000>, <ast.Name object at 0x7da1afe8a290>]]] in starred[name[items]] begin[:]
call[name[self].append, parameter[name[key], name[value]]] | keyword[def] identifier[extend] ( identifier[self] , identifier[items] , identifier[replace] = keyword[True] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[items] , identifier[dict] ) keyword[or] identifier[isinstance] ( identifier[items] , identifier[SortableDict] ):
identifier[items] = identifier[list] ( identifier[items] . identifier[items] ())
keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[items] :
identifier[self] . identifier[append] ( identifier[key] , identifier[value] , identifier[replace] = identifier[replace] ) | def extend(self, items, replace=True):
"""
Append the items to the metadata.
"""
if isinstance(items, dict) or isinstance(items, SortableDict):
items = list(items.items()) # depends on [control=['if'], data=[]]
for (key, value) in items:
self.append(key, value, replace=replace) # depends on [control=['for'], data=[]] |
def execute(helper, config, args):
"""
Deploys to an environment
"""
env_config = parse_env_config(config, args.environment)
cname_prefix = env_config.get('cname_prefix', None)
env_name = args.environment
# change version
if args.version_label:
helper.deploy_version(env_name, args.version_label)
if not args.dont_wait:
helper.wait_for_environments(env_name, status='Ready', version_label=args.version_label)
# update it
env = parse_env_config(config, env_name)
option_settings = parse_option_settings(env.get('option_settings', {}))
helper.update_environment(env_name,
description=env.get('description', None),
option_settings=option_settings,
tier_type=env.get('tier_type'),
tier_name=env.get('tier_name'),
tier_version=env.get('tier_version'))
# wait
if not args.dont_wait:
helper.wait_for_environments(env_name, health='Green', status='Ready', version_label=args.version_label)
# delete unused
helper.delete_unused_versions(versions_to_keep=int( get(config, 'app.versions_to_keep', 10) )) | def function[execute, parameter[helper, config, args]]:
constant[
Deploys to an environment
]
variable[env_config] assign[=] call[name[parse_env_config], parameter[name[config], name[args].environment]]
variable[cname_prefix] assign[=] call[name[env_config].get, parameter[constant[cname_prefix], constant[None]]]
variable[env_name] assign[=] name[args].environment
if name[args].version_label begin[:]
call[name[helper].deploy_version, parameter[name[env_name], name[args].version_label]]
if <ast.UnaryOp object at 0x7da1afe8af80> begin[:]
call[name[helper].wait_for_environments, parameter[name[env_name]]]
variable[env] assign[=] call[name[parse_env_config], parameter[name[config], name[env_name]]]
variable[option_settings] assign[=] call[name[parse_option_settings], parameter[call[name[env].get, parameter[constant[option_settings], dictionary[[], []]]]]]
call[name[helper].update_environment, parameter[name[env_name]]]
if <ast.UnaryOp object at 0x7da1afe1b520> begin[:]
call[name[helper].wait_for_environments, parameter[name[env_name]]]
call[name[helper].delete_unused_versions, parameter[]] | keyword[def] identifier[execute] ( identifier[helper] , identifier[config] , identifier[args] ):
literal[string]
identifier[env_config] = identifier[parse_env_config] ( identifier[config] , identifier[args] . identifier[environment] )
identifier[cname_prefix] = identifier[env_config] . identifier[get] ( literal[string] , keyword[None] )
identifier[env_name] = identifier[args] . identifier[environment]
keyword[if] identifier[args] . identifier[version_label] :
identifier[helper] . identifier[deploy_version] ( identifier[env_name] , identifier[args] . identifier[version_label] )
keyword[if] keyword[not] identifier[args] . identifier[dont_wait] :
identifier[helper] . identifier[wait_for_environments] ( identifier[env_name] , identifier[status] = literal[string] , identifier[version_label] = identifier[args] . identifier[version_label] )
identifier[env] = identifier[parse_env_config] ( identifier[config] , identifier[env_name] )
identifier[option_settings] = identifier[parse_option_settings] ( identifier[env] . identifier[get] ( literal[string] ,{}))
identifier[helper] . identifier[update_environment] ( identifier[env_name] ,
identifier[description] = identifier[env] . identifier[get] ( literal[string] , keyword[None] ),
identifier[option_settings] = identifier[option_settings] ,
identifier[tier_type] = identifier[env] . identifier[get] ( literal[string] ),
identifier[tier_name] = identifier[env] . identifier[get] ( literal[string] ),
identifier[tier_version] = identifier[env] . identifier[get] ( literal[string] ))
keyword[if] keyword[not] identifier[args] . identifier[dont_wait] :
identifier[helper] . identifier[wait_for_environments] ( identifier[env_name] , identifier[health] = literal[string] , identifier[status] = literal[string] , identifier[version_label] = identifier[args] . identifier[version_label] )
identifier[helper] . identifier[delete_unused_versions] ( identifier[versions_to_keep] = identifier[int] ( identifier[get] ( identifier[config] , literal[string] , literal[int] ))) | def execute(helper, config, args):
"""
Deploys to an environment
"""
env_config = parse_env_config(config, args.environment)
cname_prefix = env_config.get('cname_prefix', None)
env_name = args.environment
# change version
if args.version_label:
helper.deploy_version(env_name, args.version_label)
if not args.dont_wait:
helper.wait_for_environments(env_name, status='Ready', version_label=args.version_label) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# update it
env = parse_env_config(config, env_name)
option_settings = parse_option_settings(env.get('option_settings', {}))
helper.update_environment(env_name, description=env.get('description', None), option_settings=option_settings, tier_type=env.get('tier_type'), tier_name=env.get('tier_name'), tier_version=env.get('tier_version'))
# wait
if not args.dont_wait:
helper.wait_for_environments(env_name, health='Green', status='Ready', version_label=args.version_label) # depends on [control=['if'], data=[]]
# delete unused
helper.delete_unused_versions(versions_to_keep=int(get(config, 'app.versions_to_keep', 10))) |
def patch_prefetch_one_level():
"""
This patch address Django bug https://code.djangoproject.com/ticket/24873,
which was merged into Django master
in commit 025c6553771a09b80563baedb5b8300a8b01312f
into django.db.models.query.
The code that follows is identical to the code in the above commit,
with all comments stripped out.
"""
import copy
import django
def prefetch_one_level(instances, prefetcher, lookup, level):
rel_qs, rel_obj_attr, instance_attr, single, cache_name = (
prefetcher.get_prefetch_queryset(
instances, lookup.get_current_queryset(level)))
additional_lookups = [
copy.copy(additional_lookup) for additional_lookup
in getattr(rel_qs, '_prefetch_related_lookups', [])
]
if additional_lookups:
rel_qs._prefetch_related_lookups = []
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
to_attr, as_attr = lookup.get_current_to_attr(level)
if single:
val = vals[0] if vals else None
to_attr = to_attr if as_attr else cache_name
setattr(obj, to_attr, val)
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
qs = getattr(obj, to_attr).all()
qs._result_cache = vals
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
# apply the patch
from django.db.models import query
if django.VERSION < (2, 0, 0):
query.prefetch_one_level = prefetch_one_level | def function[patch_prefetch_one_level, parameter[]]:
constant[
This patch address Django bug https://code.djangoproject.com/ticket/24873,
which was merged into Django master
in commit 025c6553771a09b80563baedb5b8300a8b01312f
into django.db.models.query.
The code that follows is identical to the code in the above commit,
with all comments stripped out.
]
import module[copy]
import module[django]
def function[prefetch_one_level, parameter[instances, prefetcher, lookup, level]]:
<ast.Tuple object at 0x7da18eb54640> assign[=] call[name[prefetcher].get_prefetch_queryset, parameter[name[instances], call[name[lookup].get_current_queryset, parameter[name[level]]]]]
variable[additional_lookups] assign[=] <ast.ListComp object at 0x7da18eb56170>
if name[additional_lookups] begin[:]
name[rel_qs]._prefetch_related_lookups assign[=] list[[]]
variable[all_related_objects] assign[=] call[name[list], parameter[name[rel_qs]]]
variable[rel_obj_cache] assign[=] dictionary[[], []]
for taget[name[rel_obj]] in starred[name[all_related_objects]] begin[:]
variable[rel_attr_val] assign[=] call[name[rel_obj_attr], parameter[name[rel_obj]]]
call[call[name[rel_obj_cache].setdefault, parameter[name[rel_attr_val], list[[]]]].append, parameter[name[rel_obj]]]
for taget[name[obj]] in starred[name[instances]] begin[:]
variable[instance_attr_val] assign[=] call[name[instance_attr], parameter[name[obj]]]
variable[vals] assign[=] call[name[rel_obj_cache].get, parameter[name[instance_attr_val], list[[]]]]
<ast.Tuple object at 0x7da18eb577f0> assign[=] call[name[lookup].get_current_to_attr, parameter[name[level]]]
if name[single] begin[:]
variable[val] assign[=] <ast.IfExp object at 0x7da18eb55db0>
variable[to_attr] assign[=] <ast.IfExp object at 0x7da18eb541f0>
call[name[setattr], parameter[name[obj], name[to_attr], name[val]]]
return[tuple[[<ast.Name object at 0x7da18eb569e0>, <ast.Name object at 0x7da18eb54880>]]]
from relative_module[django.db.models] import module[query]
if compare[name[django].VERSION less[<] tuple[[<ast.Constant object at 0x7da18eb55090>, <ast.Constant object at 0x7da18eb55cf0>, <ast.Constant object at 0x7da18eb57700>]]] begin[:]
name[query].prefetch_one_level assign[=] name[prefetch_one_level] | keyword[def] identifier[patch_prefetch_one_level] ():
literal[string]
keyword[import] identifier[copy]
keyword[import] identifier[django]
keyword[def] identifier[prefetch_one_level] ( identifier[instances] , identifier[prefetcher] , identifier[lookup] , identifier[level] ):
identifier[rel_qs] , identifier[rel_obj_attr] , identifier[instance_attr] , identifier[single] , identifier[cache_name] =(
identifier[prefetcher] . identifier[get_prefetch_queryset] (
identifier[instances] , identifier[lookup] . identifier[get_current_queryset] ( identifier[level] )))
identifier[additional_lookups] =[
identifier[copy] . identifier[copy] ( identifier[additional_lookup] ) keyword[for] identifier[additional_lookup]
keyword[in] identifier[getattr] ( identifier[rel_qs] , literal[string] ,[])
]
keyword[if] identifier[additional_lookups] :
identifier[rel_qs] . identifier[_prefetch_related_lookups] =[]
identifier[all_related_objects] = identifier[list] ( identifier[rel_qs] )
identifier[rel_obj_cache] ={}
keyword[for] identifier[rel_obj] keyword[in] identifier[all_related_objects] :
identifier[rel_attr_val] = identifier[rel_obj_attr] ( identifier[rel_obj] )
identifier[rel_obj_cache] . identifier[setdefault] ( identifier[rel_attr_val] ,[]). identifier[append] ( identifier[rel_obj] )
keyword[for] identifier[obj] keyword[in] identifier[instances] :
identifier[instance_attr_val] = identifier[instance_attr] ( identifier[obj] )
identifier[vals] = identifier[rel_obj_cache] . identifier[get] ( identifier[instance_attr_val] ,[])
identifier[to_attr] , identifier[as_attr] = identifier[lookup] . identifier[get_current_to_attr] ( identifier[level] )
keyword[if] identifier[single] :
identifier[val] = identifier[vals] [ literal[int] ] keyword[if] identifier[vals] keyword[else] keyword[None]
identifier[to_attr] = identifier[to_attr] keyword[if] identifier[as_attr] keyword[else] identifier[cache_name]
identifier[setattr] ( identifier[obj] , identifier[to_attr] , identifier[val] )
keyword[else] :
keyword[if] identifier[as_attr] :
identifier[setattr] ( identifier[obj] , identifier[to_attr] , identifier[vals] )
keyword[else] :
identifier[qs] = identifier[getattr] ( identifier[obj] , identifier[to_attr] ). identifier[all] ()
identifier[qs] . identifier[_result_cache] = identifier[vals]
identifier[qs] . identifier[_prefetch_done] = keyword[True]
identifier[obj] . identifier[_prefetched_objects_cache] [ identifier[cache_name] ]= identifier[qs]
keyword[return] identifier[all_related_objects] , identifier[additional_lookups]
keyword[from] identifier[django] . identifier[db] . identifier[models] keyword[import] identifier[query]
keyword[if] identifier[django] . identifier[VERSION] <( literal[int] , literal[int] , literal[int] ):
identifier[query] . identifier[prefetch_one_level] = identifier[prefetch_one_level] | def patch_prefetch_one_level():
"""
This patch address Django bug https://code.djangoproject.com/ticket/24873,
which was merged into Django master
in commit 025c6553771a09b80563baedb5b8300a8b01312f
into django.db.models.query.
The code that follows is identical to the code in the above commit,
with all comments stripped out.
"""
import copy
import django
def prefetch_one_level(instances, prefetcher, lookup, level):
(rel_qs, rel_obj_attr, instance_attr, single, cache_name) = prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level))
additional_lookups = [copy.copy(additional_lookup) for additional_lookup in getattr(rel_qs, '_prefetch_related_lookups', [])]
if additional_lookups:
rel_qs._prefetch_related_lookups = [] # depends on [control=['if'], data=[]]
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj) # depends on [control=['for'], data=['rel_obj']]
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
(to_attr, as_attr) = lookup.get_current_to_attr(level)
if single:
val = vals[0] if vals else None
to_attr = to_attr if as_attr else cache_name
setattr(obj, to_attr, val) # depends on [control=['if'], data=[]]
elif as_attr:
setattr(obj, to_attr, vals) # depends on [control=['if'], data=[]]
else:
qs = getattr(obj, to_attr).all()
qs._result_cache = vals
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs # depends on [control=['for'], data=['obj']]
return (all_related_objects, additional_lookups)
# apply the patch
from django.db.models import query
if django.VERSION < (2, 0, 0):
query.prefetch_one_level = prefetch_one_level # depends on [control=['if'], data=[]] |
def nodes_on_bdry(self):
"""Encoding of grid points lying on the boundary.
Examples
--------
Using global option (default ``False``):
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3])
>>> part.nodes_on_bdry
False
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=True)
>>> part.nodes_on_bdry
True
``False`` in axis 0, ``True`` in axis 1:
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=[False, True])
>>> part.nodes_on_bdry
(False, True)
In axis 0, ``False`` left and ``True`` right, in axis 1 ``False``:
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=[[False, True],
... False])
>>> part.nodes_on_bdry
((False, True), False)
"""
if self.size == 0:
return True
nodes_on_bdry = []
for on_bdry in self.nodes_on_bdry_byaxis:
left, right = on_bdry
if left == right:
nodes_on_bdry.append(left)
else:
nodes_on_bdry.append((left, right))
if all(on_bdry == nodes_on_bdry[0] for on_bdry in nodes_on_bdry[1:]):
return nodes_on_bdry[0]
else:
return tuple(nodes_on_bdry) | def function[nodes_on_bdry, parameter[self]]:
constant[Encoding of grid points lying on the boundary.
Examples
--------
Using global option (default ``False``):
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3])
>>> part.nodes_on_bdry
False
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=True)
>>> part.nodes_on_bdry
True
``False`` in axis 0, ``True`` in axis 1:
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=[False, True])
>>> part.nodes_on_bdry
(False, True)
In axis 0, ``False`` left and ``True`` right, in axis 1 ``False``:
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=[[False, True],
... False])
>>> part.nodes_on_bdry
((False, True), False)
]
if compare[name[self].size equal[==] constant[0]] begin[:]
return[constant[True]]
variable[nodes_on_bdry] assign[=] list[[]]
for taget[name[on_bdry]] in starred[name[self].nodes_on_bdry_byaxis] begin[:]
<ast.Tuple object at 0x7da1b1ea2f50> assign[=] name[on_bdry]
if compare[name[left] equal[==] name[right]] begin[:]
call[name[nodes_on_bdry].append, parameter[name[left]]]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b1ea1180>]] begin[:]
return[call[name[nodes_on_bdry]][constant[0]]] | keyword[def] identifier[nodes_on_bdry] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[size] == literal[int] :
keyword[return] keyword[True]
identifier[nodes_on_bdry] =[]
keyword[for] identifier[on_bdry] keyword[in] identifier[self] . identifier[nodes_on_bdry_byaxis] :
identifier[left] , identifier[right] = identifier[on_bdry]
keyword[if] identifier[left] == identifier[right] :
identifier[nodes_on_bdry] . identifier[append] ( identifier[left] )
keyword[else] :
identifier[nodes_on_bdry] . identifier[append] (( identifier[left] , identifier[right] ))
keyword[if] identifier[all] ( identifier[on_bdry] == identifier[nodes_on_bdry] [ literal[int] ] keyword[for] identifier[on_bdry] keyword[in] identifier[nodes_on_bdry] [ literal[int] :]):
keyword[return] identifier[nodes_on_bdry] [ literal[int] ]
keyword[else] :
keyword[return] identifier[tuple] ( identifier[nodes_on_bdry] ) | def nodes_on_bdry(self):
"""Encoding of grid points lying on the boundary.
Examples
--------
Using global option (default ``False``):
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3])
>>> part.nodes_on_bdry
False
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=True)
>>> part.nodes_on_bdry
True
``False`` in axis 0, ``True`` in axis 1:
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=[False, True])
>>> part.nodes_on_bdry
(False, True)
In axis 0, ``False`` left and ``True`` right, in axis 1 ``False``:
>>> part = odl.nonuniform_partition([0, 2, 3], [1, 3],
... nodes_on_bdry=[[False, True],
... False])
>>> part.nodes_on_bdry
((False, True), False)
"""
if self.size == 0:
return True # depends on [control=['if'], data=[]]
nodes_on_bdry = []
for on_bdry in self.nodes_on_bdry_byaxis:
(left, right) = on_bdry
if left == right:
nodes_on_bdry.append(left) # depends on [control=['if'], data=['left']]
else:
nodes_on_bdry.append((left, right)) # depends on [control=['for'], data=['on_bdry']]
if all((on_bdry == nodes_on_bdry[0] for on_bdry in nodes_on_bdry[1:])):
return nodes_on_bdry[0] # depends on [control=['if'], data=[]]
else:
return tuple(nodes_on_bdry) |
def lstm_unroll(num_lstm_layer, seq_len, num_hidden, num_label, loss_type=None):
"""
Creates an unrolled LSTM symbol for inference if loss_type is not specified, and for training
if loss_type is specified. loss_type must be one of 'ctc' or 'warpctc'
Parameters
----------
num_lstm_layer: int
seq_len: int
num_hidden: int
num_label: int
loss_type: str
'ctc' or 'warpctc'
Returns
-------
mxnet.symbol.symbol.Symbol
"""
# Create the base (shared between training and inference) and add loss to the end
pred = _lstm_unroll_base(num_lstm_layer, seq_len, num_hidden)
if loss_type:
# Training mode, add loss
return _add_ctc_loss(pred, seq_len, num_label, loss_type)
else:
# Inference mode, add softmax
return mx.sym.softmax(data=pred, name='softmax') | def function[lstm_unroll, parameter[num_lstm_layer, seq_len, num_hidden, num_label, loss_type]]:
constant[
Creates an unrolled LSTM symbol for inference if loss_type is not specified, and for training
if loss_type is specified. loss_type must be one of 'ctc' or 'warpctc'
Parameters
----------
num_lstm_layer: int
seq_len: int
num_hidden: int
num_label: int
loss_type: str
'ctc' or 'warpctc'
Returns
-------
mxnet.symbol.symbol.Symbol
]
variable[pred] assign[=] call[name[_lstm_unroll_base], parameter[name[num_lstm_layer], name[seq_len], name[num_hidden]]]
if name[loss_type] begin[:]
return[call[name[_add_ctc_loss], parameter[name[pred], name[seq_len], name[num_label], name[loss_type]]]] | keyword[def] identifier[lstm_unroll] ( identifier[num_lstm_layer] , identifier[seq_len] , identifier[num_hidden] , identifier[num_label] , identifier[loss_type] = keyword[None] ):
literal[string]
identifier[pred] = identifier[_lstm_unroll_base] ( identifier[num_lstm_layer] , identifier[seq_len] , identifier[num_hidden] )
keyword[if] identifier[loss_type] :
keyword[return] identifier[_add_ctc_loss] ( identifier[pred] , identifier[seq_len] , identifier[num_label] , identifier[loss_type] )
keyword[else] :
keyword[return] identifier[mx] . identifier[sym] . identifier[softmax] ( identifier[data] = identifier[pred] , identifier[name] = literal[string] ) | def lstm_unroll(num_lstm_layer, seq_len, num_hidden, num_label, loss_type=None):
"""
Creates an unrolled LSTM symbol for inference if loss_type is not specified, and for training
if loss_type is specified. loss_type must be one of 'ctc' or 'warpctc'
Parameters
----------
num_lstm_layer: int
seq_len: int
num_hidden: int
num_label: int
loss_type: str
'ctc' or 'warpctc'
Returns
-------
mxnet.symbol.symbol.Symbol
"""
# Create the base (shared between training and inference) and add loss to the end
pred = _lstm_unroll_base(num_lstm_layer, seq_len, num_hidden)
if loss_type:
# Training mode, add loss
return _add_ctc_loss(pred, seq_len, num_label, loss_type) # depends on [control=['if'], data=[]]
else:
# Inference mode, add softmax
return mx.sym.softmax(data=pred, name='softmax') |
def foreign_field_func(field_name, short_description=None, admin_order_field=None):
"""
Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')]
"""
def accessor(obj):
val = obj
for part in field_name.split('__'):
val = getattr(val, part)
return val
if short_description:
accessor.short_description = short_description
else:
accessor.__name__ = field_name
if admin_order_field:
accessor.admin_order_field = admin_order_field
else:
accessor.admin_order_field = (field_name,)
return accessor | def function[foreign_field_func, parameter[field_name, short_description, admin_order_field]]:
constant[
Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')]
]
def function[accessor, parameter[obj]]:
variable[val] assign[=] name[obj]
for taget[name[part]] in starred[call[name[field_name].split, parameter[constant[__]]]] begin[:]
variable[val] assign[=] call[name[getattr], parameter[name[val], name[part]]]
return[name[val]]
if name[short_description] begin[:]
name[accessor].short_description assign[=] name[short_description]
if name[admin_order_field] begin[:]
name[accessor].admin_order_field assign[=] name[admin_order_field]
return[name[accessor]] | keyword[def] identifier[foreign_field_func] ( identifier[field_name] , identifier[short_description] = keyword[None] , identifier[admin_order_field] = keyword[None] ):
literal[string]
keyword[def] identifier[accessor] ( identifier[obj] ):
identifier[val] = identifier[obj]
keyword[for] identifier[part] keyword[in] identifier[field_name] . identifier[split] ( literal[string] ):
identifier[val] = identifier[getattr] ( identifier[val] , identifier[part] )
keyword[return] identifier[val]
keyword[if] identifier[short_description] :
identifier[accessor] . identifier[short_description] = identifier[short_description]
keyword[else] :
identifier[accessor] . identifier[__name__] = identifier[field_name]
keyword[if] identifier[admin_order_field] :
identifier[accessor] . identifier[admin_order_field] = identifier[admin_order_field]
keyword[else] :
identifier[accessor] . identifier[admin_order_field] =( identifier[field_name] ,)
keyword[return] identifier[accessor] | def foreign_field_func(field_name, short_description=None, admin_order_field=None):
"""
Allow to use ForeignKey field attributes at list_display in a simple way.
Example:
from misc.admin import foreign_field_func as ff
class SongAdmin(admin.ModelAdmin):
list_display = ['name', 'time', 'artist', 'album', ff('track__num', "Track number"), ff('album__total_tracks')]
"""
def accessor(obj):
val = obj
for part in field_name.split('__'):
val = getattr(val, part) # depends on [control=['for'], data=['part']]
return val
if short_description:
accessor.short_description = short_description # depends on [control=['if'], data=[]]
else:
accessor.__name__ = field_name
if admin_order_field:
accessor.admin_order_field = admin_order_field # depends on [control=['if'], data=[]]
else:
accessor.admin_order_field = (field_name,)
return accessor |
def build_marc(recid, single_keywords, composite_keywords,
spires=False, author_keywords=None, acronyms=None):
"""Create xml record.
:var recid: integer
:var single_keywords: dictionary of kws
:var composite_keywords: dictionary of kws
:keyword spires: please don't use, left for historical
reasons
:keyword author_keywords: dictionary of extracted keywords
:keyword acronyms: dictionary of extracted acronyms
:return: str, marxml
"""
output = ['<collection><record>\n'
'<controlfield tag="001">%s</controlfield>' % recid]
# no need to sort
single_keywords = single_keywords.items()
composite_keywords = composite_keywords.items()
output.append(_output_marc(
single_keywords,
composite_keywords,
author_keywords,
acronyms
))
output.append('</record></collection>')
return '\n'.join(output) | def function[build_marc, parameter[recid, single_keywords, composite_keywords, spires, author_keywords, acronyms]]:
constant[Create xml record.
:var recid: integer
:var single_keywords: dictionary of kws
:var composite_keywords: dictionary of kws
:keyword spires: please don't use, left for historical
reasons
:keyword author_keywords: dictionary of extracted keywords
:keyword acronyms: dictionary of extracted acronyms
:return: str, marxml
]
variable[output] assign[=] list[[<ast.BinOp object at 0x7da204347760>]]
variable[single_keywords] assign[=] call[name[single_keywords].items, parameter[]]
variable[composite_keywords] assign[=] call[name[composite_keywords].items, parameter[]]
call[name[output].append, parameter[call[name[_output_marc], parameter[name[single_keywords], name[composite_keywords], name[author_keywords], name[acronyms]]]]]
call[name[output].append, parameter[constant[</record></collection>]]]
return[call[constant[
].join, parameter[name[output]]]] | keyword[def] identifier[build_marc] ( identifier[recid] , identifier[single_keywords] , identifier[composite_keywords] ,
identifier[spires] = keyword[False] , identifier[author_keywords] = keyword[None] , identifier[acronyms] = keyword[None] ):
literal[string]
identifier[output] =[ literal[string]
literal[string] % identifier[recid] ]
identifier[single_keywords] = identifier[single_keywords] . identifier[items] ()
identifier[composite_keywords] = identifier[composite_keywords] . identifier[items] ()
identifier[output] . identifier[append] ( identifier[_output_marc] (
identifier[single_keywords] ,
identifier[composite_keywords] ,
identifier[author_keywords] ,
identifier[acronyms]
))
identifier[output] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[output] ) | def build_marc(recid, single_keywords, composite_keywords, spires=False, author_keywords=None, acronyms=None):
"""Create xml record.
:var recid: integer
:var single_keywords: dictionary of kws
:var composite_keywords: dictionary of kws
:keyword spires: please don't use, left for historical
reasons
:keyword author_keywords: dictionary of extracted keywords
:keyword acronyms: dictionary of extracted acronyms
:return: str, marxml
"""
output = ['<collection><record>\n<controlfield tag="001">%s</controlfield>' % recid]
# no need to sort
single_keywords = single_keywords.items()
composite_keywords = composite_keywords.items()
output.append(_output_marc(single_keywords, composite_keywords, author_keywords, acronyms))
output.append('</record></collection>')
return '\n'.join(output) |
def _platform_pylib_exts(): # nocover
"""
Returns .so, .pyd, or .dylib depending on linux, win or mac.
On python3 return the previous with and without abi (e.g.
.cpython-35m-x86_64-linux-gnu) flags. On python2 returns with
and without multiarch.
"""
import sysconfig
valid_exts = []
if six.PY2:
# see also 'SHLIB_EXT'
base_ext = '.' + sysconfig.get_config_var('SO').split('.')[-1]
else:
# return with and without API flags
# handle PEP 3149 -- ABI version tagged .so files
base_ext = '.' + sysconfig.get_config_var('EXT_SUFFIX').split('.')[-1]
for tag in _extension_module_tags():
valid_exts.append('.' + tag + base_ext)
valid_exts.append(base_ext)
return tuple(valid_exts) | def function[_platform_pylib_exts, parameter[]]:
constant[
Returns .so, .pyd, or .dylib depending on linux, win or mac.
On python3 return the previous with and without abi (e.g.
.cpython-35m-x86_64-linux-gnu) flags. On python2 returns with
and without multiarch.
]
import module[sysconfig]
variable[valid_exts] assign[=] list[[]]
if name[six].PY2 begin[:]
variable[base_ext] assign[=] binary_operation[constant[.] + call[call[call[name[sysconfig].get_config_var, parameter[constant[SO]]].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b0086dd0>]]
for taget[name[tag]] in starred[call[name[_extension_module_tags], parameter[]]] begin[:]
call[name[valid_exts].append, parameter[binary_operation[binary_operation[constant[.] + name[tag]] + name[base_ext]]]]
call[name[valid_exts].append, parameter[name[base_ext]]]
return[call[name[tuple], parameter[name[valid_exts]]]] | keyword[def] identifier[_platform_pylib_exts] ():
literal[string]
keyword[import] identifier[sysconfig]
identifier[valid_exts] =[]
keyword[if] identifier[six] . identifier[PY2] :
identifier[base_ext] = literal[string] + identifier[sysconfig] . identifier[get_config_var] ( literal[string] ). identifier[split] ( literal[string] )[- literal[int] ]
keyword[else] :
identifier[base_ext] = literal[string] + identifier[sysconfig] . identifier[get_config_var] ( literal[string] ). identifier[split] ( literal[string] )[- literal[int] ]
keyword[for] identifier[tag] keyword[in] identifier[_extension_module_tags] ():
identifier[valid_exts] . identifier[append] ( literal[string] + identifier[tag] + identifier[base_ext] )
identifier[valid_exts] . identifier[append] ( identifier[base_ext] )
keyword[return] identifier[tuple] ( identifier[valid_exts] ) | def _platform_pylib_exts(): # nocover
'\n Returns .so, .pyd, or .dylib depending on linux, win or mac.\n On python3 return the previous with and without abi (e.g.\n .cpython-35m-x86_64-linux-gnu) flags. On python2 returns with\n and without multiarch.\n '
import sysconfig
valid_exts = []
if six.PY2:
# see also 'SHLIB_EXT'
base_ext = '.' + sysconfig.get_config_var('SO').split('.')[-1] # depends on [control=['if'], data=[]]
else:
# return with and without API flags
# handle PEP 3149 -- ABI version tagged .so files
base_ext = '.' + sysconfig.get_config_var('EXT_SUFFIX').split('.')[-1]
for tag in _extension_module_tags():
valid_exts.append('.' + tag + base_ext) # depends on [control=['for'], data=['tag']]
valid_exts.append(base_ext)
return tuple(valid_exts) |
def display_hook(prompt, session, context, matches, longest_match_len):
# type: (str, ShellSession, BundleContext, List[str], int) -> None
"""
Displays the available services matches and the service details
:param prompt: Shell prompt string
:param session: Current shell session (for display)
:param context: BundleContext of the shell
:param matches: List of words matching the substitution
:param longest_match_len: Length of the largest match
"""
try:
# Prepare a line pattern for each match
match_pattern = "{{0: >{}}}: {{1}}".format(longest_match_len)
# Sort matching IDs
matches = sorted(int(match) for match in matches)
# Print the match and the associated name
session.write_line()
for svc_id in matches:
svc_ref = context.get_service_reference(
None, "({}={})".format(SERVICE_ID, svc_id)
)
session.write_line(match_pattern, svc_id, str(svc_ref))
# Print the prompt, then current line
session.write(prompt)
session.write_line_no_feed(readline.get_line_buffer())
readline.redisplay()
except Exception as ex:
session.write_line("\n{}\n\n", ex) | def function[display_hook, parameter[prompt, session, context, matches, longest_match_len]]:
constant[
Displays the available services matches and the service details
:param prompt: Shell prompt string
:param session: Current shell session (for display)
:param context: BundleContext of the shell
:param matches: List of words matching the substitution
:param longest_match_len: Length of the largest match
]
<ast.Try object at 0x7da1b03aca60> | keyword[def] identifier[display_hook] ( identifier[prompt] , identifier[session] , identifier[context] , identifier[matches] , identifier[longest_match_len] ):
literal[string]
keyword[try] :
identifier[match_pattern] = literal[string] . identifier[format] ( identifier[longest_match_len] )
identifier[matches] = identifier[sorted] ( identifier[int] ( identifier[match] ) keyword[for] identifier[match] keyword[in] identifier[matches] )
identifier[session] . identifier[write_line] ()
keyword[for] identifier[svc_id] keyword[in] identifier[matches] :
identifier[svc_ref] = identifier[context] . identifier[get_service_reference] (
keyword[None] , literal[string] . identifier[format] ( identifier[SERVICE_ID] , identifier[svc_id] )
)
identifier[session] . identifier[write_line] ( identifier[match_pattern] , identifier[svc_id] , identifier[str] ( identifier[svc_ref] ))
identifier[session] . identifier[write] ( identifier[prompt] )
identifier[session] . identifier[write_line_no_feed] ( identifier[readline] . identifier[get_line_buffer] ())
identifier[readline] . identifier[redisplay] ()
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[session] . identifier[write_line] ( literal[string] , identifier[ex] ) | def display_hook(prompt, session, context, matches, longest_match_len):
# type: (str, ShellSession, BundleContext, List[str], int) -> None
'\n Displays the available services matches and the service details\n\n :param prompt: Shell prompt string\n :param session: Current shell session (for display)\n :param context: BundleContext of the shell\n :param matches: List of words matching the substitution\n :param longest_match_len: Length of the largest match\n '
try:
# Prepare a line pattern for each match
match_pattern = '{{0: >{}}}: {{1}}'.format(longest_match_len)
# Sort matching IDs
matches = sorted((int(match) for match in matches))
# Print the match and the associated name
session.write_line()
for svc_id in matches:
svc_ref = context.get_service_reference(None, '({}={})'.format(SERVICE_ID, svc_id))
session.write_line(match_pattern, svc_id, str(svc_ref)) # depends on [control=['for'], data=['svc_id']]
# Print the prompt, then current line
session.write(prompt)
session.write_line_no_feed(readline.get_line_buffer())
readline.redisplay() # depends on [control=['try'], data=[]]
except Exception as ex:
session.write_line('\n{}\n\n', ex) # depends on [control=['except'], data=['ex']] |
def copy(self):
"""
Make deep copy of this key jar.
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
kj = KeyJar()
for issuer in self.owners():
kj[issuer] = [kb.copy() for kb in self[issuer]]
kj.verify_ssl = self.verify_ssl
return kj | def function[copy, parameter[self]]:
constant[
Make deep copy of this key jar.
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
]
variable[kj] assign[=] call[name[KeyJar], parameter[]]
for taget[name[issuer]] in starred[call[name[self].owners, parameter[]]] begin[:]
call[name[kj]][name[issuer]] assign[=] <ast.ListComp object at 0x7da1b0537a30>
name[kj].verify_ssl assign[=] name[self].verify_ssl
return[name[kj]] | keyword[def] identifier[copy] ( identifier[self] ):
literal[string]
identifier[kj] = identifier[KeyJar] ()
keyword[for] identifier[issuer] keyword[in] identifier[self] . identifier[owners] ():
identifier[kj] [ identifier[issuer] ]=[ identifier[kb] . identifier[copy] () keyword[for] identifier[kb] keyword[in] identifier[self] [ identifier[issuer] ]]
identifier[kj] . identifier[verify_ssl] = identifier[self] . identifier[verify_ssl]
keyword[return] identifier[kj] | def copy(self):
"""
Make deep copy of this key jar.
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
kj = KeyJar()
for issuer in self.owners():
kj[issuer] = [kb.copy() for kb in self[issuer]] # depends on [control=['for'], data=['issuer']]
kj.verify_ssl = self.verify_ssl
return kj |
def casperjs_capture(stream, url, method=None, width=None, height=None,
selector=None, data=None, waitfor=None, size=None,
crop=None, render='png', wait=None):
"""
Captures web pages using ``casperjs``
"""
if isinstance(stream, six.string_types):
output = stream
else:
with NamedTemporaryFile('wb+', suffix='.%s' % render, delete=False) as f:
output = f.name
try:
cmd = CASPERJS_CMD + [url, output]
# Extra command-line options
cmd += ['--format=%s' % render]
if method:
cmd += ['--method=%s' % method]
if width:
cmd += ['--width=%s' % width]
if height:
cmd += ['--height=%s' % height]
if selector:
cmd += ['--selector=%s' % selector]
if data:
cmd += ['--data="%s"' % json.dumps(data)]
if waitfor:
cmd += ['--waitfor=%s' % waitfor]
if wait:
cmd += ['--wait=%s' % wait]
logger.debug(cmd)
# Run CasperJS process
proc = subprocess.Popen(cmd, **casperjs_command_kwargs())
stdout = proc.communicate()[0]
process_casperjs_stdout(stdout)
size = parse_size(size)
render = parse_render(render)
if size or (render and render != 'png' and render != 'pdf'):
# pdf isn't an image, therefore we can't postprocess it.
image_postprocess(output, stream, size, crop, render)
else:
if stream != output:
# From file to stream
with open(output, 'rb') as out:
stream.write(out.read())
stream.flush()
finally:
if stream != output:
os.unlink(output) | def function[casperjs_capture, parameter[stream, url, method, width, height, selector, data, waitfor, size, crop, render, wait]]:
constant[
Captures web pages using ``casperjs``
]
if call[name[isinstance], parameter[name[stream], name[six].string_types]] begin[:]
variable[output] assign[=] name[stream]
<ast.Try object at 0x7da20e9b17e0> | keyword[def] identifier[casperjs_capture] ( identifier[stream] , identifier[url] , identifier[method] = keyword[None] , identifier[width] = keyword[None] , identifier[height] = keyword[None] ,
identifier[selector] = keyword[None] , identifier[data] = keyword[None] , identifier[waitfor] = keyword[None] , identifier[size] = keyword[None] ,
identifier[crop] = keyword[None] , identifier[render] = literal[string] , identifier[wait] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[stream] , identifier[six] . identifier[string_types] ):
identifier[output] = identifier[stream]
keyword[else] :
keyword[with] identifier[NamedTemporaryFile] ( literal[string] , identifier[suffix] = literal[string] % identifier[render] , identifier[delete] = keyword[False] ) keyword[as] identifier[f] :
identifier[output] = identifier[f] . identifier[name]
keyword[try] :
identifier[cmd] = identifier[CASPERJS_CMD] +[ identifier[url] , identifier[output] ]
identifier[cmd] +=[ literal[string] % identifier[render] ]
keyword[if] identifier[method] :
identifier[cmd] +=[ literal[string] % identifier[method] ]
keyword[if] identifier[width] :
identifier[cmd] +=[ literal[string] % identifier[width] ]
keyword[if] identifier[height] :
identifier[cmd] +=[ literal[string] % identifier[height] ]
keyword[if] identifier[selector] :
identifier[cmd] +=[ literal[string] % identifier[selector] ]
keyword[if] identifier[data] :
identifier[cmd] +=[ literal[string] % identifier[json] . identifier[dumps] ( identifier[data] )]
keyword[if] identifier[waitfor] :
identifier[cmd] +=[ literal[string] % identifier[waitfor] ]
keyword[if] identifier[wait] :
identifier[cmd] +=[ literal[string] % identifier[wait] ]
identifier[logger] . identifier[debug] ( identifier[cmd] )
identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] ,** identifier[casperjs_command_kwargs] ())
identifier[stdout] = identifier[proc] . identifier[communicate] ()[ literal[int] ]
identifier[process_casperjs_stdout] ( identifier[stdout] )
identifier[size] = identifier[parse_size] ( identifier[size] )
identifier[render] = identifier[parse_render] ( identifier[render] )
keyword[if] identifier[size] keyword[or] ( identifier[render] keyword[and] identifier[render] != literal[string] keyword[and] identifier[render] != literal[string] ):
identifier[image_postprocess] ( identifier[output] , identifier[stream] , identifier[size] , identifier[crop] , identifier[render] )
keyword[else] :
keyword[if] identifier[stream] != identifier[output] :
keyword[with] identifier[open] ( identifier[output] , literal[string] ) keyword[as] identifier[out] :
identifier[stream] . identifier[write] ( identifier[out] . identifier[read] ())
identifier[stream] . identifier[flush] ()
keyword[finally] :
keyword[if] identifier[stream] != identifier[output] :
identifier[os] . identifier[unlink] ( identifier[output] ) | def casperjs_capture(stream, url, method=None, width=None, height=None, selector=None, data=None, waitfor=None, size=None, crop=None, render='png', wait=None):
"""
Captures web pages using ``casperjs``
"""
if isinstance(stream, six.string_types):
output = stream # depends on [control=['if'], data=[]]
else:
with NamedTemporaryFile('wb+', suffix='.%s' % render, delete=False) as f:
output = f.name # depends on [control=['with'], data=['f']]
try:
cmd = CASPERJS_CMD + [url, output]
# Extra command-line options
cmd += ['--format=%s' % render]
if method:
cmd += ['--method=%s' % method] # depends on [control=['if'], data=[]]
if width:
cmd += ['--width=%s' % width] # depends on [control=['if'], data=[]]
if height:
cmd += ['--height=%s' % height] # depends on [control=['if'], data=[]]
if selector:
cmd += ['--selector=%s' % selector] # depends on [control=['if'], data=[]]
if data:
cmd += ['--data="%s"' % json.dumps(data)] # depends on [control=['if'], data=[]]
if waitfor:
cmd += ['--waitfor=%s' % waitfor] # depends on [control=['if'], data=[]]
if wait:
cmd += ['--wait=%s' % wait] # depends on [control=['if'], data=[]]
logger.debug(cmd)
# Run CasperJS process
proc = subprocess.Popen(cmd, **casperjs_command_kwargs())
stdout = proc.communicate()[0]
process_casperjs_stdout(stdout)
size = parse_size(size)
render = parse_render(render)
if size or (render and render != 'png' and (render != 'pdf')):
# pdf isn't an image, therefore we can't postprocess it.
image_postprocess(output, stream, size, crop, render) # depends on [control=['if'], data=[]]
elif stream != output:
# From file to stream
with open(output, 'rb') as out:
stream.write(out.read()) # depends on [control=['with'], data=['out']]
stream.flush() # depends on [control=['if'], data=['stream', 'output']] # depends on [control=['try'], data=[]]
finally:
if stream != output:
os.unlink(output) # depends on [control=['if'], data=['output']] |
def normalize_hex(hex_value):
"""
Normalize a hexadecimal color value to 6 digits, lowercase.
"""
match = HEX_COLOR_RE.match(hex_value)
if match is None:
raise ValueError(
u"'{}' is not a valid hexadecimal color value.".format(hex_value)
)
hex_digits = match.group(1)
if len(hex_digits) == 3:
hex_digits = u''.join(2 * s for s in hex_digits)
return u'#{}'.format(hex_digits.lower()) | def function[normalize_hex, parameter[hex_value]]:
constant[
Normalize a hexadecimal color value to 6 digits, lowercase.
]
variable[match] assign[=] call[name[HEX_COLOR_RE].match, parameter[name[hex_value]]]
if compare[name[match] is constant[None]] begin[:]
<ast.Raise object at 0x7da20e9621a0>
variable[hex_digits] assign[=] call[name[match].group, parameter[constant[1]]]
if compare[call[name[len], parameter[name[hex_digits]]] equal[==] constant[3]] begin[:]
variable[hex_digits] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b0c644f0>]]
return[call[constant[#{}].format, parameter[call[name[hex_digits].lower, parameter[]]]]] | keyword[def] identifier[normalize_hex] ( identifier[hex_value] ):
literal[string]
identifier[match] = identifier[HEX_COLOR_RE] . identifier[match] ( identifier[hex_value] )
keyword[if] identifier[match] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[hex_value] )
)
identifier[hex_digits] = identifier[match] . identifier[group] ( literal[int] )
keyword[if] identifier[len] ( identifier[hex_digits] )== literal[int] :
identifier[hex_digits] = literal[string] . identifier[join] ( literal[int] * identifier[s] keyword[for] identifier[s] keyword[in] identifier[hex_digits] )
keyword[return] literal[string] . identifier[format] ( identifier[hex_digits] . identifier[lower] ()) | def normalize_hex(hex_value):
"""
Normalize a hexadecimal color value to 6 digits, lowercase.
"""
match = HEX_COLOR_RE.match(hex_value)
if match is None:
raise ValueError(u"'{}' is not a valid hexadecimal color value.".format(hex_value)) # depends on [control=['if'], data=[]]
hex_digits = match.group(1)
if len(hex_digits) == 3:
hex_digits = u''.join((2 * s for s in hex_digits)) # depends on [control=['if'], data=[]]
return u'#{}'.format(hex_digits.lower()) |
def _connectToWP(self):
"""Establish the actual TCP connection to Flickr"""
if self.connected_to_wp:
logger.debug("Already connected to wp")
return True
# Load config from file
info=json.load(open(WP_LOGIN_FILE,'r'))
self.wp = Client(info['url'],\
info['username'],\
info['password'])
logger.debug("Connecting to wp")
self.connected_to_wp=True
return True | def function[_connectToWP, parameter[self]]:
constant[Establish the actual TCP connection to Flickr]
if name[self].connected_to_wp begin[:]
call[name[logger].debug, parameter[constant[Already connected to wp]]]
return[constant[True]]
variable[info] assign[=] call[name[json].load, parameter[call[name[open], parameter[name[WP_LOGIN_FILE], constant[r]]]]]
name[self].wp assign[=] call[name[Client], parameter[call[name[info]][constant[url]], call[name[info]][constant[username]], call[name[info]][constant[password]]]]
call[name[logger].debug, parameter[constant[Connecting to wp]]]
name[self].connected_to_wp assign[=] constant[True]
return[constant[True]] | keyword[def] identifier[_connectToWP] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[connected_to_wp] :
identifier[logger] . identifier[debug] ( literal[string] )
keyword[return] keyword[True]
identifier[info] = identifier[json] . identifier[load] ( identifier[open] ( identifier[WP_LOGIN_FILE] , literal[string] ))
identifier[self] . identifier[wp] = identifier[Client] ( identifier[info] [ literal[string] ], identifier[info] [ literal[string] ], identifier[info] [ literal[string] ])
identifier[logger] . identifier[debug] ( literal[string] )
identifier[self] . identifier[connected_to_wp] = keyword[True]
keyword[return] keyword[True] | def _connectToWP(self):
"""Establish the actual TCP connection to Flickr"""
if self.connected_to_wp:
logger.debug('Already connected to wp')
return True # depends on [control=['if'], data=[]]
# Load config from file
info = json.load(open(WP_LOGIN_FILE, 'r'))
self.wp = Client(info['url'], info['username'], info['password'])
logger.debug('Connecting to wp')
self.connected_to_wp = True
return True |
def reactivate(self):
"""
Reactivates this subscription.
If a customer's subscription is canceled with ``at_period_end`` set to True and it has not yet reached the end
of the billing period, it can be reactivated. Subscriptions canceled immediately cannot be reactivated.
(Source: https://stripe.com/docs/subscriptions/canceling-pausing)
.. warning:: Reactivating a fully canceled Subscription will fail silently. Be sure to check the returned \
Subscription's status.
"""
stripe_subscription = self.api_retrieve()
stripe_subscription.plan = self.plan.id
stripe_subscription.cancel_at_period_end = False
return Subscription.sync_from_stripe_data(stripe_subscription.save()) | def function[reactivate, parameter[self]]:
constant[
Reactivates this subscription.
If a customer's subscription is canceled with ``at_period_end`` set to True and it has not yet reached the end
of the billing period, it can be reactivated. Subscriptions canceled immediately cannot be reactivated.
(Source: https://stripe.com/docs/subscriptions/canceling-pausing)
.. warning:: Reactivating a fully canceled Subscription will fail silently. Be sure to check the returned Subscription's status.
]
variable[stripe_subscription] assign[=] call[name[self].api_retrieve, parameter[]]
name[stripe_subscription].plan assign[=] name[self].plan.id
name[stripe_subscription].cancel_at_period_end assign[=] constant[False]
return[call[name[Subscription].sync_from_stripe_data, parameter[call[name[stripe_subscription].save, parameter[]]]]] | keyword[def] identifier[reactivate] ( identifier[self] ):
literal[string]
identifier[stripe_subscription] = identifier[self] . identifier[api_retrieve] ()
identifier[stripe_subscription] . identifier[plan] = identifier[self] . identifier[plan] . identifier[id]
identifier[stripe_subscription] . identifier[cancel_at_period_end] = keyword[False]
keyword[return] identifier[Subscription] . identifier[sync_from_stripe_data] ( identifier[stripe_subscription] . identifier[save] ()) | def reactivate(self):
"""
Reactivates this subscription.
If a customer's subscription is canceled with ``at_period_end`` set to True and it has not yet reached the end
of the billing period, it can be reactivated. Subscriptions canceled immediately cannot be reactivated.
(Source: https://stripe.com/docs/subscriptions/canceling-pausing)
.. warning:: Reactivating a fully canceled Subscription will fail silently. Be sure to check the returned Subscription's status.
"""
stripe_subscription = self.api_retrieve()
stripe_subscription.plan = self.plan.id
stripe_subscription.cancel_at_period_end = False
return Subscription.sync_from_stripe_data(stripe_subscription.save()) |
def create(self, product, data, store_view=None, identifierType=None):
"""
Upload a new product image.
:param product: ID or SKU of product
:param data: `dict` of image data (label, position, exclude, types)
Example: { 'label': 'description of photo',
'position': '1', 'exclude': '0',
'types': ['image', 'small_image', 'thumbnail']}
:param store_view: Store view ID or Code
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: string - image file name
"""
return self.call('catalog_product_attribute_media.create',
[product, data, store_view, identifierType]) | def function[create, parameter[self, product, data, store_view, identifierType]]:
constant[
Upload a new product image.
:param product: ID or SKU of product
:param data: `dict` of image data (label, position, exclude, types)
Example: { 'label': 'description of photo',
'position': '1', 'exclude': '0',
'types': ['image', 'small_image', 'thumbnail']}
:param store_view: Store view ID or Code
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: string - image file name
]
return[call[name[self].call, parameter[constant[catalog_product_attribute_media.create], list[[<ast.Name object at 0x7da1b04d3e80>, <ast.Name object at 0x7da1b04d2bf0>, <ast.Name object at 0x7da1b04d1f30>, <ast.Name object at 0x7da1b04d0850>]]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[product] , identifier[data] , identifier[store_view] = keyword[None] , identifier[identifierType] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[call] ( literal[string] ,
[ identifier[product] , identifier[data] , identifier[store_view] , identifier[identifierType] ]) | def create(self, product, data, store_view=None, identifierType=None):
"""
Upload a new product image.
:param product: ID or SKU of product
:param data: `dict` of image data (label, position, exclude, types)
Example: { 'label': 'description of photo',
'position': '1', 'exclude': '0',
'types': ['image', 'small_image', 'thumbnail']}
:param store_view: Store view ID or Code
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: string - image file name
"""
return self.call('catalog_product_attribute_media.create', [product, data, store_view, identifierType]) |
def extended_analog(self, pin, data):
"""
This method will send an extended data analog output command to the selected pin
:param pin: 0 - 127
:param data: 0 - 0xfffff
"""
analog_data = [pin, data & 0x7f, (data >> 7) & 0x7f, (data >> 14) & 0x7f]
self._command_handler.send_sysex(self._command_handler.EXTENDED_ANALOG, analog_data) | def function[extended_analog, parameter[self, pin, data]]:
constant[
This method will send an extended data analog output command to the selected pin
:param pin: 0 - 127
:param data: 0 - 0xfffff
]
variable[analog_data] assign[=] list[[<ast.Name object at 0x7da20e957010>, <ast.BinOp object at 0x7da2047ebfd0>, <ast.BinOp object at 0x7da2047ead40>, <ast.BinOp object at 0x7da2047ea8c0>]]
call[name[self]._command_handler.send_sysex, parameter[name[self]._command_handler.EXTENDED_ANALOG, name[analog_data]]] | keyword[def] identifier[extended_analog] ( identifier[self] , identifier[pin] , identifier[data] ):
literal[string]
identifier[analog_data] =[ identifier[pin] , identifier[data] & literal[int] ,( identifier[data] >> literal[int] )& literal[int] ,( identifier[data] >> literal[int] )& literal[int] ]
identifier[self] . identifier[_command_handler] . identifier[send_sysex] ( identifier[self] . identifier[_command_handler] . identifier[EXTENDED_ANALOG] , identifier[analog_data] ) | def extended_analog(self, pin, data):
"""
This method will send an extended data analog output command to the selected pin
:param pin: 0 - 127
:param data: 0 - 0xfffff
"""
analog_data = [pin, data & 127, data >> 7 & 127, data >> 14 & 127]
self._command_handler.send_sysex(self._command_handler.EXTENDED_ANALOG, analog_data) |
def get_reservations(self, sessionid, timeout=None):
"""Returns a list of location IDs and names."""
url = "{}{}".format(BASE_URL, "/reservations/")
cookies = dict(sessionid=sessionid)
try:
resp = requests.get(url, timeout=timeout, cookies=cookies)
except resp.exceptions.HTTPError as error:
raise APIError("Server Error: {}".format(error))
except requests.exceptions.ConnectTimeout:
raise APIError("Timeout Error")
html = resp.content.decode("utf8")
if "https://weblogin.pennkey.upenn.edu" in html:
raise APIError("Wharton Auth Failed. Session ID is not valid.")
soup = BeautifulSoup(html, "html5lib")
reservations = []
media = soup.find_all("div", {'class': "Media-body"})
for res in media:
times = res.find_all("span", {'class': "list-view-item__end-time"})
reservation = {
"date": res.find("span", {'class': "list-view-item__start-time u-display-block"}).get_text(),
"startTime": times[0].get_text(),
"endTime": times[1].get_text(),
"location": res.find("span", {'class': "list-view-item-building"}).get_text(),
"booking_id": int(res.find("a")['href'].split("delete/")[1][:-1])
}
reservations.append(reservation)
return reservations | def function[get_reservations, parameter[self, sessionid, timeout]]:
constant[Returns a list of location IDs and names.]
variable[url] assign[=] call[constant[{}{}].format, parameter[name[BASE_URL], constant[/reservations/]]]
variable[cookies] assign[=] call[name[dict], parameter[]]
<ast.Try object at 0x7da204963fd0>
variable[html] assign[=] call[name[resp].content.decode, parameter[constant[utf8]]]
if compare[constant[https://weblogin.pennkey.upenn.edu] in name[html]] begin[:]
<ast.Raise object at 0x7da204962980>
variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[html], constant[html5lib]]]
variable[reservations] assign[=] list[[]]
variable[media] assign[=] call[name[soup].find_all, parameter[constant[div], dictionary[[<ast.Constant object at 0x7da2049623e0>], [<ast.Constant object at 0x7da204961b70>]]]]
for taget[name[res]] in starred[name[media]] begin[:]
variable[times] assign[=] call[name[res].find_all, parameter[constant[span], dictionary[[<ast.Constant object at 0x7da20eb286d0>], [<ast.Constant object at 0x7da20eb2a140>]]]]
variable[reservation] assign[=] dictionary[[<ast.Constant object at 0x7da20eb28c10>, <ast.Constant object at 0x7da204963dc0>, <ast.Constant object at 0x7da204962a70>, <ast.Constant object at 0x7da204961cf0>, <ast.Constant object at 0x7da2049614b0>], [<ast.Call object at 0x7da204960b50>, <ast.Call object at 0x7da1b2586c20>, <ast.Call object at 0x7da1b2584f40>, <ast.Call object at 0x7da1b2584eb0>, <ast.Call object at 0x7da1b2585de0>]]
call[name[reservations].append, parameter[name[reservation]]]
return[name[reservations]] | keyword[def] identifier[get_reservations] ( identifier[self] , identifier[sessionid] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[BASE_URL] , literal[string] )
identifier[cookies] = identifier[dict] ( identifier[sessionid] = identifier[sessionid] )
keyword[try] :
identifier[resp] = identifier[requests] . identifier[get] ( identifier[url] , identifier[timeout] = identifier[timeout] , identifier[cookies] = identifier[cookies] )
keyword[except] identifier[resp] . identifier[exceptions] . identifier[HTTPError] keyword[as] identifier[error] :
keyword[raise] identifier[APIError] ( literal[string] . identifier[format] ( identifier[error] ))
keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectTimeout] :
keyword[raise] identifier[APIError] ( literal[string] )
identifier[html] = identifier[resp] . identifier[content] . identifier[decode] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[html] :
keyword[raise] identifier[APIError] ( literal[string] )
identifier[soup] = identifier[BeautifulSoup] ( identifier[html] , literal[string] )
identifier[reservations] =[]
identifier[media] = identifier[soup] . identifier[find_all] ( literal[string] ,{ literal[string] : literal[string] })
keyword[for] identifier[res] keyword[in] identifier[media] :
identifier[times] = identifier[res] . identifier[find_all] ( literal[string] ,{ literal[string] : literal[string] })
identifier[reservation] ={
literal[string] : identifier[res] . identifier[find] ( literal[string] ,{ literal[string] : literal[string] }). identifier[get_text] (),
literal[string] : identifier[times] [ literal[int] ]. identifier[get_text] (),
literal[string] : identifier[times] [ literal[int] ]. identifier[get_text] (),
literal[string] : identifier[res] . identifier[find] ( literal[string] ,{ literal[string] : literal[string] }). identifier[get_text] (),
literal[string] : identifier[int] ( identifier[res] . identifier[find] ( literal[string] )[ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ][:- literal[int] ])
}
identifier[reservations] . identifier[append] ( identifier[reservation] )
keyword[return] identifier[reservations] | def get_reservations(self, sessionid, timeout=None):
"""Returns a list of location IDs and names."""
url = '{}{}'.format(BASE_URL, '/reservations/')
cookies = dict(sessionid=sessionid)
try:
resp = requests.get(url, timeout=timeout, cookies=cookies) # depends on [control=['try'], data=[]]
except resp.exceptions.HTTPError as error:
raise APIError('Server Error: {}'.format(error)) # depends on [control=['except'], data=['error']]
except requests.exceptions.ConnectTimeout:
raise APIError('Timeout Error') # depends on [control=['except'], data=[]]
html = resp.content.decode('utf8')
if 'https://weblogin.pennkey.upenn.edu' in html:
raise APIError('Wharton Auth Failed. Session ID is not valid.') # depends on [control=['if'], data=[]]
soup = BeautifulSoup(html, 'html5lib')
reservations = []
media = soup.find_all('div', {'class': 'Media-body'})
for res in media:
times = res.find_all('span', {'class': 'list-view-item__end-time'})
reservation = {'date': res.find('span', {'class': 'list-view-item__start-time u-display-block'}).get_text(), 'startTime': times[0].get_text(), 'endTime': times[1].get_text(), 'location': res.find('span', {'class': 'list-view-item-building'}).get_text(), 'booking_id': int(res.find('a')['href'].split('delete/')[1][:-1])}
reservations.append(reservation) # depends on [control=['for'], data=['res']]
return reservations |
def read(self, user=True, defaults=True):
"""Find and read the files for this configuration and set them
as the sources for this configuration. To disable either
discovered user configuration files or the in-package defaults,
set `user` or `defaults` to `False`.
"""
if user:
self._add_user_source()
if defaults:
self._add_default_source() | def function[read, parameter[self, user, defaults]]:
constant[Find and read the files for this configuration and set them
as the sources for this configuration. To disable either
discovered user configuration files or the in-package defaults,
set `user` or `defaults` to `False`.
]
if name[user] begin[:]
call[name[self]._add_user_source, parameter[]]
if name[defaults] begin[:]
call[name[self]._add_default_source, parameter[]] | keyword[def] identifier[read] ( identifier[self] , identifier[user] = keyword[True] , identifier[defaults] = keyword[True] ):
literal[string]
keyword[if] identifier[user] :
identifier[self] . identifier[_add_user_source] ()
keyword[if] identifier[defaults] :
identifier[self] . identifier[_add_default_source] () | def read(self, user=True, defaults=True):
"""Find and read the files for this configuration and set them
as the sources for this configuration. To disable either
discovered user configuration files or the in-package defaults,
set `user` or `defaults` to `False`.
"""
if user:
self._add_user_source() # depends on [control=['if'], data=[]]
if defaults:
self._add_default_source() # depends on [control=['if'], data=[]] |
def dumps(obj, root_tag):
"""Serialize :arg:`obj` to an XML :class:`str`.
"""
xml = _get_xml_value(obj)
if xml:
# Remove invalid XML
xml = RE_ILLEGAL_XML.sub('', xml)
if root_tag is None:
return xml
else:
root = root_tag
return '<' + root + '>' + xml + '</' + root + '>' | def function[dumps, parameter[obj, root_tag]]:
constant[Serialize :arg:`obj` to an XML :class:`str`.
]
variable[xml] assign[=] call[name[_get_xml_value], parameter[name[obj]]]
if name[xml] begin[:]
variable[xml] assign[=] call[name[RE_ILLEGAL_XML].sub, parameter[constant[], name[xml]]]
if compare[name[root_tag] is constant[None]] begin[:]
return[name[xml]] | keyword[def] identifier[dumps] ( identifier[obj] , identifier[root_tag] ):
literal[string]
identifier[xml] = identifier[_get_xml_value] ( identifier[obj] )
keyword[if] identifier[xml] :
identifier[xml] = identifier[RE_ILLEGAL_XML] . identifier[sub] ( literal[string] , identifier[xml] )
keyword[if] identifier[root_tag] keyword[is] keyword[None] :
keyword[return] identifier[xml]
keyword[else] :
identifier[root] = identifier[root_tag]
keyword[return] literal[string] + identifier[root] + literal[string] + identifier[xml] + literal[string] + identifier[root] + literal[string] | def dumps(obj, root_tag):
"""Serialize :arg:`obj` to an XML :class:`str`.
"""
xml = _get_xml_value(obj)
if xml:
# Remove invalid XML
xml = RE_ILLEGAL_XML.sub('', xml) # depends on [control=['if'], data=[]]
if root_tag is None:
return xml # depends on [control=['if'], data=[]]
else:
root = root_tag
return '<' + root + '>' + xml + '</' + root + '>' |
def __inner_eval(self, data_name, data_idx, feval=None):
"""Evaluate training or validation data."""
if data_idx >= self.__num_dataset:
raise ValueError("Data_idx should be smaller than number of dataset")
self.__get_eval_info()
ret = []
if self.__num_inner_eval > 0:
result = np.zeros(self.__num_inner_eval, dtype=np.float64)
tmp_out_len = ctypes.c_int(0)
_safe_call(_LIB.LGBM_BoosterGetEval(
self.handle,
ctypes.c_int(data_idx),
ctypes.byref(tmp_out_len),
result.ctypes.data_as(ctypes.POINTER(ctypes.c_double))))
if tmp_out_len.value != self.__num_inner_eval:
raise ValueError("Wrong length of eval results")
for i in range_(self.__num_inner_eval):
ret.append((data_name, self.__name_inner_eval[i],
result[i], self.__higher_better_inner_eval[i]))
if feval is not None:
if data_idx == 0:
cur_data = self.train_set
else:
cur_data = self.valid_sets[data_idx - 1]
feval_ret = feval(self.__inner_predict(data_idx), cur_data)
if isinstance(feval_ret, list):
for eval_name, val, is_higher_better in feval_ret:
ret.append((data_name, eval_name, val, is_higher_better))
else:
eval_name, val, is_higher_better = feval_ret
ret.append((data_name, eval_name, val, is_higher_better))
return ret | def function[__inner_eval, parameter[self, data_name, data_idx, feval]]:
constant[Evaluate training or validation data.]
if compare[name[data_idx] greater_or_equal[>=] name[self].__num_dataset] begin[:]
<ast.Raise object at 0x7da2047e84f0>
call[name[self].__get_eval_info, parameter[]]
variable[ret] assign[=] list[[]]
if compare[name[self].__num_inner_eval greater[>] constant[0]] begin[:]
variable[result] assign[=] call[name[np].zeros, parameter[name[self].__num_inner_eval]]
variable[tmp_out_len] assign[=] call[name[ctypes].c_int, parameter[constant[0]]]
call[name[_safe_call], parameter[call[name[_LIB].LGBM_BoosterGetEval, parameter[name[self].handle, call[name[ctypes].c_int, parameter[name[data_idx]]], call[name[ctypes].byref, parameter[name[tmp_out_len]]], call[name[result].ctypes.data_as, parameter[call[name[ctypes].POINTER, parameter[name[ctypes].c_double]]]]]]]]
if compare[name[tmp_out_len].value not_equal[!=] name[self].__num_inner_eval] begin[:]
<ast.Raise object at 0x7da1b1e152d0>
for taget[name[i]] in starred[call[name[range_], parameter[name[self].__num_inner_eval]]] begin[:]
call[name[ret].append, parameter[tuple[[<ast.Name object at 0x7da1b1e15390>, <ast.Subscript object at 0x7da1b1e16bc0>, <ast.Subscript object at 0x7da1b1e15ba0>, <ast.Subscript object at 0x7da1b1e144f0>]]]]
if compare[name[feval] is_not constant[None]] begin[:]
if compare[name[data_idx] equal[==] constant[0]] begin[:]
variable[cur_data] assign[=] name[self].train_set
variable[feval_ret] assign[=] call[name[feval], parameter[call[name[self].__inner_predict, parameter[name[data_idx]]], name[cur_data]]]
if call[name[isinstance], parameter[name[feval_ret], name[list]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1e150f0>, <ast.Name object at 0x7da1b1e16bf0>, <ast.Name object at 0x7da1b1e14070>]]] in starred[name[feval_ret]] begin[:]
call[name[ret].append, parameter[tuple[[<ast.Name object at 0x7da204347460>, <ast.Name object at 0x7da204344f10>, <ast.Name object at 0x7da204344e80>, <ast.Name object at 0x7da204347c40>]]]]
return[name[ret]] | keyword[def] identifier[__inner_eval] ( identifier[self] , identifier[data_name] , identifier[data_idx] , identifier[feval] = keyword[None] ):
literal[string]
keyword[if] identifier[data_idx] >= identifier[self] . identifier[__num_dataset] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[__get_eval_info] ()
identifier[ret] =[]
keyword[if] identifier[self] . identifier[__num_inner_eval] > literal[int] :
identifier[result] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[__num_inner_eval] , identifier[dtype] = identifier[np] . identifier[float64] )
identifier[tmp_out_len] = identifier[ctypes] . identifier[c_int] ( literal[int] )
identifier[_safe_call] ( identifier[_LIB] . identifier[LGBM_BoosterGetEval] (
identifier[self] . identifier[handle] ,
identifier[ctypes] . identifier[c_int] ( identifier[data_idx] ),
identifier[ctypes] . identifier[byref] ( identifier[tmp_out_len] ),
identifier[result] . identifier[ctypes] . identifier[data_as] ( identifier[ctypes] . identifier[POINTER] ( identifier[ctypes] . identifier[c_double] ))))
keyword[if] identifier[tmp_out_len] . identifier[value] != identifier[self] . identifier[__num_inner_eval] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range_] ( identifier[self] . identifier[__num_inner_eval] ):
identifier[ret] . identifier[append] (( identifier[data_name] , identifier[self] . identifier[__name_inner_eval] [ identifier[i] ],
identifier[result] [ identifier[i] ], identifier[self] . identifier[__higher_better_inner_eval] [ identifier[i] ]))
keyword[if] identifier[feval] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[data_idx] == literal[int] :
identifier[cur_data] = identifier[self] . identifier[train_set]
keyword[else] :
identifier[cur_data] = identifier[self] . identifier[valid_sets] [ identifier[data_idx] - literal[int] ]
identifier[feval_ret] = identifier[feval] ( identifier[self] . identifier[__inner_predict] ( identifier[data_idx] ), identifier[cur_data] )
keyword[if] identifier[isinstance] ( identifier[feval_ret] , identifier[list] ):
keyword[for] identifier[eval_name] , identifier[val] , identifier[is_higher_better] keyword[in] identifier[feval_ret] :
identifier[ret] . identifier[append] (( identifier[data_name] , identifier[eval_name] , identifier[val] , identifier[is_higher_better] ))
keyword[else] :
identifier[eval_name] , identifier[val] , identifier[is_higher_better] = identifier[feval_ret]
identifier[ret] . identifier[append] (( identifier[data_name] , identifier[eval_name] , identifier[val] , identifier[is_higher_better] ))
keyword[return] identifier[ret] | def __inner_eval(self, data_name, data_idx, feval=None):
"""Evaluate training or validation data."""
if data_idx >= self.__num_dataset:
raise ValueError('Data_idx should be smaller than number of dataset') # depends on [control=['if'], data=[]]
self.__get_eval_info()
ret = []
if self.__num_inner_eval > 0:
result = np.zeros(self.__num_inner_eval, dtype=np.float64)
tmp_out_len = ctypes.c_int(0)
_safe_call(_LIB.LGBM_BoosterGetEval(self.handle, ctypes.c_int(data_idx), ctypes.byref(tmp_out_len), result.ctypes.data_as(ctypes.POINTER(ctypes.c_double))))
if tmp_out_len.value != self.__num_inner_eval:
raise ValueError('Wrong length of eval results') # depends on [control=['if'], data=[]]
for i in range_(self.__num_inner_eval):
ret.append((data_name, self.__name_inner_eval[i], result[i], self.__higher_better_inner_eval[i])) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if feval is not None:
if data_idx == 0:
cur_data = self.train_set # depends on [control=['if'], data=[]]
else:
cur_data = self.valid_sets[data_idx - 1]
feval_ret = feval(self.__inner_predict(data_idx), cur_data)
if isinstance(feval_ret, list):
for (eval_name, val, is_higher_better) in feval_ret:
ret.append((data_name, eval_name, val, is_higher_better)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else:
(eval_name, val, is_higher_better) = feval_ret
ret.append((data_name, eval_name, val, is_higher_better)) # depends on [control=['if'], data=['feval']]
return ret |
def verify_master(self, payload, master_pub=True):
'''
Verify that the master is the same one that was previously accepted.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:param bool master_pub: Operate as if minion had no master pubkey when it sent auth request, i.e. don't verify
the minion signature
:rtype: str
:return: An empty string on verification failure. On success, the decrypted AES message in the payload.
'''
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
m_pub_exists = os.path.isfile(m_pub_fn)
if m_pub_exists and master_pub and not self.opts['open_mode']:
with salt.utils.files.fopen(m_pub_fn) as fp_:
local_master_pub = fp_.read()
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
# This is not the last master we connected to
log.error(
'The master key has changed, the salt master could '
'have been subverted, verify salt master\'s public '
'key'
)
return ''
else:
if not self.check_auth_deps(payload):
return ''
# verify the signature of the pubkey even if it has
# not changed compared with the one we already have
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload)
else:
log.error(
'The masters public could not be verified. Is the '
'verification pubkey %s up to date?',
self.opts['master_sign_key_name'] + '.pub'
)
return ''
else:
return self.extract_aes(payload)
else:
if not self.check_auth_deps(payload):
return ''
# verify the masters pubkey signature if the minion
# has not received any masters pubkey before
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
if not m_pub_exists:
# the minion has not received any masters pubkey yet, write
# the newly received pubkey to minion_master.pub
with salt.utils.files.fopen(m_pub_fn, 'wb+') as fp_:
fp_.write(salt.utils.stringutils.to_bytes(payload['pub_key']))
return self.extract_aes(payload, master_pub=False) | def function[verify_master, parameter[self, payload, master_pub]]:
constant[
Verify that the master is the same one that was previously accepted.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:param bool master_pub: Operate as if minion had no master pubkey when it sent auth request, i.e. don't verify
the minion signature
:rtype: str
:return: An empty string on verification failure. On success, the decrypted AES message in the payload.
]
variable[m_pub_fn] assign[=] call[name[os].path.join, parameter[call[name[self].opts][constant[pki_dir]], name[self].mpub]]
variable[m_pub_exists] assign[=] call[name[os].path.isfile, parameter[name[m_pub_fn]]]
if <ast.BoolOp object at 0x7da18ede44c0> begin[:]
with call[name[salt].utils.files.fopen, parameter[name[m_pub_fn]]] begin[:]
variable[local_master_pub] assign[=] call[name[fp_].read, parameter[]]
if compare[call[call[call[name[payload]][constant[pub_key]].replace, parameter[constant[
], constant[]]].replace, parameter[constant[
], constant[]]] not_equal[!=] call[call[name[local_master_pub].replace, parameter[constant[
], constant[]]].replace, parameter[constant[
], constant[]]]] begin[:]
if <ast.UnaryOp object at 0x7da18ede6920> begin[:]
return[constant[]]
if call[name[self].opts][constant[verify_master_pubkey_sign]] begin[:]
if call[name[self].verify_signing_master, parameter[name[payload]]] begin[:]
return[call[name[self].extract_aes, parameter[name[payload]]]] | keyword[def] identifier[verify_master] ( identifier[self] , identifier[payload] , identifier[master_pub] = keyword[True] ):
literal[string]
identifier[m_pub_fn] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[opts] [ literal[string] ], identifier[self] . identifier[mpub] )
identifier[m_pub_exists] = identifier[os] . identifier[path] . identifier[isfile] ( identifier[m_pub_fn] )
keyword[if] identifier[m_pub_exists] keyword[and] identifier[master_pub] keyword[and] keyword[not] identifier[self] . identifier[opts] [ literal[string] ]:
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[m_pub_fn] ) keyword[as] identifier[fp_] :
identifier[local_master_pub] = identifier[fp_] . identifier[read] ()
keyword[if] identifier[payload] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )!= identifier[local_master_pub] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ):
keyword[if] keyword[not] identifier[self] . identifier[check_auth_deps] ( identifier[payload] ):
keyword[return] literal[string]
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]:
keyword[if] identifier[self] . identifier[verify_signing_master] ( identifier[payload] ):
keyword[return] identifier[self] . identifier[extract_aes] ( identifier[payload] , identifier[master_pub] = keyword[False] )
keyword[else] :
keyword[return] literal[string]
keyword[else] :
identifier[log] . identifier[error] (
literal[string]
literal[string]
literal[string]
)
keyword[return] literal[string]
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[check_auth_deps] ( identifier[payload] ):
keyword[return] literal[string]
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]:
keyword[if] identifier[self] . identifier[verify_signing_master] ( identifier[payload] ):
keyword[return] identifier[self] . identifier[extract_aes] ( identifier[payload] )
keyword[else] :
identifier[log] . identifier[error] (
literal[string]
literal[string] ,
identifier[self] . identifier[opts] [ literal[string] ]+ literal[string]
)
keyword[return] literal[string]
keyword[else] :
keyword[return] identifier[self] . identifier[extract_aes] ( identifier[payload] )
keyword[else] :
keyword[if] keyword[not] identifier[self] . identifier[check_auth_deps] ( identifier[payload] ):
keyword[return] literal[string]
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]:
keyword[if] identifier[self] . identifier[verify_signing_master] ( identifier[payload] ):
keyword[return] identifier[self] . identifier[extract_aes] ( identifier[payload] , identifier[master_pub] = keyword[False] )
keyword[else] :
keyword[return] literal[string]
keyword[else] :
keyword[if] keyword[not] identifier[m_pub_exists] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[m_pub_fn] , literal[string] ) keyword[as] identifier[fp_] :
identifier[fp_] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_bytes] ( identifier[payload] [ literal[string] ]))
keyword[return] identifier[self] . identifier[extract_aes] ( identifier[payload] , identifier[master_pub] = keyword[False] ) | def verify_master(self, payload, master_pub=True):
"""
Verify that the master is the same one that was previously accepted.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:param bool master_pub: Operate as if minion had no master pubkey when it sent auth request, i.e. don't verify
the minion signature
:rtype: str
:return: An empty string on verification failure. On success, the decrypted AES message in the payload.
"""
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
m_pub_exists = os.path.isfile(m_pub_fn)
if m_pub_exists and master_pub and (not self.opts['open_mode']):
with salt.utils.files.fopen(m_pub_fn) as fp_:
local_master_pub = fp_.read() # depends on [control=['with'], data=['fp_']]
if payload['pub_key'].replace('\n', '').replace('\r', '') != local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return '' # depends on [control=['if'], data=[]]
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False) # depends on [control=['if'], data=[]]
else:
return '' # depends on [control=['if'], data=[]]
else:
# This is not the last master we connected to
log.error("The master key has changed, the salt master could have been subverted, verify salt master's public key")
return '' # depends on [control=['if'], data=[]]
else:
if not self.check_auth_deps(payload):
return '' # depends on [control=['if'], data=[]]
# verify the signature of the pubkey even if it has
# not changed compared with the one we already have
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload) # depends on [control=['if'], data=[]]
else:
log.error('The masters public could not be verified. Is the verification pubkey %s up to date?', self.opts['master_sign_key_name'] + '.pub')
return '' # depends on [control=['if'], data=[]]
else:
return self.extract_aes(payload) # depends on [control=['if'], data=[]]
else:
if not self.check_auth_deps(payload):
return '' # depends on [control=['if'], data=[]]
# verify the masters pubkey signature if the minion
# has not received any masters pubkey before
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False) # depends on [control=['if'], data=[]]
else:
return '' # depends on [control=['if'], data=[]]
else:
if not m_pub_exists:
# the minion has not received any masters pubkey yet, write
# the newly received pubkey to minion_master.pub
with salt.utils.files.fopen(m_pub_fn, 'wb+') as fp_:
fp_.write(salt.utils.stringutils.to_bytes(payload['pub_key'])) # depends on [control=['with'], data=['fp_']] # depends on [control=['if'], data=[]]
return self.extract_aes(payload, master_pub=False) |
def check_weather(self):
'''
Query the configured/queried station and return the weather data
'''
if self.station_id is None:
# Failed to get the nearest station ID when first launched, so
# retry it.
self.get_station_id()
self.data['update_error'] = ''
try:
query_url = STATION_QUERY_URL % (self.api_key,
'conditions',
self.station_id)
try:
response = self.api_request(query_url)['current_observation']
self.forecast_url = response.pop('ob_url', None)
except KeyError:
self.logger.error('No weather data found for %s', self.station_id)
self.data['update_error'] = self.update_error
return
if self.forecast:
query_url = STATION_QUERY_URL % (self.api_key,
'forecast',
self.station_id)
try:
forecast = self.api_request(query_url)['forecast']
forecast = forecast['simpleforecast']['forecastday'][0]
except (KeyError, IndexError, TypeError):
self.logger.error(
'No forecast data found for %s', self.station_id)
# This is a non-fatal error, so don't return but do set the
# error flag.
self.data['update_error'] = self.update_error
unit = 'celsius' if self.units == 'metric' else 'fahrenheit'
low_temp = forecast.get('low', {}).get(unit, '')
high_temp = forecast.get('high', {}).get(unit, '')
else:
low_temp = high_temp = ''
if self.units == 'metric':
temp_unit = 'c'
speed_unit = 'kph'
distance_unit = 'km'
pressure_unit = 'mb'
else:
temp_unit = 'f'
speed_unit = 'mph'
distance_unit = 'mi'
pressure_unit = 'in'
def _find(key, data=None, default=''):
if data is None:
data = response
return str(data.get(key, default))
try:
observation_epoch = _find('observation_epoch') or _find('local_epoch')
observation_time = datetime.fromtimestamp(int(observation_epoch))
except (TypeError, ValueError):
log.debug(
'Observation time \'%s\' is not a UNIX timestamp',
observation_epoch
)
observation_time = datetime.fromtimestamp(0)
self.data['city'] = _find('city', response['observation_location'])
self.data['condition'] = _find('weather')
self.data['observation_time'] = observation_time
self.data['current_temp'] = _find('temp_' + temp_unit).split('.')[0]
self.data['low_temp'] = low_temp
self.data['high_temp'] = high_temp
self.data['temp_unit'] = '°' + temp_unit.upper()
self.data['feelslike'] = _find('feelslike_' + temp_unit)
self.data['dewpoint'] = _find('dewpoint_' + temp_unit)
self.data['wind_speed'] = _find('wind_' + speed_unit)
self.data['wind_unit'] = speed_unit
self.data['wind_direction'] = _find('wind_dir')
self.data['wind_gust'] = _find('wind_gust_' + speed_unit)
self.data['pressure'] = _find('pressure_' + pressure_unit)
self.data['pressure_unit'] = pressure_unit
self.data['pressure_trend'] = _find('pressure_trend')
self.data['visibility'] = _find('visibility_' + distance_unit)
self.data['visibility_unit'] = distance_unit
self.data['humidity'] = _find('relative_humidity').rstrip('%')
self.data['uv_index'] = _find('UV')
except Exception:
# Don't let an uncaught exception kill the update thread
self.logger.error(
'Uncaught error occurred while checking weather. '
'Exception follows:', exc_info=True
)
self.data['update_error'] = self.update_error | def function[check_weather, parameter[self]]:
constant[
Query the configured/queried station and return the weather data
]
if compare[name[self].station_id is constant[None]] begin[:]
call[name[self].get_station_id, parameter[]]
call[name[self].data][constant[update_error]] assign[=] constant[]
<ast.Try object at 0x7da18bcc9a50> | keyword[def] identifier[check_weather] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[station_id] keyword[is] keyword[None] :
identifier[self] . identifier[get_station_id] ()
identifier[self] . identifier[data] [ literal[string] ]= literal[string]
keyword[try] :
identifier[query_url] = identifier[STATION_QUERY_URL] %( identifier[self] . identifier[api_key] ,
literal[string] ,
identifier[self] . identifier[station_id] )
keyword[try] :
identifier[response] = identifier[self] . identifier[api_request] ( identifier[query_url] )[ literal[string] ]
identifier[self] . identifier[forecast_url] = identifier[response] . identifier[pop] ( literal[string] , keyword[None] )
keyword[except] identifier[KeyError] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] , identifier[self] . identifier[station_id] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[self] . identifier[update_error]
keyword[return]
keyword[if] identifier[self] . identifier[forecast] :
identifier[query_url] = identifier[STATION_QUERY_URL] %( identifier[self] . identifier[api_key] ,
literal[string] ,
identifier[self] . identifier[station_id] )
keyword[try] :
identifier[forecast] = identifier[self] . identifier[api_request] ( identifier[query_url] )[ literal[string] ]
identifier[forecast] = identifier[forecast] [ literal[string] ][ literal[string] ][ literal[int] ]
keyword[except] ( identifier[KeyError] , identifier[IndexError] , identifier[TypeError] ):
identifier[self] . identifier[logger] . identifier[error] (
literal[string] , identifier[self] . identifier[station_id] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[self] . identifier[update_error]
identifier[unit] = literal[string] keyword[if] identifier[self] . identifier[units] == literal[string] keyword[else] literal[string]
identifier[low_temp] = identifier[forecast] . identifier[get] ( literal[string] ,{}). identifier[get] ( identifier[unit] , literal[string] )
identifier[high_temp] = identifier[forecast] . identifier[get] ( literal[string] ,{}). identifier[get] ( identifier[unit] , literal[string] )
keyword[else] :
identifier[low_temp] = identifier[high_temp] = literal[string]
keyword[if] identifier[self] . identifier[units] == literal[string] :
identifier[temp_unit] = literal[string]
identifier[speed_unit] = literal[string]
identifier[distance_unit] = literal[string]
identifier[pressure_unit] = literal[string]
keyword[else] :
identifier[temp_unit] = literal[string]
identifier[speed_unit] = literal[string]
identifier[distance_unit] = literal[string]
identifier[pressure_unit] = literal[string]
keyword[def] identifier[_find] ( identifier[key] , identifier[data] = keyword[None] , identifier[default] = literal[string] ):
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[data] = identifier[response]
keyword[return] identifier[str] ( identifier[data] . identifier[get] ( identifier[key] , identifier[default] ))
keyword[try] :
identifier[observation_epoch] = identifier[_find] ( literal[string] ) keyword[or] identifier[_find] ( literal[string] )
identifier[observation_time] = identifier[datetime] . identifier[fromtimestamp] ( identifier[int] ( identifier[observation_epoch] ))
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
identifier[log] . identifier[debug] (
literal[string] ,
identifier[observation_epoch]
)
identifier[observation_time] = identifier[datetime] . identifier[fromtimestamp] ( literal[int] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] , identifier[response] [ literal[string] ])
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[observation_time]
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[temp_unit] ). identifier[split] ( literal[string] )[ literal[int] ]
identifier[self] . identifier[data] [ literal[string] ]= identifier[low_temp]
identifier[self] . identifier[data] [ literal[string] ]= identifier[high_temp]
identifier[self] . identifier[data] [ literal[string] ]= literal[string] + identifier[temp_unit] . identifier[upper] ()
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[temp_unit] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[temp_unit] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[speed_unit] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[speed_unit]
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[speed_unit] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[pressure_unit] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[pressure_unit]
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] + identifier[distance_unit] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[distance_unit]
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] ). identifier[rstrip] ( literal[string] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[_find] ( literal[string] )
keyword[except] identifier[Exception] :
identifier[self] . identifier[logger] . identifier[error] (
literal[string]
literal[string] , identifier[exc_info] = keyword[True]
)
identifier[self] . identifier[data] [ literal[string] ]= identifier[self] . identifier[update_error] | def check_weather(self):
"""
Query the configured/queried station and return the weather data
"""
if self.station_id is None:
# Failed to get the nearest station ID when first launched, so
# retry it.
self.get_station_id() # depends on [control=['if'], data=[]]
self.data['update_error'] = ''
try:
query_url = STATION_QUERY_URL % (self.api_key, 'conditions', self.station_id)
try:
response = self.api_request(query_url)['current_observation']
self.forecast_url = response.pop('ob_url', None) # depends on [control=['try'], data=[]]
except KeyError:
self.logger.error('No weather data found for %s', self.station_id)
self.data['update_error'] = self.update_error
return # depends on [control=['except'], data=[]]
if self.forecast:
query_url = STATION_QUERY_URL % (self.api_key, 'forecast', self.station_id)
try:
forecast = self.api_request(query_url)['forecast']
forecast = forecast['simpleforecast']['forecastday'][0] # depends on [control=['try'], data=[]]
except (KeyError, IndexError, TypeError):
self.logger.error('No forecast data found for %s', self.station_id)
# This is a non-fatal error, so don't return but do set the
# error flag.
self.data['update_error'] = self.update_error # depends on [control=['except'], data=[]]
unit = 'celsius' if self.units == 'metric' else 'fahrenheit'
low_temp = forecast.get('low', {}).get(unit, '')
high_temp = forecast.get('high', {}).get(unit, '') # depends on [control=['if'], data=[]]
else:
low_temp = high_temp = ''
if self.units == 'metric':
temp_unit = 'c'
speed_unit = 'kph'
distance_unit = 'km'
pressure_unit = 'mb' # depends on [control=['if'], data=[]]
else:
temp_unit = 'f'
speed_unit = 'mph'
distance_unit = 'mi'
pressure_unit = 'in'
def _find(key, data=None, default=''):
if data is None:
data = response # depends on [control=['if'], data=['data']]
return str(data.get(key, default))
try:
observation_epoch = _find('observation_epoch') or _find('local_epoch')
observation_time = datetime.fromtimestamp(int(observation_epoch)) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
log.debug("Observation time '%s' is not a UNIX timestamp", observation_epoch)
observation_time = datetime.fromtimestamp(0) # depends on [control=['except'], data=[]]
self.data['city'] = _find('city', response['observation_location'])
self.data['condition'] = _find('weather')
self.data['observation_time'] = observation_time
self.data['current_temp'] = _find('temp_' + temp_unit).split('.')[0]
self.data['low_temp'] = low_temp
self.data['high_temp'] = high_temp
self.data['temp_unit'] = '°' + temp_unit.upper()
self.data['feelslike'] = _find('feelslike_' + temp_unit)
self.data['dewpoint'] = _find('dewpoint_' + temp_unit)
self.data['wind_speed'] = _find('wind_' + speed_unit)
self.data['wind_unit'] = speed_unit
self.data['wind_direction'] = _find('wind_dir')
self.data['wind_gust'] = _find('wind_gust_' + speed_unit)
self.data['pressure'] = _find('pressure_' + pressure_unit)
self.data['pressure_unit'] = pressure_unit
self.data['pressure_trend'] = _find('pressure_trend')
self.data['visibility'] = _find('visibility_' + distance_unit)
self.data['visibility_unit'] = distance_unit
self.data['humidity'] = _find('relative_humidity').rstrip('%')
self.data['uv_index'] = _find('UV') # depends on [control=['try'], data=[]]
except Exception:
# Don't let an uncaught exception kill the update thread
self.logger.error('Uncaught error occurred while checking weather. Exception follows:', exc_info=True)
self.data['update_error'] = self.update_error # depends on [control=['except'], data=[]] |
def find_one(self, cls, id):
"""Required functionality."""
try:
db_result = self.get_class_table(cls).lookup(id)
except ItemNotFound:
# according to docs, this shouldn't be required, but it IS
db_result = None
if not db_result:
return None
obj = cls.from_data(db_result['value'])
return obj | def function[find_one, parameter[self, cls, id]]:
constant[Required functionality.]
<ast.Try object at 0x7da18dc05570>
if <ast.UnaryOp object at 0x7da18dc04c70> begin[:]
return[constant[None]]
variable[obj] assign[=] call[name[cls].from_data, parameter[call[name[db_result]][constant[value]]]]
return[name[obj]] | keyword[def] identifier[find_one] ( identifier[self] , identifier[cls] , identifier[id] ):
literal[string]
keyword[try] :
identifier[db_result] = identifier[self] . identifier[get_class_table] ( identifier[cls] ). identifier[lookup] ( identifier[id] )
keyword[except] identifier[ItemNotFound] :
identifier[db_result] = keyword[None]
keyword[if] keyword[not] identifier[db_result] :
keyword[return] keyword[None]
identifier[obj] = identifier[cls] . identifier[from_data] ( identifier[db_result] [ literal[string] ])
keyword[return] identifier[obj] | def find_one(self, cls, id):
"""Required functionality."""
try:
db_result = self.get_class_table(cls).lookup(id) # depends on [control=['try'], data=[]]
except ItemNotFound:
# according to docs, this shouldn't be required, but it IS
db_result = None # depends on [control=['except'], data=[]]
if not db_result:
return None # depends on [control=['if'], data=[]]
obj = cls.from_data(db_result['value'])
return obj |
def add_identities(cls, db, identities, backend):
""" Load identities list from backend in Sorting Hat """
logger.info("Adding the identities to SortingHat")
total = 0
for identity in identities:
try:
cls.add_identity(db, identity, backend)
total += 1
except Exception as e:
logger.error("Unexcepted error when adding identities: %s" % e)
continue
logger.info("Total identities added to SH: %i", total) | def function[add_identities, parameter[cls, db, identities, backend]]:
constant[ Load identities list from backend in Sorting Hat ]
call[name[logger].info, parameter[constant[Adding the identities to SortingHat]]]
variable[total] assign[=] constant[0]
for taget[name[identity]] in starred[name[identities]] begin[:]
<ast.Try object at 0x7da1b0ff8880>
call[name[logger].info, parameter[constant[Total identities added to SH: %i], name[total]]] | keyword[def] identifier[add_identities] ( identifier[cls] , identifier[db] , identifier[identities] , identifier[backend] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
identifier[total] = literal[int]
keyword[for] identifier[identity] keyword[in] identifier[identities] :
keyword[try] :
identifier[cls] . identifier[add_identity] ( identifier[db] , identifier[identity] , identifier[backend] )
identifier[total] += literal[int]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( literal[string] % identifier[e] )
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] , identifier[total] ) | def add_identities(cls, db, identities, backend):
""" Load identities list from backend in Sorting Hat """
logger.info('Adding the identities to SortingHat')
total = 0
for identity in identities:
try:
cls.add_identity(db, identity, backend)
total += 1 # depends on [control=['try'], data=[]]
except Exception as e:
logger.error('Unexcepted error when adding identities: %s' % e)
continue # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['identity']]
logger.info('Total identities added to SH: %i', total) |
async def info(self, obj_id=None):
'''Get info about object id
|coro|
Parameters
----------
obj_id : str, list
if not provided, server info is retured(as a dict).
Otherwise, an object with that id is returned
(or objects if `obj_id` is a list).
'''
if obj_id:
try:
return await self.process(obj_id)
except JSONDecodeError:
raise LookupError('Error object with that id does not exist', obj_id)
else:
return await self.connector.getJson('/system/info/public', remote=False) | <ast.AsyncFunctionDef object at 0x7da1b28707f0> | keyword[async] keyword[def] identifier[info] ( identifier[self] , identifier[obj_id] = keyword[None] ):
literal[string]
keyword[if] identifier[obj_id] :
keyword[try] :
keyword[return] keyword[await] identifier[self] . identifier[process] ( identifier[obj_id] )
keyword[except] identifier[JSONDecodeError] :
keyword[raise] identifier[LookupError] ( literal[string] , identifier[obj_id] )
keyword[else] :
keyword[return] keyword[await] identifier[self] . identifier[connector] . identifier[getJson] ( literal[string] , identifier[remote] = keyword[False] ) | async def info(self, obj_id=None):
"""Get info about object id
|coro|
Parameters
----------
obj_id : str, list
if not provided, server info is retured(as a dict).
Otherwise, an object with that id is returned
(or objects if `obj_id` is a list).
"""
if obj_id:
try:
return await self.process(obj_id) # depends on [control=['try'], data=[]]
except JSONDecodeError:
raise LookupError('Error object with that id does not exist', obj_id) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return await self.connector.getJson('/system/info/public', remote=False) |
def create_session(self, user, password, domain, session_name):
"""Creates a new guest session for controlling the guest. The new session
will be started asynchronously, meaning on return of this function it is
not guaranteed that the guest session is in a started and/or usable state.
To wait for successful startup, use the :py:func:`IGuestSession.wait_for`
call.
A guest session represents one impersonated user account in the guest, so
every operation will use the same credentials specified when creating
the session object via :py:func:`IGuest.create_session` . Anonymous
sessions, that is, sessions without specifying a valid
user account in the guest are not allowed reasons of security.
There can be a maximum of 32 sessions at once per VM. An error will
be returned if this has been reached.
For more information please consult :py:class:`IGuestSession`
in user of type str
User name this session will be using to control the guest; has to exist
and have the appropriate rights to execute programs in the VM. Must not
be empty.
in password of type str
Password of the user account to be used. Empty passwords are allowed.
in domain of type str
Domain name of the user account to be used if the guest is part of
a domain. Optional. This feature is not implemented yet.
in session_name of type str
The session's friendly name. Optional, can be empty.
return guest_session of type :class:`IGuestSession`
The newly created session object.
raises :class:`VBoxErrorIprtError`
Error creating guest session.
raises :class:`VBoxErrorMaximumReached`
The maximum of concurrent guest sessions has been reached.
"""
if not isinstance(user, basestring):
raise TypeError("user can only be an instance of type basestring")
if not isinstance(password, basestring):
raise TypeError("password can only be an instance of type basestring")
if not isinstance(domain, basestring):
raise TypeError("domain can only be an instance of type basestring")
if not isinstance(session_name, basestring):
raise TypeError("session_name can only be an instance of type basestring")
guest_session = self._call("createSession",
in_p=[user, password, domain, session_name])
guest_session = IGuestSession(guest_session)
return guest_session | def function[create_session, parameter[self, user, password, domain, session_name]]:
constant[Creates a new guest session for controlling the guest. The new session
will be started asynchronously, meaning on return of this function it is
not guaranteed that the guest session is in a started and/or usable state.
To wait for successful startup, use the :py:func:`IGuestSession.wait_for`
call.
A guest session represents one impersonated user account in the guest, so
every operation will use the same credentials specified when creating
the session object via :py:func:`IGuest.create_session` . Anonymous
sessions, that is, sessions without specifying a valid
user account in the guest are not allowed reasons of security.
There can be a maximum of 32 sessions at once per VM. An error will
be returned if this has been reached.
For more information please consult :py:class:`IGuestSession`
in user of type str
User name this session will be using to control the guest; has to exist
and have the appropriate rights to execute programs in the VM. Must not
be empty.
in password of type str
Password of the user account to be used. Empty passwords are allowed.
in domain of type str
Domain name of the user account to be used if the guest is part of
a domain. Optional. This feature is not implemented yet.
in session_name of type str
The session's friendly name. Optional, can be empty.
return guest_session of type :class:`IGuestSession`
The newly created session object.
raises :class:`VBoxErrorIprtError`
Error creating guest session.
raises :class:`VBoxErrorMaximumReached`
The maximum of concurrent guest sessions has been reached.
]
if <ast.UnaryOp object at 0x7da20c7cad70> begin[:]
<ast.Raise object at 0x7da20c7c9840>
if <ast.UnaryOp object at 0x7da20c7ca350> begin[:]
<ast.Raise object at 0x7da204960070>
if <ast.UnaryOp object at 0x7da20c6c47f0> begin[:]
<ast.Raise object at 0x7da20c6c5150>
if <ast.UnaryOp object at 0x7da20c6c43a0> begin[:]
<ast.Raise object at 0x7da20c6c7b20>
variable[guest_session] assign[=] call[name[self]._call, parameter[constant[createSession]]]
variable[guest_session] assign[=] call[name[IGuestSession], parameter[name[guest_session]]]
return[name[guest_session]] | keyword[def] identifier[create_session] ( identifier[self] , identifier[user] , identifier[password] , identifier[domain] , identifier[session_name] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[user] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[password] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[domain] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[session_name] , identifier[basestring] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[guest_session] = identifier[self] . identifier[_call] ( literal[string] ,
identifier[in_p] =[ identifier[user] , identifier[password] , identifier[domain] , identifier[session_name] ])
identifier[guest_session] = identifier[IGuestSession] ( identifier[guest_session] )
keyword[return] identifier[guest_session] | def create_session(self, user, password, domain, session_name):
"""Creates a new guest session for controlling the guest. The new session
will be started asynchronously, meaning on return of this function it is
not guaranteed that the guest session is in a started and/or usable state.
To wait for successful startup, use the :py:func:`IGuestSession.wait_for`
call.
A guest session represents one impersonated user account in the guest, so
every operation will use the same credentials specified when creating
the session object via :py:func:`IGuest.create_session` . Anonymous
sessions, that is, sessions without specifying a valid
user account in the guest are not allowed reasons of security.
There can be a maximum of 32 sessions at once per VM. An error will
be returned if this has been reached.
For more information please consult :py:class:`IGuestSession`
in user of type str
User name this session will be using to control the guest; has to exist
and have the appropriate rights to execute programs in the VM. Must not
be empty.
in password of type str
Password of the user account to be used. Empty passwords are allowed.
in domain of type str
Domain name of the user account to be used if the guest is part of
a domain. Optional. This feature is not implemented yet.
in session_name of type str
The session's friendly name. Optional, can be empty.
return guest_session of type :class:`IGuestSession`
The newly created session object.
raises :class:`VBoxErrorIprtError`
Error creating guest session.
raises :class:`VBoxErrorMaximumReached`
The maximum of concurrent guest sessions has been reached.
"""
if not isinstance(user, basestring):
raise TypeError('user can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(password, basestring):
raise TypeError('password can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(domain, basestring):
raise TypeError('domain can only be an instance of type basestring') # depends on [control=['if'], data=[]]
if not isinstance(session_name, basestring):
raise TypeError('session_name can only be an instance of type basestring') # depends on [control=['if'], data=[]]
guest_session = self._call('createSession', in_p=[user, password, domain, session_name])
guest_session = IGuestSession(guest_session)
return guest_session |
def get_query_args(
self,
keep_blank_values: bool = False,
strict_parsing: bool = False,
encoding: str = "utf-8",
errors: str = "replace",
) -> list:
"""
Method to parse `query_string` using `urllib.parse.parse_qsl`.
This methods is used by `query_args` property.
Can be used directly if you need to change default parameters.
:param keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
:type keep_blank_values: bool
:param strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
:type strict_parsing: bool
:param encoding: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type encoding: str
:param errors: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type errors: str
:return: list
"""
if not self.parsed_not_grouped_args[
(keep_blank_values, strict_parsing, encoding, errors)
]:
if self.query_string:
self.parsed_not_grouped_args[
(keep_blank_values, strict_parsing, encoding, errors)
] = parse_qsl(
qs=self.query_string,
keep_blank_values=keep_blank_values,
strict_parsing=strict_parsing,
encoding=encoding,
errors=errors,
)
return self.parsed_not_grouped_args[
(keep_blank_values, strict_parsing, encoding, errors)
] | def function[get_query_args, parameter[self, keep_blank_values, strict_parsing, encoding, errors]]:
constant[
Method to parse `query_string` using `urllib.parse.parse_qsl`.
This methods is used by `query_args` property.
Can be used directly if you need to change default parameters.
:param keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
:type keep_blank_values: bool
:param strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
:type strict_parsing: bool
:param encoding: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type encoding: str
:param errors: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type errors: str
:return: list
]
if <ast.UnaryOp object at 0x7da1b1f77c70> begin[:]
if name[self].query_string begin[:]
call[name[self].parsed_not_grouped_args][tuple[[<ast.Name object at 0x7da1b1f747f0>, <ast.Name object at 0x7da1b1f75780>, <ast.Name object at 0x7da1b1f75390>, <ast.Name object at 0x7da1b1f75090>]]] assign[=] call[name[parse_qsl], parameter[]]
return[call[name[self].parsed_not_grouped_args][tuple[[<ast.Name object at 0x7da1b1f75b40>, <ast.Name object at 0x7da1b1f77250>, <ast.Name object at 0x7da1b1f76b60>, <ast.Name object at 0x7da1b1f767d0>]]]] | keyword[def] identifier[get_query_args] (
identifier[self] ,
identifier[keep_blank_values] : identifier[bool] = keyword[False] ,
identifier[strict_parsing] : identifier[bool] = keyword[False] ,
identifier[encoding] : identifier[str] = literal[string] ,
identifier[errors] : identifier[str] = literal[string] ,
)-> identifier[list] :
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[parsed_not_grouped_args] [
( identifier[keep_blank_values] , identifier[strict_parsing] , identifier[encoding] , identifier[errors] )
]:
keyword[if] identifier[self] . identifier[query_string] :
identifier[self] . identifier[parsed_not_grouped_args] [
( identifier[keep_blank_values] , identifier[strict_parsing] , identifier[encoding] , identifier[errors] )
]= identifier[parse_qsl] (
identifier[qs] = identifier[self] . identifier[query_string] ,
identifier[keep_blank_values] = identifier[keep_blank_values] ,
identifier[strict_parsing] = identifier[strict_parsing] ,
identifier[encoding] = identifier[encoding] ,
identifier[errors] = identifier[errors] ,
)
keyword[return] identifier[self] . identifier[parsed_not_grouped_args] [
( identifier[keep_blank_values] , identifier[strict_parsing] , identifier[encoding] , identifier[errors] )
] | def get_query_args(self, keep_blank_values: bool=False, strict_parsing: bool=False, encoding: str='utf-8', errors: str='replace') -> list:
"""
Method to parse `query_string` using `urllib.parse.parse_qsl`.
This methods is used by `query_args` property.
Can be used directly if you need to change default parameters.
:param keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
:type keep_blank_values: bool
:param strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
:type strict_parsing: bool
:param encoding: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type encoding: str
:param errors: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
:type errors: str
:return: list
"""
if not self.parsed_not_grouped_args[keep_blank_values, strict_parsing, encoding, errors]:
if self.query_string:
self.parsed_not_grouped_args[keep_blank_values, strict_parsing, encoding, errors] = parse_qsl(qs=self.query_string, keep_blank_values=keep_blank_values, strict_parsing=strict_parsing, encoding=encoding, errors=errors) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self.parsed_not_grouped_args[keep_blank_values, strict_parsing, encoding, errors] |
def rest(url, req="GET", data=None):
"""Main function to be called from this module.
send a request using method 'req' and to the url. the _rest() function
will add the base_url to this, so 'url' should be something like '/ips'.
"""
load_variables()
return _rest(base_url + url, req, data) | def function[rest, parameter[url, req, data]]:
constant[Main function to be called from this module.
send a request using method 'req' and to the url. the _rest() function
will add the base_url to this, so 'url' should be something like '/ips'.
]
call[name[load_variables], parameter[]]
return[call[name[_rest], parameter[binary_operation[name[base_url] + name[url]], name[req], name[data]]]] | keyword[def] identifier[rest] ( identifier[url] , identifier[req] = literal[string] , identifier[data] = keyword[None] ):
literal[string]
identifier[load_variables] ()
keyword[return] identifier[_rest] ( identifier[base_url] + identifier[url] , identifier[req] , identifier[data] ) | def rest(url, req='GET', data=None):
"""Main function to be called from this module.
send a request using method 'req' and to the url. the _rest() function
will add the base_url to this, so 'url' should be something like '/ips'.
"""
load_variables()
return _rest(base_url + url, req, data) |
def attempt_connection(self):
"""
Establish a multicast connection - uses 2 sockets (one for sending, the other for receiving)
"""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
self.receiver_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.receiver_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.receiver_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.receiver_socket.bind(('', MCAST_PORT))
mreq = struct.pack("4sl", socket.inet_aton(MCAST_GRP), socket.INADDR_ANY)
self.receiver_socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
if not self.socket or not self.receiver_socket:
raise exception.ConnectFailedException() | def function[attempt_connection, parameter[self]]:
constant[
Establish a multicast connection - uses 2 sockets (one for sending, the other for receiving)
]
name[self].socket assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_DGRAM, name[socket].IPPROTO_UDP]]
call[name[self].socket.setsockopt, parameter[name[socket].IPPROTO_IP, name[socket].IP_MULTICAST_TTL, constant[2]]]
name[self].receiver_socket assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_DGRAM, name[socket].IPPROTO_UDP]]
call[name[self].receiver_socket.setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_REUSEADDR, constant[1]]]
call[name[self].receiver_socket.setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_REUSEPORT, constant[1]]]
call[name[self].receiver_socket.bind, parameter[tuple[[<ast.Constant object at 0x7da2044c0c70>, <ast.Name object at 0x7da2044c0ca0>]]]]
variable[mreq] assign[=] call[name[struct].pack, parameter[constant[4sl], call[name[socket].inet_aton, parameter[name[MCAST_GRP]]], name[socket].INADDR_ANY]]
call[name[self].receiver_socket.setsockopt, parameter[name[socket].IPPROTO_IP, name[socket].IP_ADD_MEMBERSHIP, name[mreq]]]
if <ast.BoolOp object at 0x7da2044c0400> begin[:]
<ast.Raise object at 0x7da2044c37c0> | keyword[def] identifier[attempt_connection] ( identifier[self] ):
literal[string]
identifier[self] . identifier[socket] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_DGRAM] , identifier[socket] . identifier[IPPROTO_UDP] )
identifier[self] . identifier[socket] . identifier[setsockopt] ( identifier[socket] . identifier[IPPROTO_IP] , identifier[socket] . identifier[IP_MULTICAST_TTL] , literal[int] )
identifier[self] . identifier[receiver_socket] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_DGRAM] , identifier[socket] . identifier[IPPROTO_UDP] )
identifier[self] . identifier[receiver_socket] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEADDR] , literal[int] )
identifier[self] . identifier[receiver_socket] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEPORT] , literal[int] )
identifier[self] . identifier[receiver_socket] . identifier[bind] (( literal[string] , identifier[MCAST_PORT] ))
identifier[mreq] = identifier[struct] . identifier[pack] ( literal[string] , identifier[socket] . identifier[inet_aton] ( identifier[MCAST_GRP] ), identifier[socket] . identifier[INADDR_ANY] )
identifier[self] . identifier[receiver_socket] . identifier[setsockopt] ( identifier[socket] . identifier[IPPROTO_IP] , identifier[socket] . identifier[IP_ADD_MEMBERSHIP] , identifier[mreq] )
keyword[if] keyword[not] identifier[self] . identifier[socket] keyword[or] keyword[not] identifier[self] . identifier[receiver_socket] :
keyword[raise] identifier[exception] . identifier[ConnectFailedException] () | def attempt_connection(self):
"""
Establish a multicast connection - uses 2 sockets (one for sending, the other for receiving)
"""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
self.receiver_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.receiver_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.receiver_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.receiver_socket.bind(('', MCAST_PORT))
mreq = struct.pack('4sl', socket.inet_aton(MCAST_GRP), socket.INADDR_ANY)
self.receiver_socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
if not self.socket or not self.receiver_socket:
raise exception.ConnectFailedException() # depends on [control=['if'], data=[]] |
def GetNetworkAddressWithTime(self):
"""
Get a network address object.
Returns:
NetworkAddressWithTime: if we have a connection to a node.
None: otherwise.
"""
if self.port is not None and self.host is not None and self.Version is not None:
return NetworkAddressWithTime(self.host, self.port, self.Version.Services)
return None | def function[GetNetworkAddressWithTime, parameter[self]]:
constant[
Get a network address object.
Returns:
NetworkAddressWithTime: if we have a connection to a node.
None: otherwise.
]
if <ast.BoolOp object at 0x7da2046218d0> begin[:]
return[call[name[NetworkAddressWithTime], parameter[name[self].host, name[self].port, name[self].Version.Services]]]
return[constant[None]] | keyword[def] identifier[GetNetworkAddressWithTime] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[port] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[host] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[Version] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[NetworkAddressWithTime] ( identifier[self] . identifier[host] , identifier[self] . identifier[port] , identifier[self] . identifier[Version] . identifier[Services] )
keyword[return] keyword[None] | def GetNetworkAddressWithTime(self):
"""
Get a network address object.
Returns:
NetworkAddressWithTime: if we have a connection to a node.
None: otherwise.
"""
if self.port is not None and self.host is not None and (self.Version is not None):
return NetworkAddressWithTime(self.host, self.port, self.Version.Services) # depends on [control=['if'], data=[]]
return None |
def process_request(self, request, client_address):
"""Start a new thread to process the request.
This is lovingly copied and pasted from ThreadingMixIn, with the addition of setting the name
of the thread. It's a shame that ThreadingMixIn doesn't provide a customization hook.
"""
t = threading.Thread(
target=self.process_request_thread,
args=(request, client_address),
name="PailgunRequestThread",
)
t.daemon = self.daemon_threads
t.start() | def function[process_request, parameter[self, request, client_address]]:
constant[Start a new thread to process the request.
This is lovingly copied and pasted from ThreadingMixIn, with the addition of setting the name
of the thread. It's a shame that ThreadingMixIn doesn't provide a customization hook.
]
variable[t] assign[=] call[name[threading].Thread, parameter[]]
name[t].daemon assign[=] name[self].daemon_threads
call[name[t].start, parameter[]] | keyword[def] identifier[process_request] ( identifier[self] , identifier[request] , identifier[client_address] ):
literal[string]
identifier[t] = identifier[threading] . identifier[Thread] (
identifier[target] = identifier[self] . identifier[process_request_thread] ,
identifier[args] =( identifier[request] , identifier[client_address] ),
identifier[name] = literal[string] ,
)
identifier[t] . identifier[daemon] = identifier[self] . identifier[daemon_threads]
identifier[t] . identifier[start] () | def process_request(self, request, client_address):
"""Start a new thread to process the request.
This is lovingly copied and pasted from ThreadingMixIn, with the addition of setting the name
of the thread. It's a shame that ThreadingMixIn doesn't provide a customization hook.
"""
t = threading.Thread(target=self.process_request_thread, args=(request, client_address), name='PailgunRequestThread')
t.daemon = self.daemon_threads
t.start() |
def delete(self, expected_value=None, return_values=None):
"""
Delete the item from DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
return self.table.layer2.delete_item(self, expected_value,
return_values) | def function[delete, parameter[self, expected_value, return_values]]:
constant[
Delete the item from DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
]
return[call[name[self].table.layer2.delete_item, parameter[name[self], name[expected_value], name[return_values]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[expected_value] = keyword[None] , identifier[return_values] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[table] . identifier[layer2] . identifier[delete_item] ( identifier[self] , identifier[expected_value] ,
identifier[return_values] ) | def delete(self, expected_value=None, return_values=None):
"""
Delete the item from DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
return self.table.layer2.delete_item(self, expected_value, return_values) |
def _control(self, state):
""" Control device state.
Possible states are ON or OFF.
:param state: Switch to this state.
"""
# Renew subscription if necessary
if not self._subscription_is_recent():
self._subscribe()
cmd = MAGIC + CONTROL + self._mac + PADDING_1 + PADDING_2 + state
_LOGGER.debug("Sending new state to %s: %s", self.host, ord(state))
ack_state = self._udp_transact(cmd, self._control_resp, state)
if ack_state is None:
raise S20Exception(
"Device didn't acknowledge control request: {}".format(
self.host)) | def function[_control, parameter[self, state]]:
constant[ Control device state.
Possible states are ON or OFF.
:param state: Switch to this state.
]
if <ast.UnaryOp object at 0x7da1b2564c10> begin[:]
call[name[self]._subscribe, parameter[]]
variable[cmd] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[MAGIC] + name[CONTROL]] + name[self]._mac] + name[PADDING_1]] + name[PADDING_2]] + name[state]]
call[name[_LOGGER].debug, parameter[constant[Sending new state to %s: %s], name[self].host, call[name[ord], parameter[name[state]]]]]
variable[ack_state] assign[=] call[name[self]._udp_transact, parameter[name[cmd], name[self]._control_resp, name[state]]]
if compare[name[ack_state] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b2534df0> | keyword[def] identifier[_control] ( identifier[self] , identifier[state] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_subscription_is_recent] ():
identifier[self] . identifier[_subscribe] ()
identifier[cmd] = identifier[MAGIC] + identifier[CONTROL] + identifier[self] . identifier[_mac] + identifier[PADDING_1] + identifier[PADDING_2] + identifier[state]
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[host] , identifier[ord] ( identifier[state] ))
identifier[ack_state] = identifier[self] . identifier[_udp_transact] ( identifier[cmd] , identifier[self] . identifier[_control_resp] , identifier[state] )
keyword[if] identifier[ack_state] keyword[is] keyword[None] :
keyword[raise] identifier[S20Exception] (
literal[string] . identifier[format] (
identifier[self] . identifier[host] )) | def _control(self, state):
""" Control device state.
Possible states are ON or OFF.
:param state: Switch to this state.
"""
# Renew subscription if necessary
if not self._subscription_is_recent():
self._subscribe() # depends on [control=['if'], data=[]]
cmd = MAGIC + CONTROL + self._mac + PADDING_1 + PADDING_2 + state
_LOGGER.debug('Sending new state to %s: %s', self.host, ord(state))
ack_state = self._udp_transact(cmd, self._control_resp, state)
if ack_state is None:
raise S20Exception("Device didn't acknowledge control request: {}".format(self.host)) # depends on [control=['if'], data=[]] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.