code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _api_post(self, url, **kwargs):
"""
Convenience method for posting
"""
response = self.session.post(
url=url,
headers=self._get_api_headers(),
**kwargs
)
if not response.ok:
raise ServerException(
'{0}: {1}'.format(
response.status_code,
response.text or response.reason
))
return response.json() | def function[_api_post, parameter[self, url]]:
constant[
Convenience method for posting
]
variable[response] assign[=] call[name[self].session.post, parameter[]]
if <ast.UnaryOp object at 0x7da1b01192a0> begin[:]
<ast.Raise object at 0x7da1b0118130>
return[call[name[response].json, parameter[]]] | keyword[def] identifier[_api_post] ( identifier[self] , identifier[url] ,** identifier[kwargs] ):
literal[string]
identifier[response] = identifier[self] . identifier[session] . identifier[post] (
identifier[url] = identifier[url] ,
identifier[headers] = identifier[self] . identifier[_get_api_headers] (),
** identifier[kwargs]
)
keyword[if] keyword[not] identifier[response] . identifier[ok] :
keyword[raise] identifier[ServerException] (
literal[string] . identifier[format] (
identifier[response] . identifier[status_code] ,
identifier[response] . identifier[text] keyword[or] identifier[response] . identifier[reason]
))
keyword[return] identifier[response] . identifier[json] () | def _api_post(self, url, **kwargs):
"""
Convenience method for posting
"""
response = self.session.post(url=url, headers=self._get_api_headers(), **kwargs)
if not response.ok:
raise ServerException('{0}: {1}'.format(response.status_code, response.text or response.reason)) # depends on [control=['if'], data=[]]
return response.json() |
async def _get_protocol(self):
"""Get the protocol for the request."""
if self._protocol is None:
self._protocol = asyncio.Task(Context.create_client_context(
loop=self._loop))
return (await self._protocol) | <ast.AsyncFunctionDef object at 0x7da18ede4190> | keyword[async] keyword[def] identifier[_get_protocol] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_protocol] keyword[is] keyword[None] :
identifier[self] . identifier[_protocol] = identifier[asyncio] . identifier[Task] ( identifier[Context] . identifier[create_client_context] (
identifier[loop] = identifier[self] . identifier[_loop] ))
keyword[return] ( keyword[await] identifier[self] . identifier[_protocol] ) | async def _get_protocol(self):
"""Get the protocol for the request."""
if self._protocol is None:
self._protocol = asyncio.Task(Context.create_client_context(loop=self._loop)) # depends on [control=['if'], data=[]]
return await self._protocol |
def getftype(self, name):
"""Returns the python type for the specified field name. The field list is
cached so multiple calls do not invoke a getFields request each time.
@param name(string) The name of the SOLR field
@returns Python type of the field.
"""
fields = self.getFields()
try:
fld = fields['fields'][name]
except Exception:
return str
if fld['type'] in ['string', 'text', 'stext', 'text_ws']:
return str
if fld['type'] in ['sint', 'integer', 'long', 'slong']:
return int
if fld['type'] in ['sdouble', 'double', 'sfloat', 'float']:
return float
if fld['type'] in ['boolean']:
return bool
return fld['type'] | def function[getftype, parameter[self, name]]:
constant[Returns the python type for the specified field name. The field list is
cached so multiple calls do not invoke a getFields request each time.
@param name(string) The name of the SOLR field
@returns Python type of the field.
]
variable[fields] assign[=] call[name[self].getFields, parameter[]]
<ast.Try object at 0x7da1b1855480>
if compare[call[name[fld]][constant[type]] in list[[<ast.Constant object at 0x7da1b1854be0>, <ast.Constant object at 0x7da1b1857640>, <ast.Constant object at 0x7da1b18549a0>, <ast.Constant object at 0x7da1b1855300>]]] begin[:]
return[name[str]]
if compare[call[name[fld]][constant[type]] in list[[<ast.Constant object at 0x7da1b18554b0>, <ast.Constant object at 0x7da1b1855390>, <ast.Constant object at 0x7da1b18553c0>, <ast.Constant object at 0x7da1b18552a0>]]] begin[:]
return[name[int]]
if compare[call[name[fld]][constant[type]] in list[[<ast.Constant object at 0x7da1b18559f0>, <ast.Constant object at 0x7da1b1855a50>, <ast.Constant object at 0x7da1b1855bd0>, <ast.Constant object at 0x7da1b1856350>]]] begin[:]
return[name[float]]
if compare[call[name[fld]][constant[type]] in list[[<ast.Constant object at 0x7da1b1855210>]]] begin[:]
return[name[bool]]
return[call[name[fld]][constant[type]]] | keyword[def] identifier[getftype] ( identifier[self] , identifier[name] ):
literal[string]
identifier[fields] = identifier[self] . identifier[getFields] ()
keyword[try] :
identifier[fld] = identifier[fields] [ literal[string] ][ identifier[name] ]
keyword[except] identifier[Exception] :
keyword[return] identifier[str]
keyword[if] identifier[fld] [ literal[string] ] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[return] identifier[str]
keyword[if] identifier[fld] [ literal[string] ] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[return] identifier[int]
keyword[if] identifier[fld] [ literal[string] ] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]:
keyword[return] identifier[float]
keyword[if] identifier[fld] [ literal[string] ] keyword[in] [ literal[string] ]:
keyword[return] identifier[bool]
keyword[return] identifier[fld] [ literal[string] ] | def getftype(self, name):
"""Returns the python type for the specified field name. The field list is
cached so multiple calls do not invoke a getFields request each time.
@param name(string) The name of the SOLR field
@returns Python type of the field.
"""
fields = self.getFields()
try:
fld = fields['fields'][name] # depends on [control=['try'], data=[]]
except Exception:
return str # depends on [control=['except'], data=[]]
if fld['type'] in ['string', 'text', 'stext', 'text_ws']:
return str # depends on [control=['if'], data=[]]
if fld['type'] in ['sint', 'integer', 'long', 'slong']:
return int # depends on [control=['if'], data=[]]
if fld['type'] in ['sdouble', 'double', 'sfloat', 'float']:
return float # depends on [control=['if'], data=[]]
if fld['type'] in ['boolean']:
return bool # depends on [control=['if'], data=[]]
return fld['type'] |
def get_request(self, request):
""" Get download requests
Create a list of DownloadRequests for all Sentinel-2 acquisitions within request's time interval and
acceptable cloud coverage.
:param request: OGC-type request with specified bounding box, time interval, and cloud coverage for specific
product.
:type request: OgcRequest or GeopediaRequest
:return: list of DownloadRequests
"""
return [DownloadRequest(url=self.get_url(request=request, geometry=geometry),
filename=self.get_filename(request, geometry),
data_type=MimeType.JSON, headers=OgcConstants.HEADERS)
for geometry in request.geometry_list] | def function[get_request, parameter[self, request]]:
constant[ Get download requests
Create a list of DownloadRequests for all Sentinel-2 acquisitions within request's time interval and
acceptable cloud coverage.
:param request: OGC-type request with specified bounding box, time interval, and cloud coverage for specific
product.
:type request: OgcRequest or GeopediaRequest
:return: list of DownloadRequests
]
return[<ast.ListComp object at 0x7da1b180cca0>] | keyword[def] identifier[get_request] ( identifier[self] , identifier[request] ):
literal[string]
keyword[return] [ identifier[DownloadRequest] ( identifier[url] = identifier[self] . identifier[get_url] ( identifier[request] = identifier[request] , identifier[geometry] = identifier[geometry] ),
identifier[filename] = identifier[self] . identifier[get_filename] ( identifier[request] , identifier[geometry] ),
identifier[data_type] = identifier[MimeType] . identifier[JSON] , identifier[headers] = identifier[OgcConstants] . identifier[HEADERS] )
keyword[for] identifier[geometry] keyword[in] identifier[request] . identifier[geometry_list] ] | def get_request(self, request):
""" Get download requests
Create a list of DownloadRequests for all Sentinel-2 acquisitions within request's time interval and
acceptable cloud coverage.
:param request: OGC-type request with specified bounding box, time interval, and cloud coverage for specific
product.
:type request: OgcRequest or GeopediaRequest
:return: list of DownloadRequests
"""
return [DownloadRequest(url=self.get_url(request=request, geometry=geometry), filename=self.get_filename(request, geometry), data_type=MimeType.JSON, headers=OgcConstants.HEADERS) for geometry in request.geometry_list] |
def _process_infohash_list(infohash_list):
"""
Method to convert the infohash_list to qBittorrent API friendly values.
:param infohash_list: List of infohash.
"""
if isinstance(infohash_list, list):
data = {'hashes': '|'.join([h.lower() for h in infohash_list])}
else:
data = {'hashes': infohash_list.lower()}
return data | def function[_process_infohash_list, parameter[infohash_list]]:
constant[
Method to convert the infohash_list to qBittorrent API friendly values.
:param infohash_list: List of infohash.
]
if call[name[isinstance], parameter[name[infohash_list], name[list]]] begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da2044c0d00>], [<ast.Call object at 0x7da2044c2c20>]]
return[name[data]] | keyword[def] identifier[_process_infohash_list] ( identifier[infohash_list] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[infohash_list] , identifier[list] ):
identifier[data] ={ literal[string] : literal[string] . identifier[join] ([ identifier[h] . identifier[lower] () keyword[for] identifier[h] keyword[in] identifier[infohash_list] ])}
keyword[else] :
identifier[data] ={ literal[string] : identifier[infohash_list] . identifier[lower] ()}
keyword[return] identifier[data] | def _process_infohash_list(infohash_list):
"""
Method to convert the infohash_list to qBittorrent API friendly values.
:param infohash_list: List of infohash.
"""
if isinstance(infohash_list, list):
data = {'hashes': '|'.join([h.lower() for h in infohash_list])} # depends on [control=['if'], data=[]]
else:
data = {'hashes': infohash_list.lower()}
return data |
def get_affected_by_sync(
package_descriptors, targets,
testing_repo_data, main_repo_data):
"""
For each package and target check if it is affected by a sync.
This is the case when the package version in the testing repo is different
from the version in the main repo.
:return: a dict indexed by package names containing
dicts indexed by targets containing a boolean flag
"""
affected_by_sync = {}
for package_descriptor in package_descriptors.values():
pkg_name = package_descriptor.pkg_name
debian_pkg_name = package_descriptor.debian_pkg_name
affected_by_sync[pkg_name] = {}
for target in targets:
testing_version = _strip_version_suffix(
testing_repo_data.get(target, {}).get(debian_pkg_name, None))
main_version = _strip_version_suffix(
main_repo_data.get(target, {}).get(debian_pkg_name, None))
affected_by_sync[pkg_name][target] = \
testing_version != main_version
return affected_by_sync | def function[get_affected_by_sync, parameter[package_descriptors, targets, testing_repo_data, main_repo_data]]:
constant[
For each package and target check if it is affected by a sync.
This is the case when the package version in the testing repo is different
from the version in the main repo.
:return: a dict indexed by package names containing
dicts indexed by targets containing a boolean flag
]
variable[affected_by_sync] assign[=] dictionary[[], []]
for taget[name[package_descriptor]] in starred[call[name[package_descriptors].values, parameter[]]] begin[:]
variable[pkg_name] assign[=] name[package_descriptor].pkg_name
variable[debian_pkg_name] assign[=] name[package_descriptor].debian_pkg_name
call[name[affected_by_sync]][name[pkg_name]] assign[=] dictionary[[], []]
for taget[name[target]] in starred[name[targets]] begin[:]
variable[testing_version] assign[=] call[name[_strip_version_suffix], parameter[call[call[name[testing_repo_data].get, parameter[name[target], dictionary[[], []]]].get, parameter[name[debian_pkg_name], constant[None]]]]]
variable[main_version] assign[=] call[name[_strip_version_suffix], parameter[call[call[name[main_repo_data].get, parameter[name[target], dictionary[[], []]]].get, parameter[name[debian_pkg_name], constant[None]]]]]
call[call[name[affected_by_sync]][name[pkg_name]]][name[target]] assign[=] compare[name[testing_version] not_equal[!=] name[main_version]]
return[name[affected_by_sync]] | keyword[def] identifier[get_affected_by_sync] (
identifier[package_descriptors] , identifier[targets] ,
identifier[testing_repo_data] , identifier[main_repo_data] ):
literal[string]
identifier[affected_by_sync] ={}
keyword[for] identifier[package_descriptor] keyword[in] identifier[package_descriptors] . identifier[values] ():
identifier[pkg_name] = identifier[package_descriptor] . identifier[pkg_name]
identifier[debian_pkg_name] = identifier[package_descriptor] . identifier[debian_pkg_name]
identifier[affected_by_sync] [ identifier[pkg_name] ]={}
keyword[for] identifier[target] keyword[in] identifier[targets] :
identifier[testing_version] = identifier[_strip_version_suffix] (
identifier[testing_repo_data] . identifier[get] ( identifier[target] ,{}). identifier[get] ( identifier[debian_pkg_name] , keyword[None] ))
identifier[main_version] = identifier[_strip_version_suffix] (
identifier[main_repo_data] . identifier[get] ( identifier[target] ,{}). identifier[get] ( identifier[debian_pkg_name] , keyword[None] ))
identifier[affected_by_sync] [ identifier[pkg_name] ][ identifier[target] ]= identifier[testing_version] != identifier[main_version]
keyword[return] identifier[affected_by_sync] | def get_affected_by_sync(package_descriptors, targets, testing_repo_data, main_repo_data):
"""
For each package and target check if it is affected by a sync.
This is the case when the package version in the testing repo is different
from the version in the main repo.
:return: a dict indexed by package names containing
dicts indexed by targets containing a boolean flag
"""
affected_by_sync = {}
for package_descriptor in package_descriptors.values():
pkg_name = package_descriptor.pkg_name
debian_pkg_name = package_descriptor.debian_pkg_name
affected_by_sync[pkg_name] = {}
for target in targets:
testing_version = _strip_version_suffix(testing_repo_data.get(target, {}).get(debian_pkg_name, None))
main_version = _strip_version_suffix(main_repo_data.get(target, {}).get(debian_pkg_name, None))
affected_by_sync[pkg_name][target] = testing_version != main_version # depends on [control=['for'], data=['target']] # depends on [control=['for'], data=['package_descriptor']]
return affected_by_sync |
def run(addr, *commands, **kwargs):
"""
Non-threaded batch command runner returning output results
"""
results = []
handler = VarnishHandler(addr, **kwargs)
for cmd in commands:
if isinstance(cmd, tuple) and len(cmd)>1:
results.extend([getattr(handler, c[0].replace('.','_'))(*c[1:]) for c in cmd])
else:
results.append(getattr(handler, cmd.replace('.','_'))(*commands[1:]))
break
handler.close()
return results | def function[run, parameter[addr]]:
constant[
Non-threaded batch command runner returning output results
]
variable[results] assign[=] list[[]]
variable[handler] assign[=] call[name[VarnishHandler], parameter[name[addr]]]
for taget[name[cmd]] in starred[name[commands]] begin[:]
if <ast.BoolOp object at 0x7da18fe931f0> begin[:]
call[name[results].extend, parameter[<ast.ListComp object at 0x7da18fe91330>]]
call[name[handler].close, parameter[]]
return[name[results]] | keyword[def] identifier[run] ( identifier[addr] ,* identifier[commands] ,** identifier[kwargs] ):
literal[string]
identifier[results] =[]
identifier[handler] = identifier[VarnishHandler] ( identifier[addr] ,** identifier[kwargs] )
keyword[for] identifier[cmd] keyword[in] identifier[commands] :
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[tuple] ) keyword[and] identifier[len] ( identifier[cmd] )> literal[int] :
identifier[results] . identifier[extend] ([ identifier[getattr] ( identifier[handler] , identifier[c] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ))(* identifier[c] [ literal[int] :]) keyword[for] identifier[c] keyword[in] identifier[cmd] ])
keyword[else] :
identifier[results] . identifier[append] ( identifier[getattr] ( identifier[handler] , identifier[cmd] . identifier[replace] ( literal[string] , literal[string] ))(* identifier[commands] [ literal[int] :]))
keyword[break]
identifier[handler] . identifier[close] ()
keyword[return] identifier[results] | def run(addr, *commands, **kwargs):
"""
Non-threaded batch command runner returning output results
"""
results = []
handler = VarnishHandler(addr, **kwargs)
for cmd in commands:
if isinstance(cmd, tuple) and len(cmd) > 1:
results.extend([getattr(handler, c[0].replace('.', '_'))(*c[1:]) for c in cmd]) # depends on [control=['if'], data=[]]
else:
results.append(getattr(handler, cmd.replace('.', '_'))(*commands[1:]))
break # depends on [control=['for'], data=['cmd']]
handler.close()
return results |
def annotations(self, qname=True):
"""
wrapper that returns all triples for an onto.
By default resources URIs are transformed into qnames
"""
if qname:
return sorted([(uri2niceString(x, self.namespaces)
), (uri2niceString(y, self.namespaces)), z]
for x, y, z in self.triples)
else:
return sorted(self.triples) | def function[annotations, parameter[self, qname]]:
constant[
wrapper that returns all triples for an onto.
By default resources URIs are transformed into qnames
]
if name[qname] begin[:]
return[call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b11aa110>]]] | keyword[def] identifier[annotations] ( identifier[self] , identifier[qname] = keyword[True] ):
literal[string]
keyword[if] identifier[qname] :
keyword[return] identifier[sorted] ([( identifier[uri2niceString] ( identifier[x] , identifier[self] . identifier[namespaces] )
),( identifier[uri2niceString] ( identifier[y] , identifier[self] . identifier[namespaces] )), identifier[z] ]
keyword[for] identifier[x] , identifier[y] , identifier[z] keyword[in] identifier[self] . identifier[triples] )
keyword[else] :
keyword[return] identifier[sorted] ( identifier[self] . identifier[triples] ) | def annotations(self, qname=True):
"""
wrapper that returns all triples for an onto.
By default resources URIs are transformed into qnames
"""
if qname:
return sorted(([uri2niceString(x, self.namespaces), uri2niceString(y, self.namespaces), z] for (x, y, z) in self.triples)) # depends on [control=['if'], data=[]]
else:
return sorted(self.triples) |
def queryName(self, queryName):
"""Specifies the name of the :class:`StreamingQuery` that can be started with
:func:`start`. This name must be unique among all the currently active queries
in the associated SparkSession.
.. note:: Evolving.
:param queryName: unique name for the query
>>> writer = sdf.writeStream.queryName('streaming_query')
"""
if not queryName or type(queryName) != str or len(queryName.strip()) == 0:
raise ValueError('The queryName must be a non-empty string. Got: %s' % queryName)
self._jwrite = self._jwrite.queryName(queryName)
return self | def function[queryName, parameter[self, queryName]]:
constant[Specifies the name of the :class:`StreamingQuery` that can be started with
:func:`start`. This name must be unique among all the currently active queries
in the associated SparkSession.
.. note:: Evolving.
:param queryName: unique name for the query
>>> writer = sdf.writeStream.queryName('streaming_query')
]
if <ast.BoolOp object at 0x7da1b20a81f0> begin[:]
<ast.Raise object at 0x7da1b20a9810>
name[self]._jwrite assign[=] call[name[self]._jwrite.queryName, parameter[name[queryName]]]
return[name[self]] | keyword[def] identifier[queryName] ( identifier[self] , identifier[queryName] ):
literal[string]
keyword[if] keyword[not] identifier[queryName] keyword[or] identifier[type] ( identifier[queryName] )!= identifier[str] keyword[or] identifier[len] ( identifier[queryName] . identifier[strip] ())== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[queryName] )
identifier[self] . identifier[_jwrite] = identifier[self] . identifier[_jwrite] . identifier[queryName] ( identifier[queryName] )
keyword[return] identifier[self] | def queryName(self, queryName):
"""Specifies the name of the :class:`StreamingQuery` that can be started with
:func:`start`. This name must be unique among all the currently active queries
in the associated SparkSession.
.. note:: Evolving.
:param queryName: unique name for the query
>>> writer = sdf.writeStream.queryName('streaming_query')
"""
if not queryName or type(queryName) != str or len(queryName.strip()) == 0:
raise ValueError('The queryName must be a non-empty string. Got: %s' % queryName) # depends on [control=['if'], data=[]]
self._jwrite = self._jwrite.queryName(queryName)
return self |
def expand_var(v, env):
""" If v is a variable reference (for example: '$myvar'), replace it using the supplied
env dictionary.
Args:
v: the variable to replace if needed.
env: user supplied dictionary.
Raises:
Exception if v is a variable reference but it is not found in env.
"""
if len(v) == 0:
return v
# Using len() and v[0] instead of startswith makes this Unicode-safe.
if v[0] == '$':
v = v[1:]
if len(v) and v[0] != '$':
if v in env:
v = env[v]
else:
raise Exception('Cannot expand variable $%s' % v)
return v | def function[expand_var, parameter[v, env]]:
constant[ If v is a variable reference (for example: '$myvar'), replace it using the supplied
env dictionary.
Args:
v: the variable to replace if needed.
env: user supplied dictionary.
Raises:
Exception if v is a variable reference but it is not found in env.
]
if compare[call[name[len], parameter[name[v]]] equal[==] constant[0]] begin[:]
return[name[v]]
if compare[call[name[v]][constant[0]] equal[==] constant[$]] begin[:]
variable[v] assign[=] call[name[v]][<ast.Slice object at 0x7da18dc05120>]
if <ast.BoolOp object at 0x7da18dc051b0> begin[:]
if compare[name[v] in name[env]] begin[:]
variable[v] assign[=] call[name[env]][name[v]]
return[name[v]] | keyword[def] identifier[expand_var] ( identifier[v] , identifier[env] ):
literal[string]
keyword[if] identifier[len] ( identifier[v] )== literal[int] :
keyword[return] identifier[v]
keyword[if] identifier[v] [ literal[int] ]== literal[string] :
identifier[v] = identifier[v] [ literal[int] :]
keyword[if] identifier[len] ( identifier[v] ) keyword[and] identifier[v] [ literal[int] ]!= literal[string] :
keyword[if] identifier[v] keyword[in] identifier[env] :
identifier[v] = identifier[env] [ identifier[v] ]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[v] )
keyword[return] identifier[v] | def expand_var(v, env):
""" If v is a variable reference (for example: '$myvar'), replace it using the supplied
env dictionary.
Args:
v: the variable to replace if needed.
env: user supplied dictionary.
Raises:
Exception if v is a variable reference but it is not found in env.
"""
if len(v) == 0:
return v # depends on [control=['if'], data=[]]
# Using len() and v[0] instead of startswith makes this Unicode-safe.
if v[0] == '$':
v = v[1:]
if len(v) and v[0] != '$':
if v in env:
v = env[v] # depends on [control=['if'], data=['v', 'env']]
else:
raise Exception('Cannot expand variable $%s' % v) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return v |
def prior_model_name_constant_tuples_dict(self):
"""
Returns
-------
class_constants_dict: {String: [Constant]}
A dictionary mapping_matrix the names of priors to lists of associated constants
"""
return {name: list(prior_model.constant_tuples) for name, prior_model in self.prior_model_tuples} | def function[prior_model_name_constant_tuples_dict, parameter[self]]:
constant[
Returns
-------
class_constants_dict: {String: [Constant]}
A dictionary mapping_matrix the names of priors to lists of associated constants
]
return[<ast.DictComp object at 0x7da18fe91b40>] | keyword[def] identifier[prior_model_name_constant_tuples_dict] ( identifier[self] ):
literal[string]
keyword[return] { identifier[name] : identifier[list] ( identifier[prior_model] . identifier[constant_tuples] ) keyword[for] identifier[name] , identifier[prior_model] keyword[in] identifier[self] . identifier[prior_model_tuples] } | def prior_model_name_constant_tuples_dict(self):
"""
Returns
-------
class_constants_dict: {String: [Constant]}
A dictionary mapping_matrix the names of priors to lists of associated constants
"""
return {name: list(prior_model.constant_tuples) for (name, prior_model) in self.prior_model_tuples} |
def chimera_layout(G, scale=1., center=None, dim=2):
"""Positions the nodes of graph G in a Chimera cross topology.
NumPy (http://scipy.org) is required for this function.
Parameters
----------
G : NetworkX graph
Should be a Chimera graph or a subgraph of a
Chimera graph. If every node in G has a `chimera_index`
attribute, those are used to place the nodes. Otherwise makes
a best-effort attempt to find positions.
scale : float (default 1.)
Scale factor. When scale = 1, all positions fit within [0, 1]
on the x-axis and [-1, 0] on the y-axis.
center : None or array (default None)
Coordinates of the top left corner.
dim : int (default 2)
Number of dimensions. When dim > 2, all extra dimensions are
set to 0.
Returns
-------
pos : dict
A dictionary of positions keyed by node.
Examples
--------
>>> G = dnx.chimera_graph(1)
>>> pos = dnx.chimera_layout(G)
"""
if not isinstance(G, nx.Graph):
empty_graph = nx.Graph()
empty_graph.add_edges_from(G)
G = empty_graph
# now we get chimera coordinates for the translation
# first, check if we made it
if G.graph.get("family") == "chimera":
m = G.graph['rows']
n = G.graph['columns']
t = G.graph['tile']
# get a node placement function
xy_coords = chimera_node_placer_2d(m, n, t, scale, center, dim)
if G.graph.get('labels') == 'coordinate':
pos = {v: xy_coords(*v) for v in G.nodes()}
elif G.graph.get('data'):
pos = {v: xy_coords(*dat['chimera_index']) for v, dat in G.nodes(data=True)}
else:
coord = chimera_coordinates(m, n, t)
pos = {v: xy_coords(*coord.tuple(v)) for v in G.nodes()}
else:
# best case scenario, each node in G has a chimera_index attribute. Otherwise
# we will try to determine it using the find_chimera_indices function.
if all('chimera_index' in dat for __, dat in G.nodes(data=True)):
chimera_indices = {v: dat['chimera_index'] for v, dat in G.nodes(data=True)}
else:
chimera_indices = find_chimera_indices(G)
# we could read these off of the name attribute for G, but we would want the values in
# the nodes to override the name in case of conflict.
m = max(idx[0] for idx in itervalues(chimera_indices)) + 1
n = max(idx[1] for idx in itervalues(chimera_indices)) + 1
t = max(idx[3] for idx in itervalues(chimera_indices)) + 1
xy_coords = chimera_node_placer_2d(m, n, t, scale, center, dim)
# compute our coordinates
pos = {v: xy_coords(i, j, u, k) for v, (i, j, u, k) in iteritems(chimera_indices)}
return pos | def function[chimera_layout, parameter[G, scale, center, dim]]:
constant[Positions the nodes of graph G in a Chimera cross topology.
NumPy (http://scipy.org) is required for this function.
Parameters
----------
G : NetworkX graph
Should be a Chimera graph or a subgraph of a
Chimera graph. If every node in G has a `chimera_index`
attribute, those are used to place the nodes. Otherwise makes
a best-effort attempt to find positions.
scale : float (default 1.)
Scale factor. When scale = 1, all positions fit within [0, 1]
on the x-axis and [-1, 0] on the y-axis.
center : None or array (default None)
Coordinates of the top left corner.
dim : int (default 2)
Number of dimensions. When dim > 2, all extra dimensions are
set to 0.
Returns
-------
pos : dict
A dictionary of positions keyed by node.
Examples
--------
>>> G = dnx.chimera_graph(1)
>>> pos = dnx.chimera_layout(G)
]
if <ast.UnaryOp object at 0x7da1b088bc40> begin[:]
variable[empty_graph] assign[=] call[name[nx].Graph, parameter[]]
call[name[empty_graph].add_edges_from, parameter[name[G]]]
variable[G] assign[=] name[empty_graph]
if compare[call[name[G].graph.get, parameter[constant[family]]] equal[==] constant[chimera]] begin[:]
variable[m] assign[=] call[name[G].graph][constant[rows]]
variable[n] assign[=] call[name[G].graph][constant[columns]]
variable[t] assign[=] call[name[G].graph][constant[tile]]
variable[xy_coords] assign[=] call[name[chimera_node_placer_2d], parameter[name[m], name[n], name[t], name[scale], name[center], name[dim]]]
if compare[call[name[G].graph.get, parameter[constant[labels]]] equal[==] constant[coordinate]] begin[:]
variable[pos] assign[=] <ast.DictComp object at 0x7da1b088b970>
return[name[pos]] | keyword[def] identifier[chimera_layout] ( identifier[G] , identifier[scale] = literal[int] , identifier[center] = keyword[None] , identifier[dim] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[G] , identifier[nx] . identifier[Graph] ):
identifier[empty_graph] = identifier[nx] . identifier[Graph] ()
identifier[empty_graph] . identifier[add_edges_from] ( identifier[G] )
identifier[G] = identifier[empty_graph]
keyword[if] identifier[G] . identifier[graph] . identifier[get] ( literal[string] )== literal[string] :
identifier[m] = identifier[G] . identifier[graph] [ literal[string] ]
identifier[n] = identifier[G] . identifier[graph] [ literal[string] ]
identifier[t] = identifier[G] . identifier[graph] [ literal[string] ]
identifier[xy_coords] = identifier[chimera_node_placer_2d] ( identifier[m] , identifier[n] , identifier[t] , identifier[scale] , identifier[center] , identifier[dim] )
keyword[if] identifier[G] . identifier[graph] . identifier[get] ( literal[string] )== literal[string] :
identifier[pos] ={ identifier[v] : identifier[xy_coords] (* identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[G] . identifier[nodes] ()}
keyword[elif] identifier[G] . identifier[graph] . identifier[get] ( literal[string] ):
identifier[pos] ={ identifier[v] : identifier[xy_coords] (* identifier[dat] [ literal[string] ]) keyword[for] identifier[v] , identifier[dat] keyword[in] identifier[G] . identifier[nodes] ( identifier[data] = keyword[True] )}
keyword[else] :
identifier[coord] = identifier[chimera_coordinates] ( identifier[m] , identifier[n] , identifier[t] )
identifier[pos] ={ identifier[v] : identifier[xy_coords] (* identifier[coord] . identifier[tuple] ( identifier[v] )) keyword[for] identifier[v] keyword[in] identifier[G] . identifier[nodes] ()}
keyword[else] :
keyword[if] identifier[all] ( literal[string] keyword[in] identifier[dat] keyword[for] identifier[__] , identifier[dat] keyword[in] identifier[G] . identifier[nodes] ( identifier[data] = keyword[True] )):
identifier[chimera_indices] ={ identifier[v] : identifier[dat] [ literal[string] ] keyword[for] identifier[v] , identifier[dat] keyword[in] identifier[G] . identifier[nodes] ( identifier[data] = keyword[True] )}
keyword[else] :
identifier[chimera_indices] = identifier[find_chimera_indices] ( identifier[G] )
identifier[m] = identifier[max] ( identifier[idx] [ literal[int] ] keyword[for] identifier[idx] keyword[in] identifier[itervalues] ( identifier[chimera_indices] ))+ literal[int]
identifier[n] = identifier[max] ( identifier[idx] [ literal[int] ] keyword[for] identifier[idx] keyword[in] identifier[itervalues] ( identifier[chimera_indices] ))+ literal[int]
identifier[t] = identifier[max] ( identifier[idx] [ literal[int] ] keyword[for] identifier[idx] keyword[in] identifier[itervalues] ( identifier[chimera_indices] ))+ literal[int]
identifier[xy_coords] = identifier[chimera_node_placer_2d] ( identifier[m] , identifier[n] , identifier[t] , identifier[scale] , identifier[center] , identifier[dim] )
identifier[pos] ={ identifier[v] : identifier[xy_coords] ( identifier[i] , identifier[j] , identifier[u] , identifier[k] ) keyword[for] identifier[v] ,( identifier[i] , identifier[j] , identifier[u] , identifier[k] ) keyword[in] identifier[iteritems] ( identifier[chimera_indices] )}
keyword[return] identifier[pos] | def chimera_layout(G, scale=1.0, center=None, dim=2):
"""Positions the nodes of graph G in a Chimera cross topology.
NumPy (http://scipy.org) is required for this function.
Parameters
----------
G : NetworkX graph
Should be a Chimera graph or a subgraph of a
Chimera graph. If every node in G has a `chimera_index`
attribute, those are used to place the nodes. Otherwise makes
a best-effort attempt to find positions.
scale : float (default 1.)
Scale factor. When scale = 1, all positions fit within [0, 1]
on the x-axis and [-1, 0] on the y-axis.
center : None or array (default None)
Coordinates of the top left corner.
dim : int (default 2)
Number of dimensions. When dim > 2, all extra dimensions are
set to 0.
Returns
-------
pos : dict
A dictionary of positions keyed by node.
Examples
--------
>>> G = dnx.chimera_graph(1)
>>> pos = dnx.chimera_layout(G)
"""
if not isinstance(G, nx.Graph):
empty_graph = nx.Graph()
empty_graph.add_edges_from(G)
G = empty_graph # depends on [control=['if'], data=[]]
# now we get chimera coordinates for the translation
# first, check if we made it
if G.graph.get('family') == 'chimera':
m = G.graph['rows']
n = G.graph['columns']
t = G.graph['tile']
# get a node placement function
xy_coords = chimera_node_placer_2d(m, n, t, scale, center, dim)
if G.graph.get('labels') == 'coordinate':
pos = {v: xy_coords(*v) for v in G.nodes()} # depends on [control=['if'], data=[]]
elif G.graph.get('data'):
pos = {v: xy_coords(*dat['chimera_index']) for (v, dat) in G.nodes(data=True)} # depends on [control=['if'], data=[]]
else:
coord = chimera_coordinates(m, n, t)
pos = {v: xy_coords(*coord.tuple(v)) for v in G.nodes()} # depends on [control=['if'], data=[]]
else:
# best case scenario, each node in G has a chimera_index attribute. Otherwise
# we will try to determine it using the find_chimera_indices function.
if all(('chimera_index' in dat for (__, dat) in G.nodes(data=True))):
chimera_indices = {v: dat['chimera_index'] for (v, dat) in G.nodes(data=True)} # depends on [control=['if'], data=[]]
else:
chimera_indices = find_chimera_indices(G)
# we could read these off of the name attribute for G, but we would want the values in
# the nodes to override the name in case of conflict.
m = max((idx[0] for idx in itervalues(chimera_indices))) + 1
n = max((idx[1] for idx in itervalues(chimera_indices))) + 1
t = max((idx[3] for idx in itervalues(chimera_indices))) + 1
xy_coords = chimera_node_placer_2d(m, n, t, scale, center, dim)
# compute our coordinates
pos = {v: xy_coords(i, j, u, k) for (v, (i, j, u, k)) in iteritems(chimera_indices)}
return pos |
def daArray(arry, dtype=numpy.float):
"""
Array constructor for numpy distributed array
@param arry numpy-like array
"""
a = numpy.array(arry, dtype)
res = DistArray(a.shape, a.dtype)
res[:] = a
return res | def function[daArray, parameter[arry, dtype]]:
constant[
Array constructor for numpy distributed array
@param arry numpy-like array
]
variable[a] assign[=] call[name[numpy].array, parameter[name[arry], name[dtype]]]
variable[res] assign[=] call[name[DistArray], parameter[name[a].shape, name[a].dtype]]
call[name[res]][<ast.Slice object at 0x7da1b0bceef0>] assign[=] name[a]
return[name[res]] | keyword[def] identifier[daArray] ( identifier[arry] , identifier[dtype] = identifier[numpy] . identifier[float] ):
literal[string]
identifier[a] = identifier[numpy] . identifier[array] ( identifier[arry] , identifier[dtype] )
identifier[res] = identifier[DistArray] ( identifier[a] . identifier[shape] , identifier[a] . identifier[dtype] )
identifier[res] [:]= identifier[a]
keyword[return] identifier[res] | def daArray(arry, dtype=numpy.float):
"""
Array constructor for numpy distributed array
@param arry numpy-like array
"""
a = numpy.array(arry, dtype)
res = DistArray(a.shape, a.dtype)
res[:] = a
return res |
def strip_figures(figure):
"""
Strips a figure into multiple figures with a trace on each of them
Parameters:
-----------
figure : Figure
Plotly Figure
"""
fig=[]
for trace in figure['data']:
fig.append(dict(data=[trace],layout=figure['layout']))
return fig | def function[strip_figures, parameter[figure]]:
constant[
Strips a figure into multiple figures with a trace on each of them
Parameters:
-----------
figure : Figure
Plotly Figure
]
variable[fig] assign[=] list[[]]
for taget[name[trace]] in starred[call[name[figure]][constant[data]]] begin[:]
call[name[fig].append, parameter[call[name[dict], parameter[]]]]
return[name[fig]] | keyword[def] identifier[strip_figures] ( identifier[figure] ):
literal[string]
identifier[fig] =[]
keyword[for] identifier[trace] keyword[in] identifier[figure] [ literal[string] ]:
identifier[fig] . identifier[append] ( identifier[dict] ( identifier[data] =[ identifier[trace] ], identifier[layout] = identifier[figure] [ literal[string] ]))
keyword[return] identifier[fig] | def strip_figures(figure):
"""
Strips a figure into multiple figures with a trace on each of them
Parameters:
-----------
figure : Figure
Plotly Figure
"""
fig = []
for trace in figure['data']:
fig.append(dict(data=[trace], layout=figure['layout'])) # depends on [control=['for'], data=['trace']]
return fig |
def iexSymbolsDF(token='', version=''):
'''This call returns an array of symbols the Investors Exchange supports for trading.
This list is updated daily as of 7:45 a.m. ET. Symbols may be added or removed by the Investors Exchange after the list was produced.
https://iexcloud.io/docs/api/#iex-symbols
8am, 9am, 12pm, 1pm UTC daily
Args:
token (string); Access token
version (string); API version
Returns:
DataFrame: result
'''
df = pd.DataFrame(iexSymbols(token, version))
_toDatetime(df)
_reindex(df, 'symbol')
return df | def function[iexSymbolsDF, parameter[token, version]]:
constant[This call returns an array of symbols the Investors Exchange supports for trading.
This list is updated daily as of 7:45 a.m. ET. Symbols may be added or removed by the Investors Exchange after the list was produced.
https://iexcloud.io/docs/api/#iex-symbols
8am, 9am, 12pm, 1pm UTC daily
Args:
token (string); Access token
version (string); API version
Returns:
DataFrame: result
]
variable[df] assign[=] call[name[pd].DataFrame, parameter[call[name[iexSymbols], parameter[name[token], name[version]]]]]
call[name[_toDatetime], parameter[name[df]]]
call[name[_reindex], parameter[name[df], constant[symbol]]]
return[name[df]] | keyword[def] identifier[iexSymbolsDF] ( identifier[token] = literal[string] , identifier[version] = literal[string] ):
literal[string]
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[iexSymbols] ( identifier[token] , identifier[version] ))
identifier[_toDatetime] ( identifier[df] )
identifier[_reindex] ( identifier[df] , literal[string] )
keyword[return] identifier[df] | def iexSymbolsDF(token='', version=''):
"""This call returns an array of symbols the Investors Exchange supports for trading.
This list is updated daily as of 7:45 a.m. ET. Symbols may be added or removed by the Investors Exchange after the list was produced.
https://iexcloud.io/docs/api/#iex-symbols
8am, 9am, 12pm, 1pm UTC daily
Args:
token (string); Access token
version (string); API version
Returns:
DataFrame: result
"""
df = pd.DataFrame(iexSymbols(token, version))
_toDatetime(df)
_reindex(df, 'symbol')
return df |
def detach_all(self):
"""
Detach from all tracked classes and objects.
Restore the original constructors and cleanse the tracking lists.
"""
self.detach_all_classes()
self.objects.clear()
self.index.clear()
self._keepalive[:] = [] | def function[detach_all, parameter[self]]:
constant[
Detach from all tracked classes and objects.
Restore the original constructors and cleanse the tracking lists.
]
call[name[self].detach_all_classes, parameter[]]
call[name[self].objects.clear, parameter[]]
call[name[self].index.clear, parameter[]]
call[name[self]._keepalive][<ast.Slice object at 0x7da2043447c0>] assign[=] list[[]] | keyword[def] identifier[detach_all] ( identifier[self] ):
literal[string]
identifier[self] . identifier[detach_all_classes] ()
identifier[self] . identifier[objects] . identifier[clear] ()
identifier[self] . identifier[index] . identifier[clear] ()
identifier[self] . identifier[_keepalive] [:]=[] | def detach_all(self):
"""
Detach from all tracked classes and objects.
Restore the original constructors and cleanse the tracking lists.
"""
self.detach_all_classes()
self.objects.clear()
self.index.clear()
self._keepalive[:] = [] |
def call_handlers(self, msg):
""" Reimplemented to emit signals instead of making callbacks.
"""
# Emit the generic signal.
self.message_received.emit(msg)
# Emit signals for specialized message types.
msg_type = msg['header']['msg_type']
signal = getattr(self, msg_type, None)
if signal:
signal.emit(msg)
if not self._handlers_called:
self.first_reply.emit()
self._handlers_called = True | def function[call_handlers, parameter[self, msg]]:
constant[ Reimplemented to emit signals instead of making callbacks.
]
call[name[self].message_received.emit, parameter[name[msg]]]
variable[msg_type] assign[=] call[call[name[msg]][constant[header]]][constant[msg_type]]
variable[signal] assign[=] call[name[getattr], parameter[name[self], name[msg_type], constant[None]]]
if name[signal] begin[:]
call[name[signal].emit, parameter[name[msg]]]
if <ast.UnaryOp object at 0x7da18f8116c0> begin[:]
call[name[self].first_reply.emit, parameter[]]
name[self]._handlers_called assign[=] constant[True] | keyword[def] identifier[call_handlers] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[self] . identifier[message_received] . identifier[emit] ( identifier[msg] )
identifier[msg_type] = identifier[msg] [ literal[string] ][ literal[string] ]
identifier[signal] = identifier[getattr] ( identifier[self] , identifier[msg_type] , keyword[None] )
keyword[if] identifier[signal] :
identifier[signal] . identifier[emit] ( identifier[msg] )
keyword[if] keyword[not] identifier[self] . identifier[_handlers_called] :
identifier[self] . identifier[first_reply] . identifier[emit] ()
identifier[self] . identifier[_handlers_called] = keyword[True] | def call_handlers(self, msg):
""" Reimplemented to emit signals instead of making callbacks.
"""
# Emit the generic signal.
self.message_received.emit(msg)
# Emit signals for specialized message types.
msg_type = msg['header']['msg_type']
signal = getattr(self, msg_type, None)
if signal:
signal.emit(msg) # depends on [control=['if'], data=[]]
if not self._handlers_called:
self.first_reply.emit()
self._handlers_called = True # depends on [control=['if'], data=[]] |
def insert_sequences_into_tree(aln, moltype, params={},
write_log=True):
"""Returns a tree from Alignment object aln.
aln: an xxx.Alignment object, or data that can be used to build one.
moltype: cogent.core.moltype.MolType object
params: dict of parameters to pass in to the RAxML app controller.
The result will be an xxx.Alignment object, or None if tree fails.
"""
# convert aln to phy since seq_names need fixed to run through pplacer
new_aln=get_align_for_phylip(StringIO(aln))
# convert aln to fasta in case it is not already a fasta file
aln2 = Alignment(new_aln)
seqs = aln2.toFasta()
ih = '_input_as_multiline_string'
pplacer_app = Pplacer(params=params,
InputHandler=ih,
WorkingDir=None,
SuppressStderr=False,
SuppressStdout=False)
pplacer_result = pplacer_app(seqs)
# write a log file
if write_log:
log_fp = join(params["--out-dir"],'log_pplacer_' + \
split(get_tmp_filename())[-1])
log_file=open(log_fp,'w')
log_file.write(pplacer_result['StdOut'].read())
log_file.close()
# use guppy to convert json file into a placement tree
guppy_params={'tog':None}
new_tree=build_tree_from_json_using_params(pplacer_result['json'].name, \
output_dir=params['--out-dir'], \
params=guppy_params)
pplacer_result.cleanUp()
return new_tree | def function[insert_sequences_into_tree, parameter[aln, moltype, params, write_log]]:
constant[Returns a tree from Alignment object aln.
aln: an xxx.Alignment object, or data that can be used to build one.
moltype: cogent.core.moltype.MolType object
params: dict of parameters to pass in to the RAxML app controller.
The result will be an xxx.Alignment object, or None if tree fails.
]
variable[new_aln] assign[=] call[name[get_align_for_phylip], parameter[call[name[StringIO], parameter[name[aln]]]]]
variable[aln2] assign[=] call[name[Alignment], parameter[name[new_aln]]]
variable[seqs] assign[=] call[name[aln2].toFasta, parameter[]]
variable[ih] assign[=] constant[_input_as_multiline_string]
variable[pplacer_app] assign[=] call[name[Pplacer], parameter[]]
variable[pplacer_result] assign[=] call[name[pplacer_app], parameter[name[seqs]]]
if name[write_log] begin[:]
variable[log_fp] assign[=] call[name[join], parameter[call[name[params]][constant[--out-dir]], binary_operation[constant[log_pplacer_] + call[call[name[split], parameter[call[name[get_tmp_filename], parameter[]]]]][<ast.UnaryOp object at 0x7da1b0b80610>]]]]
variable[log_file] assign[=] call[name[open], parameter[name[log_fp], constant[w]]]
call[name[log_file].write, parameter[call[call[name[pplacer_result]][constant[StdOut]].read, parameter[]]]]
call[name[log_file].close, parameter[]]
variable[guppy_params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b819c0>], [<ast.Constant object at 0x7da1b0b82950>]]
variable[new_tree] assign[=] call[name[build_tree_from_json_using_params], parameter[call[name[pplacer_result]][constant[json]].name]]
call[name[pplacer_result].cleanUp, parameter[]]
return[name[new_tree]] | keyword[def] identifier[insert_sequences_into_tree] ( identifier[aln] , identifier[moltype] , identifier[params] ={},
identifier[write_log] = keyword[True] ):
literal[string]
identifier[new_aln] = identifier[get_align_for_phylip] ( identifier[StringIO] ( identifier[aln] ))
identifier[aln2] = identifier[Alignment] ( identifier[new_aln] )
identifier[seqs] = identifier[aln2] . identifier[toFasta] ()
identifier[ih] = literal[string]
identifier[pplacer_app] = identifier[Pplacer] ( identifier[params] = identifier[params] ,
identifier[InputHandler] = identifier[ih] ,
identifier[WorkingDir] = keyword[None] ,
identifier[SuppressStderr] = keyword[False] ,
identifier[SuppressStdout] = keyword[False] )
identifier[pplacer_result] = identifier[pplacer_app] ( identifier[seqs] )
keyword[if] identifier[write_log] :
identifier[log_fp] = identifier[join] ( identifier[params] [ literal[string] ], literal[string] + identifier[split] ( identifier[get_tmp_filename] ())[- literal[int] ])
identifier[log_file] = identifier[open] ( identifier[log_fp] , literal[string] )
identifier[log_file] . identifier[write] ( identifier[pplacer_result] [ literal[string] ]. identifier[read] ())
identifier[log_file] . identifier[close] ()
identifier[guppy_params] ={ literal[string] : keyword[None] }
identifier[new_tree] = identifier[build_tree_from_json_using_params] ( identifier[pplacer_result] [ literal[string] ]. identifier[name] , identifier[output_dir] = identifier[params] [ literal[string] ], identifier[params] = identifier[guppy_params] )
identifier[pplacer_result] . identifier[cleanUp] ()
keyword[return] identifier[new_tree] | def insert_sequences_into_tree(aln, moltype, params={}, write_log=True):
"""Returns a tree from Alignment object aln.
aln: an xxx.Alignment object, or data that can be used to build one.
moltype: cogent.core.moltype.MolType object
params: dict of parameters to pass in to the RAxML app controller.
The result will be an xxx.Alignment object, or None if tree fails.
"""
# convert aln to phy since seq_names need fixed to run through pplacer
new_aln = get_align_for_phylip(StringIO(aln))
# convert aln to fasta in case it is not already a fasta file
aln2 = Alignment(new_aln)
seqs = aln2.toFasta()
ih = '_input_as_multiline_string'
pplacer_app = Pplacer(params=params, InputHandler=ih, WorkingDir=None, SuppressStderr=False, SuppressStdout=False)
pplacer_result = pplacer_app(seqs)
# write a log file
if write_log:
log_fp = join(params['--out-dir'], 'log_pplacer_' + split(get_tmp_filename())[-1])
log_file = open(log_fp, 'w')
log_file.write(pplacer_result['StdOut'].read())
log_file.close() # depends on [control=['if'], data=[]]
# use guppy to convert json file into a placement tree
guppy_params = {'tog': None}
new_tree = build_tree_from_json_using_params(pplacer_result['json'].name, output_dir=params['--out-dir'], params=guppy_params)
pplacer_result.cleanUp()
return new_tree |
def _first_batch(sock_info, db, coll, query, ntoreturn,
slave_ok, codec_options, read_preference, cmd, listeners):
"""Simple query helper for retrieving a first (and possibly only) batch."""
query = _Query(
0, db, coll, 0, query, None, codec_options,
read_preference, ntoreturn, 0, DEFAULT_READ_CONCERN, None, None,
None)
name = next(iter(cmd))
publish = listeners.enabled_for_commands
if publish:
start = datetime.datetime.now()
request_id, msg, max_doc_size = query.get_message(slave_ok, sock_info)
if publish:
encoding_duration = datetime.datetime.now() - start
listeners.publish_command_start(
cmd, db, request_id, sock_info.address)
start = datetime.datetime.now()
sock_info.send_message(msg, max_doc_size)
reply = sock_info.receive_message(request_id)
try:
docs = reply.unpack_response(None, codec_options)
except Exception as exc:
if publish:
duration = (datetime.datetime.now() - start) + encoding_duration
if isinstance(exc, (NotMasterError, OperationFailure)):
failure = exc.details
else:
failure = _convert_exception(exc)
listeners.publish_command_failure(
duration, failure, name, request_id, sock_info.address)
raise
# listIndexes
if 'cursor' in cmd:
result = {
u'cursor': {
u'firstBatch': docs,
u'id': reply.cursor_id,
u'ns': u'%s.%s' % (db, coll)
},
u'ok': 1.0
}
# fsyncUnlock, currentOp
else:
result = docs[0] if docs else {}
result[u'ok'] = 1.0
if publish:
duration = (datetime.datetime.now() - start) + encoding_duration
listeners.publish_command_success(
duration, result, name, request_id, sock_info.address)
return result | def function[_first_batch, parameter[sock_info, db, coll, query, ntoreturn, slave_ok, codec_options, read_preference, cmd, listeners]]:
constant[Simple query helper for retrieving a first (and possibly only) batch.]
variable[query] assign[=] call[name[_Query], parameter[constant[0], name[db], name[coll], constant[0], name[query], constant[None], name[codec_options], name[read_preference], name[ntoreturn], constant[0], name[DEFAULT_READ_CONCERN], constant[None], constant[None], constant[None]]]
variable[name] assign[=] call[name[next], parameter[call[name[iter], parameter[name[cmd]]]]]
variable[publish] assign[=] name[listeners].enabled_for_commands
if name[publish] begin[:]
variable[start] assign[=] call[name[datetime].datetime.now, parameter[]]
<ast.Tuple object at 0x7da2054a50f0> assign[=] call[name[query].get_message, parameter[name[slave_ok], name[sock_info]]]
if name[publish] begin[:]
variable[encoding_duration] assign[=] binary_operation[call[name[datetime].datetime.now, parameter[]] - name[start]]
call[name[listeners].publish_command_start, parameter[name[cmd], name[db], name[request_id], name[sock_info].address]]
variable[start] assign[=] call[name[datetime].datetime.now, parameter[]]
call[name[sock_info].send_message, parameter[name[msg], name[max_doc_size]]]
variable[reply] assign[=] call[name[sock_info].receive_message, parameter[name[request_id]]]
<ast.Try object at 0x7da2054a5d80>
if compare[constant[cursor] in name[cmd]] begin[:]
variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da2054a4a90>, <ast.Constant object at 0x7da2054a5480>], [<ast.Dict object at 0x7da2054a6620>, <ast.Constant object at 0x7da2054a4670>]]
if name[publish] begin[:]
variable[duration] assign[=] binary_operation[binary_operation[call[name[datetime].datetime.now, parameter[]] - name[start]] + name[encoding_duration]]
call[name[listeners].publish_command_success, parameter[name[duration], name[result], name[name], name[request_id], name[sock_info].address]]
return[name[result]] | keyword[def] identifier[_first_batch] ( identifier[sock_info] , identifier[db] , identifier[coll] , identifier[query] , identifier[ntoreturn] ,
identifier[slave_ok] , identifier[codec_options] , identifier[read_preference] , identifier[cmd] , identifier[listeners] ):
literal[string]
identifier[query] = identifier[_Query] (
literal[int] , identifier[db] , identifier[coll] , literal[int] , identifier[query] , keyword[None] , identifier[codec_options] ,
identifier[read_preference] , identifier[ntoreturn] , literal[int] , identifier[DEFAULT_READ_CONCERN] , keyword[None] , keyword[None] ,
keyword[None] )
identifier[name] = identifier[next] ( identifier[iter] ( identifier[cmd] ))
identifier[publish] = identifier[listeners] . identifier[enabled_for_commands]
keyword[if] identifier[publish] :
identifier[start] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[request_id] , identifier[msg] , identifier[max_doc_size] = identifier[query] . identifier[get_message] ( identifier[slave_ok] , identifier[sock_info] )
keyword[if] identifier[publish] :
identifier[encoding_duration] = identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[start]
identifier[listeners] . identifier[publish_command_start] (
identifier[cmd] , identifier[db] , identifier[request_id] , identifier[sock_info] . identifier[address] )
identifier[start] = identifier[datetime] . identifier[datetime] . identifier[now] ()
identifier[sock_info] . identifier[send_message] ( identifier[msg] , identifier[max_doc_size] )
identifier[reply] = identifier[sock_info] . identifier[receive_message] ( identifier[request_id] )
keyword[try] :
identifier[docs] = identifier[reply] . identifier[unpack_response] ( keyword[None] , identifier[codec_options] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[if] identifier[publish] :
identifier[duration] =( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[start] )+ identifier[encoding_duration]
keyword[if] identifier[isinstance] ( identifier[exc] ,( identifier[NotMasterError] , identifier[OperationFailure] )):
identifier[failure] = identifier[exc] . identifier[details]
keyword[else] :
identifier[failure] = identifier[_convert_exception] ( identifier[exc] )
identifier[listeners] . identifier[publish_command_failure] (
identifier[duration] , identifier[failure] , identifier[name] , identifier[request_id] , identifier[sock_info] . identifier[address] )
keyword[raise]
keyword[if] literal[string] keyword[in] identifier[cmd] :
identifier[result] ={
literal[string] :{
literal[string] : identifier[docs] ,
literal[string] : identifier[reply] . identifier[cursor_id] ,
literal[string] : literal[string] %( identifier[db] , identifier[coll] )
},
literal[string] : literal[int]
}
keyword[else] :
identifier[result] = identifier[docs] [ literal[int] ] keyword[if] identifier[docs] keyword[else] {}
identifier[result] [ literal[string] ]= literal[int]
keyword[if] identifier[publish] :
identifier[duration] =( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[start] )+ identifier[encoding_duration]
identifier[listeners] . identifier[publish_command_success] (
identifier[duration] , identifier[result] , identifier[name] , identifier[request_id] , identifier[sock_info] . identifier[address] )
keyword[return] identifier[result] | def _first_batch(sock_info, db, coll, query, ntoreturn, slave_ok, codec_options, read_preference, cmd, listeners):
"""Simple query helper for retrieving a first (and possibly only) batch."""
query = _Query(0, db, coll, 0, query, None, codec_options, read_preference, ntoreturn, 0, DEFAULT_READ_CONCERN, None, None, None)
name = next(iter(cmd))
publish = listeners.enabled_for_commands
if publish:
start = datetime.datetime.now() # depends on [control=['if'], data=[]]
(request_id, msg, max_doc_size) = query.get_message(slave_ok, sock_info)
if publish:
encoding_duration = datetime.datetime.now() - start
listeners.publish_command_start(cmd, db, request_id, sock_info.address)
start = datetime.datetime.now() # depends on [control=['if'], data=[]]
sock_info.send_message(msg, max_doc_size)
reply = sock_info.receive_message(request_id)
try:
docs = reply.unpack_response(None, codec_options) # depends on [control=['try'], data=[]]
except Exception as exc:
if publish:
duration = datetime.datetime.now() - start + encoding_duration
if isinstance(exc, (NotMasterError, OperationFailure)):
failure = exc.details # depends on [control=['if'], data=[]]
else:
failure = _convert_exception(exc)
listeners.publish_command_failure(duration, failure, name, request_id, sock_info.address) # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['exc']]
# listIndexes
if 'cursor' in cmd:
result = {u'cursor': {u'firstBatch': docs, u'id': reply.cursor_id, u'ns': u'%s.%s' % (db, coll)}, u'ok': 1.0} # depends on [control=['if'], data=[]]
else:
# fsyncUnlock, currentOp
result = docs[0] if docs else {}
result[u'ok'] = 1.0
if publish:
duration = datetime.datetime.now() - start + encoding_duration
listeners.publish_command_success(duration, result, name, request_id, sock_info.address) # depends on [control=['if'], data=[]]
return result |
def _updateMetadata(self, variantFile):
"""
Updates the metadata for his variant set based on the specified
variant file
"""
metadata = self._getMetadataFromVcf(variantFile)
if self._metadata is None:
self._metadata = metadata | def function[_updateMetadata, parameter[self, variantFile]]:
constant[
Updates the metadata for his variant set based on the specified
variant file
]
variable[metadata] assign[=] call[name[self]._getMetadataFromVcf, parameter[name[variantFile]]]
if compare[name[self]._metadata is constant[None]] begin[:]
name[self]._metadata assign[=] name[metadata] | keyword[def] identifier[_updateMetadata] ( identifier[self] , identifier[variantFile] ):
literal[string]
identifier[metadata] = identifier[self] . identifier[_getMetadataFromVcf] ( identifier[variantFile] )
keyword[if] identifier[self] . identifier[_metadata] keyword[is] keyword[None] :
identifier[self] . identifier[_metadata] = identifier[metadata] | def _updateMetadata(self, variantFile):
"""
Updates the metadata for his variant set based on the specified
variant file
"""
metadata = self._getMetadataFromVcf(variantFile)
if self._metadata is None:
self._metadata = metadata # depends on [control=['if'], data=[]] |
def replace_validation(self):
"""Replace the validation configuration in the selected profile.
TODO: Update this method.
"""
self.validate_profile_exists()
profile_data = self.profiles.get(self.args.profile_name)
# check redis
# if redis is None:
# self.handle_error('Could not get connection to Redis')
# load hash
redis_hash = profile_data.get('data', {}).get('args', {}).get('tc_playbook_db_context')
if redis_hash is None:
self.handle_error('Could not find redis hash (db context).')
# load data
data = self.redis.hgetall(redis_hash)
if data is None:
self.handle_error('Could not load data for hash {}.'.format(redis_hash))
validations = {'rules': [], 'outputs': []}
for v, d in data.items():
variable = v.decode('utf-8')
# data = d.decode('utf-8')
data = json.loads(d.decode('utf-8'))
# if data == 'null':
if data is None:
continue
validations['outputs'].append(variable)
# null check
od = OrderedDict()
od['data'] = data
od['data_type'] = 'redis'
od['operator'] = 'eq'
od['variable'] = variable
# if variable.endswith('Array'):
# od['data'] = json.loads(data)
# od['data_type'] = 'redis'
# od['operator'] = 'eq'
# od['variable'] = variable
# elif variable.endswith('Binary'):
# od['data'] = json.loads(data)
# od['data_type'] = 'redis'
# od['operator'] = 'eq'
# od['variable'] = variable
# elif variable.endswith('String'):
# od['data'] = json.loads(data)
# od['data_type'] = 'redis'
# od['operator'] = 'eq'
# od['variable'] = variable
validations['rules'].append(od)
fqfn = profile_data.get('fqfn')
with open(fqfn, 'r+') as fh:
data = json.load(fh)
for profile in data:
if profile.get('profile_name') == self.args.profile_name:
profile['validations'] = validations.get('rules')
profile['args']['default']['tc_playbook_out_variables'] = ','.join(
validations.get('outputs')
)
fh.seek(0)
fh.write(json.dumps(data, indent=2, sort_keys=True))
fh.truncate() | def function[replace_validation, parameter[self]]:
constant[Replace the validation configuration in the selected profile.
TODO: Update this method.
]
call[name[self].validate_profile_exists, parameter[]]
variable[profile_data] assign[=] call[name[self].profiles.get, parameter[name[self].args.profile_name]]
variable[redis_hash] assign[=] call[call[call[name[profile_data].get, parameter[constant[data], dictionary[[], []]]].get, parameter[constant[args], dictionary[[], []]]].get, parameter[constant[tc_playbook_db_context]]]
if compare[name[redis_hash] is constant[None]] begin[:]
call[name[self].handle_error, parameter[constant[Could not find redis hash (db context).]]]
variable[data] assign[=] call[name[self].redis.hgetall, parameter[name[redis_hash]]]
if compare[name[data] is constant[None]] begin[:]
call[name[self].handle_error, parameter[call[constant[Could not load data for hash {}.].format, parameter[name[redis_hash]]]]]
variable[validations] assign[=] dictionary[[<ast.Constant object at 0x7da1b23460e0>, <ast.Constant object at 0x7da1b2347a00>], [<ast.List object at 0x7da1b2345600>, <ast.List object at 0x7da1b2344ee0>]]
for taget[tuple[[<ast.Name object at 0x7da1b2347670>, <ast.Name object at 0x7da1b23479a0>]]] in starred[call[name[data].items, parameter[]]] begin[:]
variable[variable] assign[=] call[name[v].decode, parameter[constant[utf-8]]]
variable[data] assign[=] call[name[json].loads, parameter[call[name[d].decode, parameter[constant[utf-8]]]]]
if compare[name[data] is constant[None]] begin[:]
continue
call[call[name[validations]][constant[outputs]].append, parameter[name[variable]]]
variable[od] assign[=] call[name[OrderedDict], parameter[]]
call[name[od]][constant[data]] assign[=] name[data]
call[name[od]][constant[data_type]] assign[=] constant[redis]
call[name[od]][constant[operator]] assign[=] constant[eq]
call[name[od]][constant[variable]] assign[=] name[variable]
call[call[name[validations]][constant[rules]].append, parameter[name[od]]]
variable[fqfn] assign[=] call[name[profile_data].get, parameter[constant[fqfn]]]
with call[name[open], parameter[name[fqfn], constant[r+]]] begin[:]
variable[data] assign[=] call[name[json].load, parameter[name[fh]]]
for taget[name[profile]] in starred[name[data]] begin[:]
if compare[call[name[profile].get, parameter[constant[profile_name]]] equal[==] name[self].args.profile_name] begin[:]
call[name[profile]][constant[validations]] assign[=] call[name[validations].get, parameter[constant[rules]]]
call[call[call[name[profile]][constant[args]]][constant[default]]][constant[tc_playbook_out_variables]] assign[=] call[constant[,].join, parameter[call[name[validations].get, parameter[constant[outputs]]]]]
call[name[fh].seek, parameter[constant[0]]]
call[name[fh].write, parameter[call[name[json].dumps, parameter[name[data]]]]]
call[name[fh].truncate, parameter[]] | keyword[def] identifier[replace_validation] ( identifier[self] ):
literal[string]
identifier[self] . identifier[validate_profile_exists] ()
identifier[profile_data] = identifier[self] . identifier[profiles] . identifier[get] ( identifier[self] . identifier[args] . identifier[profile_name] )
identifier[redis_hash] = identifier[profile_data] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
keyword[if] identifier[redis_hash] keyword[is] keyword[None] :
identifier[self] . identifier[handle_error] ( literal[string] )
identifier[data] = identifier[self] . identifier[redis] . identifier[hgetall] ( identifier[redis_hash] )
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[self] . identifier[handle_error] ( literal[string] . identifier[format] ( identifier[redis_hash] ))
identifier[validations] ={ literal[string] :[], literal[string] :[]}
keyword[for] identifier[v] , identifier[d] keyword[in] identifier[data] . identifier[items] ():
identifier[variable] = identifier[v] . identifier[decode] ( literal[string] )
identifier[data] = identifier[json] . identifier[loads] ( identifier[d] . identifier[decode] ( literal[string] ))
keyword[if] identifier[data] keyword[is] keyword[None] :
keyword[continue]
identifier[validations] [ literal[string] ]. identifier[append] ( identifier[variable] )
identifier[od] = identifier[OrderedDict] ()
identifier[od] [ literal[string] ]= identifier[data]
identifier[od] [ literal[string] ]= literal[string]
identifier[od] [ literal[string] ]= literal[string]
identifier[od] [ literal[string] ]= identifier[variable]
identifier[validations] [ literal[string] ]. identifier[append] ( identifier[od] )
identifier[fqfn] = identifier[profile_data] . identifier[get] ( literal[string] )
keyword[with] identifier[open] ( identifier[fqfn] , literal[string] ) keyword[as] identifier[fh] :
identifier[data] = identifier[json] . identifier[load] ( identifier[fh] )
keyword[for] identifier[profile] keyword[in] identifier[data] :
keyword[if] identifier[profile] . identifier[get] ( literal[string] )== identifier[self] . identifier[args] . identifier[profile_name] :
identifier[profile] [ literal[string] ]= identifier[validations] . identifier[get] ( literal[string] )
identifier[profile] [ literal[string] ][ literal[string] ][ literal[string] ]= literal[string] . identifier[join] (
identifier[validations] . identifier[get] ( literal[string] )
)
identifier[fh] . identifier[seek] ( literal[int] )
identifier[fh] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[data] , identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] ))
identifier[fh] . identifier[truncate] () | def replace_validation(self):
"""Replace the validation configuration in the selected profile.
TODO: Update this method.
"""
self.validate_profile_exists()
profile_data = self.profiles.get(self.args.profile_name)
# check redis
# if redis is None:
# self.handle_error('Could not get connection to Redis')
# load hash
redis_hash = profile_data.get('data', {}).get('args', {}).get('tc_playbook_db_context')
if redis_hash is None:
self.handle_error('Could not find redis hash (db context).') # depends on [control=['if'], data=[]]
# load data
data = self.redis.hgetall(redis_hash)
if data is None:
self.handle_error('Could not load data for hash {}.'.format(redis_hash)) # depends on [control=['if'], data=[]]
validations = {'rules': [], 'outputs': []}
for (v, d) in data.items():
variable = v.decode('utf-8')
# data = d.decode('utf-8')
data = json.loads(d.decode('utf-8'))
# if data == 'null':
if data is None:
continue # depends on [control=['if'], data=[]]
validations['outputs'].append(variable)
# null check
od = OrderedDict()
od['data'] = data
od['data_type'] = 'redis'
od['operator'] = 'eq'
od['variable'] = variable
# if variable.endswith('Array'):
# od['data'] = json.loads(data)
# od['data_type'] = 'redis'
# od['operator'] = 'eq'
# od['variable'] = variable
# elif variable.endswith('Binary'):
# od['data'] = json.loads(data)
# od['data_type'] = 'redis'
# od['operator'] = 'eq'
# od['variable'] = variable
# elif variable.endswith('String'):
# od['data'] = json.loads(data)
# od['data_type'] = 'redis'
# od['operator'] = 'eq'
# od['variable'] = variable
validations['rules'].append(od) # depends on [control=['for'], data=[]]
fqfn = profile_data.get('fqfn')
with open(fqfn, 'r+') as fh:
data = json.load(fh)
for profile in data:
if profile.get('profile_name') == self.args.profile_name:
profile['validations'] = validations.get('rules')
profile['args']['default']['tc_playbook_out_variables'] = ','.join(validations.get('outputs')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['profile']]
fh.seek(0)
fh.write(json.dumps(data, indent=2, sort_keys=True))
fh.truncate() # depends on [control=['with'], data=['fh']] |
def convert_upsample_bilinear(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert upsample_bilinear2d layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting upsample...')
if names == 'short':
tf_name = 'UPSL' + random_string(4)
elif names == 'keep':
tf_name = w_name
else:
tf_name = w_name + str(random.random())
output_size = params['output_size']
align_corners = params['align_corners'] > 0
def target_layer(x, size=output_size, align_corners=align_corners):
import tensorflow as tf
x = tf.transpose(x, [0, 2, 3, 1])
x = tf.image.resize_images(x, size, align_corners=align_corners)
x = tf.transpose(x, [0, 3, 1, 2])
return x
lambda_layer = keras.layers.Lambda(target_layer)
layers[scope_name] = lambda_layer(layers[inputs[0]]) | def function[convert_upsample_bilinear, parameter[params, w_name, scope_name, inputs, layers, weights, names]]:
constant[
Convert upsample_bilinear2d layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
]
call[name[print], parameter[constant[Converting upsample...]]]
if compare[name[names] equal[==] constant[short]] begin[:]
variable[tf_name] assign[=] binary_operation[constant[UPSL] + call[name[random_string], parameter[constant[4]]]]
variable[output_size] assign[=] call[name[params]][constant[output_size]]
variable[align_corners] assign[=] compare[call[name[params]][constant[align_corners]] greater[>] constant[0]]
def function[target_layer, parameter[x, size, align_corners]]:
import module[tensorflow] as alias[tf]
variable[x] assign[=] call[name[tf].transpose, parameter[name[x], list[[<ast.Constant object at 0x7da20e9562f0>, <ast.Constant object at 0x7da20e954be0>, <ast.Constant object at 0x7da20e954c40>, <ast.Constant object at 0x7da20e955a20>]]]]
variable[x] assign[=] call[name[tf].image.resize_images, parameter[name[x], name[size]]]
variable[x] assign[=] call[name[tf].transpose, parameter[name[x], list[[<ast.Constant object at 0x7da1b01e1390>, <ast.Constant object at 0x7da1b01e3a30>, <ast.Constant object at 0x7da1b01e0070>, <ast.Constant object at 0x7da1b01e1990>]]]]
return[name[x]]
variable[lambda_layer] assign[=] call[name[keras].layers.Lambda, parameter[name[target_layer]]]
call[name[layers]][name[scope_name]] assign[=] call[name[lambda_layer], parameter[call[name[layers]][call[name[inputs]][constant[0]]]]] | keyword[def] identifier[convert_upsample_bilinear] ( identifier[params] , identifier[w_name] , identifier[scope_name] , identifier[inputs] , identifier[layers] , identifier[weights] , identifier[names] ):
literal[string]
identifier[print] ( literal[string] )
keyword[if] identifier[names] == literal[string] :
identifier[tf_name] = literal[string] + identifier[random_string] ( literal[int] )
keyword[elif] identifier[names] == literal[string] :
identifier[tf_name] = identifier[w_name]
keyword[else] :
identifier[tf_name] = identifier[w_name] + identifier[str] ( identifier[random] . identifier[random] ())
identifier[output_size] = identifier[params] [ literal[string] ]
identifier[align_corners] = identifier[params] [ literal[string] ]> literal[int]
keyword[def] identifier[target_layer] ( identifier[x] , identifier[size] = identifier[output_size] , identifier[align_corners] = identifier[align_corners] ):
keyword[import] identifier[tensorflow] keyword[as] identifier[tf]
identifier[x] = identifier[tf] . identifier[transpose] ( identifier[x] ,[ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[x] = identifier[tf] . identifier[image] . identifier[resize_images] ( identifier[x] , identifier[size] , identifier[align_corners] = identifier[align_corners] )
identifier[x] = identifier[tf] . identifier[transpose] ( identifier[x] ,[ literal[int] , literal[int] , literal[int] , literal[int] ])
keyword[return] identifier[x]
identifier[lambda_layer] = identifier[keras] . identifier[layers] . identifier[Lambda] ( identifier[target_layer] )
identifier[layers] [ identifier[scope_name] ]= identifier[lambda_layer] ( identifier[layers] [ identifier[inputs] [ literal[int] ]]) | def convert_upsample_bilinear(params, w_name, scope_name, inputs, layers, weights, names):
"""
Convert upsample_bilinear2d layer.
Args:
params: dictionary with layer parameters
w_name: name prefix in state_dict
scope_name: pytorch scope name
inputs: pytorch node inputs
layers: dictionary with keras tensors
weights: pytorch state_dict
names: use short names for keras layers
"""
print('Converting upsample...')
if names == 'short':
tf_name = 'UPSL' + random_string(4) # depends on [control=['if'], data=[]]
elif names == 'keep':
tf_name = w_name # depends on [control=['if'], data=[]]
else:
tf_name = w_name + str(random.random())
output_size = params['output_size']
align_corners = params['align_corners'] > 0
def target_layer(x, size=output_size, align_corners=align_corners):
import tensorflow as tf
x = tf.transpose(x, [0, 2, 3, 1])
x = tf.image.resize_images(x, size, align_corners=align_corners)
x = tf.transpose(x, [0, 3, 1, 2])
return x
lambda_layer = keras.layers.Lambda(target_layer)
layers[scope_name] = lambda_layer(layers[inputs[0]]) |
def _handle_data(self, msg, args, kwargs):
"""Handles data and returns `True` or `False` if everything is done."""
stop = False
try:
if msg == 'DONE':
stop = True
elif msg == 'STORE':
if 'msg' in kwargs:
store_msg = kwargs.pop('msg')
else:
store_msg = args[0]
args = args[1:]
if 'stuff_to_store' in kwargs:
stuff_to_store = kwargs.pop('stuff_to_store')
else:
stuff_to_store = args[0]
args = args[1:]
trajectory_name = kwargs['trajectory_name']
if self._trajectory_name != trajectory_name:
if self._storage_service.is_open:
self._close_file()
self._trajectory_name = trajectory_name
self._open_file()
self._storage_service.store(store_msg, stuff_to_store, *args, **kwargs)
self._storage_service.store(pypetconstants.FLUSH, None)
self._check_and_collect_garbage()
else:
raise RuntimeError('You queued something that was not '
'intended to be queued. I did not understand message '
'`%s`.' % msg)
except Exception:
self._logger.exception('ERROR occurred during storing!')
time.sleep(0.01)
pass # We don't want to kill the queue process in case of an error
return stop | def function[_handle_data, parameter[self, msg, args, kwargs]]:
constant[Handles data and returns `True` or `False` if everything is done.]
variable[stop] assign[=] constant[False]
<ast.Try object at 0x7da1b01e2710>
return[name[stop]] | keyword[def] identifier[_handle_data] ( identifier[self] , identifier[msg] , identifier[args] , identifier[kwargs] ):
literal[string]
identifier[stop] = keyword[False]
keyword[try] :
keyword[if] identifier[msg] == literal[string] :
identifier[stop] = keyword[True]
keyword[elif] identifier[msg] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[store_msg] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[else] :
identifier[store_msg] = identifier[args] [ literal[int] ]
identifier[args] = identifier[args] [ literal[int] :]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[stuff_to_store] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[else] :
identifier[stuff_to_store] = identifier[args] [ literal[int] ]
identifier[args] = identifier[args] [ literal[int] :]
identifier[trajectory_name] = identifier[kwargs] [ literal[string] ]
keyword[if] identifier[self] . identifier[_trajectory_name] != identifier[trajectory_name] :
keyword[if] identifier[self] . identifier[_storage_service] . identifier[is_open] :
identifier[self] . identifier[_close_file] ()
identifier[self] . identifier[_trajectory_name] = identifier[trajectory_name]
identifier[self] . identifier[_open_file] ()
identifier[self] . identifier[_storage_service] . identifier[store] ( identifier[store_msg] , identifier[stuff_to_store] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[_storage_service] . identifier[store] ( identifier[pypetconstants] . identifier[FLUSH] , keyword[None] )
identifier[self] . identifier[_check_and_collect_garbage] ()
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string] % identifier[msg] )
keyword[except] identifier[Exception] :
identifier[self] . identifier[_logger] . identifier[exception] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] )
keyword[pass]
keyword[return] identifier[stop] | def _handle_data(self, msg, args, kwargs):
"""Handles data and returns `True` or `False` if everything is done."""
stop = False
try:
if msg == 'DONE':
stop = True # depends on [control=['if'], data=[]]
elif msg == 'STORE':
if 'msg' in kwargs:
store_msg = kwargs.pop('msg') # depends on [control=['if'], data=['kwargs']]
else:
store_msg = args[0]
args = args[1:]
if 'stuff_to_store' in kwargs:
stuff_to_store = kwargs.pop('stuff_to_store') # depends on [control=['if'], data=['kwargs']]
else:
stuff_to_store = args[0]
args = args[1:]
trajectory_name = kwargs['trajectory_name']
if self._trajectory_name != trajectory_name:
if self._storage_service.is_open:
self._close_file() # depends on [control=['if'], data=[]]
self._trajectory_name = trajectory_name
self._open_file() # depends on [control=['if'], data=['trajectory_name']]
self._storage_service.store(store_msg, stuff_to_store, *args, **kwargs)
self._storage_service.store(pypetconstants.FLUSH, None)
self._check_and_collect_garbage() # depends on [control=['if'], data=[]]
else:
raise RuntimeError('You queued something that was not intended to be queued. I did not understand message `%s`.' % msg) # depends on [control=['try'], data=[]]
except Exception:
self._logger.exception('ERROR occurred during storing!')
time.sleep(0.01)
pass # We don't want to kill the queue process in case of an error # depends on [control=['except'], data=[]]
return stop |
def remove_files(self):
"""Removes intermediate files."""
file_list = ["molecule.svg","lig.pdb","HIS.pdb","PHE.pdb","TRP.pdb","TYR.pdb","lig.mol","test.xtc"]
for residue in self.topol_data.dict_of_plotted_res.keys():
file_list.append(residue[1]+residue[2]+".svg")
for f in file_list:
if os.path.isfile(f)==True:
os.remove(f) | def function[remove_files, parameter[self]]:
constant[Removes intermediate files.]
variable[file_list] assign[=] list[[<ast.Constant object at 0x7da1b25d36a0>, <ast.Constant object at 0x7da1b25d3df0>, <ast.Constant object at 0x7da1b25d3a30>, <ast.Constant object at 0x7da1b25d1ab0>, <ast.Constant object at 0x7da1b25d1c60>, <ast.Constant object at 0x7da1b25d10c0>, <ast.Constant object at 0x7da1b25d37f0>, <ast.Constant object at 0x7da1b25d1a50>]]
for taget[name[residue]] in starred[call[name[self].topol_data.dict_of_plotted_res.keys, parameter[]]] begin[:]
call[name[file_list].append, parameter[binary_operation[binary_operation[call[name[residue]][constant[1]] + call[name[residue]][constant[2]]] + constant[.svg]]]]
for taget[name[f]] in starred[name[file_list]] begin[:]
if compare[call[name[os].path.isfile, parameter[name[f]]] equal[==] constant[True]] begin[:]
call[name[os].remove, parameter[name[f]]] | keyword[def] identifier[remove_files] ( identifier[self] ):
literal[string]
identifier[file_list] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[residue] keyword[in] identifier[self] . identifier[topol_data] . identifier[dict_of_plotted_res] . identifier[keys] ():
identifier[file_list] . identifier[append] ( identifier[residue] [ literal[int] ]+ identifier[residue] [ literal[int] ]+ literal[string] )
keyword[for] identifier[f] keyword[in] identifier[file_list] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[f] )== keyword[True] :
identifier[os] . identifier[remove] ( identifier[f] ) | def remove_files(self):
"""Removes intermediate files."""
file_list = ['molecule.svg', 'lig.pdb', 'HIS.pdb', 'PHE.pdb', 'TRP.pdb', 'TYR.pdb', 'lig.mol', 'test.xtc']
for residue in self.topol_data.dict_of_plotted_res.keys():
file_list.append(residue[1] + residue[2] + '.svg') # depends on [control=['for'], data=['residue']]
for f in file_list:
if os.path.isfile(f) == True:
os.remove(f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] |
def assert_not_visible(self, selector, testid=None, **kwargs):
"""Assert that the element is not visible in the dom
Args:
selector (str): the selector used to find the element
test_id (str): the test_id or a str
Kwargs:
wait_until_not_visible (bool)
highlight (bool)
Returns:
bool: True is the assertion succeed; False otherwise.
"""
self.info_log(
"Assert not visible selector(%s) testid(%s)" % (selector, testid)
)
highlight = kwargs.get(
'highlight',
BROME_CONFIG['highlight']['highlight_on_assertion_failure']
)
self.debug_log("effective highlight: %s" % highlight)
wait_until_not_visible = kwargs.get(
'wait_until_not_visible',
BROME_CONFIG['proxy_driver']['wait_until_not_visible_before_assert_not_visible'] # noqa
)
self.debug_log(
"effective wait_until_not_visible: %s" % wait_until_not_visible
)
if wait_until_not_visible:
self.wait_until_not_visible(selector, raise_exception=False)
element = self.find(
selector,
raise_exception=False,
wait_until_visible=False,
wait_until_present=False
)
if element and element.is_displayed(raise_exception=False):
data = self.execute_script(
"return arguments[0].getBoundingClientRect();",
element._element
)
if highlight:
element.highlight(
style=BROME_CONFIG['highlight']['style_on_assertion_failure'] # noqa
)
if testid is not None:
self.create_test_result(testid, False, extra_data={
'bounding_client_rect': data,
'video_x_offset': self.browser_config.get('video_x_offset', 0), # noqa
'video_y_offset': self.browser_config.get('video_y_offset', 0) # noqa
})
return False
else:
if testid is not None:
self.create_test_result(testid, True)
return True | def function[assert_not_visible, parameter[self, selector, testid]]:
constant[Assert that the element is not visible in the dom
Args:
selector (str): the selector used to find the element
test_id (str): the test_id or a str
Kwargs:
wait_until_not_visible (bool)
highlight (bool)
Returns:
bool: True is the assertion succeed; False otherwise.
]
call[name[self].info_log, parameter[binary_operation[constant[Assert not visible selector(%s) testid(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7c80d0>, <ast.Name object at 0x7da20c7c9000>]]]]]
variable[highlight] assign[=] call[name[kwargs].get, parameter[constant[highlight], call[call[name[BROME_CONFIG]][constant[highlight]]][constant[highlight_on_assertion_failure]]]]
call[name[self].debug_log, parameter[binary_operation[constant[effective highlight: %s] <ast.Mod object at 0x7da2590d6920> name[highlight]]]]
variable[wait_until_not_visible] assign[=] call[name[kwargs].get, parameter[constant[wait_until_not_visible], call[call[name[BROME_CONFIG]][constant[proxy_driver]]][constant[wait_until_not_visible_before_assert_not_visible]]]]
call[name[self].debug_log, parameter[binary_operation[constant[effective wait_until_not_visible: %s] <ast.Mod object at 0x7da2590d6920> name[wait_until_not_visible]]]]
if name[wait_until_not_visible] begin[:]
call[name[self].wait_until_not_visible, parameter[name[selector]]]
variable[element] assign[=] call[name[self].find, parameter[name[selector]]]
if <ast.BoolOp object at 0x7da20c6e51e0> begin[:]
variable[data] assign[=] call[name[self].execute_script, parameter[constant[return arguments[0].getBoundingClientRect();], name[element]._element]]
if name[highlight] begin[:]
call[name[element].highlight, parameter[]]
if compare[name[testid] is_not constant[None]] begin[:]
call[name[self].create_test_result, parameter[name[testid], constant[False]]]
return[constant[False]] | keyword[def] identifier[assert_not_visible] ( identifier[self] , identifier[selector] , identifier[testid] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[info_log] (
literal[string] %( identifier[selector] , identifier[testid] )
)
identifier[highlight] = identifier[kwargs] . identifier[get] (
literal[string] ,
identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]
)
identifier[self] . identifier[debug_log] ( literal[string] % identifier[highlight] )
identifier[wait_until_not_visible] = identifier[kwargs] . identifier[get] (
literal[string] ,
identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]
)
identifier[self] . identifier[debug_log] (
literal[string] % identifier[wait_until_not_visible]
)
keyword[if] identifier[wait_until_not_visible] :
identifier[self] . identifier[wait_until_not_visible] ( identifier[selector] , identifier[raise_exception] = keyword[False] )
identifier[element] = identifier[self] . identifier[find] (
identifier[selector] ,
identifier[raise_exception] = keyword[False] ,
identifier[wait_until_visible] = keyword[False] ,
identifier[wait_until_present] = keyword[False]
)
keyword[if] identifier[element] keyword[and] identifier[element] . identifier[is_displayed] ( identifier[raise_exception] = keyword[False] ):
identifier[data] = identifier[self] . identifier[execute_script] (
literal[string] ,
identifier[element] . identifier[_element]
)
keyword[if] identifier[highlight] :
identifier[element] . identifier[highlight] (
identifier[style] = identifier[BROME_CONFIG] [ literal[string] ][ literal[string] ]
)
keyword[if] identifier[testid] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[create_test_result] ( identifier[testid] , keyword[False] , identifier[extra_data] ={
literal[string] : identifier[data] ,
literal[string] : identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] , literal[int] ),
literal[string] : identifier[self] . identifier[browser_config] . identifier[get] ( literal[string] , literal[int] )
})
keyword[return] keyword[False]
keyword[else] :
keyword[if] identifier[testid] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[create_test_result] ( identifier[testid] , keyword[True] )
keyword[return] keyword[True] | def assert_not_visible(self, selector, testid=None, **kwargs):
"""Assert that the element is not visible in the dom
Args:
selector (str): the selector used to find the element
test_id (str): the test_id or a str
Kwargs:
wait_until_not_visible (bool)
highlight (bool)
Returns:
bool: True is the assertion succeed; False otherwise.
"""
self.info_log('Assert not visible selector(%s) testid(%s)' % (selector, testid))
highlight = kwargs.get('highlight', BROME_CONFIG['highlight']['highlight_on_assertion_failure'])
self.debug_log('effective highlight: %s' % highlight) # noqa
wait_until_not_visible = kwargs.get('wait_until_not_visible', BROME_CONFIG['proxy_driver']['wait_until_not_visible_before_assert_not_visible'])
self.debug_log('effective wait_until_not_visible: %s' % wait_until_not_visible)
if wait_until_not_visible:
self.wait_until_not_visible(selector, raise_exception=False) # depends on [control=['if'], data=[]]
element = self.find(selector, raise_exception=False, wait_until_visible=False, wait_until_present=False)
if element and element.is_displayed(raise_exception=False):
data = self.execute_script('return arguments[0].getBoundingClientRect();', element._element)
if highlight: # noqa
element.highlight(style=BROME_CONFIG['highlight']['style_on_assertion_failure']) # depends on [control=['if'], data=[]]
if testid is not None: # noqa
# noqa
self.create_test_result(testid, False, extra_data={'bounding_client_rect': data, 'video_x_offset': self.browser_config.get('video_x_offset', 0), 'video_y_offset': self.browser_config.get('video_y_offset', 0)}) # depends on [control=['if'], data=['testid']]
return False # depends on [control=['if'], data=[]]
else:
if testid is not None:
self.create_test_result(testid, True) # depends on [control=['if'], data=['testid']]
return True |
def bus_line_names(self):
"""Append bus injection and line flow names to `varname`"""
if self.system.tds.config.compute_flows:
self.system.Bus._varname_inj()
self.system.Line._varname_flow()
self.system.Area._varname_inter() | def function[bus_line_names, parameter[self]]:
constant[Append bus injection and line flow names to `varname`]
if name[self].system.tds.config.compute_flows begin[:]
call[name[self].system.Bus._varname_inj, parameter[]]
call[name[self].system.Line._varname_flow, parameter[]]
call[name[self].system.Area._varname_inter, parameter[]] | keyword[def] identifier[bus_line_names] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[system] . identifier[tds] . identifier[config] . identifier[compute_flows] :
identifier[self] . identifier[system] . identifier[Bus] . identifier[_varname_inj] ()
identifier[self] . identifier[system] . identifier[Line] . identifier[_varname_flow] ()
identifier[self] . identifier[system] . identifier[Area] . identifier[_varname_inter] () | def bus_line_names(self):
"""Append bus injection and line flow names to `varname`"""
if self.system.tds.config.compute_flows:
self.system.Bus._varname_inj()
self.system.Line._varname_flow()
self.system.Area._varname_inter() # depends on [control=['if'], data=[]] |
def check_config(filename):
"""Check configuration file of TonicDNS CLI.
Argument:
filename: config file name (default is ~/.tdclirc)
"""
conf = configparser.SafeConfigParser(allow_no_value=False)
conf.read(filename)
try:
server = conf.get('global', 'server')
except configparser.NoSectionError:
server = False
except configparser.NoOptionError:
server = False
try:
username = conf.get('auth', 'username')
except configparser.NoSectionError:
username = False
except configparser.NoOptionError:
username = False
try:
password = conf.get('auth', 'password')
except configparser.NoSectionError:
password = False
except configparser.NoOptionError:
password = False
try:
auto_update_soa = conf.get('global', 'soa_update')
except configparser.NoSectionError:
auto_update_soa = False
except configparser.NoOptionError:
auto_update_soa = False
return server, username, password, auto_update_soa | def function[check_config, parameter[filename]]:
constant[Check configuration file of TonicDNS CLI.
Argument:
filename: config file name (default is ~/.tdclirc)
]
variable[conf] assign[=] call[name[configparser].SafeConfigParser, parameter[]]
call[name[conf].read, parameter[name[filename]]]
<ast.Try object at 0x7da1b143ff40>
<ast.Try object at 0x7da1b143fa90>
<ast.Try object at 0x7da1b143ea10>
<ast.Try object at 0x7da1b143f2b0>
return[tuple[[<ast.Name object at 0x7da1b14c6170>, <ast.Name object at 0x7da1b14c5a50>, <ast.Name object at 0x7da1b14c7b80>, <ast.Name object at 0x7da1b14c59f0>]]] | keyword[def] identifier[check_config] ( identifier[filename] ):
literal[string]
identifier[conf] = identifier[configparser] . identifier[SafeConfigParser] ( identifier[allow_no_value] = keyword[False] )
identifier[conf] . identifier[read] ( identifier[filename] )
keyword[try] :
identifier[server] = identifier[conf] . identifier[get] ( literal[string] , literal[string] )
keyword[except] identifier[configparser] . identifier[NoSectionError] :
identifier[server] = keyword[False]
keyword[except] identifier[configparser] . identifier[NoOptionError] :
identifier[server] = keyword[False]
keyword[try] :
identifier[username] = identifier[conf] . identifier[get] ( literal[string] , literal[string] )
keyword[except] identifier[configparser] . identifier[NoSectionError] :
identifier[username] = keyword[False]
keyword[except] identifier[configparser] . identifier[NoOptionError] :
identifier[username] = keyword[False]
keyword[try] :
identifier[password] = identifier[conf] . identifier[get] ( literal[string] , literal[string] )
keyword[except] identifier[configparser] . identifier[NoSectionError] :
identifier[password] = keyword[False]
keyword[except] identifier[configparser] . identifier[NoOptionError] :
identifier[password] = keyword[False]
keyword[try] :
identifier[auto_update_soa] = identifier[conf] . identifier[get] ( literal[string] , literal[string] )
keyword[except] identifier[configparser] . identifier[NoSectionError] :
identifier[auto_update_soa] = keyword[False]
keyword[except] identifier[configparser] . identifier[NoOptionError] :
identifier[auto_update_soa] = keyword[False]
keyword[return] identifier[server] , identifier[username] , identifier[password] , identifier[auto_update_soa] | def check_config(filename):
"""Check configuration file of TonicDNS CLI.
Argument:
filename: config file name (default is ~/.tdclirc)
"""
conf = configparser.SafeConfigParser(allow_no_value=False)
conf.read(filename)
try:
server = conf.get('global', 'server') # depends on [control=['try'], data=[]]
except configparser.NoSectionError:
server = False # depends on [control=['except'], data=[]]
except configparser.NoOptionError:
server = False # depends on [control=['except'], data=[]]
try:
username = conf.get('auth', 'username') # depends on [control=['try'], data=[]]
except configparser.NoSectionError:
username = False # depends on [control=['except'], data=[]]
except configparser.NoOptionError:
username = False # depends on [control=['except'], data=[]]
try:
password = conf.get('auth', 'password') # depends on [control=['try'], data=[]]
except configparser.NoSectionError:
password = False # depends on [control=['except'], data=[]]
except configparser.NoOptionError:
password = False # depends on [control=['except'], data=[]]
try:
auto_update_soa = conf.get('global', 'soa_update') # depends on [control=['try'], data=[]]
except configparser.NoSectionError:
auto_update_soa = False # depends on [control=['except'], data=[]]
except configparser.NoOptionError:
auto_update_soa = False # depends on [control=['except'], data=[]]
return (server, username, password, auto_update_soa) |
def configure(self, options, conf):
"""Configure plugin.
"""
if not self.can_configure:
return
self.conf = conf
disable = getattr(options, 'noDeprecated', False)
if disable:
self.enabled = False | def function[configure, parameter[self, options, conf]]:
constant[Configure plugin.
]
if <ast.UnaryOp object at 0x7da207f9b010> begin[:]
return[None]
name[self].conf assign[=] name[conf]
variable[disable] assign[=] call[name[getattr], parameter[name[options], constant[noDeprecated], constant[False]]]
if name[disable] begin[:]
name[self].enabled assign[=] constant[False] | keyword[def] identifier[configure] ( identifier[self] , identifier[options] , identifier[conf] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[can_configure] :
keyword[return]
identifier[self] . identifier[conf] = identifier[conf]
identifier[disable] = identifier[getattr] ( identifier[options] , literal[string] , keyword[False] )
keyword[if] identifier[disable] :
identifier[self] . identifier[enabled] = keyword[False] | def configure(self, options, conf):
"""Configure plugin.
"""
if not self.can_configure:
return # depends on [control=['if'], data=[]]
self.conf = conf
disable = getattr(options, 'noDeprecated', False)
if disable:
self.enabled = False # depends on [control=['if'], data=[]] |
def get_form(self, request, obj=None, **kwargs):
"""
Provides a flexible way to get the right form according to the context
For the add view it checks whether the app_config is set; if not, a special form
to select the namespace is shown, which is reloaded after namespace selection.
If only one namespace exists, the current is selected and the normal form
is used.
"""
form = super(ModelAppHookConfig, self).get_form(request, obj, **kwargs)
if self.app_config_attribute not in form.base_fields:
return form
app_config_default = self._app_config_select(request, obj)
if app_config_default:
form.base_fields[self.app_config_attribute].initial = app_config_default
get = copy.copy(request.GET)
get[self.app_config_attribute] = app_config_default.pk
request.GET = get
elif app_config_default is None and request.method == 'GET':
class InitialForm(form):
class Meta(form.Meta):
fields = (self.app_config_attribute,)
form = InitialForm
form = self._set_config_defaults(request, form, obj)
return form | def function[get_form, parameter[self, request, obj]]:
constant[
Provides a flexible way to get the right form according to the context
For the add view it checks whether the app_config is set; if not, a special form
to select the namespace is shown, which is reloaded after namespace selection.
If only one namespace exists, the current is selected and the normal form
is used.
]
variable[form] assign[=] call[call[name[super], parameter[name[ModelAppHookConfig], name[self]]].get_form, parameter[name[request], name[obj]]]
if compare[name[self].app_config_attribute <ast.NotIn object at 0x7da2590d7190> name[form].base_fields] begin[:]
return[name[form]]
variable[app_config_default] assign[=] call[name[self]._app_config_select, parameter[name[request], name[obj]]]
if name[app_config_default] begin[:]
call[name[form].base_fields][name[self].app_config_attribute].initial assign[=] name[app_config_default]
variable[get] assign[=] call[name[copy].copy, parameter[name[request].GET]]
call[name[get]][name[self].app_config_attribute] assign[=] name[app_config_default].pk
name[request].GET assign[=] name[get]
variable[form] assign[=] call[name[self]._set_config_defaults, parameter[name[request], name[form], name[obj]]]
return[name[form]] | keyword[def] identifier[get_form] ( identifier[self] , identifier[request] , identifier[obj] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[form] = identifier[super] ( identifier[ModelAppHookConfig] , identifier[self] ). identifier[get_form] ( identifier[request] , identifier[obj] ,** identifier[kwargs] )
keyword[if] identifier[self] . identifier[app_config_attribute] keyword[not] keyword[in] identifier[form] . identifier[base_fields] :
keyword[return] identifier[form]
identifier[app_config_default] = identifier[self] . identifier[_app_config_select] ( identifier[request] , identifier[obj] )
keyword[if] identifier[app_config_default] :
identifier[form] . identifier[base_fields] [ identifier[self] . identifier[app_config_attribute] ]. identifier[initial] = identifier[app_config_default]
identifier[get] = identifier[copy] . identifier[copy] ( identifier[request] . identifier[GET] )
identifier[get] [ identifier[self] . identifier[app_config_attribute] ]= identifier[app_config_default] . identifier[pk]
identifier[request] . identifier[GET] = identifier[get]
keyword[elif] identifier[app_config_default] keyword[is] keyword[None] keyword[and] identifier[request] . identifier[method] == literal[string] :
keyword[class] identifier[InitialForm] ( identifier[form] ):
keyword[class] identifier[Meta] ( identifier[form] . identifier[Meta] ):
identifier[fields] =( identifier[self] . identifier[app_config_attribute] ,)
identifier[form] = identifier[InitialForm]
identifier[form] = identifier[self] . identifier[_set_config_defaults] ( identifier[request] , identifier[form] , identifier[obj] )
keyword[return] identifier[form] | def get_form(self, request, obj=None, **kwargs):
"""
Provides a flexible way to get the right form according to the context
For the add view it checks whether the app_config is set; if not, a special form
to select the namespace is shown, which is reloaded after namespace selection.
If only one namespace exists, the current is selected and the normal form
is used.
"""
form = super(ModelAppHookConfig, self).get_form(request, obj, **kwargs)
if self.app_config_attribute not in form.base_fields:
return form # depends on [control=['if'], data=[]]
app_config_default = self._app_config_select(request, obj)
if app_config_default:
form.base_fields[self.app_config_attribute].initial = app_config_default
get = copy.copy(request.GET)
get[self.app_config_attribute] = app_config_default.pk
request.GET = get # depends on [control=['if'], data=[]]
elif app_config_default is None and request.method == 'GET':
class InitialForm(form):
class Meta(form.Meta):
fields = (self.app_config_attribute,)
form = InitialForm # depends on [control=['if'], data=[]]
form = self._set_config_defaults(request, form, obj)
return form |
def write_xbm(matrix, version, out, scale=1, border=None, name='img'):
"""\
Serializes the matrix as `XBM <https://en.wikipedia.org/wiki/X_BitMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write text data.
:param scale: Indicates the size of a single module (default: 1 which
corresponds to 1 x 1 in the provided unit per module).
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
:param name: Prefix for the variable names. Default: "img".
The prefix is used to construct the variable names:
```#define <prefix>_width``` ```static unsigned char <prefix>_bits[]```
"""
row_iter = matrix_iter(matrix, version, scale, border)
border = get_border(version, border)
width, height = get_symbol_size(version, scale=scale, border=border)
with writable(out, 'wt') as f:
write = f.write
write('#define {0}_width {1}\n'
'#define {0}_height {2}\n'
'static unsigned char {0}_bits[] = {{\n'.format(name, width, height))
for i, row in enumerate(row_iter, start=1):
iter_ = zip_longest(*[iter(row)] * 8, fillvalue=0x0)
# Reverse bits since XBM uses little endian
bits = ['0x{0:02x}'.format(reduce(lambda x, y: (x << 1) + y, bits[::-1])) for bits in iter_]
write(' ')
write(', '.join(bits))
write(',\n' if i < height else '\n')
write('};\n') | def function[write_xbm, parameter[matrix, version, out, scale, border, name]]:
constant[ Serializes the matrix as `XBM <https://en.wikipedia.org/wiki/X_BitMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write text data.
:param scale: Indicates the size of a single module (default: 1 which
corresponds to 1 x 1 in the provided unit per module).
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
:param name: Prefix for the variable names. Default: "img".
The prefix is used to construct the variable names:
```#define <prefix>_width``` ```static unsigned char <prefix>_bits[]```
]
variable[row_iter] assign[=] call[name[matrix_iter], parameter[name[matrix], name[version], name[scale], name[border]]]
variable[border] assign[=] call[name[get_border], parameter[name[version], name[border]]]
<ast.Tuple object at 0x7da1b0c51e70> assign[=] call[name[get_symbol_size], parameter[name[version]]]
with call[name[writable], parameter[name[out], constant[wt]]] begin[:]
variable[write] assign[=] name[f].write
call[name[write], parameter[call[constant[#define {0}_width {1}
#define {0}_height {2}
static unsigned char {0}_bits[] = {{
].format, parameter[name[name], name[width], name[height]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0c53400>, <ast.Name object at 0x7da1b0c51d20>]]] in starred[call[name[enumerate], parameter[name[row_iter]]]] begin[:]
variable[iter_] assign[=] call[name[zip_longest], parameter[<ast.Starred object at 0x7da1b0c50eb0>]]
variable[bits] assign[=] <ast.ListComp object at 0x7da1b0c53940>
call[name[write], parameter[constant[ ]]]
call[name[write], parameter[call[constant[, ].join, parameter[name[bits]]]]]
call[name[write], parameter[<ast.IfExp object at 0x7da1b0c53eb0>]]
call[name[write], parameter[constant[};
]]] | keyword[def] identifier[write_xbm] ( identifier[matrix] , identifier[version] , identifier[out] , identifier[scale] = literal[int] , identifier[border] = keyword[None] , identifier[name] = literal[string] ):
literal[string]
identifier[row_iter] = identifier[matrix_iter] ( identifier[matrix] , identifier[version] , identifier[scale] , identifier[border] )
identifier[border] = identifier[get_border] ( identifier[version] , identifier[border] )
identifier[width] , identifier[height] = identifier[get_symbol_size] ( identifier[version] , identifier[scale] = identifier[scale] , identifier[border] = identifier[border] )
keyword[with] identifier[writable] ( identifier[out] , literal[string] ) keyword[as] identifier[f] :
identifier[write] = identifier[f] . identifier[write]
identifier[write] ( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[name] , identifier[width] , identifier[height] ))
keyword[for] identifier[i] , identifier[row] keyword[in] identifier[enumerate] ( identifier[row_iter] , identifier[start] = literal[int] ):
identifier[iter_] = identifier[zip_longest] (*[ identifier[iter] ( identifier[row] )]* literal[int] , identifier[fillvalue] = literal[int] )
identifier[bits] =[ literal[string] . identifier[format] ( identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] :( identifier[x] << literal[int] )+ identifier[y] , identifier[bits] [::- literal[int] ])) keyword[for] identifier[bits] keyword[in] identifier[iter_] ]
identifier[write] ( literal[string] )
identifier[write] ( literal[string] . identifier[join] ( identifier[bits] ))
identifier[write] ( literal[string] keyword[if] identifier[i] < identifier[height] keyword[else] literal[string] )
identifier[write] ( literal[string] ) | def write_xbm(matrix, version, out, scale=1, border=None, name='img'):
""" Serializes the matrix as `XBM <https://en.wikipedia.org/wiki/X_BitMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write text data.
:param scale: Indicates the size of a single module (default: 1 which
corresponds to 1 x 1 in the provided unit per module).
:param int border: Integer indicating the size of the quiet zone.
If set to ``None`` (default), the recommended border size
will be used (``4`` for QR Codes, ``2`` for a Micro QR Codes).
:param name: Prefix for the variable names. Default: "img".
The prefix is used to construct the variable names:
```#define <prefix>_width``` ```static unsigned char <prefix>_bits[]```
"""
row_iter = matrix_iter(matrix, version, scale, border)
border = get_border(version, border)
(width, height) = get_symbol_size(version, scale=scale, border=border)
with writable(out, 'wt') as f:
write = f.write
write('#define {0}_width {1}\n#define {0}_height {2}\nstatic unsigned char {0}_bits[] = {{\n'.format(name, width, height))
for (i, row) in enumerate(row_iter, start=1):
iter_ = zip_longest(*[iter(row)] * 8, fillvalue=0)
# Reverse bits since XBM uses little endian
bits = ['0x{0:02x}'.format(reduce(lambda x, y: (x << 1) + y, bits[::-1])) for bits in iter_]
write(' ')
write(', '.join(bits))
write(',\n' if i < height else '\n') # depends on [control=['for'], data=[]]
write('};\n') # depends on [control=['with'], data=['f']] |
def serve(handler, sock_path, timeout=UNIX_SOCKET_TIMEOUT):
"""
Start the ssh-agent server on a UNIX-domain socket.
If no connection is made during the specified timeout,
retry until the context is over.
"""
ssh_version = subprocess.check_output(['ssh', '-V'],
stderr=subprocess.STDOUT)
log.debug('local SSH version: %r', ssh_version)
environ = {'SSH_AUTH_SOCK': sock_path, 'SSH_AGENT_PID': str(os.getpid())}
device_mutex = threading.Lock()
with server.unix_domain_socket_server(sock_path) as sock:
sock.settimeout(timeout)
quit_event = threading.Event()
handle_conn = functools.partial(server.handle_connection,
handler=handler,
mutex=device_mutex)
kwargs = dict(sock=sock,
handle_conn=handle_conn,
quit_event=quit_event)
with server.spawn(server.server_thread, kwargs):
try:
yield environ
finally:
log.debug('closing server')
quit_event.set() | def function[serve, parameter[handler, sock_path, timeout]]:
constant[
Start the ssh-agent server on a UNIX-domain socket.
If no connection is made during the specified timeout,
retry until the context is over.
]
variable[ssh_version] assign[=] call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da1b1212e90>, <ast.Constant object at 0x7da1b1212b30>]]]]
call[name[log].debug, parameter[constant[local SSH version: %r], name[ssh_version]]]
variable[environ] assign[=] dictionary[[<ast.Constant object at 0x7da1b1212bf0>, <ast.Constant object at 0x7da1b1213580>], [<ast.Name object at 0x7da1b1213760>, <ast.Call object at 0x7da1b12126b0>]]
variable[device_mutex] assign[=] call[name[threading].Lock, parameter[]]
with call[name[server].unix_domain_socket_server, parameter[name[sock_path]]] begin[:]
call[name[sock].settimeout, parameter[name[timeout]]]
variable[quit_event] assign[=] call[name[threading].Event, parameter[]]
variable[handle_conn] assign[=] call[name[functools].partial, parameter[name[server].handle_connection]]
variable[kwargs] assign[=] call[name[dict], parameter[]]
with call[name[server].spawn, parameter[name[server].server_thread, name[kwargs]]] begin[:]
<ast.Try object at 0x7da1b12413c0> | keyword[def] identifier[serve] ( identifier[handler] , identifier[sock_path] , identifier[timeout] = identifier[UNIX_SOCKET_TIMEOUT] ):
literal[string]
identifier[ssh_version] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] ],
identifier[stderr] = identifier[subprocess] . identifier[STDOUT] )
identifier[log] . identifier[debug] ( literal[string] , identifier[ssh_version] )
identifier[environ] ={ literal[string] : identifier[sock_path] , literal[string] : identifier[str] ( identifier[os] . identifier[getpid] ())}
identifier[device_mutex] = identifier[threading] . identifier[Lock] ()
keyword[with] identifier[server] . identifier[unix_domain_socket_server] ( identifier[sock_path] ) keyword[as] identifier[sock] :
identifier[sock] . identifier[settimeout] ( identifier[timeout] )
identifier[quit_event] = identifier[threading] . identifier[Event] ()
identifier[handle_conn] = identifier[functools] . identifier[partial] ( identifier[server] . identifier[handle_connection] ,
identifier[handler] = identifier[handler] ,
identifier[mutex] = identifier[device_mutex] )
identifier[kwargs] = identifier[dict] ( identifier[sock] = identifier[sock] ,
identifier[handle_conn] = identifier[handle_conn] ,
identifier[quit_event] = identifier[quit_event] )
keyword[with] identifier[server] . identifier[spawn] ( identifier[server] . identifier[server_thread] , identifier[kwargs] ):
keyword[try] :
keyword[yield] identifier[environ]
keyword[finally] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[quit_event] . identifier[set] () | def serve(handler, sock_path, timeout=UNIX_SOCKET_TIMEOUT):
"""
Start the ssh-agent server on a UNIX-domain socket.
If no connection is made during the specified timeout,
retry until the context is over.
"""
ssh_version = subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT)
log.debug('local SSH version: %r', ssh_version)
environ = {'SSH_AUTH_SOCK': sock_path, 'SSH_AGENT_PID': str(os.getpid())}
device_mutex = threading.Lock()
with server.unix_domain_socket_server(sock_path) as sock:
sock.settimeout(timeout)
quit_event = threading.Event()
handle_conn = functools.partial(server.handle_connection, handler=handler, mutex=device_mutex)
kwargs = dict(sock=sock, handle_conn=handle_conn, quit_event=quit_event)
with server.spawn(server.server_thread, kwargs):
try:
yield environ # depends on [control=['try'], data=[]]
finally:
log.debug('closing server')
quit_event.set() # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['sock']] |
def score_url(self, url):
"""
Give an url a score which can be used to choose preferred URLs
for a given project release.
"""
t = urlparse(url)
return (t.scheme != 'https', 'pypi.python.org' in t.netloc,
posixpath.basename(t.path)) | def function[score_url, parameter[self, url]]:
constant[
Give an url a score which can be used to choose preferred URLs
for a given project release.
]
variable[t] assign[=] call[name[urlparse], parameter[name[url]]]
return[tuple[[<ast.Compare object at 0x7da1b17efc40>, <ast.Compare object at 0x7da1b17ef190>, <ast.Call object at 0x7da1b17ee1a0>]]] | keyword[def] identifier[score_url] ( identifier[self] , identifier[url] ):
literal[string]
identifier[t] = identifier[urlparse] ( identifier[url] )
keyword[return] ( identifier[t] . identifier[scheme] != literal[string] , literal[string] keyword[in] identifier[t] . identifier[netloc] ,
identifier[posixpath] . identifier[basename] ( identifier[t] . identifier[path] )) | def score_url(self, url):
"""
Give an url a score which can be used to choose preferred URLs
for a given project release.
"""
t = urlparse(url)
return (t.scheme != 'https', 'pypi.python.org' in t.netloc, posixpath.basename(t.path)) |
def response_add(self, request, obj, post_url_continue=None, **kwargs):
"""Redirects to the appropriate items' 'continue' page on item add.
As we administer tree items within tree itself, we
should make some changes to redirection process.
"""
if post_url_continue is None:
post_url_continue = '../item_%s/' % obj.pk
return self._redirect(request, super(TreeItemAdmin, self).response_add(request, obj, post_url_continue)) | def function[response_add, parameter[self, request, obj, post_url_continue]]:
constant[Redirects to the appropriate items' 'continue' page on item add.
As we administer tree items within tree itself, we
should make some changes to redirection process.
]
if compare[name[post_url_continue] is constant[None]] begin[:]
variable[post_url_continue] assign[=] binary_operation[constant[../item_%s/] <ast.Mod object at 0x7da2590d6920> name[obj].pk]
return[call[name[self]._redirect, parameter[name[request], call[call[name[super], parameter[name[TreeItemAdmin], name[self]]].response_add, parameter[name[request], name[obj], name[post_url_continue]]]]]] | keyword[def] identifier[response_add] ( identifier[self] , identifier[request] , identifier[obj] , identifier[post_url_continue] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[post_url_continue] keyword[is] keyword[None] :
identifier[post_url_continue] = literal[string] % identifier[obj] . identifier[pk]
keyword[return] identifier[self] . identifier[_redirect] ( identifier[request] , identifier[super] ( identifier[TreeItemAdmin] , identifier[self] ). identifier[response_add] ( identifier[request] , identifier[obj] , identifier[post_url_continue] )) | def response_add(self, request, obj, post_url_continue=None, **kwargs):
"""Redirects to the appropriate items' 'continue' page on item add.
As we administer tree items within tree itself, we
should make some changes to redirection process.
"""
if post_url_continue is None:
post_url_continue = '../item_%s/' % obj.pk # depends on [control=['if'], data=['post_url_continue']]
return self._redirect(request, super(TreeItemAdmin, self).response_add(request, obj, post_url_continue)) |
def wait_until_final(self, poll_interval=1, timeout=60):
"""It will poll the URL to grab the latest status resource in a given
timeout and time interval.
Args:
poll_interval (int): how often to poll the status service.
timeout (int): how long to poll the URL until giving up. Use <= 0
to wait forever
"""
start_time = time.time()
elapsed = 0
while (self.status != "complete" and
(timeout <= 0 or elapsed < timeout)):
time.sleep(poll_interval)
self.refresh()
elapsed = time.time() - start_time | def function[wait_until_final, parameter[self, poll_interval, timeout]]:
constant[It will poll the URL to grab the latest status resource in a given
timeout and time interval.
Args:
poll_interval (int): how often to poll the status service.
timeout (int): how long to poll the URL until giving up. Use <= 0
to wait forever
]
variable[start_time] assign[=] call[name[time].time, parameter[]]
variable[elapsed] assign[=] constant[0]
while <ast.BoolOp object at 0x7da1aff6dcf0> begin[:]
call[name[time].sleep, parameter[name[poll_interval]]]
call[name[self].refresh, parameter[]]
variable[elapsed] assign[=] binary_operation[call[name[time].time, parameter[]] - name[start_time]] | keyword[def] identifier[wait_until_final] ( identifier[self] , identifier[poll_interval] = literal[int] , identifier[timeout] = literal[int] ):
literal[string]
identifier[start_time] = identifier[time] . identifier[time] ()
identifier[elapsed] = literal[int]
keyword[while] ( identifier[self] . identifier[status] != literal[string] keyword[and]
( identifier[timeout] <= literal[int] keyword[or] identifier[elapsed] < identifier[timeout] )):
identifier[time] . identifier[sleep] ( identifier[poll_interval] )
identifier[self] . identifier[refresh] ()
identifier[elapsed] = identifier[time] . identifier[time] ()- identifier[start_time] | def wait_until_final(self, poll_interval=1, timeout=60):
"""It will poll the URL to grab the latest status resource in a given
timeout and time interval.
Args:
poll_interval (int): how often to poll the status service.
timeout (int): how long to poll the URL until giving up. Use <= 0
to wait forever
"""
start_time = time.time()
elapsed = 0
while self.status != 'complete' and (timeout <= 0 or elapsed < timeout):
time.sleep(poll_interval)
self.refresh()
elapsed = time.time() - start_time # depends on [control=['while'], data=[]] |
def _ReadFlowResponseCounts(self, request_keys, cursor=None):
"""Reads counts of responses for the given requests."""
query = """
SELECT
flow_requests.client_id, flow_requests.flow_id,
flow_requests.request_id, COUNT(*)
FROM flow_responses, flow_requests
WHERE ({conditions}) AND
flow_requests.client_id = flow_responses.client_id AND
flow_requests.flow_id = flow_responses.flow_id AND
flow_requests.request_id = flow_responses.request_id AND
flow_requests.needs_processing = FALSE
GROUP BY
flow_requests.client_id,
flow_requests.flow_id,
flow_requests.request_id
"""
condition_template = """
(flow_requests.client_id=%s AND
flow_requests.flow_id=%s AND
flow_requests.request_id=%s)"""
conditions = [condition_template] * len(request_keys)
args = []
for client_id, flow_id, request_id in request_keys:
args.append(db_utils.ClientIDToInt(client_id))
args.append(db_utils.FlowIDToInt(flow_id))
args.append(request_id)
query = query.format(conditions=" OR ".join(conditions))
cursor.execute(query, args)
response_counts = {}
for (client_id_int, flow_id_int, request_id, count) in cursor.fetchall():
request_key = (db_utils.IntToClientID(client_id_int),
db_utils.IntToFlowID(flow_id_int), request_id)
response_counts[request_key] = count
return response_counts | def function[_ReadFlowResponseCounts, parameter[self, request_keys, cursor]]:
constant[Reads counts of responses for the given requests.]
variable[query] assign[=] constant[
SELECT
flow_requests.client_id, flow_requests.flow_id,
flow_requests.request_id, COUNT(*)
FROM flow_responses, flow_requests
WHERE ({conditions}) AND
flow_requests.client_id = flow_responses.client_id AND
flow_requests.flow_id = flow_responses.flow_id AND
flow_requests.request_id = flow_responses.request_id AND
flow_requests.needs_processing = FALSE
GROUP BY
flow_requests.client_id,
flow_requests.flow_id,
flow_requests.request_id
]
variable[condition_template] assign[=] constant[
(flow_requests.client_id=%s AND
flow_requests.flow_id=%s AND
flow_requests.request_id=%s)]
variable[conditions] assign[=] binary_operation[list[[<ast.Name object at 0x7da1b1c1b550>]] * call[name[len], parameter[name[request_keys]]]]
variable[args] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1c1bd30>, <ast.Name object at 0x7da1b1c1ada0>, <ast.Name object at 0x7da1b1c18520>]]] in starred[name[request_keys]] begin[:]
call[name[args].append, parameter[call[name[db_utils].ClientIDToInt, parameter[name[client_id]]]]]
call[name[args].append, parameter[call[name[db_utils].FlowIDToInt, parameter[name[flow_id]]]]]
call[name[args].append, parameter[name[request_id]]]
variable[query] assign[=] call[name[query].format, parameter[]]
call[name[cursor].execute, parameter[name[query], name[args]]]
variable[response_counts] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1b05ba0>, <ast.Name object at 0x7da1b1b05b10>, <ast.Name object at 0x7da1b1b06b00>, <ast.Name object at 0x7da1b1b06770>]]] in starred[call[name[cursor].fetchall, parameter[]]] begin[:]
variable[request_key] assign[=] tuple[[<ast.Call object at 0x7da1b1b07a30>, <ast.Call object at 0x7da1b1b07430>, <ast.Name object at 0x7da1b1b06fb0>]]
call[name[response_counts]][name[request_key]] assign[=] name[count]
return[name[response_counts]] | keyword[def] identifier[_ReadFlowResponseCounts] ( identifier[self] , identifier[request_keys] , identifier[cursor] = keyword[None] ):
literal[string]
identifier[query] = literal[string]
identifier[condition_template] = literal[string]
identifier[conditions] =[ identifier[condition_template] ]* identifier[len] ( identifier[request_keys] )
identifier[args] =[]
keyword[for] identifier[client_id] , identifier[flow_id] , identifier[request_id] keyword[in] identifier[request_keys] :
identifier[args] . identifier[append] ( identifier[db_utils] . identifier[ClientIDToInt] ( identifier[client_id] ))
identifier[args] . identifier[append] ( identifier[db_utils] . identifier[FlowIDToInt] ( identifier[flow_id] ))
identifier[args] . identifier[append] ( identifier[request_id] )
identifier[query] = identifier[query] . identifier[format] ( identifier[conditions] = literal[string] . identifier[join] ( identifier[conditions] ))
identifier[cursor] . identifier[execute] ( identifier[query] , identifier[args] )
identifier[response_counts] ={}
keyword[for] ( identifier[client_id_int] , identifier[flow_id_int] , identifier[request_id] , identifier[count] ) keyword[in] identifier[cursor] . identifier[fetchall] ():
identifier[request_key] =( identifier[db_utils] . identifier[IntToClientID] ( identifier[client_id_int] ),
identifier[db_utils] . identifier[IntToFlowID] ( identifier[flow_id_int] ), identifier[request_id] )
identifier[response_counts] [ identifier[request_key] ]= identifier[count]
keyword[return] identifier[response_counts] | def _ReadFlowResponseCounts(self, request_keys, cursor=None):
"""Reads counts of responses for the given requests."""
query = '\n SELECT\n flow_requests.client_id, flow_requests.flow_id,\n flow_requests.request_id, COUNT(*)\n FROM flow_responses, flow_requests\n WHERE ({conditions}) AND\n flow_requests.client_id = flow_responses.client_id AND\n flow_requests.flow_id = flow_responses.flow_id AND\n flow_requests.request_id = flow_responses.request_id AND\n flow_requests.needs_processing = FALSE\n GROUP BY\n flow_requests.client_id,\n flow_requests.flow_id,\n flow_requests.request_id\n '
condition_template = '\n (flow_requests.client_id=%s AND\n flow_requests.flow_id=%s AND\n flow_requests.request_id=%s)'
conditions = [condition_template] * len(request_keys)
args = []
for (client_id, flow_id, request_id) in request_keys:
args.append(db_utils.ClientIDToInt(client_id))
args.append(db_utils.FlowIDToInt(flow_id))
args.append(request_id) # depends on [control=['for'], data=[]]
query = query.format(conditions=' OR '.join(conditions))
cursor.execute(query, args)
response_counts = {}
for (client_id_int, flow_id_int, request_id, count) in cursor.fetchall():
request_key = (db_utils.IntToClientID(client_id_int), db_utils.IntToFlowID(flow_id_int), request_id)
response_counts[request_key] = count # depends on [control=['for'], data=[]]
return response_counts |
def get_info(dirs, parallel, resources=None):
"""Retrieve cluster or local filesystem resources from pre-retrieved information.
"""
# Allow custom specification of cores/memory in resources
if resources and isinstance(resources, dict) and "machine" in resources:
minfo = resources["machine"]
assert "memory" in minfo, "Require memory specification (Gb) in machine resources: %s" % minfo
assert "cores" in minfo, "Require core specification in machine resources: %s" % minfo
return minfo
if parallel["type"] in ["ipython"] and not parallel["queue"] == "localrun":
cache_file = _get_cache_file(dirs, parallel)
if utils.file_exists(cache_file):
with open(cache_file) as in_handle:
minfo = yaml.safe_load(in_handle)
return _combine_machine_info(minfo)
else:
return {}
else:
return _combine_machine_info(machine_info()) | def function[get_info, parameter[dirs, parallel, resources]]:
constant[Retrieve cluster or local filesystem resources from pre-retrieved information.
]
if <ast.BoolOp object at 0x7da1b18d9300> begin[:]
variable[minfo] assign[=] call[name[resources]][constant[machine]]
assert[compare[constant[memory] in name[minfo]]]
assert[compare[constant[cores] in name[minfo]]]
return[name[minfo]]
if <ast.BoolOp object at 0x7da1b18d8520> begin[:]
variable[cache_file] assign[=] call[name[_get_cache_file], parameter[name[dirs], name[parallel]]]
if call[name[utils].file_exists, parameter[name[cache_file]]] begin[:]
with call[name[open], parameter[name[cache_file]]] begin[:]
variable[minfo] assign[=] call[name[yaml].safe_load, parameter[name[in_handle]]]
return[call[name[_combine_machine_info], parameter[name[minfo]]]] | keyword[def] identifier[get_info] ( identifier[dirs] , identifier[parallel] , identifier[resources] = keyword[None] ):
literal[string]
keyword[if] identifier[resources] keyword[and] identifier[isinstance] ( identifier[resources] , identifier[dict] ) keyword[and] literal[string] keyword[in] identifier[resources] :
identifier[minfo] = identifier[resources] [ literal[string] ]
keyword[assert] literal[string] keyword[in] identifier[minfo] , literal[string] % identifier[minfo]
keyword[assert] literal[string] keyword[in] identifier[minfo] , literal[string] % identifier[minfo]
keyword[return] identifier[minfo]
keyword[if] identifier[parallel] [ literal[string] ] keyword[in] [ literal[string] ] keyword[and] keyword[not] identifier[parallel] [ literal[string] ]== literal[string] :
identifier[cache_file] = identifier[_get_cache_file] ( identifier[dirs] , identifier[parallel] )
keyword[if] identifier[utils] . identifier[file_exists] ( identifier[cache_file] ):
keyword[with] identifier[open] ( identifier[cache_file] ) keyword[as] identifier[in_handle] :
identifier[minfo] = identifier[yaml] . identifier[safe_load] ( identifier[in_handle] )
keyword[return] identifier[_combine_machine_info] ( identifier[minfo] )
keyword[else] :
keyword[return] {}
keyword[else] :
keyword[return] identifier[_combine_machine_info] ( identifier[machine_info] ()) | def get_info(dirs, parallel, resources=None):
"""Retrieve cluster or local filesystem resources from pre-retrieved information.
"""
# Allow custom specification of cores/memory in resources
if resources and isinstance(resources, dict) and ('machine' in resources):
minfo = resources['machine']
assert 'memory' in minfo, 'Require memory specification (Gb) in machine resources: %s' % minfo
assert 'cores' in minfo, 'Require core specification in machine resources: %s' % minfo
return minfo # depends on [control=['if'], data=[]]
if parallel['type'] in ['ipython'] and (not parallel['queue'] == 'localrun'):
cache_file = _get_cache_file(dirs, parallel)
if utils.file_exists(cache_file):
with open(cache_file) as in_handle:
minfo = yaml.safe_load(in_handle) # depends on [control=['with'], data=['in_handle']]
return _combine_machine_info(minfo) # depends on [control=['if'], data=[]]
else:
return {} # depends on [control=['if'], data=[]]
else:
return _combine_machine_info(machine_info()) |
def _ImportModuleHookBySuffix(name, package=None):
"""Callback when a module is imported through importlib.import_module."""
_IncrementNestLevel()
try:
# Really import modules.
module = _real_import_module(name, package)
finally:
if name.startswith('.'):
if package:
name = _ResolveRelativeImport(name, package)
else:
# Should not happen. Relative imports require the package argument.
name = None
if name:
_ProcessImportBySuffix(name, None, None)
return module | def function[_ImportModuleHookBySuffix, parameter[name, package]]:
constant[Callback when a module is imported through importlib.import_module.]
call[name[_IncrementNestLevel], parameter[]]
<ast.Try object at 0x7da204962380>
return[name[module]] | keyword[def] identifier[_ImportModuleHookBySuffix] ( identifier[name] , identifier[package] = keyword[None] ):
literal[string]
identifier[_IncrementNestLevel] ()
keyword[try] :
identifier[module] = identifier[_real_import_module] ( identifier[name] , identifier[package] )
keyword[finally] :
keyword[if] identifier[name] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[package] :
identifier[name] = identifier[_ResolveRelativeImport] ( identifier[name] , identifier[package] )
keyword[else] :
identifier[name] = keyword[None]
keyword[if] identifier[name] :
identifier[_ProcessImportBySuffix] ( identifier[name] , keyword[None] , keyword[None] )
keyword[return] identifier[module] | def _ImportModuleHookBySuffix(name, package=None):
"""Callback when a module is imported through importlib.import_module."""
_IncrementNestLevel()
try:
# Really import modules.
module = _real_import_module(name, package) # depends on [control=['try'], data=[]]
finally:
if name.startswith('.'):
if package:
name = _ResolveRelativeImport(name, package) # depends on [control=['if'], data=[]]
else:
# Should not happen. Relative imports require the package argument.
name = None # depends on [control=['if'], data=[]]
if name:
_ProcessImportBySuffix(name, None, None) # depends on [control=['if'], data=[]]
return module |
def detail(request, action_id):
"""
``Action`` detail view (pretty boring, mainly used for get_absolute_url)
"""
return render(
request,
'actstream/detail.html',
{
'action': get_object_or_404(models.Action, pk=action_id)
}
) | def function[detail, parameter[request, action_id]]:
constant[
``Action`` detail view (pretty boring, mainly used for get_absolute_url)
]
return[call[name[render], parameter[name[request], constant[actstream/detail.html], dictionary[[<ast.Constant object at 0x7da1b1d81c00>], [<ast.Call object at 0x7da1b1d830d0>]]]]] | keyword[def] identifier[detail] ( identifier[request] , identifier[action_id] ):
literal[string]
keyword[return] identifier[render] (
identifier[request] ,
literal[string] ,
{
literal[string] : identifier[get_object_or_404] ( identifier[models] . identifier[Action] , identifier[pk] = identifier[action_id] )
}
) | def detail(request, action_id):
"""
``Action`` detail view (pretty boring, mainly used for get_absolute_url)
"""
return render(request, 'actstream/detail.html', {'action': get_object_or_404(models.Action, pk=action_id)}) |
def to_database(self, manager=None):
"""Send the model to the PyBEL database
This function wraps :py:func:`pybel.to_database`.
Parameters
----------
manager : Optional[pybel.manager.Manager]
A PyBEL database manager. If none, first checks the PyBEL
configuration for ``PYBEL_CONNECTION`` then checks the
environment variable ``PYBEL_REMOTE_HOST``. Finally,
defaults to using SQLite database in PyBEL data directory
(automatically configured by PyBEL)
Returns
-------
network : Optional[pybel.manager.models.Network]
The SQLAlchemy model representing the network that was uploaded.
Returns None if upload fails.
"""
network = pybel.to_database(self.model, manager=manager)
return network | def function[to_database, parameter[self, manager]]:
constant[Send the model to the PyBEL database
This function wraps :py:func:`pybel.to_database`.
Parameters
----------
manager : Optional[pybel.manager.Manager]
A PyBEL database manager. If none, first checks the PyBEL
configuration for ``PYBEL_CONNECTION`` then checks the
environment variable ``PYBEL_REMOTE_HOST``. Finally,
defaults to using SQLite database in PyBEL data directory
(automatically configured by PyBEL)
Returns
-------
network : Optional[pybel.manager.models.Network]
The SQLAlchemy model representing the network that was uploaded.
Returns None if upload fails.
]
variable[network] assign[=] call[name[pybel].to_database, parameter[name[self].model]]
return[name[network]] | keyword[def] identifier[to_database] ( identifier[self] , identifier[manager] = keyword[None] ):
literal[string]
identifier[network] = identifier[pybel] . identifier[to_database] ( identifier[self] . identifier[model] , identifier[manager] = identifier[manager] )
keyword[return] identifier[network] | def to_database(self, manager=None):
"""Send the model to the PyBEL database
This function wraps :py:func:`pybel.to_database`.
Parameters
----------
manager : Optional[pybel.manager.Manager]
A PyBEL database manager. If none, first checks the PyBEL
configuration for ``PYBEL_CONNECTION`` then checks the
environment variable ``PYBEL_REMOTE_HOST``. Finally,
defaults to using SQLite database in PyBEL data directory
(automatically configured by PyBEL)
Returns
-------
network : Optional[pybel.manager.models.Network]
The SQLAlchemy model representing the network that was uploaded.
Returns None if upload fails.
"""
network = pybel.to_database(self.model, manager=manager)
return network |
def select_uri_implementation(ecore_model_path):
"""Select the right URI implementation regarding the Ecore model path schema."""
if URL_PATTERN.match(ecore_model_path):
return pyecore.resources.resource.HttpURI
return pyecore.resources.URI | def function[select_uri_implementation, parameter[ecore_model_path]]:
constant[Select the right URI implementation regarding the Ecore model path schema.]
if call[name[URL_PATTERN].match, parameter[name[ecore_model_path]]] begin[:]
return[name[pyecore].resources.resource.HttpURI]
return[name[pyecore].resources.URI] | keyword[def] identifier[select_uri_implementation] ( identifier[ecore_model_path] ):
literal[string]
keyword[if] identifier[URL_PATTERN] . identifier[match] ( identifier[ecore_model_path] ):
keyword[return] identifier[pyecore] . identifier[resources] . identifier[resource] . identifier[HttpURI]
keyword[return] identifier[pyecore] . identifier[resources] . identifier[URI] | def select_uri_implementation(ecore_model_path):
"""Select the right URI implementation regarding the Ecore model path schema."""
if URL_PATTERN.match(ecore_model_path):
return pyecore.resources.resource.HttpURI # depends on [control=['if'], data=[]]
return pyecore.resources.URI |
def add_update(self, *args, **kwargs):
"""A records is added, then on subsequent calls, updated"""
if not self._ai_rec_id:
self._ai_rec_id = self.add(*args, **kwargs)
else:
au_save = self._ai_rec_id
self.update(*args, **kwargs)
self._ai_rec_id = au_save
return self._ai_rec_id | def function[add_update, parameter[self]]:
constant[A records is added, then on subsequent calls, updated]
if <ast.UnaryOp object at 0x7da18dc05180> begin[:]
name[self]._ai_rec_id assign[=] call[name[self].add, parameter[<ast.Starred object at 0x7da18dc04370>]]
return[name[self]._ai_rec_id] | keyword[def] identifier[add_update] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_ai_rec_id] :
identifier[self] . identifier[_ai_rec_id] = identifier[self] . identifier[add] (* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[au_save] = identifier[self] . identifier[_ai_rec_id]
identifier[self] . identifier[update] (* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[_ai_rec_id] = identifier[au_save]
keyword[return] identifier[self] . identifier[_ai_rec_id] | def add_update(self, *args, **kwargs):
"""A records is added, then on subsequent calls, updated"""
if not self._ai_rec_id:
self._ai_rec_id = self.add(*args, **kwargs) # depends on [control=['if'], data=[]]
else:
au_save = self._ai_rec_id
self.update(*args, **kwargs)
self._ai_rec_id = au_save
return self._ai_rec_id |
def ensure_schema(self):
"""Create file and schema if it does not exist yet."""
self._ensure_filename()
if not os.path.isfile(self.filename):
self.create_schema() | def function[ensure_schema, parameter[self]]:
constant[Create file and schema if it does not exist yet.]
call[name[self]._ensure_filename, parameter[]]
if <ast.UnaryOp object at 0x7da1b28fc4c0> begin[:]
call[name[self].create_schema, parameter[]] | keyword[def] identifier[ensure_schema] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_ensure_filename] ()
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[filename] ):
identifier[self] . identifier[create_schema] () | def ensure_schema(self):
"""Create file and schema if it does not exist yet."""
self._ensure_filename()
if not os.path.isfile(self.filename):
self.create_schema() # depends on [control=['if'], data=[]] |
async def fetch(self) -> Response:
"""Fetch all the information by using aiohttp"""
if self.request_config.get('DELAY', 0) > 0:
await asyncio.sleep(self.request_config['DELAY'])
timeout = self.request_config.get('TIMEOUT', 10)
try:
async with async_timeout.timeout(timeout):
resp = await self._make_request()
try:
resp_data = await resp.text(encoding=self.encoding)
except UnicodeDecodeError:
resp_data = await resp.read()
response = Response(
url=self.url,
method=self.method,
encoding=resp.get_encoding(),
html=resp_data,
metadata=self.metadata,
cookies=resp.cookies,
headers=resp.headers,
history=resp.history,
status=resp.status,
aws_json=resp.json,
aws_text=resp.text,
aws_read=resp.read)
# Retry middleware
aws_valid_response = self.request_config.get('VALID')
if aws_valid_response and iscoroutinefunction(aws_valid_response):
response = await aws_valid_response(response)
if response.ok:
return response
else:
return await self._retry(error_msg='request url failed!')
except asyncio.TimeoutError:
return await self._retry(error_msg='timeout')
except Exception as e:
return await self._retry(error_msg=e)
finally:
# Close client session
await self._close_request_session() | <ast.AsyncFunctionDef object at 0x7da1b055dbd0> | keyword[async] keyword[def] identifier[fetch] ( identifier[self] )-> identifier[Response] :
literal[string]
keyword[if] identifier[self] . identifier[request_config] . identifier[get] ( literal[string] , literal[int] )> literal[int] :
keyword[await] identifier[asyncio] . identifier[sleep] ( identifier[self] . identifier[request_config] [ literal[string] ])
identifier[timeout] = identifier[self] . identifier[request_config] . identifier[get] ( literal[string] , literal[int] )
keyword[try] :
keyword[async] keyword[with] identifier[async_timeout] . identifier[timeout] ( identifier[timeout] ):
identifier[resp] = keyword[await] identifier[self] . identifier[_make_request] ()
keyword[try] :
identifier[resp_data] = keyword[await] identifier[resp] . identifier[text] ( identifier[encoding] = identifier[self] . identifier[encoding] )
keyword[except] identifier[UnicodeDecodeError] :
identifier[resp_data] = keyword[await] identifier[resp] . identifier[read] ()
identifier[response] = identifier[Response] (
identifier[url] = identifier[self] . identifier[url] ,
identifier[method] = identifier[self] . identifier[method] ,
identifier[encoding] = identifier[resp] . identifier[get_encoding] (),
identifier[html] = identifier[resp_data] ,
identifier[metadata] = identifier[self] . identifier[metadata] ,
identifier[cookies] = identifier[resp] . identifier[cookies] ,
identifier[headers] = identifier[resp] . identifier[headers] ,
identifier[history] = identifier[resp] . identifier[history] ,
identifier[status] = identifier[resp] . identifier[status] ,
identifier[aws_json] = identifier[resp] . identifier[json] ,
identifier[aws_text] = identifier[resp] . identifier[text] ,
identifier[aws_read] = identifier[resp] . identifier[read] )
identifier[aws_valid_response] = identifier[self] . identifier[request_config] . identifier[get] ( literal[string] )
keyword[if] identifier[aws_valid_response] keyword[and] identifier[iscoroutinefunction] ( identifier[aws_valid_response] ):
identifier[response] = keyword[await] identifier[aws_valid_response] ( identifier[response] )
keyword[if] identifier[response] . identifier[ok] :
keyword[return] identifier[response]
keyword[else] :
keyword[return] keyword[await] identifier[self] . identifier[_retry] ( identifier[error_msg] = literal[string] )
keyword[except] identifier[asyncio] . identifier[TimeoutError] :
keyword[return] keyword[await] identifier[self] . identifier[_retry] ( identifier[error_msg] = literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] keyword[await] identifier[self] . identifier[_retry] ( identifier[error_msg] = identifier[e] )
keyword[finally] :
keyword[await] identifier[self] . identifier[_close_request_session] () | async def fetch(self) -> Response:
"""Fetch all the information by using aiohttp"""
if self.request_config.get('DELAY', 0) > 0:
await asyncio.sleep(self.request_config['DELAY']) # depends on [control=['if'], data=[]]
timeout = self.request_config.get('TIMEOUT', 10)
try:
async with async_timeout.timeout(timeout):
resp = await self._make_request()
try:
resp_data = await resp.text(encoding=self.encoding) # depends on [control=['try'], data=[]]
except UnicodeDecodeError:
resp_data = await resp.read() # depends on [control=['except'], data=[]]
response = Response(url=self.url, method=self.method, encoding=resp.get_encoding(), html=resp_data, metadata=self.metadata, cookies=resp.cookies, headers=resp.headers, history=resp.history, status=resp.status, aws_json=resp.json, aws_text=resp.text, aws_read=resp.read)
# Retry middleware
aws_valid_response = self.request_config.get('VALID')
if aws_valid_response and iscoroutinefunction(aws_valid_response):
response = await aws_valid_response(response) # depends on [control=['if'], data=[]]
if response.ok:
return response # depends on [control=['if'], data=[]]
else:
return await self._retry(error_msg='request url failed!') # depends on [control=['try'], data=[]]
except asyncio.TimeoutError:
return await self._retry(error_msg='timeout') # depends on [control=['except'], data=[]]
except Exception as e:
return await self._retry(error_msg=e) # depends on [control=['except'], data=['e']]
finally:
# Close client session
await self._close_request_session() |
def get_rtc_table(self):
"""Returns global RTC table.
Creates the table if it does not exist.
"""
rtc_table = self._global_tables.get(RF_RTC_UC)
# Lazy initialization of the table.
if not rtc_table:
rtc_table = RtcTable(self._core_service, self._signal_bus)
self._global_tables[RF_RTC_UC] = rtc_table
self._tables[(None, RF_RTC_UC)] = rtc_table
return rtc_table | def function[get_rtc_table, parameter[self]]:
constant[Returns global RTC table.
Creates the table if it does not exist.
]
variable[rtc_table] assign[=] call[name[self]._global_tables.get, parameter[name[RF_RTC_UC]]]
if <ast.UnaryOp object at 0x7da1b1a36da0> begin[:]
variable[rtc_table] assign[=] call[name[RtcTable], parameter[name[self]._core_service, name[self]._signal_bus]]
call[name[self]._global_tables][name[RF_RTC_UC]] assign[=] name[rtc_table]
call[name[self]._tables][tuple[[<ast.Constant object at 0x7da1b1a35ff0>, <ast.Name object at 0x7da1b1a361d0>]]] assign[=] name[rtc_table]
return[name[rtc_table]] | keyword[def] identifier[get_rtc_table] ( identifier[self] ):
literal[string]
identifier[rtc_table] = identifier[self] . identifier[_global_tables] . identifier[get] ( identifier[RF_RTC_UC] )
keyword[if] keyword[not] identifier[rtc_table] :
identifier[rtc_table] = identifier[RtcTable] ( identifier[self] . identifier[_core_service] , identifier[self] . identifier[_signal_bus] )
identifier[self] . identifier[_global_tables] [ identifier[RF_RTC_UC] ]= identifier[rtc_table]
identifier[self] . identifier[_tables] [( keyword[None] , identifier[RF_RTC_UC] )]= identifier[rtc_table]
keyword[return] identifier[rtc_table] | def get_rtc_table(self):
"""Returns global RTC table.
Creates the table if it does not exist.
"""
rtc_table = self._global_tables.get(RF_RTC_UC)
# Lazy initialization of the table.
if not rtc_table:
rtc_table = RtcTable(self._core_service, self._signal_bus)
self._global_tables[RF_RTC_UC] = rtc_table
self._tables[None, RF_RTC_UC] = rtc_table # depends on [control=['if'], data=[]]
return rtc_table |
def create_pipeline_field(self, pipeline_key, name, field_type, **kwargs):
'''Creates a pipeline field with the provided attributes.
Args:
pipeline_key specifying the pipeline to add the field to
name required name string
field_type required type string [TEXT_INPUT, DATE or PERSON]
kwargs {}
return (status code, field dict)
'''
uri = '/'.join([self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.fields_suffix
])
code, data = self._create_field(uri, name, field_type, **kwargs)
return code, data | def function[create_pipeline_field, parameter[self, pipeline_key, name, field_type]]:
constant[Creates a pipeline field with the provided attributes.
Args:
pipeline_key specifying the pipeline to add the field to
name required name string
field_type required type string [TEXT_INPUT, DATE or PERSON]
kwargs {}
return (status code, field dict)
]
variable[uri] assign[=] call[constant[/].join, parameter[list[[<ast.Attribute object at 0x7da1b1596c50>, <ast.Attribute object at 0x7da1b1596320>, <ast.Name object at 0x7da1b1595240>, <ast.Attribute object at 0x7da1b15957b0>]]]]
<ast.Tuple object at 0x7da1b1594430> assign[=] call[name[self]._create_field, parameter[name[uri], name[name], name[field_type]]]
return[tuple[[<ast.Name object at 0x7da1b15954e0>, <ast.Name object at 0x7da1b1597f70>]]] | keyword[def] identifier[create_pipeline_field] ( identifier[self] , identifier[pipeline_key] , identifier[name] , identifier[field_type] ,** identifier[kwargs] ):
literal[string]
identifier[uri] = literal[string] . identifier[join] ([ identifier[self] . identifier[api_uri] ,
identifier[self] . identifier[pipelines_suffix] ,
identifier[pipeline_key] ,
identifier[self] . identifier[fields_suffix]
])
identifier[code] , identifier[data] = identifier[self] . identifier[_create_field] ( identifier[uri] , identifier[name] , identifier[field_type] ,** identifier[kwargs] )
keyword[return] identifier[code] , identifier[data] | def create_pipeline_field(self, pipeline_key, name, field_type, **kwargs):
"""Creates a pipeline field with the provided attributes.
Args:
pipeline_key specifying the pipeline to add the field to
name required name string
field_type required type string [TEXT_INPUT, DATE or PERSON]
kwargs {}
return (status code, field dict)
"""
uri = '/'.join([self.api_uri, self.pipelines_suffix, pipeline_key, self.fields_suffix])
(code, data) = self._create_field(uri, name, field_type, **kwargs)
return (code, data) |
def presentation_type(self):
"""
It returns page's presentation_type
"""
if self.page and self.page.presentation_type:
return self.page.presentation_type
return "" | def function[presentation_type, parameter[self]]:
constant[
It returns page's presentation_type
]
if <ast.BoolOp object at 0x7da1b133d420> begin[:]
return[name[self].page.presentation_type]
return[constant[]] | keyword[def] identifier[presentation_type] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[page] keyword[and] identifier[self] . identifier[page] . identifier[presentation_type] :
keyword[return] identifier[self] . identifier[page] . identifier[presentation_type]
keyword[return] literal[string] | def presentation_type(self):
"""
It returns page's presentation_type
"""
if self.page and self.page.presentation_type:
return self.page.presentation_type # depends on [control=['if'], data=[]]
return '' |
def _matches_patterns(self, matches, context):
"""
Search for all matches with current paterns agains input_string
:param matches: matches list
:type matches: Matches
:param context: context to use
:type context: dict
:return:
:rtype:
"""
if not self.disabled(context):
patterns = self.effective_patterns(context)
for pattern in patterns:
if not pattern.disabled(context):
pattern_matches = pattern.matches(matches.input_string, context)
if pattern_matches:
log(pattern.log_level, "Pattern has %s match(es). (%s)", len(pattern_matches), pattern)
else:
pass
# log(pattern.log_level, "Pattern doesn't match. (%s)" % (pattern,))
for match in pattern_matches:
if match.marker:
log(pattern.log_level, "Marker found. (%s)", match)
matches.markers.append(match)
else:
log(pattern.log_level, "Match found. (%s)", match)
matches.append(match)
else:
log(pattern.log_level, "Pattern is disabled. (%s)", pattern) | def function[_matches_patterns, parameter[self, matches, context]]:
constant[
Search for all matches with current paterns agains input_string
:param matches: matches list
:type matches: Matches
:param context: context to use
:type context: dict
:return:
:rtype:
]
if <ast.UnaryOp object at 0x7da18dc06c80> begin[:]
variable[patterns] assign[=] call[name[self].effective_patterns, parameter[name[context]]]
for taget[name[pattern]] in starred[name[patterns]] begin[:]
if <ast.UnaryOp object at 0x7da20c76eb90> begin[:]
variable[pattern_matches] assign[=] call[name[pattern].matches, parameter[name[matches].input_string, name[context]]]
if name[pattern_matches] begin[:]
call[name[log], parameter[name[pattern].log_level, constant[Pattern has %s match(es). (%s)], call[name[len], parameter[name[pattern_matches]]], name[pattern]]]
for taget[name[match]] in starred[name[pattern_matches]] begin[:]
if name[match].marker begin[:]
call[name[log], parameter[name[pattern].log_level, constant[Marker found. (%s)], name[match]]]
call[name[matches].markers.append, parameter[name[match]]] | keyword[def] identifier[_matches_patterns] ( identifier[self] , identifier[matches] , identifier[context] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[disabled] ( identifier[context] ):
identifier[patterns] = identifier[self] . identifier[effective_patterns] ( identifier[context] )
keyword[for] identifier[pattern] keyword[in] identifier[patterns] :
keyword[if] keyword[not] identifier[pattern] . identifier[disabled] ( identifier[context] ):
identifier[pattern_matches] = identifier[pattern] . identifier[matches] ( identifier[matches] . identifier[input_string] , identifier[context] )
keyword[if] identifier[pattern_matches] :
identifier[log] ( identifier[pattern] . identifier[log_level] , literal[string] , identifier[len] ( identifier[pattern_matches] ), identifier[pattern] )
keyword[else] :
keyword[pass]
keyword[for] identifier[match] keyword[in] identifier[pattern_matches] :
keyword[if] identifier[match] . identifier[marker] :
identifier[log] ( identifier[pattern] . identifier[log_level] , literal[string] , identifier[match] )
identifier[matches] . identifier[markers] . identifier[append] ( identifier[match] )
keyword[else] :
identifier[log] ( identifier[pattern] . identifier[log_level] , literal[string] , identifier[match] )
identifier[matches] . identifier[append] ( identifier[match] )
keyword[else] :
identifier[log] ( identifier[pattern] . identifier[log_level] , literal[string] , identifier[pattern] ) | def _matches_patterns(self, matches, context):
"""
Search for all matches with current paterns agains input_string
:param matches: matches list
:type matches: Matches
:param context: context to use
:type context: dict
:return:
:rtype:
"""
if not self.disabled(context):
patterns = self.effective_patterns(context)
for pattern in patterns:
if not pattern.disabled(context):
pattern_matches = pattern.matches(matches.input_string, context)
if pattern_matches:
log(pattern.log_level, 'Pattern has %s match(es). (%s)', len(pattern_matches), pattern) # depends on [control=['if'], data=[]]
else:
pass
# log(pattern.log_level, "Pattern doesn't match. (%s)" % (pattern,))
for match in pattern_matches:
if match.marker:
log(pattern.log_level, 'Marker found. (%s)', match)
matches.markers.append(match) # depends on [control=['if'], data=[]]
else:
log(pattern.log_level, 'Match found. (%s)', match)
matches.append(match) # depends on [control=['for'], data=['match']] # depends on [control=['if'], data=[]]
else:
log(pattern.log_level, 'Pattern is disabled. (%s)', pattern) # depends on [control=['for'], data=['pattern']] # depends on [control=['if'], data=[]] |
def beginlock(self, container):
"Start to acquire lock in another routine. Call trylock or lock later to acquire the lock. Call unlock to cancel the lock routine"
if self.locked:
return True
if self.lockroutine:
return False
self.lockroutine = container.subroutine(self._lockroutine(container), False)
return self.locked | def function[beginlock, parameter[self, container]]:
constant[Start to acquire lock in another routine. Call trylock or lock later to acquire the lock. Call unlock to cancel the lock routine]
if name[self].locked begin[:]
return[constant[True]]
if name[self].lockroutine begin[:]
return[constant[False]]
name[self].lockroutine assign[=] call[name[container].subroutine, parameter[call[name[self]._lockroutine, parameter[name[container]]], constant[False]]]
return[name[self].locked] | keyword[def] identifier[beginlock] ( identifier[self] , identifier[container] ):
literal[string]
keyword[if] identifier[self] . identifier[locked] :
keyword[return] keyword[True]
keyword[if] identifier[self] . identifier[lockroutine] :
keyword[return] keyword[False]
identifier[self] . identifier[lockroutine] = identifier[container] . identifier[subroutine] ( identifier[self] . identifier[_lockroutine] ( identifier[container] ), keyword[False] )
keyword[return] identifier[self] . identifier[locked] | def beginlock(self, container):
"""Start to acquire lock in another routine. Call trylock or lock later to acquire the lock. Call unlock to cancel the lock routine"""
if self.locked:
return True # depends on [control=['if'], data=[]]
if self.lockroutine:
return False # depends on [control=['if'], data=[]]
self.lockroutine = container.subroutine(self._lockroutine(container), False)
return self.locked |
def extract_root_meta(cls, serializer, resource):
"""
Calls a `get_root_meta` function on a serializer, if it exists.
"""
many = False
if hasattr(serializer, 'child'):
many = True
serializer = serializer.child
data = {}
if getattr(serializer, 'get_root_meta', None):
json_api_meta = serializer.get_root_meta(resource, many)
assert isinstance(json_api_meta, dict), 'get_root_meta must return a dict'
data.update(json_api_meta)
return data | def function[extract_root_meta, parameter[cls, serializer, resource]]:
constant[
Calls a `get_root_meta` function on a serializer, if it exists.
]
variable[many] assign[=] constant[False]
if call[name[hasattr], parameter[name[serializer], constant[child]]] begin[:]
variable[many] assign[=] constant[True]
variable[serializer] assign[=] name[serializer].child
variable[data] assign[=] dictionary[[], []]
if call[name[getattr], parameter[name[serializer], constant[get_root_meta], constant[None]]] begin[:]
variable[json_api_meta] assign[=] call[name[serializer].get_root_meta, parameter[name[resource], name[many]]]
assert[call[name[isinstance], parameter[name[json_api_meta], name[dict]]]]
call[name[data].update, parameter[name[json_api_meta]]]
return[name[data]] | keyword[def] identifier[extract_root_meta] ( identifier[cls] , identifier[serializer] , identifier[resource] ):
literal[string]
identifier[many] = keyword[False]
keyword[if] identifier[hasattr] ( identifier[serializer] , literal[string] ):
identifier[many] = keyword[True]
identifier[serializer] = identifier[serializer] . identifier[child]
identifier[data] ={}
keyword[if] identifier[getattr] ( identifier[serializer] , literal[string] , keyword[None] ):
identifier[json_api_meta] = identifier[serializer] . identifier[get_root_meta] ( identifier[resource] , identifier[many] )
keyword[assert] identifier[isinstance] ( identifier[json_api_meta] , identifier[dict] ), literal[string]
identifier[data] . identifier[update] ( identifier[json_api_meta] )
keyword[return] identifier[data] | def extract_root_meta(cls, serializer, resource):
"""
Calls a `get_root_meta` function on a serializer, if it exists.
"""
many = False
if hasattr(serializer, 'child'):
many = True
serializer = serializer.child # depends on [control=['if'], data=[]]
data = {}
if getattr(serializer, 'get_root_meta', None):
json_api_meta = serializer.get_root_meta(resource, many)
assert isinstance(json_api_meta, dict), 'get_root_meta must return a dict'
data.update(json_api_meta) # depends on [control=['if'], data=[]]
return data |
def rio_save(self, filename, fformat=None, fill_value=None,
dtype=np.uint8, compute=True, tags=None,
keep_palette=False, cmap=None,
**format_kwargs):
"""Save the image using rasterio.
Overviews can be added to the file using the `overviews` kwarg, eg::
img.rio_save('myfile.tif', overviews=[2, 4, 8, 16])
"""
fformat = fformat or os.path.splitext(filename)[1][1:4]
drivers = {'jpg': 'JPEG',
'png': 'PNG',
'tif': 'GTiff',
'jp2': 'JP2OpenJPEG'}
driver = drivers.get(fformat, fformat)
if tags is None:
tags = {}
data, mode = self.finalize(fill_value, dtype=dtype,
keep_palette=keep_palette, cmap=cmap)
data = data.transpose('bands', 'y', 'x')
data.attrs = self.data.attrs
crs = None
gcps = None
transform = None
if driver in ['GTiff', 'JP2OpenJPEG']:
if not np.issubdtype(data.dtype, np.floating):
format_kwargs.setdefault('compress', 'DEFLATE')
photometric_map = {
'RGB': 'RGB',
'RGBA': 'RGB',
'CMYK': 'CMYK',
'CMYKA': 'CMYK',
'YCBCR': 'YCBCR',
'YCBCRA': 'YCBCR',
}
if mode.upper() in photometric_map:
format_kwargs.setdefault('photometric',
photometric_map[mode.upper()])
try:
crs = rasterio.crs.CRS(data.attrs['area'].proj_dict)
west, south, east, north = data.attrs['area'].area_extent
height, width = data.sizes['y'], data.sizes['x']
transform = rasterio.transform.from_bounds(west, south,
east, north,
width, height)
except KeyError: # No area
logger.info("Couldn't create geotransform")
except AttributeError:
try:
gcps = data.attrs['area'].lons.attrs['gcps']
crs = data.attrs['area'].lons.attrs['crs']
except KeyError:
logger.info("Couldn't create geotransform")
if "start_time" in data.attrs:
stime = data.attrs['start_time']
stime_str = stime.strftime("%Y:%m:%d %H:%M:%S")
tags.setdefault('TIFFTAG_DATETIME', stime_str)
elif driver == 'JPEG' and 'A' in mode:
raise ValueError('JPEG does not support alpha')
# FIXME add metadata
r_file = RIOFile(filename, 'w', driver=driver,
width=data.sizes['x'], height=data.sizes['y'],
count=data.sizes['bands'],
dtype=dtype,
nodata=fill_value,
crs=crs,
transform=transform,
gcps=gcps,
**format_kwargs)
r_file.open()
if not keep_palette:
r_file.colorinterp = color_interp(data)
r_file.rfile.update_tags(**tags)
if keep_palette and cmap is not None:
if data.dtype != 'uint8':
raise ValueError('Rasterio only supports 8-bit colormaps')
try:
from trollimage.colormap import Colormap
cmap = cmap.to_rio() if isinstance(cmap, Colormap) else cmap
r_file.rfile.write_colormap(1, cmap)
except AttributeError:
raise ValueError("Colormap is not formatted correctly")
if compute:
# write data to the file now
res = da.store(data.data, r_file)
r_file.close()
return res
# provide the data object and the opened file so the caller can
# store them when they would like. Caller is responsible for
# closing the file
return data.data, r_file | def function[rio_save, parameter[self, filename, fformat, fill_value, dtype, compute, tags, keep_palette, cmap]]:
constant[Save the image using rasterio.
Overviews can be added to the file using the `overviews` kwarg, eg::
img.rio_save('myfile.tif', overviews=[2, 4, 8, 16])
]
variable[fformat] assign[=] <ast.BoolOp object at 0x7da1b050b520>
variable[drivers] assign[=] dictionary[[<ast.Constant object at 0x7da1b050bf10>, <ast.Constant object at 0x7da1b050bee0>, <ast.Constant object at 0x7da1b04bc1f0>, <ast.Constant object at 0x7da1b04bcb50>], [<ast.Constant object at 0x7da1b04bd090>, <ast.Constant object at 0x7da1b04bc640>, <ast.Constant object at 0x7da1b04bea10>, <ast.Constant object at 0x7da1b04bdea0>]]
variable[driver] assign[=] call[name[drivers].get, parameter[name[fformat], name[fformat]]]
if compare[name[tags] is constant[None]] begin[:]
variable[tags] assign[=] dictionary[[], []]
<ast.Tuple object at 0x7da1b04bca00> assign[=] call[name[self].finalize, parameter[name[fill_value]]]
variable[data] assign[=] call[name[data].transpose, parameter[constant[bands], constant[y], constant[x]]]
name[data].attrs assign[=] name[self].data.attrs
variable[crs] assign[=] constant[None]
variable[gcps] assign[=] constant[None]
variable[transform] assign[=] constant[None]
if compare[name[driver] in list[[<ast.Constant object at 0x7da1b049a5f0>, <ast.Constant object at 0x7da1b049b220>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0499ff0> begin[:]
call[name[format_kwargs].setdefault, parameter[constant[compress], constant[DEFLATE]]]
variable[photometric_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b04bc190>, <ast.Constant object at 0x7da1b04bfb50>, <ast.Constant object at 0x7da1b04bdd80>, <ast.Constant object at 0x7da1b04bfa30>, <ast.Constant object at 0x7da1b04be800>, <ast.Constant object at 0x7da1b04be710>], [<ast.Constant object at 0x7da1b04bf0a0>, <ast.Constant object at 0x7da1b04be980>, <ast.Constant object at 0x7da1b04bdf60>, <ast.Constant object at 0x7da1b04bde10>, <ast.Constant object at 0x7da1b04bc310>, <ast.Constant object at 0x7da1b04be860>]]
if compare[call[name[mode].upper, parameter[]] in name[photometric_map]] begin[:]
call[name[format_kwargs].setdefault, parameter[constant[photometric], call[name[photometric_map]][call[name[mode].upper, parameter[]]]]]
<ast.Try object at 0x7da1b04bf490>
if compare[constant[start_time] in name[data].attrs] begin[:]
variable[stime] assign[=] call[name[data].attrs][constant[start_time]]
variable[stime_str] assign[=] call[name[stime].strftime, parameter[constant[%Y:%m:%d %H:%M:%S]]]
call[name[tags].setdefault, parameter[constant[TIFFTAG_DATETIME], name[stime_str]]]
variable[r_file] assign[=] call[name[RIOFile], parameter[name[filename], constant[w]]]
call[name[r_file].open, parameter[]]
if <ast.UnaryOp object at 0x7da1b04d7a90> begin[:]
name[r_file].colorinterp assign[=] call[name[color_interp], parameter[name[data]]]
call[name[r_file].rfile.update_tags, parameter[]]
if <ast.BoolOp object at 0x7da1b04d6e90> begin[:]
if compare[name[data].dtype not_equal[!=] constant[uint8]] begin[:]
<ast.Raise object at 0x7da1b04d76a0>
<ast.Try object at 0x7da1b04d4700>
if name[compute] begin[:]
variable[res] assign[=] call[name[da].store, parameter[name[data].data, name[r_file]]]
call[name[r_file].close, parameter[]]
return[name[res]]
return[tuple[[<ast.Attribute object at 0x7da1b04d7490>, <ast.Name object at 0x7da1b04d6680>]]] | keyword[def] identifier[rio_save] ( identifier[self] , identifier[filename] , identifier[fformat] = keyword[None] , identifier[fill_value] = keyword[None] ,
identifier[dtype] = identifier[np] . identifier[uint8] , identifier[compute] = keyword[True] , identifier[tags] = keyword[None] ,
identifier[keep_palette] = keyword[False] , identifier[cmap] = keyword[None] ,
** identifier[format_kwargs] ):
literal[string]
identifier[fformat] = identifier[fformat] keyword[or] identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] )[ literal[int] ][ literal[int] : literal[int] ]
identifier[drivers] ={ literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[driver] = identifier[drivers] . identifier[get] ( identifier[fformat] , identifier[fformat] )
keyword[if] identifier[tags] keyword[is] keyword[None] :
identifier[tags] ={}
identifier[data] , identifier[mode] = identifier[self] . identifier[finalize] ( identifier[fill_value] , identifier[dtype] = identifier[dtype] ,
identifier[keep_palette] = identifier[keep_palette] , identifier[cmap] = identifier[cmap] )
identifier[data] = identifier[data] . identifier[transpose] ( literal[string] , literal[string] , literal[string] )
identifier[data] . identifier[attrs] = identifier[self] . identifier[data] . identifier[attrs]
identifier[crs] = keyword[None]
identifier[gcps] = keyword[None]
identifier[transform] = keyword[None]
keyword[if] identifier[driver] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] keyword[not] identifier[np] . identifier[issubdtype] ( identifier[data] . identifier[dtype] , identifier[np] . identifier[floating] ):
identifier[format_kwargs] . identifier[setdefault] ( literal[string] , literal[string] )
identifier[photometric_map] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
keyword[if] identifier[mode] . identifier[upper] () keyword[in] identifier[photometric_map] :
identifier[format_kwargs] . identifier[setdefault] ( literal[string] ,
identifier[photometric_map] [ identifier[mode] . identifier[upper] ()])
keyword[try] :
identifier[crs] = identifier[rasterio] . identifier[crs] . identifier[CRS] ( identifier[data] . identifier[attrs] [ literal[string] ]. identifier[proj_dict] )
identifier[west] , identifier[south] , identifier[east] , identifier[north] = identifier[data] . identifier[attrs] [ literal[string] ]. identifier[area_extent]
identifier[height] , identifier[width] = identifier[data] . identifier[sizes] [ literal[string] ], identifier[data] . identifier[sizes] [ literal[string] ]
identifier[transform] = identifier[rasterio] . identifier[transform] . identifier[from_bounds] ( identifier[west] , identifier[south] ,
identifier[east] , identifier[north] ,
identifier[width] , identifier[height] )
keyword[except] identifier[KeyError] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[gcps] = identifier[data] . identifier[attrs] [ literal[string] ]. identifier[lons] . identifier[attrs] [ literal[string] ]
identifier[crs] = identifier[data] . identifier[attrs] [ literal[string] ]. identifier[lons] . identifier[attrs] [ literal[string] ]
keyword[except] identifier[KeyError] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[data] . identifier[attrs] :
identifier[stime] = identifier[data] . identifier[attrs] [ literal[string] ]
identifier[stime_str] = identifier[stime] . identifier[strftime] ( literal[string] )
identifier[tags] . identifier[setdefault] ( literal[string] , identifier[stime_str] )
keyword[elif] identifier[driver] == literal[string] keyword[and] literal[string] keyword[in] identifier[mode] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[r_file] = identifier[RIOFile] ( identifier[filename] , literal[string] , identifier[driver] = identifier[driver] ,
identifier[width] = identifier[data] . identifier[sizes] [ literal[string] ], identifier[height] = identifier[data] . identifier[sizes] [ literal[string] ],
identifier[count] = identifier[data] . identifier[sizes] [ literal[string] ],
identifier[dtype] = identifier[dtype] ,
identifier[nodata] = identifier[fill_value] ,
identifier[crs] = identifier[crs] ,
identifier[transform] = identifier[transform] ,
identifier[gcps] = identifier[gcps] ,
** identifier[format_kwargs] )
identifier[r_file] . identifier[open] ()
keyword[if] keyword[not] identifier[keep_palette] :
identifier[r_file] . identifier[colorinterp] = identifier[color_interp] ( identifier[data] )
identifier[r_file] . identifier[rfile] . identifier[update_tags] (** identifier[tags] )
keyword[if] identifier[keep_palette] keyword[and] identifier[cmap] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[data] . identifier[dtype] != literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[try] :
keyword[from] identifier[trollimage] . identifier[colormap] keyword[import] identifier[Colormap]
identifier[cmap] = identifier[cmap] . identifier[to_rio] () keyword[if] identifier[isinstance] ( identifier[cmap] , identifier[Colormap] ) keyword[else] identifier[cmap]
identifier[r_file] . identifier[rfile] . identifier[write_colormap] ( literal[int] , identifier[cmap] )
keyword[except] identifier[AttributeError] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[compute] :
identifier[res] = identifier[da] . identifier[store] ( identifier[data] . identifier[data] , identifier[r_file] )
identifier[r_file] . identifier[close] ()
keyword[return] identifier[res]
keyword[return] identifier[data] . identifier[data] , identifier[r_file] | def rio_save(self, filename, fformat=None, fill_value=None, dtype=np.uint8, compute=True, tags=None, keep_palette=False, cmap=None, **format_kwargs):
"""Save the image using rasterio.
Overviews can be added to the file using the `overviews` kwarg, eg::
img.rio_save('myfile.tif', overviews=[2, 4, 8, 16])
"""
fformat = fformat or os.path.splitext(filename)[1][1:4]
drivers = {'jpg': 'JPEG', 'png': 'PNG', 'tif': 'GTiff', 'jp2': 'JP2OpenJPEG'}
driver = drivers.get(fformat, fformat)
if tags is None:
tags = {} # depends on [control=['if'], data=['tags']]
(data, mode) = self.finalize(fill_value, dtype=dtype, keep_palette=keep_palette, cmap=cmap)
data = data.transpose('bands', 'y', 'x')
data.attrs = self.data.attrs
crs = None
gcps = None
transform = None
if driver in ['GTiff', 'JP2OpenJPEG']:
if not np.issubdtype(data.dtype, np.floating):
format_kwargs.setdefault('compress', 'DEFLATE') # depends on [control=['if'], data=[]]
photometric_map = {'RGB': 'RGB', 'RGBA': 'RGB', 'CMYK': 'CMYK', 'CMYKA': 'CMYK', 'YCBCR': 'YCBCR', 'YCBCRA': 'YCBCR'}
if mode.upper() in photometric_map:
format_kwargs.setdefault('photometric', photometric_map[mode.upper()]) # depends on [control=['if'], data=['photometric_map']]
try:
crs = rasterio.crs.CRS(data.attrs['area'].proj_dict)
(west, south, east, north) = data.attrs['area'].area_extent
(height, width) = (data.sizes['y'], data.sizes['x'])
transform = rasterio.transform.from_bounds(west, south, east, north, width, height) # depends on [control=['try'], data=[]]
except KeyError: # No area
logger.info("Couldn't create geotransform") # depends on [control=['except'], data=[]]
except AttributeError:
try:
gcps = data.attrs['area'].lons.attrs['gcps']
crs = data.attrs['area'].lons.attrs['crs'] # depends on [control=['try'], data=[]]
except KeyError:
logger.info("Couldn't create geotransform") # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
if 'start_time' in data.attrs:
stime = data.attrs['start_time']
stime_str = stime.strftime('%Y:%m:%d %H:%M:%S')
tags.setdefault('TIFFTAG_DATETIME', stime_str) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif driver == 'JPEG' and 'A' in mode:
raise ValueError('JPEG does not support alpha') # depends on [control=['if'], data=[]]
# FIXME add metadata
r_file = RIOFile(filename, 'w', driver=driver, width=data.sizes['x'], height=data.sizes['y'], count=data.sizes['bands'], dtype=dtype, nodata=fill_value, crs=crs, transform=transform, gcps=gcps, **format_kwargs)
r_file.open()
if not keep_palette:
r_file.colorinterp = color_interp(data) # depends on [control=['if'], data=[]]
r_file.rfile.update_tags(**tags)
if keep_palette and cmap is not None:
if data.dtype != 'uint8':
raise ValueError('Rasterio only supports 8-bit colormaps') # depends on [control=['if'], data=[]]
try:
from trollimage.colormap import Colormap
cmap = cmap.to_rio() if isinstance(cmap, Colormap) else cmap
r_file.rfile.write_colormap(1, cmap) # depends on [control=['try'], data=[]]
except AttributeError:
raise ValueError('Colormap is not formatted correctly') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if compute:
# write data to the file now
res = da.store(data.data, r_file)
r_file.close()
return res # depends on [control=['if'], data=[]]
# provide the data object and the opened file so the caller can
# store them when they would like. Caller is responsible for
# closing the file
return (data.data, r_file) |
def _get_toplevel(path, user=None, password=None, output_encoding=None):
'''
Use git rev-parse to return the top level of a repo
'''
return _git_run(
['git', 'rev-parse', '--show-toplevel'],
cwd=path,
user=user,
password=password,
output_encoding=output_encoding)['stdout'] | def function[_get_toplevel, parameter[path, user, password, output_encoding]]:
constant[
Use git rev-parse to return the top level of a repo
]
return[call[call[name[_git_run], parameter[list[[<ast.Constant object at 0x7da20c7c9990>, <ast.Constant object at 0x7da20c7ca770>, <ast.Constant object at 0x7da20c7cba30>]]]]][constant[stdout]]] | keyword[def] identifier[_get_toplevel] ( identifier[path] , identifier[user] = keyword[None] , identifier[password] = keyword[None] , identifier[output_encoding] = keyword[None] ):
literal[string]
keyword[return] identifier[_git_run] (
[ literal[string] , literal[string] , literal[string] ],
identifier[cwd] = identifier[path] ,
identifier[user] = identifier[user] ,
identifier[password] = identifier[password] ,
identifier[output_encoding] = identifier[output_encoding] )[ literal[string] ] | def _get_toplevel(path, user=None, password=None, output_encoding=None):
"""
Use git rev-parse to return the top level of a repo
"""
return _git_run(['git', 'rev-parse', '--show-toplevel'], cwd=path, user=user, password=password, output_encoding=output_encoding)['stdout'] |
def _set_uda_offset1(self, v, load=False):
"""
Setter method for uda_offset1, mapped from YANG variable /uda_key/profile/uda_profile_offsets/uda_offset1 (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_uda_offset1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_uda_offset1() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=[RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ignore': {'value': 0}},),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 63']}),], is_leaf=True, yang_name="uda-offset1", rest_name="uda-offset1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'offset1', u'cli-drop-node-name': None, u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-uda-access-list', defining_module='brocade-uda-access-list', yang_type='union', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """uda_offset1 must be of a type compatible with union""",
'defined-type': "brocade-uda-access-list:union",
'generated-type': """YANGDynClass(base=[RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ignore': {'value': 0}},),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 63']}),], is_leaf=True, yang_name="uda-offset1", rest_name="uda-offset1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'offset1', u'cli-drop-node-name': None, u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-uda-access-list', defining_module='brocade-uda-access-list', yang_type='union', is_config=True)""",
})
self.__uda_offset1 = t
if hasattr(self, '_set'):
self._set() | def function[_set_uda_offset1, parameter[self, v, load]]:
constant[
Setter method for uda_offset1, mapped from YANG variable /uda_key/profile/uda_profile_offsets/uda_offset1 (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_uda_offset1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_uda_offset1() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18eb55180>
name[self].__uda_offset1 assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_uda_offset1] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] =[ identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_type] = literal[string] , identifier[restriction_arg] ={ literal[string] :{ literal[string] : literal[int] }},), identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}),], identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__uda_offset1] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_uda_offset1(self, v, load=False):
"""
Setter method for uda_offset1, mapped from YANG variable /uda_key/profile/uda_profile_offsets/uda_offset1 (union)
If this variable is read-only (config: false) in the
source YANG file, then _set_uda_offset1 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_uda_offset1() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=[RestrictedClassType(base_type=unicode, restriction_type='dict_key', restriction_arg={u'ignore': {'value': 0}}), RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0 .. 63']})], is_leaf=True, yang_name='uda-offset1', rest_name='uda-offset1', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'offset1', u'cli-drop-node-name': None, u'cli-incomplete-command': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-uda-access-list', defining_module='brocade-uda-access-list', yang_type='union', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'uda_offset1 must be of a type compatible with union', 'defined-type': 'brocade-uda-access-list:union', 'generated-type': 'YANGDynClass(base=[RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u\'ignore\': {\'value\': 0}},),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'0..4294967295\']}, int_size=32), restriction_dict={\'range\': [u\'0 .. 63\']}),], is_leaf=True, yang_name="uda-offset1", rest_name="uda-offset1", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'offset1\', u\'cli-drop-node-name\': None, u\'cli-incomplete-command\': None, u\'cli-suppress-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-uda-access-list\', defining_module=\'brocade-uda-access-list\', yang_type=\'union\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__uda_offset1 = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def registration_authority_entity_id(self):
"""
Some entities return the register entity id,
but other do not. Unsure if this is a bug or
inconsistently registered data.
"""
if ATTR_ENTITY_REGISTRATION_AUTHORITY in self.raw:
try:
return self.raw[
ATTR_ENTITY_REGISTRATION_AUTHORITY][
ATTR_ENTITY_REGISTRATION_AUTHORITY_ENTITY_ID][
ATTR_DOLLAR_SIGN]
except KeyError:
pass | def function[registration_authority_entity_id, parameter[self]]:
constant[
Some entities return the register entity id,
but other do not. Unsure if this is a bug or
inconsistently registered data.
]
if compare[name[ATTR_ENTITY_REGISTRATION_AUTHORITY] in name[self].raw] begin[:]
<ast.Try object at 0x7da1b1a76bf0> | keyword[def] identifier[registration_authority_entity_id] ( identifier[self] ):
literal[string]
keyword[if] identifier[ATTR_ENTITY_REGISTRATION_AUTHORITY] keyword[in] identifier[self] . identifier[raw] :
keyword[try] :
keyword[return] identifier[self] . identifier[raw] [
identifier[ATTR_ENTITY_REGISTRATION_AUTHORITY] ][
identifier[ATTR_ENTITY_REGISTRATION_AUTHORITY_ENTITY_ID] ][
identifier[ATTR_DOLLAR_SIGN] ]
keyword[except] identifier[KeyError] :
keyword[pass] | def registration_authority_entity_id(self):
"""
Some entities return the register entity id,
but other do not. Unsure if this is a bug or
inconsistently registered data.
"""
if ATTR_ENTITY_REGISTRATION_AUTHORITY in self.raw:
try:
return self.raw[ATTR_ENTITY_REGISTRATION_AUTHORITY][ATTR_ENTITY_REGISTRATION_AUTHORITY_ENTITY_ID][ATTR_DOLLAR_SIGN] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['ATTR_ENTITY_REGISTRATION_AUTHORITY']] |
def import_submodules(package_name: str) -> None:
"""
Import all submodules under the given package.
Primarily useful so that people using AllenNLP as a library
can specify their own custom packages and have their custom
classes get loaded and registered.
"""
importlib.invalidate_caches()
# For some reason, python doesn't always add this by default to your path, but you pretty much
# always want it when using `--include-package`. And if it's already there, adding it again at
# the end won't hurt anything.
sys.path.append('.')
# Import at top level
module = importlib.import_module(package_name)
path = getattr(module, '__path__', [])
path_string = '' if not path else path[0]
# walk_packages only finds immediate children, so need to recurse.
for module_finder, name, _ in pkgutil.walk_packages(path):
# Sometimes when you import third-party libraries that are on your path,
# `pkgutil.walk_packages` returns those too, so we need to skip them.
if path_string and module_finder.path != path_string:
continue
subpackage = f"{package_name}.{name}"
import_submodules(subpackage) | def function[import_submodules, parameter[package_name]]:
constant[
Import all submodules under the given package.
Primarily useful so that people using AllenNLP as a library
can specify their own custom packages and have their custom
classes get loaded and registered.
]
call[name[importlib].invalidate_caches, parameter[]]
call[name[sys].path.append, parameter[constant[.]]]
variable[module] assign[=] call[name[importlib].import_module, parameter[name[package_name]]]
variable[path] assign[=] call[name[getattr], parameter[name[module], constant[__path__], list[[]]]]
variable[path_string] assign[=] <ast.IfExp object at 0x7da2041daf20>
for taget[tuple[[<ast.Name object at 0x7da2041db880>, <ast.Name object at 0x7da2041daa70>, <ast.Name object at 0x7da2041db100>]]] in starred[call[name[pkgutil].walk_packages, parameter[name[path]]]] begin[:]
if <ast.BoolOp object at 0x7da2041da440> begin[:]
continue
variable[subpackage] assign[=] <ast.JoinedStr object at 0x7da2041daf50>
call[name[import_submodules], parameter[name[subpackage]]] | keyword[def] identifier[import_submodules] ( identifier[package_name] : identifier[str] )-> keyword[None] :
literal[string]
identifier[importlib] . identifier[invalidate_caches] ()
identifier[sys] . identifier[path] . identifier[append] ( literal[string] )
identifier[module] = identifier[importlib] . identifier[import_module] ( identifier[package_name] )
identifier[path] = identifier[getattr] ( identifier[module] , literal[string] ,[])
identifier[path_string] = literal[string] keyword[if] keyword[not] identifier[path] keyword[else] identifier[path] [ literal[int] ]
keyword[for] identifier[module_finder] , identifier[name] , identifier[_] keyword[in] identifier[pkgutil] . identifier[walk_packages] ( identifier[path] ):
keyword[if] identifier[path_string] keyword[and] identifier[module_finder] . identifier[path] != identifier[path_string] :
keyword[continue]
identifier[subpackage] = literal[string]
identifier[import_submodules] ( identifier[subpackage] ) | def import_submodules(package_name: str) -> None:
"""
Import all submodules under the given package.
Primarily useful so that people using AllenNLP as a library
can specify their own custom packages and have their custom
classes get loaded and registered.
"""
importlib.invalidate_caches()
# For some reason, python doesn't always add this by default to your path, but you pretty much
# always want it when using `--include-package`. And if it's already there, adding it again at
# the end won't hurt anything.
sys.path.append('.')
# Import at top level
module = importlib.import_module(package_name)
path = getattr(module, '__path__', [])
path_string = '' if not path else path[0]
# walk_packages only finds immediate children, so need to recurse.
for (module_finder, name, _) in pkgutil.walk_packages(path):
# Sometimes when you import third-party libraries that are on your path,
# `pkgutil.walk_packages` returns those too, so we need to skip them.
if path_string and module_finder.path != path_string:
continue # depends on [control=['if'], data=[]]
subpackage = f'{package_name}.{name}'
import_submodules(subpackage) # depends on [control=['for'], data=[]] |
def contractString(self, contract, seperator="_"):
""" returns string from contract tuple """
localSymbol = ""
contractTuple = contract
if type(contract) != tuple:
localSymbol = contract.m_localSymbol
contractTuple = self.contract_to_tuple(contract)
# build identifier
try:
if contractTuple[1] in ("OPT", "FOP"):
# if contractTuple[5]*100 - int(contractTuple[5]*100):
# strike = contractTuple[5]
# else:
# strike = "{0:.2f}".format(contractTuple[5])
strike = '{:0>5d}'.format(int(contractTuple[5])) + \
format(contractTuple[5], '.3f').split('.')[1]
contractString = (contractTuple[0] + str(contractTuple[4]) +
contractTuple[6][0] + strike, contractTuple[1])
# contractTuple[6], str(strike).replace(".", ""))
elif contractTuple[1] == "FUT":
exp = ' ' # default
# round expiry day to expiry month
if localSymbol != "":
# exp = localSymbol[2:3]+str(contractTuple[4][:4])
exp = localSymbol[2:3] + self.localSymbolExpiry[localSymbol][:4]
if ' ' in exp:
exp = str(contractTuple[4])[:6]
exp = dataTypes["MONTH_CODES"][int(exp[4:6])] + str(int(exp[:4]))
contractString = (contractTuple[0] + exp, contractTuple[1])
elif contractTuple[1] == "CASH":
contractString = (contractTuple[0] + contractTuple[3], contractTuple[1])
else: # STK
contractString = (contractTuple[0], contractTuple[1])
# construct string
contractString = seperator.join(
str(v) for v in contractString).replace(seperator + "STK", "")
except Exception:
contractString = contractTuple[0]
return contractString.replace(" ", "_").upper() | def function[contractString, parameter[self, contract, seperator]]:
constant[ returns string from contract tuple ]
variable[localSymbol] assign[=] constant[]
variable[contractTuple] assign[=] name[contract]
if compare[call[name[type], parameter[name[contract]]] not_equal[!=] name[tuple]] begin[:]
variable[localSymbol] assign[=] name[contract].m_localSymbol
variable[contractTuple] assign[=] call[name[self].contract_to_tuple, parameter[name[contract]]]
<ast.Try object at 0x7da20c6c6440>
return[call[call[name[contractString].replace, parameter[constant[ ], constant[_]]].upper, parameter[]]] | keyword[def] identifier[contractString] ( identifier[self] , identifier[contract] , identifier[seperator] = literal[string] ):
literal[string]
identifier[localSymbol] = literal[string]
identifier[contractTuple] = identifier[contract]
keyword[if] identifier[type] ( identifier[contract] )!= identifier[tuple] :
identifier[localSymbol] = identifier[contract] . identifier[m_localSymbol]
identifier[contractTuple] = identifier[self] . identifier[contract_to_tuple] ( identifier[contract] )
keyword[try] :
keyword[if] identifier[contractTuple] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[strike] = literal[string] . identifier[format] ( identifier[int] ( identifier[contractTuple] [ literal[int] ]))+ identifier[format] ( identifier[contractTuple] [ literal[int] ], literal[string] ). identifier[split] ( literal[string] )[ literal[int] ]
identifier[contractString] =( identifier[contractTuple] [ literal[int] ]+ identifier[str] ( identifier[contractTuple] [ literal[int] ])+
identifier[contractTuple] [ literal[int] ][ literal[int] ]+ identifier[strike] , identifier[contractTuple] [ literal[int] ])
keyword[elif] identifier[contractTuple] [ literal[int] ]== literal[string] :
identifier[exp] = literal[string]
keyword[if] identifier[localSymbol] != literal[string] :
identifier[exp] = identifier[localSymbol] [ literal[int] : literal[int] ]+ identifier[self] . identifier[localSymbolExpiry] [ identifier[localSymbol] ][: literal[int] ]
keyword[if] literal[string] keyword[in] identifier[exp] :
identifier[exp] = identifier[str] ( identifier[contractTuple] [ literal[int] ])[: literal[int] ]
identifier[exp] = identifier[dataTypes] [ literal[string] ][ identifier[int] ( identifier[exp] [ literal[int] : literal[int] ])]+ identifier[str] ( identifier[int] ( identifier[exp] [: literal[int] ]))
identifier[contractString] =( identifier[contractTuple] [ literal[int] ]+ identifier[exp] , identifier[contractTuple] [ literal[int] ])
keyword[elif] identifier[contractTuple] [ literal[int] ]== literal[string] :
identifier[contractString] =( identifier[contractTuple] [ literal[int] ]+ identifier[contractTuple] [ literal[int] ], identifier[contractTuple] [ literal[int] ])
keyword[else] :
identifier[contractString] =( identifier[contractTuple] [ literal[int] ], identifier[contractTuple] [ literal[int] ])
identifier[contractString] = identifier[seperator] . identifier[join] (
identifier[str] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[contractString] ). identifier[replace] ( identifier[seperator] + literal[string] , literal[string] )
keyword[except] identifier[Exception] :
identifier[contractString] = identifier[contractTuple] [ literal[int] ]
keyword[return] identifier[contractString] . identifier[replace] ( literal[string] , literal[string] ). identifier[upper] () | def contractString(self, contract, seperator='_'):
""" returns string from contract tuple """
localSymbol = ''
contractTuple = contract
if type(contract) != tuple:
localSymbol = contract.m_localSymbol
contractTuple = self.contract_to_tuple(contract) # depends on [control=['if'], data=[]]
# build identifier
try:
if contractTuple[1] in ('OPT', 'FOP'):
# if contractTuple[5]*100 - int(contractTuple[5]*100):
# strike = contractTuple[5]
# else:
# strike = "{0:.2f}".format(contractTuple[5])
strike = '{:0>5d}'.format(int(contractTuple[5])) + format(contractTuple[5], '.3f').split('.')[1]
contractString = (contractTuple[0] + str(contractTuple[4]) + contractTuple[6][0] + strike, contractTuple[1]) # depends on [control=['if'], data=[]]
# contractTuple[6], str(strike).replace(".", ""))
elif contractTuple[1] == 'FUT':
exp = ' ' # default
# round expiry day to expiry month
if localSymbol != '':
# exp = localSymbol[2:3]+str(contractTuple[4][:4])
exp = localSymbol[2:3] + self.localSymbolExpiry[localSymbol][:4] # depends on [control=['if'], data=['localSymbol']]
if ' ' in exp:
exp = str(contractTuple[4])[:6]
exp = dataTypes['MONTH_CODES'][int(exp[4:6])] + str(int(exp[:4])) # depends on [control=['if'], data=['exp']]
contractString = (contractTuple[0] + exp, contractTuple[1]) # depends on [control=['if'], data=[]]
elif contractTuple[1] == 'CASH':
contractString = (contractTuple[0] + contractTuple[3], contractTuple[1]) # depends on [control=['if'], data=[]]
else: # STK
contractString = (contractTuple[0], contractTuple[1])
# construct string
contractString = seperator.join((str(v) for v in contractString)).replace(seperator + 'STK', '') # depends on [control=['try'], data=[]]
except Exception:
contractString = contractTuple[0] # depends on [control=['except'], data=[]]
return contractString.replace(' ', '_').upper() |
def _displayFeatures(self, fig, features, minX, maxX, offsetAdjuster):
"""
Add the given C{features} to the figure in C{fig}.
@param fig: A matplotlib figure.
@param features: A C{FeatureList} instance.
@param minX: The smallest x coordinate.
@param maxX: The largest x coordinate.
@param offsetAdjuster: a function for adjusting feature X axis offsets
for plotting.
"""
labels = []
for index, feature in enumerate(features):
fig.plot([offsetAdjuster(feature.start),
offsetAdjuster(feature.end)],
[index * -0.2, index * -0.2], color=feature.color,
linewidth=2)
labels.append(feature.legendLabel())
# Note that minX and maxX do not need to be adjusted by the offset
# adjuster. They are the already-adjusted min/max values as
# computed in computePlotInfo in blast.py
fig.axis([minX, maxX, (len(features) + 1) * -0.2, 0.2])
if labels:
# Put a legend above the figure.
box = fig.get_position()
fig.set_position([box.x0, box.y0,
box.width, box.height * 0.2])
fig.legend(labels, loc='lower center', bbox_to_anchor=(0.5, 1.4),
fancybox=True, shadow=True, ncol=2) | def function[_displayFeatures, parameter[self, fig, features, minX, maxX, offsetAdjuster]]:
constant[
Add the given C{features} to the figure in C{fig}.
@param fig: A matplotlib figure.
@param features: A C{FeatureList} instance.
@param minX: The smallest x coordinate.
@param maxX: The largest x coordinate.
@param offsetAdjuster: a function for adjusting feature X axis offsets
for plotting.
]
variable[labels] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6aa2f0>, <ast.Name object at 0x7da20c6a9c60>]]] in starred[call[name[enumerate], parameter[name[features]]]] begin[:]
call[name[fig].plot, parameter[list[[<ast.Call object at 0x7da20c6ab8b0>, <ast.Call object at 0x7da20c6aa170>]], list[[<ast.BinOp object at 0x7da20c6ab970>, <ast.BinOp object at 0x7da20c6a8d60>]]]]
call[name[labels].append, parameter[call[name[feature].legendLabel, parameter[]]]]
call[name[fig].axis, parameter[list[[<ast.Name object at 0x7da20c6a8190>, <ast.Name object at 0x7da20c6aa740>, <ast.BinOp object at 0x7da20c6a9f90>, <ast.Constant object at 0x7da20c6aad10>]]]]
if name[labels] begin[:]
variable[box] assign[=] call[name[fig].get_position, parameter[]]
call[name[fig].set_position, parameter[list[[<ast.Attribute object at 0x7da20c6aaaa0>, <ast.Attribute object at 0x7da2041d87c0>, <ast.Attribute object at 0x7da2041db2e0>, <ast.BinOp object at 0x7da2041db940>]]]]
call[name[fig].legend, parameter[name[labels]]] | keyword[def] identifier[_displayFeatures] ( identifier[self] , identifier[fig] , identifier[features] , identifier[minX] , identifier[maxX] , identifier[offsetAdjuster] ):
literal[string]
identifier[labels] =[]
keyword[for] identifier[index] , identifier[feature] keyword[in] identifier[enumerate] ( identifier[features] ):
identifier[fig] . identifier[plot] ([ identifier[offsetAdjuster] ( identifier[feature] . identifier[start] ),
identifier[offsetAdjuster] ( identifier[feature] . identifier[end] )],
[ identifier[index] *- literal[int] , identifier[index] *- literal[int] ], identifier[color] = identifier[feature] . identifier[color] ,
identifier[linewidth] = literal[int] )
identifier[labels] . identifier[append] ( identifier[feature] . identifier[legendLabel] ())
identifier[fig] . identifier[axis] ([ identifier[minX] , identifier[maxX] ,( identifier[len] ( identifier[features] )+ literal[int] )*- literal[int] , literal[int] ])
keyword[if] identifier[labels] :
identifier[box] = identifier[fig] . identifier[get_position] ()
identifier[fig] . identifier[set_position] ([ identifier[box] . identifier[x0] , identifier[box] . identifier[y0] ,
identifier[box] . identifier[width] , identifier[box] . identifier[height] * literal[int] ])
identifier[fig] . identifier[legend] ( identifier[labels] , identifier[loc] = literal[string] , identifier[bbox_to_anchor] =( literal[int] , literal[int] ),
identifier[fancybox] = keyword[True] , identifier[shadow] = keyword[True] , identifier[ncol] = literal[int] ) | def _displayFeatures(self, fig, features, minX, maxX, offsetAdjuster):
"""
Add the given C{features} to the figure in C{fig}.
@param fig: A matplotlib figure.
@param features: A C{FeatureList} instance.
@param minX: The smallest x coordinate.
@param maxX: The largest x coordinate.
@param offsetAdjuster: a function for adjusting feature X axis offsets
for plotting.
"""
labels = []
for (index, feature) in enumerate(features):
fig.plot([offsetAdjuster(feature.start), offsetAdjuster(feature.end)], [index * -0.2, index * -0.2], color=feature.color, linewidth=2)
labels.append(feature.legendLabel()) # depends on [control=['for'], data=[]]
# Note that minX and maxX do not need to be adjusted by the offset
# adjuster. They are the already-adjusted min/max values as
# computed in computePlotInfo in blast.py
fig.axis([minX, maxX, (len(features) + 1) * -0.2, 0.2])
if labels:
# Put a legend above the figure.
box = fig.get_position()
fig.set_position([box.x0, box.y0, box.width, box.height * 0.2])
fig.legend(labels, loc='lower center', bbox_to_anchor=(0.5, 1.4), fancybox=True, shadow=True, ncol=2) # depends on [control=['if'], data=[]] |
def set_output(self, outfile):
''' Set's the output file, currently only useful with context-managers.
Note:
This function is experimental and may not last.
'''
if self._orig_stdout: # restore Usted
sys.stdout = self._orig_stdout
self._stream = outfile
sys.stdout = _LineWriter(self, self._stream, self.default) | def function[set_output, parameter[self, outfile]]:
constant[ Set's the output file, currently only useful with context-managers.
Note:
This function is experimental and may not last.
]
if name[self]._orig_stdout begin[:]
name[sys].stdout assign[=] name[self]._orig_stdout
name[self]._stream assign[=] name[outfile]
name[sys].stdout assign[=] call[name[_LineWriter], parameter[name[self], name[self]._stream, name[self].default]] | keyword[def] identifier[set_output] ( identifier[self] , identifier[outfile] ):
literal[string]
keyword[if] identifier[self] . identifier[_orig_stdout] :
identifier[sys] . identifier[stdout] = identifier[self] . identifier[_orig_stdout]
identifier[self] . identifier[_stream] = identifier[outfile]
identifier[sys] . identifier[stdout] = identifier[_LineWriter] ( identifier[self] , identifier[self] . identifier[_stream] , identifier[self] . identifier[default] ) | def set_output(self, outfile):
""" Set's the output file, currently only useful with context-managers.
Note:
This function is experimental and may not last.
"""
if self._orig_stdout: # restore Usted
sys.stdout = self._orig_stdout # depends on [control=['if'], data=[]]
self._stream = outfile
sys.stdout = _LineWriter(self, self._stream, self.default) |
def describe_target_groups(names=None,
target_group_arns=None,
load_balancer_arn=None,
region=None,
key=None,
keyid=None,
profile=None):
'''
Describes the specified target groups or all of your target groups. By default,
all target groups are described. Alternatively, you can specify one of the
following to filter the results: the ARN of the load balancer, the names of
one or more target groups, or the ARNs of one or more target groups.
Returns: list
CLI example:
.. code-block:: bash
salt myminion boto_elbv2.describe_target_groups
salt myminion boto_elbv2.describe_target_groups target_group_name
salt myminion boto_elbv2.describe_target_groups "[tg_name,tg_name]"
'''
if names and target_group_arns:
raise SaltInvocationError('At most one of names or target_group_arns may '
'be provided')
if names:
target_groups = names
elif target_group_arns:
target_groups = target_group_arns
else:
target_groups = None
tg_list = []
if target_groups:
if isinstance(target_groups, str) or isinstance(target_groups, six.text_type):
tg_list.append(target_groups)
else:
for group in target_groups:
tg_list.append(group)
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
if names:
ret = conn.describe_target_groups(Names=tg_list)['TargetGroups']
elif target_group_arns:
ret = conn.describe_target_groups(TargetGroupArns=tg_list)['TargetGroups']
elif load_balancer_arn:
ret = conn.describe_target_groups(LoadBalancerArn=load_balancer_arn)['TargetGroups']
else:
ret = []
next_marker = ''
while True:
r = conn.describe_target_groups(Marker=next_marker)
for alb in r['TargetGroups']:
ret.append(alb)
if 'NextMarker' in r:
next_marker = r['NextMarker']
else:
break
return ret if ret else []
except ClientError as error:
log.warning(error)
return False | def function[describe_target_groups, parameter[names, target_group_arns, load_balancer_arn, region, key, keyid, profile]]:
constant[
Describes the specified target groups or all of your target groups. By default,
all target groups are described. Alternatively, you can specify one of the
following to filter the results: the ARN of the load balancer, the names of
one or more target groups, or the ARNs of one or more target groups.
Returns: list
CLI example:
.. code-block:: bash
salt myminion boto_elbv2.describe_target_groups
salt myminion boto_elbv2.describe_target_groups target_group_name
salt myminion boto_elbv2.describe_target_groups "[tg_name,tg_name]"
]
if <ast.BoolOp object at 0x7da1b2044550> begin[:]
<ast.Raise object at 0x7da1b2046740>
if name[names] begin[:]
variable[target_groups] assign[=] name[names]
variable[tg_list] assign[=] list[[]]
if name[target_groups] begin[:]
if <ast.BoolOp object at 0x7da1b2046950> begin[:]
call[name[tg_list].append, parameter[name[target_groups]]]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
<ast.Try object at 0x7da1b2045b70> | keyword[def] identifier[describe_target_groups] ( identifier[names] = keyword[None] ,
identifier[target_group_arns] = keyword[None] ,
identifier[load_balancer_arn] = keyword[None] ,
identifier[region] = keyword[None] ,
identifier[key] = keyword[None] ,
identifier[keyid] = keyword[None] ,
identifier[profile] = keyword[None] ):
literal[string]
keyword[if] identifier[names] keyword[and] identifier[target_group_arns] :
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string] )
keyword[if] identifier[names] :
identifier[target_groups] = identifier[names]
keyword[elif] identifier[target_group_arns] :
identifier[target_groups] = identifier[target_group_arns]
keyword[else] :
identifier[target_groups] = keyword[None]
identifier[tg_list] =[]
keyword[if] identifier[target_groups] :
keyword[if] identifier[isinstance] ( identifier[target_groups] , identifier[str] ) keyword[or] identifier[isinstance] ( identifier[target_groups] , identifier[six] . identifier[text_type] ):
identifier[tg_list] . identifier[append] ( identifier[target_groups] )
keyword[else] :
keyword[for] identifier[group] keyword[in] identifier[target_groups] :
identifier[tg_list] . identifier[append] ( identifier[group] )
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[try] :
keyword[if] identifier[names] :
identifier[ret] = identifier[conn] . identifier[describe_target_groups] ( identifier[Names] = identifier[tg_list] )[ literal[string] ]
keyword[elif] identifier[target_group_arns] :
identifier[ret] = identifier[conn] . identifier[describe_target_groups] ( identifier[TargetGroupArns] = identifier[tg_list] )[ literal[string] ]
keyword[elif] identifier[load_balancer_arn] :
identifier[ret] = identifier[conn] . identifier[describe_target_groups] ( identifier[LoadBalancerArn] = identifier[load_balancer_arn] )[ literal[string] ]
keyword[else] :
identifier[ret] =[]
identifier[next_marker] = literal[string]
keyword[while] keyword[True] :
identifier[r] = identifier[conn] . identifier[describe_target_groups] ( identifier[Marker] = identifier[next_marker] )
keyword[for] identifier[alb] keyword[in] identifier[r] [ literal[string] ]:
identifier[ret] . identifier[append] ( identifier[alb] )
keyword[if] literal[string] keyword[in] identifier[r] :
identifier[next_marker] = identifier[r] [ literal[string] ]
keyword[else] :
keyword[break]
keyword[return] identifier[ret] keyword[if] identifier[ret] keyword[else] []
keyword[except] identifier[ClientError] keyword[as] identifier[error] :
identifier[log] . identifier[warning] ( identifier[error] )
keyword[return] keyword[False] | def describe_target_groups(names=None, target_group_arns=None, load_balancer_arn=None, region=None, key=None, keyid=None, profile=None):
"""
Describes the specified target groups or all of your target groups. By default,
all target groups are described. Alternatively, you can specify one of the
following to filter the results: the ARN of the load balancer, the names of
one or more target groups, or the ARNs of one or more target groups.
Returns: list
CLI example:
.. code-block:: bash
salt myminion boto_elbv2.describe_target_groups
salt myminion boto_elbv2.describe_target_groups target_group_name
salt myminion boto_elbv2.describe_target_groups "[tg_name,tg_name]"
"""
if names and target_group_arns:
raise SaltInvocationError('At most one of names or target_group_arns may be provided') # depends on [control=['if'], data=[]]
if names:
target_groups = names # depends on [control=['if'], data=[]]
elif target_group_arns:
target_groups = target_group_arns # depends on [control=['if'], data=[]]
else:
target_groups = None
tg_list = []
if target_groups:
if isinstance(target_groups, str) or isinstance(target_groups, six.text_type):
tg_list.append(target_groups) # depends on [control=['if'], data=[]]
else:
for group in target_groups:
tg_list.append(group) # depends on [control=['for'], data=['group']] # depends on [control=['if'], data=[]]
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
if names:
ret = conn.describe_target_groups(Names=tg_list)['TargetGroups'] # depends on [control=['if'], data=[]]
elif target_group_arns:
ret = conn.describe_target_groups(TargetGroupArns=tg_list)['TargetGroups'] # depends on [control=['if'], data=[]]
elif load_balancer_arn:
ret = conn.describe_target_groups(LoadBalancerArn=load_balancer_arn)['TargetGroups'] # depends on [control=['if'], data=[]]
else:
ret = []
next_marker = ''
while True:
r = conn.describe_target_groups(Marker=next_marker)
for alb in r['TargetGroups']:
ret.append(alb) # depends on [control=['for'], data=['alb']]
if 'NextMarker' in r:
next_marker = r['NextMarker'] # depends on [control=['if'], data=['r']]
else:
break # depends on [control=['while'], data=[]]
return ret if ret else [] # depends on [control=['try'], data=[]]
except ClientError as error:
log.warning(error)
return False # depends on [control=['except'], data=['error']] |
def get_objects_dex(self):
"""
Yields all dex objects inclduing their Analysis objects
:returns: tuple of (sha256, DalvikVMFormat, Analysis)
"""
# TODO: there is no variant like get_objects_apk
for digest, d in self.analyzed_dex.items():
yield digest, d, self.analyzed_vms[digest] | def function[get_objects_dex, parameter[self]]:
constant[
Yields all dex objects inclduing their Analysis objects
:returns: tuple of (sha256, DalvikVMFormat, Analysis)
]
for taget[tuple[[<ast.Name object at 0x7da20c6c6ce0>, <ast.Name object at 0x7da2045644c0>]]] in starred[call[name[self].analyzed_dex.items, parameter[]]] begin[:]
<ast.Yield object at 0x7da204566380> | keyword[def] identifier[get_objects_dex] ( identifier[self] ):
literal[string]
keyword[for] identifier[digest] , identifier[d] keyword[in] identifier[self] . identifier[analyzed_dex] . identifier[items] ():
keyword[yield] identifier[digest] , identifier[d] , identifier[self] . identifier[analyzed_vms] [ identifier[digest] ] | def get_objects_dex(self):
"""
Yields all dex objects inclduing their Analysis objects
:returns: tuple of (sha256, DalvikVMFormat, Analysis)
"""
# TODO: there is no variant like get_objects_apk
for (digest, d) in self.analyzed_dex.items():
yield (digest, d, self.analyzed_vms[digest]) # depends on [control=['for'], data=[]] |
def set_led_brightness(self, brightness):
"""Set the LED brightness for the current group/button."""
set_cmd = self._create_set_property_msg("_led_brightness", 0x07,
brightness)
self._send_method(set_cmd, self._property_set) | def function[set_led_brightness, parameter[self, brightness]]:
constant[Set the LED brightness for the current group/button.]
variable[set_cmd] assign[=] call[name[self]._create_set_property_msg, parameter[constant[_led_brightness], constant[7], name[brightness]]]
call[name[self]._send_method, parameter[name[set_cmd], name[self]._property_set]] | keyword[def] identifier[set_led_brightness] ( identifier[self] , identifier[brightness] ):
literal[string]
identifier[set_cmd] = identifier[self] . identifier[_create_set_property_msg] ( literal[string] , literal[int] ,
identifier[brightness] )
identifier[self] . identifier[_send_method] ( identifier[set_cmd] , identifier[self] . identifier[_property_set] ) | def set_led_brightness(self, brightness):
"""Set the LED brightness for the current group/button."""
set_cmd = self._create_set_property_msg('_led_brightness', 7, brightness)
self._send_method(set_cmd, self._property_set) |
def loadmat(path):
r"""
Load a matlab data file.
Parameters
----------
path : string
Path to the mat file from the data folder, without the .mat extension.
Returns
-------
data : dict
dictionary with variable names as keys, and loaded matrices as
values.
Examples
--------
>>> from pygsp import utils
>>> data = utils.loadmat('pointclouds/bunny')
>>> data['bunny'].shape
(2503, 3)
"""
data = pkgutil.get_data('pygsp', 'data/' + path + '.mat')
data = io.BytesIO(data)
return scipy.io.loadmat(data) | def function[loadmat, parameter[path]]:
constant[
Load a matlab data file.
Parameters
----------
path : string
Path to the mat file from the data folder, without the .mat extension.
Returns
-------
data : dict
dictionary with variable names as keys, and loaded matrices as
values.
Examples
--------
>>> from pygsp import utils
>>> data = utils.loadmat('pointclouds/bunny')
>>> data['bunny'].shape
(2503, 3)
]
variable[data] assign[=] call[name[pkgutil].get_data, parameter[constant[pygsp], binary_operation[binary_operation[constant[data/] + name[path]] + constant[.mat]]]]
variable[data] assign[=] call[name[io].BytesIO, parameter[name[data]]]
return[call[name[scipy].io.loadmat, parameter[name[data]]]] | keyword[def] identifier[loadmat] ( identifier[path] ):
literal[string]
identifier[data] = identifier[pkgutil] . identifier[get_data] ( literal[string] , literal[string] + identifier[path] + literal[string] )
identifier[data] = identifier[io] . identifier[BytesIO] ( identifier[data] )
keyword[return] identifier[scipy] . identifier[io] . identifier[loadmat] ( identifier[data] ) | def loadmat(path):
"""
Load a matlab data file.
Parameters
----------
path : string
Path to the mat file from the data folder, without the .mat extension.
Returns
-------
data : dict
dictionary with variable names as keys, and loaded matrices as
values.
Examples
--------
>>> from pygsp import utils
>>> data = utils.loadmat('pointclouds/bunny')
>>> data['bunny'].shape
(2503, 3)
"""
data = pkgutil.get_data('pygsp', 'data/' + path + '.mat')
data = io.BytesIO(data)
return scipy.io.loadmat(data) |
def set_gateway(self, gateway):
'''
:param crabpy.gateway.crab.CrabGateway gateway: Gateway to use.
'''
self.gateway = gateway
self.gewest.gateway = gateway | def function[set_gateway, parameter[self, gateway]]:
constant[
:param crabpy.gateway.crab.CrabGateway gateway: Gateway to use.
]
name[self].gateway assign[=] name[gateway]
name[self].gewest.gateway assign[=] name[gateway] | keyword[def] identifier[set_gateway] ( identifier[self] , identifier[gateway] ):
literal[string]
identifier[self] . identifier[gateway] = identifier[gateway]
identifier[self] . identifier[gewest] . identifier[gateway] = identifier[gateway] | def set_gateway(self, gateway):
"""
:param crabpy.gateway.crab.CrabGateway gateway: Gateway to use.
"""
self.gateway = gateway
self.gewest.gateway = gateway |
def proj(vec, vec_onto):
""" Vector projection.
Calculated as:
.. math::
\\mathsf{vec\\_onto} * \\frac{\\mathsf{vec}\\cdot\\mathsf{vec\\_onto}}
{\\mathsf{vec\\_onto}\\cdot\\mathsf{vec\\_onto}}
Parameters
----------
vec
length-R |npfloat_| --
Vector to project
vec_onto
length-R |npfloat_| --
Vector onto which `vec` is to be projected
Returns
-------
proj_vec
length-R |npfloat_| --
Projection of `vec` onto `vec_onto`
"""
# Imports
import numpy as np
# Ensure vectors
if not len(vec.shape) == 1:
raise ValueError("'vec' is not a vector")
## end if
if not len(vec_onto.shape) == 1:
raise ValueError("'vec_onto' is not a vector")
## end if
if not vec.shape[0] == vec_onto.shape[0]:
raise ValueError("Shape mismatch between vectors")
## end if
# Calculate the projection and return
proj_vec = np.float_(np.asscalar(np.dot(vec.T, vec_onto))) / \
np.float_(np.asscalar(np.dot(vec_onto.T, vec_onto))) * vec_onto
return proj_vec | def function[proj, parameter[vec, vec_onto]]:
constant[ Vector projection.
Calculated as:
.. math::
\mathsf{vec\_onto} * \frac{\mathsf{vec}\cdot\mathsf{vec\_onto}}
{\mathsf{vec\_onto}\cdot\mathsf{vec\_onto}}
Parameters
----------
vec
length-R |npfloat_| --
Vector to project
vec_onto
length-R |npfloat_| --
Vector onto which `vec` is to be projected
Returns
-------
proj_vec
length-R |npfloat_| --
Projection of `vec` onto `vec_onto`
]
import module[numpy] as alias[np]
if <ast.UnaryOp object at 0x7da1b2538e20> begin[:]
<ast.Raise object at 0x7da1b2539000>
if <ast.UnaryOp object at 0x7da1b2538c40> begin[:]
<ast.Raise object at 0x7da1b2539d80>
if <ast.UnaryOp object at 0x7da1b253a0e0> begin[:]
<ast.Raise object at 0x7da1b2539570>
variable[proj_vec] assign[=] binary_operation[binary_operation[call[name[np].float_, parameter[call[name[np].asscalar, parameter[call[name[np].dot, parameter[name[vec].T, name[vec_onto]]]]]]] / call[name[np].float_, parameter[call[name[np].asscalar, parameter[call[name[np].dot, parameter[name[vec_onto].T, name[vec_onto]]]]]]]] * name[vec_onto]]
return[name[proj_vec]] | keyword[def] identifier[proj] ( identifier[vec] , identifier[vec_onto] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
keyword[if] keyword[not] identifier[len] ( identifier[vec] . identifier[shape] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[len] ( identifier[vec_onto] . identifier[shape] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[vec] . identifier[shape] [ literal[int] ]== identifier[vec_onto] . identifier[shape] [ literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[proj_vec] = identifier[np] . identifier[float_] ( identifier[np] . identifier[asscalar] ( identifier[np] . identifier[dot] ( identifier[vec] . identifier[T] , identifier[vec_onto] )))/ identifier[np] . identifier[float_] ( identifier[np] . identifier[asscalar] ( identifier[np] . identifier[dot] ( identifier[vec_onto] . identifier[T] , identifier[vec_onto] )))* identifier[vec_onto]
keyword[return] identifier[proj_vec] | def proj(vec, vec_onto):
""" Vector projection.
Calculated as:
.. math::
\\mathsf{vec\\_onto} * \\frac{\\mathsf{vec}\\cdot\\mathsf{vec\\_onto}}
{\\mathsf{vec\\_onto}\\cdot\\mathsf{vec\\_onto}}
Parameters
----------
vec
length-R |npfloat_| --
Vector to project
vec_onto
length-R |npfloat_| --
Vector onto which `vec` is to be projected
Returns
-------
proj_vec
length-R |npfloat_| --
Projection of `vec` onto `vec_onto`
"""
# Imports
import numpy as np
# Ensure vectors
if not len(vec.shape) == 1:
raise ValueError("'vec' is not a vector") # depends on [control=['if'], data=[]]
## end if
if not len(vec_onto.shape) == 1:
raise ValueError("'vec_onto' is not a vector") # depends on [control=['if'], data=[]]
## end if
if not vec.shape[0] == vec_onto.shape[0]:
raise ValueError('Shape mismatch between vectors') # depends on [control=['if'], data=[]]
## end if
# Calculate the projection and return
proj_vec = np.float_(np.asscalar(np.dot(vec.T, vec_onto))) / np.float_(np.asscalar(np.dot(vec_onto.T, vec_onto))) * vec_onto
return proj_vec |
def _get_connection(self):
""" Returns connection to sqlite db.
Returns:
connection to the sqlite db who stores mpr data.
"""
if getattr(self, '_connection', None):
logger.debug('Connection to sqlite db already exists. Using existing one.')
else:
dsn = self._dsn
if dsn == 'sqlite://':
dsn = ':memory:'
else:
dsn = dsn.replace('sqlite:///', '')
logger.debug(
'Creating new apsw connection.\n dsn: {}, config_dsn: {}'
.format(dsn, self._dsn))
self._connection = apsw.Connection(dsn)
return self._connection | def function[_get_connection, parameter[self]]:
constant[ Returns connection to sqlite db.
Returns:
connection to the sqlite db who stores mpr data.
]
if call[name[getattr], parameter[name[self], constant[_connection], constant[None]]] begin[:]
call[name[logger].debug, parameter[constant[Connection to sqlite db already exists. Using existing one.]]]
return[name[self]._connection] | keyword[def] identifier[_get_connection] ( identifier[self] ):
literal[string]
keyword[if] identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ):
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[dsn] = identifier[self] . identifier[_dsn]
keyword[if] identifier[dsn] == literal[string] :
identifier[dsn] = literal[string]
keyword[else] :
identifier[dsn] = identifier[dsn] . identifier[replace] ( literal[string] , literal[string] )
identifier[logger] . identifier[debug] (
literal[string]
. identifier[format] ( identifier[dsn] , identifier[self] . identifier[_dsn] ))
identifier[self] . identifier[_connection] = identifier[apsw] . identifier[Connection] ( identifier[dsn] )
keyword[return] identifier[self] . identifier[_connection] | def _get_connection(self):
""" Returns connection to sqlite db.
Returns:
connection to the sqlite db who stores mpr data.
"""
if getattr(self, '_connection', None):
logger.debug('Connection to sqlite db already exists. Using existing one.') # depends on [control=['if'], data=[]]
else:
dsn = self._dsn
if dsn == 'sqlite://':
dsn = ':memory:' # depends on [control=['if'], data=['dsn']]
else:
dsn = dsn.replace('sqlite:///', '')
logger.debug('Creating new apsw connection.\n dsn: {}, config_dsn: {}'.format(dsn, self._dsn))
self._connection = apsw.Connection(dsn)
return self._connection |
def save_as(self, new_filename):
"""
Save our file with the name provided.
Args:
new_filename: New name for the workbook file. String.
Returns:
Nothing.
"""
xfile._save_file(self._filename, self._datasourceTree, new_filename) | def function[save_as, parameter[self, new_filename]]:
constant[
Save our file with the name provided.
Args:
new_filename: New name for the workbook file. String.
Returns:
Nothing.
]
call[name[xfile]._save_file, parameter[name[self]._filename, name[self]._datasourceTree, name[new_filename]]] | keyword[def] identifier[save_as] ( identifier[self] , identifier[new_filename] ):
literal[string]
identifier[xfile] . identifier[_save_file] ( identifier[self] . identifier[_filename] , identifier[self] . identifier[_datasourceTree] , identifier[new_filename] ) | def save_as(self, new_filename):
"""
Save our file with the name provided.
Args:
new_filename: New name for the workbook file. String.
Returns:
Nothing.
"""
xfile._save_file(self._filename, self._datasourceTree, new_filename) |
def tagsInString_process(self, d_DICOM, astr, *args, **kwargs):
"""
This method substitutes DICOM tags that are '%'-tagged
in a string template with the actual tag lookup.
For example, an output filename that is specified as the
following string:
%PatientAge-%PatientID-output.txt
will be parsed to
006Y-4412364-ouptut.txt
It is also possible to apply certain permutations/functions
to a tag. For example, a function is identified by an underscore
prefixed and suffixed string as part of the DICOM tag. If
found, this function is applied to the tag value. For example,
%PatientAge-%_md5|4_PatientID-output.txt
will apply an md5 hash to the PatientID and use the first 4
characters:
006Y-7f38-output.txt
"""
b_tagsFound = False
str_replace = '' # The lookup/processed tag value
l_tags = [] # The input string split by '%'
l_tagsToSub = [] # Remove any noise etc from each tag
l_funcTag = [] # a function/tag list
l_args = [] # the 'args' of the function
func = '' # the function to apply
tag = '' # the tag in the funcTag combo
chars = '' # the number of resultant chars from func
# result to use
if '%' in astr:
l_tags = astr.split('%')[1:]
# Find which tags (mangled) in string match actual tags
l_tagsToSub = [i for i in d_DICOM['l_tagRaw'] if any(i in b for b in l_tags)]
# Need to arrange l_tagsToSub in same order as l_tags
l_tagsToSubSort = sorted(
l_tagsToSub,
key = lambda x: [i for i, s in enumerate(l_tags) if x in s][0]
)
for tag, func in zip(l_tagsToSubSort, l_tags):
b_tagsFound = True
str_replace = d_DICOM['d_dicomSimple'][tag]
if 'md5' in func:
str_replace = hashlib.md5(str_replace.encode('utf-8')).hexdigest()
l_funcTag = func.split('_')[1:]
func = l_funcTag[0]
l_args = func.split('|')
if len(l_args) > 1:
chars = l_args[1]
str_replace = str_replace[0:int(chars)]
astr = astr.replace('_%s_' % func, '')
if 'strmsk' in func:
l_funcTag = func.split('_')[1:]
func = l_funcTag[0]
str_msk = func.split('|')[1]
l_n = []
for i, j in zip(list(str_replace), list(str_msk)):
if j == '*': l_n.append(i)
else: l_n.append(j)
str_replace = ''.join(l_n)
astr = astr.replace('_%s_' % func, '')
if 'nospc' in func:
# pudb.set_trace()
l_funcTag = func.split('_')[1:]
func = l_funcTag[0]
l_args = func.split('|')
str_char = ''
if len(l_args) > 1:
str_char = l_args[1]
# strip out all non-alphnumeric chars and
# replace with space
str_replace = re.sub(r'\W+', ' ', str_replace)
# replace all spaces with str_char
str_replace = str_char.join(str_replace.split())
astr = astr.replace('_%s_' % func, '')
astr = astr.replace('%' + tag, str_replace)
return {
'status': True,
'b_tagsFound': b_tagsFound,
'str_result': astr
} | def function[tagsInString_process, parameter[self, d_DICOM, astr]]:
constant[
This method substitutes DICOM tags that are '%'-tagged
in a string template with the actual tag lookup.
For example, an output filename that is specified as the
following string:
%PatientAge-%PatientID-output.txt
will be parsed to
006Y-4412364-ouptut.txt
It is also possible to apply certain permutations/functions
to a tag. For example, a function is identified by an underscore
prefixed and suffixed string as part of the DICOM tag. If
found, this function is applied to the tag value. For example,
%PatientAge-%_md5|4_PatientID-output.txt
will apply an md5 hash to the PatientID and use the first 4
characters:
006Y-7f38-output.txt
]
variable[b_tagsFound] assign[=] constant[False]
variable[str_replace] assign[=] constant[]
variable[l_tags] assign[=] list[[]]
variable[l_tagsToSub] assign[=] list[[]]
variable[l_funcTag] assign[=] list[[]]
variable[l_args] assign[=] list[[]]
variable[func] assign[=] constant[]
variable[tag] assign[=] constant[]
variable[chars] assign[=] constant[]
if compare[constant[%] in name[astr]] begin[:]
variable[l_tags] assign[=] call[call[name[astr].split, parameter[constant[%]]]][<ast.Slice object at 0x7da2044c3340>]
variable[l_tagsToSub] assign[=] <ast.ListComp object at 0x7da2044c3b80>
variable[l_tagsToSubSort] assign[=] call[name[sorted], parameter[name[l_tagsToSub]]]
for taget[tuple[[<ast.Name object at 0x7da2044c0e80>, <ast.Name object at 0x7da2044c3ee0>]]] in starred[call[name[zip], parameter[name[l_tagsToSubSort], name[l_tags]]]] begin[:]
variable[b_tagsFound] assign[=] constant[True]
variable[str_replace] assign[=] call[call[name[d_DICOM]][constant[d_dicomSimple]]][name[tag]]
if compare[constant[md5] in name[func]] begin[:]
variable[str_replace] assign[=] call[call[name[hashlib].md5, parameter[call[name[str_replace].encode, parameter[constant[utf-8]]]]].hexdigest, parameter[]]
variable[l_funcTag] assign[=] call[call[name[func].split, parameter[constant[_]]]][<ast.Slice object at 0x7da207f9a980>]
variable[func] assign[=] call[name[l_funcTag]][constant[0]]
variable[l_args] assign[=] call[name[func].split, parameter[constant[|]]]
if compare[call[name[len], parameter[name[l_args]]] greater[>] constant[1]] begin[:]
variable[chars] assign[=] call[name[l_args]][constant[1]]
variable[str_replace] assign[=] call[name[str_replace]][<ast.Slice object at 0x7da18f810040>]
variable[astr] assign[=] call[name[astr].replace, parameter[binary_operation[constant[_%s_] <ast.Mod object at 0x7da2590d6920> name[func]], constant[]]]
if compare[constant[strmsk] in name[func]] begin[:]
variable[l_funcTag] assign[=] call[call[name[func].split, parameter[constant[_]]]][<ast.Slice object at 0x7da18f8109d0>]
variable[func] assign[=] call[name[l_funcTag]][constant[0]]
variable[str_msk] assign[=] call[call[name[func].split, parameter[constant[|]]]][constant[1]]
variable[l_n] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20e961510>, <ast.Name object at 0x7da20e9626b0>]]] in starred[call[name[zip], parameter[call[name[list], parameter[name[str_replace]]], call[name[list], parameter[name[str_msk]]]]]] begin[:]
if compare[name[j] equal[==] constant[*]] begin[:]
call[name[l_n].append, parameter[name[i]]]
variable[str_replace] assign[=] call[constant[].join, parameter[name[l_n]]]
variable[astr] assign[=] call[name[astr].replace, parameter[binary_operation[constant[_%s_] <ast.Mod object at 0x7da2590d6920> name[func]], constant[]]]
if compare[constant[nospc] in name[func]] begin[:]
variable[l_funcTag] assign[=] call[call[name[func].split, parameter[constant[_]]]][<ast.Slice object at 0x7da20c6e6770>]
variable[func] assign[=] call[name[l_funcTag]][constant[0]]
variable[l_args] assign[=] call[name[func].split, parameter[constant[|]]]
variable[str_char] assign[=] constant[]
if compare[call[name[len], parameter[name[l_args]]] greater[>] constant[1]] begin[:]
variable[str_char] assign[=] call[name[l_args]][constant[1]]
variable[str_replace] assign[=] call[name[re].sub, parameter[constant[\W+], constant[ ], name[str_replace]]]
variable[str_replace] assign[=] call[name[str_char].join, parameter[call[name[str_replace].split, parameter[]]]]
variable[astr] assign[=] call[name[astr].replace, parameter[binary_operation[constant[_%s_] <ast.Mod object at 0x7da2590d6920> name[func]], constant[]]]
variable[astr] assign[=] call[name[astr].replace, parameter[binary_operation[constant[%] + name[tag]], name[str_replace]]]
return[dictionary[[<ast.Constant object at 0x7da204565ff0>, <ast.Constant object at 0x7da2045675e0>, <ast.Constant object at 0x7da204565420>], [<ast.Constant object at 0x7da204564d00>, <ast.Name object at 0x7da204567d00>, <ast.Name object at 0x7da204565600>]]] | keyword[def] identifier[tagsInString_process] ( identifier[self] , identifier[d_DICOM] , identifier[astr] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[b_tagsFound] = keyword[False]
identifier[str_replace] = literal[string]
identifier[l_tags] =[]
identifier[l_tagsToSub] =[]
identifier[l_funcTag] =[]
identifier[l_args] =[]
identifier[func] = literal[string]
identifier[tag] = literal[string]
identifier[chars] = literal[string]
keyword[if] literal[string] keyword[in] identifier[astr] :
identifier[l_tags] = identifier[astr] . identifier[split] ( literal[string] )[ literal[int] :]
identifier[l_tagsToSub] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[d_DICOM] [ literal[string] ] keyword[if] identifier[any] ( identifier[i] keyword[in] identifier[b] keyword[for] identifier[b] keyword[in] identifier[l_tags] )]
identifier[l_tagsToSubSort] = identifier[sorted] (
identifier[l_tagsToSub] ,
identifier[key] = keyword[lambda] identifier[x] :[ identifier[i] keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[l_tags] ) keyword[if] identifier[x] keyword[in] identifier[s] ][ literal[int] ]
)
keyword[for] identifier[tag] , identifier[func] keyword[in] identifier[zip] ( identifier[l_tagsToSubSort] , identifier[l_tags] ):
identifier[b_tagsFound] = keyword[True]
identifier[str_replace] = identifier[d_DICOM] [ literal[string] ][ identifier[tag] ]
keyword[if] literal[string] keyword[in] identifier[func] :
identifier[str_replace] = identifier[hashlib] . identifier[md5] ( identifier[str_replace] . identifier[encode] ( literal[string] )). identifier[hexdigest] ()
identifier[l_funcTag] = identifier[func] . identifier[split] ( literal[string] )[ literal[int] :]
identifier[func] = identifier[l_funcTag] [ literal[int] ]
identifier[l_args] = identifier[func] . identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[l_args] )> literal[int] :
identifier[chars] = identifier[l_args] [ literal[int] ]
identifier[str_replace] = identifier[str_replace] [ literal[int] : identifier[int] ( identifier[chars] )]
identifier[astr] = identifier[astr] . identifier[replace] ( literal[string] % identifier[func] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[func] :
identifier[l_funcTag] = identifier[func] . identifier[split] ( literal[string] )[ literal[int] :]
identifier[func] = identifier[l_funcTag] [ literal[int] ]
identifier[str_msk] = identifier[func] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[l_n] =[]
keyword[for] identifier[i] , identifier[j] keyword[in] identifier[zip] ( identifier[list] ( identifier[str_replace] ), identifier[list] ( identifier[str_msk] )):
keyword[if] identifier[j] == literal[string] : identifier[l_n] . identifier[append] ( identifier[i] )
keyword[else] : identifier[l_n] . identifier[append] ( identifier[j] )
identifier[str_replace] = literal[string] . identifier[join] ( identifier[l_n] )
identifier[astr] = identifier[astr] . identifier[replace] ( literal[string] % identifier[func] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[func] :
identifier[l_funcTag] = identifier[func] . identifier[split] ( literal[string] )[ literal[int] :]
identifier[func] = identifier[l_funcTag] [ literal[int] ]
identifier[l_args] = identifier[func] . identifier[split] ( literal[string] )
identifier[str_char] = literal[string]
keyword[if] identifier[len] ( identifier[l_args] )> literal[int] :
identifier[str_char] = identifier[l_args] [ literal[int] ]
identifier[str_replace] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[str_replace] )
identifier[str_replace] = identifier[str_char] . identifier[join] ( identifier[str_replace] . identifier[split] ())
identifier[astr] = identifier[astr] . identifier[replace] ( literal[string] % identifier[func] , literal[string] )
identifier[astr] = identifier[astr] . identifier[replace] ( literal[string] + identifier[tag] , identifier[str_replace] )
keyword[return] {
literal[string] : keyword[True] ,
literal[string] : identifier[b_tagsFound] ,
literal[string] : identifier[astr]
} | def tagsInString_process(self, d_DICOM, astr, *args, **kwargs):
"""
This method substitutes DICOM tags that are '%'-tagged
in a string template with the actual tag lookup.
For example, an output filename that is specified as the
following string:
%PatientAge-%PatientID-output.txt
will be parsed to
006Y-4412364-ouptut.txt
It is also possible to apply certain permutations/functions
to a tag. For example, a function is identified by an underscore
prefixed and suffixed string as part of the DICOM tag. If
found, this function is applied to the tag value. For example,
%PatientAge-%_md5|4_PatientID-output.txt
will apply an md5 hash to the PatientID and use the first 4
characters:
006Y-7f38-output.txt
"""
b_tagsFound = False
str_replace = '' # The lookup/processed tag value
l_tags = [] # The input string split by '%'
l_tagsToSub = [] # Remove any noise etc from each tag
l_funcTag = [] # a function/tag list
l_args = [] # the 'args' of the function
func = '' # the function to apply
tag = '' # the tag in the funcTag combo
chars = '' # the number of resultant chars from func
# result to use
if '%' in astr:
l_tags = astr.split('%')[1:]
# Find which tags (mangled) in string match actual tags
l_tagsToSub = [i for i in d_DICOM['l_tagRaw'] if any((i in b for b in l_tags))]
# Need to arrange l_tagsToSub in same order as l_tags
l_tagsToSubSort = sorted(l_tagsToSub, key=lambda x: [i for (i, s) in enumerate(l_tags) if x in s][0])
for (tag, func) in zip(l_tagsToSubSort, l_tags):
b_tagsFound = True
str_replace = d_DICOM['d_dicomSimple'][tag]
if 'md5' in func:
str_replace = hashlib.md5(str_replace.encode('utf-8')).hexdigest()
l_funcTag = func.split('_')[1:]
func = l_funcTag[0]
l_args = func.split('|')
if len(l_args) > 1:
chars = l_args[1]
str_replace = str_replace[0:int(chars)] # depends on [control=['if'], data=[]]
astr = astr.replace('_%s_' % func, '') # depends on [control=['if'], data=['func']]
if 'strmsk' in func:
l_funcTag = func.split('_')[1:]
func = l_funcTag[0]
str_msk = func.split('|')[1]
l_n = []
for (i, j) in zip(list(str_replace), list(str_msk)):
if j == '*':
l_n.append(i) # depends on [control=['if'], data=[]]
else:
l_n.append(j) # depends on [control=['for'], data=[]]
str_replace = ''.join(l_n)
astr = astr.replace('_%s_' % func, '') # depends on [control=['if'], data=['func']]
if 'nospc' in func:
# pudb.set_trace()
l_funcTag = func.split('_')[1:]
func = l_funcTag[0]
l_args = func.split('|')
str_char = ''
if len(l_args) > 1:
str_char = l_args[1] # depends on [control=['if'], data=[]] # strip out all non-alphnumeric chars and
# replace with space
str_replace = re.sub('\\W+', ' ', str_replace)
# replace all spaces with str_char
str_replace = str_char.join(str_replace.split())
astr = astr.replace('_%s_' % func, '') # depends on [control=['if'], data=['func']]
astr = astr.replace('%' + tag, str_replace) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['astr']]
return {'status': True, 'b_tagsFound': b_tagsFound, 'str_result': astr} |
def _spark_predict(self, cls, X, *args, **kwargs):
"""Wraps a Scikit-learn Linear model's predict method to use with RDD
input.
Parameters
----------
cls : class object
The sklearn linear model's class to wrap.
Z : ArrayRDD
The distributed data to predict in a DictRDD.
Returns
-------
self: the wrapped class
"""
return X.map(lambda X: super(cls, self).predict(X, *args, **kwargs)) | def function[_spark_predict, parameter[self, cls, X]]:
constant[Wraps a Scikit-learn Linear model's predict method to use with RDD
input.
Parameters
----------
cls : class object
The sklearn linear model's class to wrap.
Z : ArrayRDD
The distributed data to predict in a DictRDD.
Returns
-------
self: the wrapped class
]
return[call[name[X].map, parameter[<ast.Lambda object at 0x7da18dc04fd0>]]] | keyword[def] identifier[_spark_predict] ( identifier[self] , identifier[cls] , identifier[X] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[X] . identifier[map] ( keyword[lambda] identifier[X] : identifier[super] ( identifier[cls] , identifier[self] ). identifier[predict] ( identifier[X] ,* identifier[args] ,** identifier[kwargs] )) | def _spark_predict(self, cls, X, *args, **kwargs):
"""Wraps a Scikit-learn Linear model's predict method to use with RDD
input.
Parameters
----------
cls : class object
The sklearn linear model's class to wrap.
Z : ArrayRDD
The distributed data to predict in a DictRDD.
Returns
-------
self: the wrapped class
"""
return X.map(lambda X: super(cls, self).predict(X, *args, **kwargs)) |
def get_plain_image_as_widget(self):
"""Used for generating thumbnails. Does not include overlaid
graphics.
"""
arr = self.getwin_array(order=self.rgb_order)
image = self._get_qimage(arr, self.qimg_fmt)
return image | def function[get_plain_image_as_widget, parameter[self]]:
constant[Used for generating thumbnails. Does not include overlaid
graphics.
]
variable[arr] assign[=] call[name[self].getwin_array, parameter[]]
variable[image] assign[=] call[name[self]._get_qimage, parameter[name[arr], name[self].qimg_fmt]]
return[name[image]] | keyword[def] identifier[get_plain_image_as_widget] ( identifier[self] ):
literal[string]
identifier[arr] = identifier[self] . identifier[getwin_array] ( identifier[order] = identifier[self] . identifier[rgb_order] )
identifier[image] = identifier[self] . identifier[_get_qimage] ( identifier[arr] , identifier[self] . identifier[qimg_fmt] )
keyword[return] identifier[image] | def get_plain_image_as_widget(self):
"""Used for generating thumbnails. Does not include overlaid
graphics.
"""
arr = self.getwin_array(order=self.rgb_order)
image = self._get_qimage(arr, self.qimg_fmt)
return image |
def ndiffs(x, alpha=0.05, test='kpss', max_d=2, **kwargs):
"""Estimate ARIMA differencing term, ``d``.
Perform a test of stationarity for different levels of ``d`` to
estimate the number of differences required to make a given time
series stationary. Will select the maximum value of ``d`` for which
the time series is judged stationary by the statistical test.
Parameters
----------
x : array-like, shape=(n_samples, [n_features])
The array (time series) to difference.
alpha : float, optional (default=0.05)
Level of the test. This is the value above below which the P-value
will be deemed significant.
test : str, optional (default='kpss')
Type of unit root test of stationarity to use in order to
test the stationarity of the time-series. One of ('kpss', 'adf', 'pp')
max_d : int, optional (default=2)
Maximum number of non-seasonal differences allowed. Must
be a positive integer. The estimated value of ``d`` will not
exceed ``max_d``.
Returns
-------
d : int
The estimated differencing term. This is the maximum value of ``d``
such that ``d <= max_d`` and the time series is judged stationary.
If the time series is constant, will return 0.
References
----------
.. [1] R's auto_arima ndiffs function: https://bit.ly/2Bu8CHN
"""
if max_d <= 0:
raise ValueError('max_d must be a positive integer')
# get the test
testfunc = get_callable(test, VALID_TESTS)(alpha, **kwargs).should_diff
x = column_or_1d(check_array(x, ensure_2d=False,
force_all_finite=True, dtype=DTYPE))
# base case, if constant return 0
d = 0
if is_constant(x):
return d
# get initial diff
pval, dodiff = testfunc(x)
# if initially NaN, return 0
if np.isnan(pval):
return 0 # (d is zero, but this is more explicit to the reader)
# Begin loop.
while dodiff and d < max_d:
d += 1
# do differencing
x = diff(x)
if is_constant(x):
return d
# get new result
pval, dodiff = testfunc(x)
# if it's NaN now, take the last non-null one
if np.isnan(pval):
return d - 1
# when d >= max_d
return d | def function[ndiffs, parameter[x, alpha, test, max_d]]:
constant[Estimate ARIMA differencing term, ``d``.
Perform a test of stationarity for different levels of ``d`` to
estimate the number of differences required to make a given time
series stationary. Will select the maximum value of ``d`` for which
the time series is judged stationary by the statistical test.
Parameters
----------
x : array-like, shape=(n_samples, [n_features])
The array (time series) to difference.
alpha : float, optional (default=0.05)
Level of the test. This is the value above below which the P-value
will be deemed significant.
test : str, optional (default='kpss')
Type of unit root test of stationarity to use in order to
test the stationarity of the time-series. One of ('kpss', 'adf', 'pp')
max_d : int, optional (default=2)
Maximum number of non-seasonal differences allowed. Must
be a positive integer. The estimated value of ``d`` will not
exceed ``max_d``.
Returns
-------
d : int
The estimated differencing term. This is the maximum value of ``d``
such that ``d <= max_d`` and the time series is judged stationary.
If the time series is constant, will return 0.
References
----------
.. [1] R's auto_arima ndiffs function: https://bit.ly/2Bu8CHN
]
if compare[name[max_d] less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da1b1e95330>
variable[testfunc] assign[=] call[call[name[get_callable], parameter[name[test], name[VALID_TESTS]]], parameter[name[alpha]]].should_diff
variable[x] assign[=] call[name[column_or_1d], parameter[call[name[check_array], parameter[name[x]]]]]
variable[d] assign[=] constant[0]
if call[name[is_constant], parameter[name[x]]] begin[:]
return[name[d]]
<ast.Tuple object at 0x7da1b1e97430> assign[=] call[name[testfunc], parameter[name[x]]]
if call[name[np].isnan, parameter[name[pval]]] begin[:]
return[constant[0]]
while <ast.BoolOp object at 0x7da1b1e952a0> begin[:]
<ast.AugAssign object at 0x7da1b1e94f70>
variable[x] assign[=] call[name[diff], parameter[name[x]]]
if call[name[is_constant], parameter[name[x]]] begin[:]
return[name[d]]
<ast.Tuple object at 0x7da1b1e326b0> assign[=] call[name[testfunc], parameter[name[x]]]
if call[name[np].isnan, parameter[name[pval]]] begin[:]
return[binary_operation[name[d] - constant[1]]]
return[name[d]] | keyword[def] identifier[ndiffs] ( identifier[x] , identifier[alpha] = literal[int] , identifier[test] = literal[string] , identifier[max_d] = literal[int] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[max_d] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[testfunc] = identifier[get_callable] ( identifier[test] , identifier[VALID_TESTS] )( identifier[alpha] ,** identifier[kwargs] ). identifier[should_diff]
identifier[x] = identifier[column_or_1d] ( identifier[check_array] ( identifier[x] , identifier[ensure_2d] = keyword[False] ,
identifier[force_all_finite] = keyword[True] , identifier[dtype] = identifier[DTYPE] ))
identifier[d] = literal[int]
keyword[if] identifier[is_constant] ( identifier[x] ):
keyword[return] identifier[d]
identifier[pval] , identifier[dodiff] = identifier[testfunc] ( identifier[x] )
keyword[if] identifier[np] . identifier[isnan] ( identifier[pval] ):
keyword[return] literal[int]
keyword[while] identifier[dodiff] keyword[and] identifier[d] < identifier[max_d] :
identifier[d] += literal[int]
identifier[x] = identifier[diff] ( identifier[x] )
keyword[if] identifier[is_constant] ( identifier[x] ):
keyword[return] identifier[d]
identifier[pval] , identifier[dodiff] = identifier[testfunc] ( identifier[x] )
keyword[if] identifier[np] . identifier[isnan] ( identifier[pval] ):
keyword[return] identifier[d] - literal[int]
keyword[return] identifier[d] | def ndiffs(x, alpha=0.05, test='kpss', max_d=2, **kwargs):
"""Estimate ARIMA differencing term, ``d``.
Perform a test of stationarity for different levels of ``d`` to
estimate the number of differences required to make a given time
series stationary. Will select the maximum value of ``d`` for which
the time series is judged stationary by the statistical test.
Parameters
----------
x : array-like, shape=(n_samples, [n_features])
The array (time series) to difference.
alpha : float, optional (default=0.05)
Level of the test. This is the value above below which the P-value
will be deemed significant.
test : str, optional (default='kpss')
Type of unit root test of stationarity to use in order to
test the stationarity of the time-series. One of ('kpss', 'adf', 'pp')
max_d : int, optional (default=2)
Maximum number of non-seasonal differences allowed. Must
be a positive integer. The estimated value of ``d`` will not
exceed ``max_d``.
Returns
-------
d : int
The estimated differencing term. This is the maximum value of ``d``
such that ``d <= max_d`` and the time series is judged stationary.
If the time series is constant, will return 0.
References
----------
.. [1] R's auto_arima ndiffs function: https://bit.ly/2Bu8CHN
"""
if max_d <= 0:
raise ValueError('max_d must be a positive integer') # depends on [control=['if'], data=[]]
# get the test
testfunc = get_callable(test, VALID_TESTS)(alpha, **kwargs).should_diff
x = column_or_1d(check_array(x, ensure_2d=False, force_all_finite=True, dtype=DTYPE))
# base case, if constant return 0
d = 0
if is_constant(x):
return d # depends on [control=['if'], data=[]]
# get initial diff
(pval, dodiff) = testfunc(x)
# if initially NaN, return 0
if np.isnan(pval):
return 0 # (d is zero, but this is more explicit to the reader) # depends on [control=['if'], data=[]]
# Begin loop.
while dodiff and d < max_d:
d += 1
# do differencing
x = diff(x)
if is_constant(x):
return d # depends on [control=['if'], data=[]]
# get new result
(pval, dodiff) = testfunc(x)
# if it's NaN now, take the last non-null one
if np.isnan(pval):
return d - 1 # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# when d >= max_d
return d |
def bprecess(ra0, dec0, mu_radec=None, parallax=None, rad_vel=None, epoch=None):
"""
NAME:
BPRECESS
PURPOSE:
Precess positions from J2000.0 (FK5) to B1950.0 (FK4)
EXPLANATION:
Calculates the mean place of a star at B1950.0 on the FK4 system from
the mean place at J2000.0 on the FK5 system.
CALLING SEQUENCE:
bprecess, ra, dec, ra_1950, dec_1950, [ MU_RADEC = , PARALLAX =
RAD_VEL =, EPOCH = ]
INPUTS:
RA,DEC - Input J2000 right ascension and declination in *degrees*.
Scalar or N element vector
OUTPUTS:
RA_1950, DEC_1950 - The corresponding B1950 right ascension and
declination in *degrees*. Same number of elements as
RA,DEC but always double precision.
OPTIONAL INPUT-OUTPUT KEYWORDS
MU_RADEC - 2xN element double precision vector containing the proper
motion in seconds of arc per tropical *century* in right
ascension and declination.
PARALLAX - N_element vector giving stellar parallax (seconds of arc)
RAD_VEL - N_element vector giving radial velocity in km/s
The values of MU_RADEC, PARALLAX, and RADVEL will all be modified
upon output to contain the values of these quantities in the
B1950 system. The parallax and radial velocity will have a very
minor influence on the B1950 position.
EPOCH - scalar giving epoch of original observations, default 2000.0d
This keyword value is only used if the MU_RADEC keyword is not set.
NOTES:
The algorithm is taken from the Explanatory Supplement to the
Astronomical Almanac 1992, page 186.
Also see Aoki et al (1983), A&A, 128,263
BPRECESS distinguishes between the following two cases:
(1) The proper motion is known and non-zero
(2) the proper motion is unknown or known to be exactly zero (i.e.
extragalactic radio sources). In this case, the reverse of
the algorithm in Appendix 2 of Aoki et al. (1983) is used to
ensure that the output proper motion is exactly zero. Better
precision can be achieved in this case by inputting the EPOCH
of the original observations.
The error in using the IDL procedure PRECESS for converting between
B1950 and J1950 can be up to 12", mainly in right ascension. If
better accuracy than this is needed then BPRECESS should be used.
An unsystematic comparison of BPRECESS with the IPAC precession
routine (http://nedwww.ipac.caltech.edu/forms/calculator.html) always
gives differences less than 0.15".
EXAMPLE:
The SAO2000 catalogue gives the J2000 position and proper motion for
the star HD 119288. Find the B1950 position.
RA(2000) = 13h 42m 12.740s Dec(2000) = 8d 23' 17.69''
Mu(RA) = -.0257 s/yr Mu(Dec) = -.090 ''/yr
IDL> mu_radec = 100D* [ -15D*.0257, -0.090 ]
IDL> ra = ten(13, 42, 12.740)*15.D
IDL> dec = ten(8, 23, 17.69)
IDL> bprecess, ra, dec, ra1950, dec1950, mu_radec = mu_radec
IDL> print, adstring(ra1950, dec1950,2)
===> 13h 39m 44.526s +08d 38' 28.63"
REVISION HISTORY:
Written, W. Landsman October, 1992
Vectorized, W. Landsman February, 1994
Treat case where proper motion not known or exactly zero November 1994
Handling of arrays larger than 32767 Lars L. Christensen, march, 1995
Converted to IDL V5.0 W. Landsman September 1997
Fixed bug where A term not initialized for vector input
W. Landsman February 2000
Converted to python Sergey Koposov july 2010
"""
scal = True
if isinstance(ra0, ndarray):
ra = ra0
dec = dec0
n = ra.size
scal = False
else:
n = 1
ra = array([ra0])
dec = array([dec0])
if rad_vel is None:
rad_vel = zeros(n)
else:
if not isinstance(rad_vel, ndarray):
rad_vel = array([rad_vel],dtype=float)
if rad_vel.size != n:
raise Exception('ERROR - RAD_VEL keyword vector must be of the same length as RA and DEC')
if (mu_radec is not None):
if (array(mu_radec).size != 2 * n):
raise Exception('ERROR - MU_RADEC keyword (proper motion) be dimensioned (2,' + strtrim(n, 2) + ')')
mu_radec = mu_radec * 1.
if parallax is None:
parallax = zeros(n)
else:
if not isinstance(parallax, ndarray):
parallax = array([parallax],dtype=float)
if epoch is None:
epoch = 2000.0e0
radeg = 180.e0 / pi
sec_to_radian = lambda x : deg2rad(x/3600.)
m = array([array([+0.9999256795e0, -0.0111814828e0, -0.0048590040e0, -0.000551e0, -0.238560e0, +0.435730e0]),
array([+0.0111814828e0, +0.9999374849e0, -0.0000271557e0, +0.238509e0, -0.002667e0, -0.008541e0]),
array([+0.0048590039e0, -0.0000271771e0, +0.9999881946e0, -0.435614e0, +0.012254e0, +0.002117e0]),
array([-0.00000242389840e0, +0.00000002710544e0, +0.00000001177742e0, +0.99990432e0, -0.01118145e0, -0.00485852e0]),
array([-0.00000002710544e0, -0.00000242392702e0, +0.00000000006585e0, +0.01118145e0, +0.99991613e0, -0.00002716e0]),
array([-0.00000001177742e0, +0.00000000006585e0, -0.00000242404995e0, +0.00485852e0, -0.00002717e0, +0.99996684e0])])
a_dot = 1e-3 * array([1.244e0, -1.579e0, -0.660e0]) #in arc seconds per century
ra_rad = deg2rad(ra)
dec_rad = deg2rad(dec)
cosra = cos(ra_rad)
sinra = sin(ra_rad)
cosdec = cos(dec_rad)
sindec = sin(dec_rad)
dec_1950 = dec * 0.
ra_1950 = ra * 0.
for i in range(n):
# Following statement moved inside loop in Feb 2000.
a = 1e-6 * array([-1.62557e0, -0.31919e0, -0.13843e0]) #in radians
r0 = array([cosra[i] * cosdec[i], sinra[i] * cosdec[i], sindec[i]])
if (mu_radec is not None):
mu_a = mu_radec[i,0]
mu_d = mu_radec[i,1]
r0_dot = array([-mu_a * sinra[i] * cosdec[i] - mu_d * cosra[i] * sindec[i], mu_a * cosra[i] * cosdec[i] - mu_d * sinra[i] * sindec[i], mu_d * cosdec[i]]) + 21.095e0 * rad_vel[i] * parallax[i] * r0
else:
r0_dot = array([0.0e0, 0.0e0, 0.0e0])
r_0 = concatenate((r0, r0_dot))
r_1 = transpose(dot(transpose(m), transpose(r_0)))
# Include the effects of the E-terms of aberration to form r and r_dot.
r1 = r_1[0:3]
r1_dot = r_1[3:6]
if mu_radec is None:
r1 = r1 + sec_to_radian ( r1_dot * (epoch - 1950.0e0) / 100. )
a = a + sec_to_radian ( a_dot * (epoch - 1950.0e0) / 100. )
x1 = r_1[0] ; y1 = r_1[1] ; z1 = r_1[2]
rmag = sqrt(x1 ** 2 + y1 ** 2 + z1 ** 2)
s1 = r1 / rmag ; s1_dot = r1_dot / rmag
s = s1
for j in arange(0, 3):
r = s1 + a - ((s * a).sum()) * s
s = r / rmag
x = r[0] ; y = r[1] ; z = r[2]
r2 = x ** 2 + y ** 2 + z ** 2
rmag = sqrt(r2)
if mu_radec is not None:
r_dot = s1_dot + a_dot - ((s * a_dot).sum()) * s
x_dot = r_dot[0] ; y_dot = r_dot[1] ; z_dot = r_dot[2]
mu_radec[i,0] = (x * y_dot - y * x_dot) / (x ** 2 + y ** 2)
mu_radec[i,1] = (z_dot * (x ** 2 + y ** 2) - z * (x * x_dot + y * y_dot)) / (r2 * sqrt(x ** 2 + y ** 2))
dec_1950[i] = arcsin(z / rmag)
ra_1950[i] = arctan2(y, x)
if parallax[i] > 0.:
rad_vel[i] = (x * x_dot + y * y_dot + z * z_dot) / (21.095 * parallax[i] * rmag)
parallax[i] = parallax[i] / rmag
neg = (ra_1950 < 0)
if neg.any() > 0:
ra_1950[neg] = ra_1950[neg] + 2.e0 * pi
ra_1950 = rad2deg(ra_1950)
dec_1950 = rad2deg(dec_1950)
# Make output scalar if input was scalar
if scal:
return ra_1950[0],dec_1950[0]
else:
return ra_1950, dec_1950 | def function[bprecess, parameter[ra0, dec0, mu_radec, parallax, rad_vel, epoch]]:
constant[
NAME:
BPRECESS
PURPOSE:
Precess positions from J2000.0 (FK5) to B1950.0 (FK4)
EXPLANATION:
Calculates the mean place of a star at B1950.0 on the FK4 system from
the mean place at J2000.0 on the FK5 system.
CALLING SEQUENCE:
bprecess, ra, dec, ra_1950, dec_1950, [ MU_RADEC = , PARALLAX =
RAD_VEL =, EPOCH = ]
INPUTS:
RA,DEC - Input J2000 right ascension and declination in *degrees*.
Scalar or N element vector
OUTPUTS:
RA_1950, DEC_1950 - The corresponding B1950 right ascension and
declination in *degrees*. Same number of elements as
RA,DEC but always double precision.
OPTIONAL INPUT-OUTPUT KEYWORDS
MU_RADEC - 2xN element double precision vector containing the proper
motion in seconds of arc per tropical *century* in right
ascension and declination.
PARALLAX - N_element vector giving stellar parallax (seconds of arc)
RAD_VEL - N_element vector giving radial velocity in km/s
The values of MU_RADEC, PARALLAX, and RADVEL will all be modified
upon output to contain the values of these quantities in the
B1950 system. The parallax and radial velocity will have a very
minor influence on the B1950 position.
EPOCH - scalar giving epoch of original observations, default 2000.0d
This keyword value is only used if the MU_RADEC keyword is not set.
NOTES:
The algorithm is taken from the Explanatory Supplement to the
Astronomical Almanac 1992, page 186.
Also see Aoki et al (1983), A&A, 128,263
BPRECESS distinguishes between the following two cases:
(1) The proper motion is known and non-zero
(2) the proper motion is unknown or known to be exactly zero (i.e.
extragalactic radio sources). In this case, the reverse of
the algorithm in Appendix 2 of Aoki et al. (1983) is used to
ensure that the output proper motion is exactly zero. Better
precision can be achieved in this case by inputting the EPOCH
of the original observations.
The error in using the IDL procedure PRECESS for converting between
B1950 and J1950 can be up to 12", mainly in right ascension. If
better accuracy than this is needed then BPRECESS should be used.
An unsystematic comparison of BPRECESS with the IPAC precession
routine (http://nedwww.ipac.caltech.edu/forms/calculator.html) always
gives differences less than 0.15".
EXAMPLE:
The SAO2000 catalogue gives the J2000 position and proper motion for
the star HD 119288. Find the B1950 position.
RA(2000) = 13h 42m 12.740s Dec(2000) = 8d 23' 17.69''
Mu(RA) = -.0257 s/yr Mu(Dec) = -.090 ''/yr
IDL> mu_radec = 100D* [ -15D*.0257, -0.090 ]
IDL> ra = ten(13, 42, 12.740)*15.D
IDL> dec = ten(8, 23, 17.69)
IDL> bprecess, ra, dec, ra1950, dec1950, mu_radec = mu_radec
IDL> print, adstring(ra1950, dec1950,2)
===> 13h 39m 44.526s +08d 38' 28.63"
REVISION HISTORY:
Written, W. Landsman October, 1992
Vectorized, W. Landsman February, 1994
Treat case where proper motion not known or exactly zero November 1994
Handling of arrays larger than 32767 Lars L. Christensen, march, 1995
Converted to IDL V5.0 W. Landsman September 1997
Fixed bug where A term not initialized for vector input
W. Landsman February 2000
Converted to python Sergey Koposov july 2010
]
variable[scal] assign[=] constant[True]
if call[name[isinstance], parameter[name[ra0], name[ndarray]]] begin[:]
variable[ra] assign[=] name[ra0]
variable[dec] assign[=] name[dec0]
variable[n] assign[=] name[ra].size
variable[scal] assign[=] constant[False]
if compare[name[rad_vel] is constant[None]] begin[:]
variable[rad_vel] assign[=] call[name[zeros], parameter[name[n]]]
if compare[name[mu_radec] is_not constant[None]] begin[:]
if compare[call[name[array], parameter[name[mu_radec]]].size not_equal[!=] binary_operation[constant[2] * name[n]]] begin[:]
<ast.Raise object at 0x7da204345480>
variable[mu_radec] assign[=] binary_operation[name[mu_radec] * constant[1.0]]
if compare[name[parallax] is constant[None]] begin[:]
variable[parallax] assign[=] call[name[zeros], parameter[name[n]]]
if compare[name[epoch] is constant[None]] begin[:]
variable[epoch] assign[=] constant[2000.0]
variable[radeg] assign[=] binary_operation[constant[180.0] / name[pi]]
variable[sec_to_radian] assign[=] <ast.Lambda object at 0x7da2043469e0>
variable[m] assign[=] call[name[array], parameter[list[[<ast.Call object at 0x7da204347880>, <ast.Call object at 0x7da204345330>, <ast.Call object at 0x7da204346a70>, <ast.Call object at 0x7da204345570>, <ast.Call object at 0x7da204345ff0>, <ast.Call object at 0x7da204345f00>]]]]
variable[a_dot] assign[=] binary_operation[constant[0.001] * call[name[array], parameter[list[[<ast.Constant object at 0x7da204344820>, <ast.UnaryOp object at 0x7da204346620>, <ast.UnaryOp object at 0x7da2043472b0>]]]]]
variable[ra_rad] assign[=] call[name[deg2rad], parameter[name[ra]]]
variable[dec_rad] assign[=] call[name[deg2rad], parameter[name[dec]]]
variable[cosra] assign[=] call[name[cos], parameter[name[ra_rad]]]
variable[sinra] assign[=] call[name[sin], parameter[name[ra_rad]]]
variable[cosdec] assign[=] call[name[cos], parameter[name[dec_rad]]]
variable[sindec] assign[=] call[name[sin], parameter[name[dec_rad]]]
variable[dec_1950] assign[=] binary_operation[name[dec] * constant[0.0]]
variable[ra_1950] assign[=] binary_operation[name[ra] * constant[0.0]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
variable[a] assign[=] binary_operation[constant[1e-06] * call[name[array], parameter[list[[<ast.UnaryOp object at 0x7da204345e10>, <ast.UnaryOp object at 0x7da2043466e0>, <ast.UnaryOp object at 0x7da204346ad0>]]]]]
variable[r0] assign[=] call[name[array], parameter[list[[<ast.BinOp object at 0x7da204346320>, <ast.BinOp object at 0x7da2047ebfd0>, <ast.Subscript object at 0x7da2047ea620>]]]]
if compare[name[mu_radec] is_not constant[None]] begin[:]
variable[mu_a] assign[=] call[name[mu_radec]][tuple[[<ast.Name object at 0x7da2047e8d30>, <ast.Constant object at 0x7da2047ea440>]]]
variable[mu_d] assign[=] call[name[mu_radec]][tuple[[<ast.Name object at 0x7da2047e89d0>, <ast.Constant object at 0x7da2047e86d0>]]]
variable[r0_dot] assign[=] binary_operation[call[name[array], parameter[list[[<ast.BinOp object at 0x7da2047e9300>, <ast.BinOp object at 0x7da2047eaad0>, <ast.BinOp object at 0x7da2047ea4d0>]]]] + binary_operation[binary_operation[binary_operation[constant[21.095] * call[name[rad_vel]][name[i]]] * call[name[parallax]][name[i]]] * name[r0]]]
variable[r_0] assign[=] call[name[concatenate], parameter[tuple[[<ast.Name object at 0x7da2047eac20>, <ast.Name object at 0x7da2047ea530>]]]]
variable[r_1] assign[=] call[name[transpose], parameter[call[name[dot], parameter[call[name[transpose], parameter[name[m]]], call[name[transpose], parameter[name[r_0]]]]]]]
variable[r1] assign[=] call[name[r_1]][<ast.Slice object at 0x7da2047e8fd0>]
variable[r1_dot] assign[=] call[name[r_1]][<ast.Slice object at 0x7da2047e8d90>]
if compare[name[mu_radec] is constant[None]] begin[:]
variable[r1] assign[=] binary_operation[name[r1] + call[name[sec_to_radian], parameter[binary_operation[binary_operation[name[r1_dot] * binary_operation[name[epoch] - constant[1950.0]]] / constant[100.0]]]]]
variable[a] assign[=] binary_operation[name[a] + call[name[sec_to_radian], parameter[binary_operation[binary_operation[name[a_dot] * binary_operation[name[epoch] - constant[1950.0]]] / constant[100.0]]]]]
variable[x1] assign[=] call[name[r_1]][constant[0]]
variable[y1] assign[=] call[name[r_1]][constant[1]]
variable[z1] assign[=] call[name[r_1]][constant[2]]
variable[rmag] assign[=] call[name[sqrt], parameter[binary_operation[binary_operation[binary_operation[name[x1] ** constant[2]] + binary_operation[name[y1] ** constant[2]]] + binary_operation[name[z1] ** constant[2]]]]]
variable[s1] assign[=] binary_operation[name[r1] / name[rmag]]
variable[s1_dot] assign[=] binary_operation[name[r1_dot] / name[rmag]]
variable[s] assign[=] name[s1]
for taget[name[j]] in starred[call[name[arange], parameter[constant[0], constant[3]]]] begin[:]
variable[r] assign[=] binary_operation[binary_operation[name[s1] + name[a]] - binary_operation[call[binary_operation[name[s] * name[a]].sum, parameter[]] * name[s]]]
variable[s] assign[=] binary_operation[name[r] / name[rmag]]
variable[x] assign[=] call[name[r]][constant[0]]
variable[y] assign[=] call[name[r]][constant[1]]
variable[z] assign[=] call[name[r]][constant[2]]
variable[r2] assign[=] binary_operation[binary_operation[binary_operation[name[x] ** constant[2]] + binary_operation[name[y] ** constant[2]]] + binary_operation[name[z] ** constant[2]]]
variable[rmag] assign[=] call[name[sqrt], parameter[name[r2]]]
if compare[name[mu_radec] is_not constant[None]] begin[:]
variable[r_dot] assign[=] binary_operation[binary_operation[name[s1_dot] + name[a_dot]] - binary_operation[call[binary_operation[name[s] * name[a_dot]].sum, parameter[]] * name[s]]]
variable[x_dot] assign[=] call[name[r_dot]][constant[0]]
variable[y_dot] assign[=] call[name[r_dot]][constant[1]]
variable[z_dot] assign[=] call[name[r_dot]][constant[2]]
call[name[mu_radec]][tuple[[<ast.Name object at 0x7da18bccb100>, <ast.Constant object at 0x7da18bccb730>]]] assign[=] binary_operation[binary_operation[binary_operation[name[x] * name[y_dot]] - binary_operation[name[y] * name[x_dot]]] / binary_operation[binary_operation[name[x] ** constant[2]] + binary_operation[name[y] ** constant[2]]]]
call[name[mu_radec]][tuple[[<ast.Name object at 0x7da18bcc9480>, <ast.Constant object at 0x7da18bccbbe0>]]] assign[=] binary_operation[binary_operation[binary_operation[name[z_dot] * binary_operation[binary_operation[name[x] ** constant[2]] + binary_operation[name[y] ** constant[2]]]] - binary_operation[name[z] * binary_operation[binary_operation[name[x] * name[x_dot]] + binary_operation[name[y] * name[y_dot]]]]] / binary_operation[name[r2] * call[name[sqrt], parameter[binary_operation[binary_operation[name[x] ** constant[2]] + binary_operation[name[y] ** constant[2]]]]]]]
call[name[dec_1950]][name[i]] assign[=] call[name[arcsin], parameter[binary_operation[name[z] / name[rmag]]]]
call[name[ra_1950]][name[i]] assign[=] call[name[arctan2], parameter[name[y], name[x]]]
if compare[call[name[parallax]][name[i]] greater[>] constant[0.0]] begin[:]
call[name[rad_vel]][name[i]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[x] * name[x_dot]] + binary_operation[name[y] * name[y_dot]]] + binary_operation[name[z] * name[z_dot]]] / binary_operation[binary_operation[constant[21.095] * call[name[parallax]][name[i]]] * name[rmag]]]
call[name[parallax]][name[i]] assign[=] binary_operation[call[name[parallax]][name[i]] / name[rmag]]
variable[neg] assign[=] compare[name[ra_1950] less[<] constant[0]]
if compare[call[name[neg].any, parameter[]] greater[>] constant[0]] begin[:]
call[name[ra_1950]][name[neg]] assign[=] binary_operation[call[name[ra_1950]][name[neg]] + binary_operation[constant[2.0] * name[pi]]]
variable[ra_1950] assign[=] call[name[rad2deg], parameter[name[ra_1950]]]
variable[dec_1950] assign[=] call[name[rad2deg], parameter[name[dec_1950]]]
if name[scal] begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b2347970>, <ast.Subscript object at 0x7da1b2347850>]]] | keyword[def] identifier[bprecess] ( identifier[ra0] , identifier[dec0] , identifier[mu_radec] = keyword[None] , identifier[parallax] = keyword[None] , identifier[rad_vel] = keyword[None] , identifier[epoch] = keyword[None] ):
literal[string]
identifier[scal] = keyword[True]
keyword[if] identifier[isinstance] ( identifier[ra0] , identifier[ndarray] ):
identifier[ra] = identifier[ra0]
identifier[dec] = identifier[dec0]
identifier[n] = identifier[ra] . identifier[size]
identifier[scal] = keyword[False]
keyword[else] :
identifier[n] = literal[int]
identifier[ra] = identifier[array] ([ identifier[ra0] ])
identifier[dec] = identifier[array] ([ identifier[dec0] ])
keyword[if] identifier[rad_vel] keyword[is] keyword[None] :
identifier[rad_vel] = identifier[zeros] ( identifier[n] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[rad_vel] , identifier[ndarray] ):
identifier[rad_vel] = identifier[array] ([ identifier[rad_vel] ], identifier[dtype] = identifier[float] )
keyword[if] identifier[rad_vel] . identifier[size] != identifier[n] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[if] ( identifier[mu_radec] keyword[is] keyword[not] keyword[None] ):
keyword[if] ( identifier[array] ( identifier[mu_radec] ). identifier[size] != literal[int] * identifier[n] ):
keyword[raise] identifier[Exception] ( literal[string] + identifier[strtrim] ( identifier[n] , literal[int] )+ literal[string] )
identifier[mu_radec] = identifier[mu_radec] * literal[int]
keyword[if] identifier[parallax] keyword[is] keyword[None] :
identifier[parallax] = identifier[zeros] ( identifier[n] )
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[parallax] , identifier[ndarray] ):
identifier[parallax] = identifier[array] ([ identifier[parallax] ], identifier[dtype] = identifier[float] )
keyword[if] identifier[epoch] keyword[is] keyword[None] :
identifier[epoch] = literal[int]
identifier[radeg] = literal[int] / identifier[pi]
identifier[sec_to_radian] = keyword[lambda] identifier[x] : identifier[deg2rad] ( identifier[x] / literal[int] )
identifier[m] = identifier[array] ([ identifier[array] ([+ literal[int] ,- literal[int] ,- literal[int] ,- literal[int] ,- literal[int] ,+ literal[int] ]),
identifier[array] ([+ literal[int] ,+ literal[int] ,- literal[int] ,+ literal[int] ,- literal[int] ,- literal[int] ]),
identifier[array] ([+ literal[int] ,- literal[int] ,+ literal[int] ,- literal[int] ,+ literal[int] ,+ literal[int] ]),
identifier[array] ([- literal[int] ,+ literal[int] ,+ literal[int] ,+ literal[int] ,- literal[int] ,- literal[int] ]),
identifier[array] ([- literal[int] ,- literal[int] ,+ literal[int] ,+ literal[int] ,+ literal[int] ,- literal[int] ]),
identifier[array] ([- literal[int] ,+ literal[int] ,- literal[int] ,+ literal[int] ,- literal[int] ,+ literal[int] ])])
identifier[a_dot] = literal[int] * identifier[array] ([ literal[int] ,- literal[int] ,- literal[int] ])
identifier[ra_rad] = identifier[deg2rad] ( identifier[ra] )
identifier[dec_rad] = identifier[deg2rad] ( identifier[dec] )
identifier[cosra] = identifier[cos] ( identifier[ra_rad] )
identifier[sinra] = identifier[sin] ( identifier[ra_rad] )
identifier[cosdec] = identifier[cos] ( identifier[dec_rad] )
identifier[sindec] = identifier[sin] ( identifier[dec_rad] )
identifier[dec_1950] = identifier[dec] * literal[int]
identifier[ra_1950] = identifier[ra] * literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[a] = literal[int] * identifier[array] ([- literal[int] ,- literal[int] ,- literal[int] ])
identifier[r0] = identifier[array] ([ identifier[cosra] [ identifier[i] ]* identifier[cosdec] [ identifier[i] ], identifier[sinra] [ identifier[i] ]* identifier[cosdec] [ identifier[i] ], identifier[sindec] [ identifier[i] ]])
keyword[if] ( identifier[mu_radec] keyword[is] keyword[not] keyword[None] ):
identifier[mu_a] = identifier[mu_radec] [ identifier[i] , literal[int] ]
identifier[mu_d] = identifier[mu_radec] [ identifier[i] , literal[int] ]
identifier[r0_dot] = identifier[array] ([- identifier[mu_a] * identifier[sinra] [ identifier[i] ]* identifier[cosdec] [ identifier[i] ]- identifier[mu_d] * identifier[cosra] [ identifier[i] ]* identifier[sindec] [ identifier[i] ], identifier[mu_a] * identifier[cosra] [ identifier[i] ]* identifier[cosdec] [ identifier[i] ]- identifier[mu_d] * identifier[sinra] [ identifier[i] ]* identifier[sindec] [ identifier[i] ], identifier[mu_d] * identifier[cosdec] [ identifier[i] ]])+ literal[int] * identifier[rad_vel] [ identifier[i] ]* identifier[parallax] [ identifier[i] ]* identifier[r0]
keyword[else] :
identifier[r0_dot] = identifier[array] ([ literal[int] , literal[int] , literal[int] ])
identifier[r_0] = identifier[concatenate] (( identifier[r0] , identifier[r0_dot] ))
identifier[r_1] = identifier[transpose] ( identifier[dot] ( identifier[transpose] ( identifier[m] ), identifier[transpose] ( identifier[r_0] )))
identifier[r1] = identifier[r_1] [ literal[int] : literal[int] ]
identifier[r1_dot] = identifier[r_1] [ literal[int] : literal[int] ]
keyword[if] identifier[mu_radec] keyword[is] keyword[None] :
identifier[r1] = identifier[r1] + identifier[sec_to_radian] ( identifier[r1_dot] *( identifier[epoch] - literal[int] )/ literal[int] )
identifier[a] = identifier[a] + identifier[sec_to_radian] ( identifier[a_dot] *( identifier[epoch] - literal[int] )/ literal[int] )
identifier[x1] = identifier[r_1] [ literal[int] ]; identifier[y1] = identifier[r_1] [ literal[int] ]; identifier[z1] = identifier[r_1] [ literal[int] ]
identifier[rmag] = identifier[sqrt] ( identifier[x1] ** literal[int] + identifier[y1] ** literal[int] + identifier[z1] ** literal[int] )
identifier[s1] = identifier[r1] / identifier[rmag] ; identifier[s1_dot] = identifier[r1_dot] / identifier[rmag]
identifier[s] = identifier[s1]
keyword[for] identifier[j] keyword[in] identifier[arange] ( literal[int] , literal[int] ):
identifier[r] = identifier[s1] + identifier[a] -(( identifier[s] * identifier[a] ). identifier[sum] ())* identifier[s]
identifier[s] = identifier[r] / identifier[rmag]
identifier[x] = identifier[r] [ literal[int] ]; identifier[y] = identifier[r] [ literal[int] ]; identifier[z] = identifier[r] [ literal[int] ]
identifier[r2] = identifier[x] ** literal[int] + identifier[y] ** literal[int] + identifier[z] ** literal[int]
identifier[rmag] = identifier[sqrt] ( identifier[r2] )
keyword[if] identifier[mu_radec] keyword[is] keyword[not] keyword[None] :
identifier[r_dot] = identifier[s1_dot] + identifier[a_dot] -(( identifier[s] * identifier[a_dot] ). identifier[sum] ())* identifier[s]
identifier[x_dot] = identifier[r_dot] [ literal[int] ]; identifier[y_dot] = identifier[r_dot] [ literal[int] ]; identifier[z_dot] = identifier[r_dot] [ literal[int] ]
identifier[mu_radec] [ identifier[i] , literal[int] ]=( identifier[x] * identifier[y_dot] - identifier[y] * identifier[x_dot] )/( identifier[x] ** literal[int] + identifier[y] ** literal[int] )
identifier[mu_radec] [ identifier[i] , literal[int] ]=( identifier[z_dot] *( identifier[x] ** literal[int] + identifier[y] ** literal[int] )- identifier[z] *( identifier[x] * identifier[x_dot] + identifier[y] * identifier[y_dot] ))/( identifier[r2] * identifier[sqrt] ( identifier[x] ** literal[int] + identifier[y] ** literal[int] ))
identifier[dec_1950] [ identifier[i] ]= identifier[arcsin] ( identifier[z] / identifier[rmag] )
identifier[ra_1950] [ identifier[i] ]= identifier[arctan2] ( identifier[y] , identifier[x] )
keyword[if] identifier[parallax] [ identifier[i] ]> literal[int] :
identifier[rad_vel] [ identifier[i] ]=( identifier[x] * identifier[x_dot] + identifier[y] * identifier[y_dot] + identifier[z] * identifier[z_dot] )/( literal[int] * identifier[parallax] [ identifier[i] ]* identifier[rmag] )
identifier[parallax] [ identifier[i] ]= identifier[parallax] [ identifier[i] ]/ identifier[rmag]
identifier[neg] =( identifier[ra_1950] < literal[int] )
keyword[if] identifier[neg] . identifier[any] ()> literal[int] :
identifier[ra_1950] [ identifier[neg] ]= identifier[ra_1950] [ identifier[neg] ]+ literal[int] * identifier[pi]
identifier[ra_1950] = identifier[rad2deg] ( identifier[ra_1950] )
identifier[dec_1950] = identifier[rad2deg] ( identifier[dec_1950] )
keyword[if] identifier[scal] :
keyword[return] identifier[ra_1950] [ literal[int] ], identifier[dec_1950] [ literal[int] ]
keyword[else] :
keyword[return] identifier[ra_1950] , identifier[dec_1950] | def bprecess(ra0, dec0, mu_radec=None, parallax=None, rad_vel=None, epoch=None):
"""
NAME:
BPRECESS
PURPOSE:
Precess positions from J2000.0 (FK5) to B1950.0 (FK4)
EXPLANATION:
Calculates the mean place of a star at B1950.0 on the FK4 system from
the mean place at J2000.0 on the FK5 system.
CALLING SEQUENCE:
bprecess, ra, dec, ra_1950, dec_1950, [ MU_RADEC = , PARALLAX =
RAD_VEL =, EPOCH = ]
INPUTS:
RA,DEC - Input J2000 right ascension and declination in *degrees*.
Scalar or N element vector
OUTPUTS:
RA_1950, DEC_1950 - The corresponding B1950 right ascension and
declination in *degrees*. Same number of elements as
RA,DEC but always double precision.
OPTIONAL INPUT-OUTPUT KEYWORDS
MU_RADEC - 2xN element double precision vector containing the proper
motion in seconds of arc per tropical *century* in right
ascension and declination.
PARALLAX - N_element vector giving stellar parallax (seconds of arc)
RAD_VEL - N_element vector giving radial velocity in km/s
The values of MU_RADEC, PARALLAX, and RADVEL will all be modified
upon output to contain the values of these quantities in the
B1950 system. The parallax and radial velocity will have a very
minor influence on the B1950 position.
EPOCH - scalar giving epoch of original observations, default 2000.0d
This keyword value is only used if the MU_RADEC keyword is not set.
NOTES:
The algorithm is taken from the Explanatory Supplement to the
Astronomical Almanac 1992, page 186.
Also see Aoki et al (1983), A&A, 128,263
BPRECESS distinguishes between the following two cases:
(1) The proper motion is known and non-zero
(2) the proper motion is unknown or known to be exactly zero (i.e.
extragalactic radio sources). In this case, the reverse of
the algorithm in Appendix 2 of Aoki et al. (1983) is used to
ensure that the output proper motion is exactly zero. Better
precision can be achieved in this case by inputting the EPOCH
of the original observations.
The error in using the IDL procedure PRECESS for converting between
B1950 and J1950 can be up to 12", mainly in right ascension. If
better accuracy than this is needed then BPRECESS should be used.
An unsystematic comparison of BPRECESS with the IPAC precession
routine (http://nedwww.ipac.caltech.edu/forms/calculator.html) always
gives differences less than 0.15".
EXAMPLE:
The SAO2000 catalogue gives the J2000 position and proper motion for
the star HD 119288. Find the B1950 position.
RA(2000) = 13h 42m 12.740s Dec(2000) = 8d 23' 17.69''
Mu(RA) = -.0257 s/yr Mu(Dec) = -.090 ''/yr
IDL> mu_radec = 100D* [ -15D*.0257, -0.090 ]
IDL> ra = ten(13, 42, 12.740)*15.D
IDL> dec = ten(8, 23, 17.69)
IDL> bprecess, ra, dec, ra1950, dec1950, mu_radec = mu_radec
IDL> print, adstring(ra1950, dec1950,2)
===> 13h 39m 44.526s +08d 38' 28.63"
REVISION HISTORY:
Written, W. Landsman October, 1992
Vectorized, W. Landsman February, 1994
Treat case where proper motion not known or exactly zero November 1994
Handling of arrays larger than 32767 Lars L. Christensen, march, 1995
Converted to IDL V5.0 W. Landsman September 1997
Fixed bug where A term not initialized for vector input
W. Landsman February 2000
Converted to python Sergey Koposov july 2010
"""
scal = True
if isinstance(ra0, ndarray):
ra = ra0
dec = dec0
n = ra.size
scal = False # depends on [control=['if'], data=[]]
else:
n = 1
ra = array([ra0])
dec = array([dec0])
if rad_vel is None:
rad_vel = zeros(n) # depends on [control=['if'], data=['rad_vel']]
else:
if not isinstance(rad_vel, ndarray):
rad_vel = array([rad_vel], dtype=float) # depends on [control=['if'], data=[]]
if rad_vel.size != n:
raise Exception('ERROR - RAD_VEL keyword vector must be of the same length as RA and DEC') # depends on [control=['if'], data=[]]
if mu_radec is not None:
if array(mu_radec).size != 2 * n:
raise Exception('ERROR - MU_RADEC keyword (proper motion) be dimensioned (2,' + strtrim(n, 2) + ')') # depends on [control=['if'], data=[]]
mu_radec = mu_radec * 1.0 # depends on [control=['if'], data=['mu_radec']]
if parallax is None:
parallax = zeros(n) # depends on [control=['if'], data=['parallax']]
elif not isinstance(parallax, ndarray):
parallax = array([parallax], dtype=float) # depends on [control=['if'], data=[]]
if epoch is None:
epoch = 2000.0 # depends on [control=['if'], data=['epoch']]
radeg = 180.0 / pi
sec_to_radian = lambda x: deg2rad(x / 3600.0)
m = array([array([+0.9999256795, -0.0111814828, -0.004859004, -0.000551, -0.23856, +0.43573]), array([+0.0111814828, +0.9999374849, -2.71557e-05, +0.238509, -0.002667, -0.008541]), array([+0.0048590039, -2.71771e-05, +0.9999881946, -0.435614, +0.012254, +0.002117]), array([-2.4238984e-06, +2.710544e-08, +1.177742e-08, +0.99990432, -0.01118145, -0.00485852]), array([-2.710544e-08, -2.42392702e-06, +6.585e-11, +0.01118145, +0.99991613, -2.716e-05]), array([-1.177742e-08, +6.585e-11, -2.42404995e-06, +0.00485852, -2.717e-05, +0.99996684])])
a_dot = 0.001 * array([1.244, -1.579, -0.66]) #in arc seconds per century
ra_rad = deg2rad(ra)
dec_rad = deg2rad(dec)
cosra = cos(ra_rad)
sinra = sin(ra_rad)
cosdec = cos(dec_rad)
sindec = sin(dec_rad)
dec_1950 = dec * 0.0
ra_1950 = ra * 0.0
for i in range(n):
# Following statement moved inside loop in Feb 2000.
a = 1e-06 * array([-1.62557, -0.31919, -0.13843]) #in radians
r0 = array([cosra[i] * cosdec[i], sinra[i] * cosdec[i], sindec[i]])
if mu_radec is not None:
mu_a = mu_radec[i, 0]
mu_d = mu_radec[i, 1]
r0_dot = array([-mu_a * sinra[i] * cosdec[i] - mu_d * cosra[i] * sindec[i], mu_a * cosra[i] * cosdec[i] - mu_d * sinra[i] * sindec[i], mu_d * cosdec[i]]) + 21.095 * rad_vel[i] * parallax[i] * r0 # depends on [control=['if'], data=['mu_radec']]
else:
r0_dot = array([0.0, 0.0, 0.0])
r_0 = concatenate((r0, r0_dot))
r_1 = transpose(dot(transpose(m), transpose(r_0)))
# Include the effects of the E-terms of aberration to form r and r_dot.
r1 = r_1[0:3]
r1_dot = r_1[3:6]
if mu_radec is None:
r1 = r1 + sec_to_radian(r1_dot * (epoch - 1950.0) / 100.0)
a = a + sec_to_radian(a_dot * (epoch - 1950.0) / 100.0) # depends on [control=['if'], data=[]]
x1 = r_1[0]
y1 = r_1[1]
z1 = r_1[2]
rmag = sqrt(x1 ** 2 + y1 ** 2 + z1 ** 2)
s1 = r1 / rmag
s1_dot = r1_dot / rmag
s = s1
for j in arange(0, 3):
r = s1 + a - (s * a).sum() * s
s = r / rmag # depends on [control=['for'], data=[]]
x = r[0]
y = r[1]
z = r[2]
r2 = x ** 2 + y ** 2 + z ** 2
rmag = sqrt(r2)
if mu_radec is not None:
r_dot = s1_dot + a_dot - (s * a_dot).sum() * s
x_dot = r_dot[0]
y_dot = r_dot[1]
z_dot = r_dot[2]
mu_radec[i, 0] = (x * y_dot - y * x_dot) / (x ** 2 + y ** 2)
mu_radec[i, 1] = (z_dot * (x ** 2 + y ** 2) - z * (x * x_dot + y * y_dot)) / (r2 * sqrt(x ** 2 + y ** 2)) # depends on [control=['if'], data=['mu_radec']]
dec_1950[i] = arcsin(z / rmag)
ra_1950[i] = arctan2(y, x)
if parallax[i] > 0.0:
rad_vel[i] = (x * x_dot + y * y_dot + z * z_dot) / (21.095 * parallax[i] * rmag)
parallax[i] = parallax[i] / rmag # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
neg = ra_1950 < 0
if neg.any() > 0:
ra_1950[neg] = ra_1950[neg] + 2.0 * pi # depends on [control=['if'], data=[]]
ra_1950 = rad2deg(ra_1950)
dec_1950 = rad2deg(dec_1950)
# Make output scalar if input was scalar
if scal:
return (ra_1950[0], dec_1950[0]) # depends on [control=['if'], data=[]]
else:
return (ra_1950, dec_1950) |
def get_cost_per_kg(self, comp):
"""
Get best estimate of minimum cost/kg based on known data
Args:
comp:
Composition as a pymatgen.core.structure.Composition
Returns:
float of cost/kg
"""
comp = comp if isinstance(comp, Composition) else Composition(comp)
return self.get_cost_per_mol(comp) / (
comp.weight.to("kg") * const.N_A) | def function[get_cost_per_kg, parameter[self, comp]]:
constant[
Get best estimate of minimum cost/kg based on known data
Args:
comp:
Composition as a pymatgen.core.structure.Composition
Returns:
float of cost/kg
]
variable[comp] assign[=] <ast.IfExp object at 0x7da18c4cf790>
return[binary_operation[call[name[self].get_cost_per_mol, parameter[name[comp]]] / binary_operation[call[name[comp].weight.to, parameter[constant[kg]]] * name[const].N_A]]] | keyword[def] identifier[get_cost_per_kg] ( identifier[self] , identifier[comp] ):
literal[string]
identifier[comp] = identifier[comp] keyword[if] identifier[isinstance] ( identifier[comp] , identifier[Composition] ) keyword[else] identifier[Composition] ( identifier[comp] )
keyword[return] identifier[self] . identifier[get_cost_per_mol] ( identifier[comp] )/(
identifier[comp] . identifier[weight] . identifier[to] ( literal[string] )* identifier[const] . identifier[N_A] ) | def get_cost_per_kg(self, comp):
"""
Get best estimate of minimum cost/kg based on known data
Args:
comp:
Composition as a pymatgen.core.structure.Composition
Returns:
float of cost/kg
"""
comp = comp if isinstance(comp, Composition) else Composition(comp)
return self.get_cost_per_mol(comp) / (comp.weight.to('kg') * const.N_A) |
def gMagnitudeError(G):
"""
Calculate the single-field-of-view-transit photometric standard error in the G band as a function
of G. A 20% margin is included.
Parameters
----------
G - Value(s) of G-band magnitude.
Returns
-------
The G band photometric standard error in units of magnitude.
"""
z=calcZ(G)
return 1.0e-3*sqrt(0.04895*z*z + 1.8633*z + 0.0001985) * _scienceMargin | def function[gMagnitudeError, parameter[G]]:
constant[
Calculate the single-field-of-view-transit photometric standard error in the G band as a function
of G. A 20% margin is included.
Parameters
----------
G - Value(s) of G-band magnitude.
Returns
-------
The G band photometric standard error in units of magnitude.
]
variable[z] assign[=] call[name[calcZ], parameter[name[G]]]
return[binary_operation[binary_operation[constant[0.001] * call[name[sqrt], parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[0.04895] * name[z]] * name[z]] + binary_operation[constant[1.8633] * name[z]]] + constant[0.0001985]]]]] * name[_scienceMargin]]] | keyword[def] identifier[gMagnitudeError] ( identifier[G] ):
literal[string]
identifier[z] = identifier[calcZ] ( identifier[G] )
keyword[return] literal[int] * identifier[sqrt] ( literal[int] * identifier[z] * identifier[z] + literal[int] * identifier[z] + literal[int] )* identifier[_scienceMargin] | def gMagnitudeError(G):
"""
Calculate the single-field-of-view-transit photometric standard error in the G band as a function
of G. A 20% margin is included.
Parameters
----------
G - Value(s) of G-band magnitude.
Returns
-------
The G band photometric standard error in units of magnitude.
"""
z = calcZ(G)
return 0.001 * sqrt(0.04895 * z * z + 1.8633 * z + 0.0001985) * _scienceMargin |
def new(self, array):
"""
Convert an array of compatible length into a DictArray:
>>> d = DictArray({'PGA': [0.01, 0.02, 0.04], 'PGV': [0.1, 0.2]})
>>> d.new(numpy.arange(0, 5, 1)) # array of lenght 5 = 3 + 2
<DictArray
PGA: [0 1 2]
PGV: [3 4]>
"""
assert len(self.array) == len(array)
arr = object.__new__(self.__class__)
arr.dt = self.dt
arr.slicedic = self.slicedic
arr.array = array
return arr | def function[new, parameter[self, array]]:
constant[
Convert an array of compatible length into a DictArray:
>>> d = DictArray({'PGA': [0.01, 0.02, 0.04], 'PGV': [0.1, 0.2]})
>>> d.new(numpy.arange(0, 5, 1)) # array of lenght 5 = 3 + 2
<DictArray
PGA: [0 1 2]
PGV: [3 4]>
]
assert[compare[call[name[len], parameter[name[self].array]] equal[==] call[name[len], parameter[name[array]]]]]
variable[arr] assign[=] call[name[object].__new__, parameter[name[self].__class__]]
name[arr].dt assign[=] name[self].dt
name[arr].slicedic assign[=] name[self].slicedic
name[arr].array assign[=] name[array]
return[name[arr]] | keyword[def] identifier[new] ( identifier[self] , identifier[array] ):
literal[string]
keyword[assert] identifier[len] ( identifier[self] . identifier[array] )== identifier[len] ( identifier[array] )
identifier[arr] = identifier[object] . identifier[__new__] ( identifier[self] . identifier[__class__] )
identifier[arr] . identifier[dt] = identifier[self] . identifier[dt]
identifier[arr] . identifier[slicedic] = identifier[self] . identifier[slicedic]
identifier[arr] . identifier[array] = identifier[array]
keyword[return] identifier[arr] | def new(self, array):
"""
Convert an array of compatible length into a DictArray:
>>> d = DictArray({'PGA': [0.01, 0.02, 0.04], 'PGV': [0.1, 0.2]})
>>> d.new(numpy.arange(0, 5, 1)) # array of lenght 5 = 3 + 2
<DictArray
PGA: [0 1 2]
PGV: [3 4]>
"""
assert len(self.array) == len(array)
arr = object.__new__(self.__class__)
arr.dt = self.dt
arr.slicedic = self.slicedic
arr.array = array
return arr |
def get_strategy(cls, strategyName, inform_callback, sensor,
*params, **kwargs):
"""Factory method to create a strategy object.
Parameters
----------
strategyName : str
Name of strategy.
inform_callback : callable, signature inform_callback(sensor, reading)
Callback to receive inform messages.
sensor : Sensor object
Sensor to sample.
params : list of objects
Custom sampling parameters for specified strategy.
Keyword Arguments
-----------------
ioloop : tornado.ioloop.IOLoop instance, optional
Tornado ioloop to use, otherwise tornado.ioloop.IOLoop.current()
Returns
-------
strategy : :class:`SampleStrategy` object
The created sampling strategy.
"""
if strategyName not in cls.SAMPLING_LOOKUP_REV:
raise ValueError("Unknown sampling strategy '%s'. "
"Known strategies are %s."
% (strategyName, cls.SAMPLING_LOOKUP.values()))
strategyType = cls.SAMPLING_LOOKUP_REV[strategyName]
if strategyType == cls.NONE:
return SampleNone(inform_callback, sensor, *params, **kwargs)
elif strategyType == cls.AUTO:
return SampleAuto(inform_callback, sensor, *params, **kwargs)
elif strategyType == cls.EVENT:
return SampleEvent(inform_callback, sensor, *params, **kwargs)
elif strategyType == cls.DIFFERENTIAL:
return SampleDifferential(inform_callback, sensor,
*params, **kwargs)
elif strategyType == cls.PERIOD:
return SamplePeriod(inform_callback, sensor, *params, **kwargs)
elif strategyType == cls.EVENT_RATE:
return SampleEventRate(inform_callback, sensor, *params, **kwargs)
elif strategyType == cls.DIFFERENTIAL_RATE:
return SampleDifferentialRate(inform_callback, sensor,
*params, **kwargs) | def function[get_strategy, parameter[cls, strategyName, inform_callback, sensor]]:
constant[Factory method to create a strategy object.
Parameters
----------
strategyName : str
Name of strategy.
inform_callback : callable, signature inform_callback(sensor, reading)
Callback to receive inform messages.
sensor : Sensor object
Sensor to sample.
params : list of objects
Custom sampling parameters for specified strategy.
Keyword Arguments
-----------------
ioloop : tornado.ioloop.IOLoop instance, optional
Tornado ioloop to use, otherwise tornado.ioloop.IOLoop.current()
Returns
-------
strategy : :class:`SampleStrategy` object
The created sampling strategy.
]
if compare[name[strategyName] <ast.NotIn object at 0x7da2590d7190> name[cls].SAMPLING_LOOKUP_REV] begin[:]
<ast.Raise object at 0x7da1b056ba90>
variable[strategyType] assign[=] call[name[cls].SAMPLING_LOOKUP_REV][name[strategyName]]
if compare[name[strategyType] equal[==] name[cls].NONE] begin[:]
return[call[name[SampleNone], parameter[name[inform_callback], name[sensor], <ast.Starred object at 0x7da1b056a950>]]] | keyword[def] identifier[get_strategy] ( identifier[cls] , identifier[strategyName] , identifier[inform_callback] , identifier[sensor] ,
* identifier[params] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[strategyName] keyword[not] keyword[in] identifier[cls] . identifier[SAMPLING_LOOKUP_REV] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
%( identifier[strategyName] , identifier[cls] . identifier[SAMPLING_LOOKUP] . identifier[values] ()))
identifier[strategyType] = identifier[cls] . identifier[SAMPLING_LOOKUP_REV] [ identifier[strategyName] ]
keyword[if] identifier[strategyType] == identifier[cls] . identifier[NONE] :
keyword[return] identifier[SampleNone] ( identifier[inform_callback] , identifier[sensor] ,* identifier[params] ,** identifier[kwargs] )
keyword[elif] identifier[strategyType] == identifier[cls] . identifier[AUTO] :
keyword[return] identifier[SampleAuto] ( identifier[inform_callback] , identifier[sensor] ,* identifier[params] ,** identifier[kwargs] )
keyword[elif] identifier[strategyType] == identifier[cls] . identifier[EVENT] :
keyword[return] identifier[SampleEvent] ( identifier[inform_callback] , identifier[sensor] ,* identifier[params] ,** identifier[kwargs] )
keyword[elif] identifier[strategyType] == identifier[cls] . identifier[DIFFERENTIAL] :
keyword[return] identifier[SampleDifferential] ( identifier[inform_callback] , identifier[sensor] ,
* identifier[params] ,** identifier[kwargs] )
keyword[elif] identifier[strategyType] == identifier[cls] . identifier[PERIOD] :
keyword[return] identifier[SamplePeriod] ( identifier[inform_callback] , identifier[sensor] ,* identifier[params] ,** identifier[kwargs] )
keyword[elif] identifier[strategyType] == identifier[cls] . identifier[EVENT_RATE] :
keyword[return] identifier[SampleEventRate] ( identifier[inform_callback] , identifier[sensor] ,* identifier[params] ,** identifier[kwargs] )
keyword[elif] identifier[strategyType] == identifier[cls] . identifier[DIFFERENTIAL_RATE] :
keyword[return] identifier[SampleDifferentialRate] ( identifier[inform_callback] , identifier[sensor] ,
* identifier[params] ,** identifier[kwargs] ) | def get_strategy(cls, strategyName, inform_callback, sensor, *params, **kwargs):
"""Factory method to create a strategy object.
Parameters
----------
strategyName : str
Name of strategy.
inform_callback : callable, signature inform_callback(sensor, reading)
Callback to receive inform messages.
sensor : Sensor object
Sensor to sample.
params : list of objects
Custom sampling parameters for specified strategy.
Keyword Arguments
-----------------
ioloop : tornado.ioloop.IOLoop instance, optional
Tornado ioloop to use, otherwise tornado.ioloop.IOLoop.current()
Returns
-------
strategy : :class:`SampleStrategy` object
The created sampling strategy.
"""
if strategyName not in cls.SAMPLING_LOOKUP_REV:
raise ValueError("Unknown sampling strategy '%s'. Known strategies are %s." % (strategyName, cls.SAMPLING_LOOKUP.values())) # depends on [control=['if'], data=['strategyName']]
strategyType = cls.SAMPLING_LOOKUP_REV[strategyName]
if strategyType == cls.NONE:
return SampleNone(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]]
elif strategyType == cls.AUTO:
return SampleAuto(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]]
elif strategyType == cls.EVENT:
return SampleEvent(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]]
elif strategyType == cls.DIFFERENTIAL:
return SampleDifferential(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]]
elif strategyType == cls.PERIOD:
return SamplePeriod(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]]
elif strategyType == cls.EVENT_RATE:
return SampleEventRate(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]]
elif strategyType == cls.DIFFERENTIAL_RATE:
return SampleDifferentialRate(inform_callback, sensor, *params, **kwargs) # depends on [control=['if'], data=[]] |
def remove_routes(self, item, routes):
"""Removes item from matching routes"""
for route in routes:
items = self._routes.get(route)
try:
items.remove(item)
LOG.debug('removed item from route %s', route)
except ValueError:
pass
if not items:
self._routes.pop(route)
LOG.debug('removed route %s', route) | def function[remove_routes, parameter[self, item, routes]]:
constant[Removes item from matching routes]
for taget[name[route]] in starred[name[routes]] begin[:]
variable[items] assign[=] call[name[self]._routes.get, parameter[name[route]]]
<ast.Try object at 0x7da18eb556c0>
if <ast.UnaryOp object at 0x7da18eb57a60> begin[:]
call[name[self]._routes.pop, parameter[name[route]]]
call[name[LOG].debug, parameter[constant[removed route %s], name[route]]] | keyword[def] identifier[remove_routes] ( identifier[self] , identifier[item] , identifier[routes] ):
literal[string]
keyword[for] identifier[route] keyword[in] identifier[routes] :
identifier[items] = identifier[self] . identifier[_routes] . identifier[get] ( identifier[route] )
keyword[try] :
identifier[items] . identifier[remove] ( identifier[item] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[route] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[if] keyword[not] identifier[items] :
identifier[self] . identifier[_routes] . identifier[pop] ( identifier[route] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[route] ) | def remove_routes(self, item, routes):
"""Removes item from matching routes"""
for route in routes:
items = self._routes.get(route)
try:
items.remove(item)
LOG.debug('removed item from route %s', route) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
if not items:
self._routes.pop(route)
LOG.debug('removed route %s', route) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['route']] |
def support_autoupload_param_hostip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
support = ET.SubElement(config, "support", xmlns="urn:brocade.com:mgmt:brocade-ras")
autoupload_param = ET.SubElement(support, "autoupload-param")
hostip = ET.SubElement(autoupload_param, "hostip")
hostip.text = kwargs.pop('hostip')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[support_autoupload_param_hostip, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[support] assign[=] call[name[ET].SubElement, parameter[name[config], constant[support]]]
variable[autoupload_param] assign[=] call[name[ET].SubElement, parameter[name[support], constant[autoupload-param]]]
variable[hostip] assign[=] call[name[ET].SubElement, parameter[name[autoupload_param], constant[hostip]]]
name[hostip].text assign[=] call[name[kwargs].pop, parameter[constant[hostip]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[support_autoupload_param_hostip] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[support] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[autoupload_param] = identifier[ET] . identifier[SubElement] ( identifier[support] , literal[string] )
identifier[hostip] = identifier[ET] . identifier[SubElement] ( identifier[autoupload_param] , literal[string] )
identifier[hostip] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def support_autoupload_param_hostip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
support = ET.SubElement(config, 'support', xmlns='urn:brocade.com:mgmt:brocade-ras')
autoupload_param = ET.SubElement(support, 'autoupload-param')
hostip = ET.SubElement(autoupload_param, 'hostip')
hostip.text = kwargs.pop('hostip')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _ensure_append(self, new_items, append_to, index=0):
"""Ensure an item is appended to a list or create a new empty list
:param new_items: the item(s) to append
:type new_items: list(obj)
:param append_to: the list on which to append the items
:type append_to: list()
:param index: index of the list on which to append the items
:type index: int
"""
append_to = append_to or []
append_to.insert(index, new_items)
return append_to | def function[_ensure_append, parameter[self, new_items, append_to, index]]:
constant[Ensure an item is appended to a list or create a new empty list
:param new_items: the item(s) to append
:type new_items: list(obj)
:param append_to: the list on which to append the items
:type append_to: list()
:param index: index of the list on which to append the items
:type index: int
]
variable[append_to] assign[=] <ast.BoolOp object at 0x7da20c6a8190>
call[name[append_to].insert, parameter[name[index], name[new_items]]]
return[name[append_to]] | keyword[def] identifier[_ensure_append] ( identifier[self] , identifier[new_items] , identifier[append_to] , identifier[index] = literal[int] ):
literal[string]
identifier[append_to] = identifier[append_to] keyword[or] []
identifier[append_to] . identifier[insert] ( identifier[index] , identifier[new_items] )
keyword[return] identifier[append_to] | def _ensure_append(self, new_items, append_to, index=0):
"""Ensure an item is appended to a list or create a new empty list
:param new_items: the item(s) to append
:type new_items: list(obj)
:param append_to: the list on which to append the items
:type append_to: list()
:param index: index of the list on which to append the items
:type index: int
"""
append_to = append_to or []
append_to.insert(index, new_items)
return append_to |
def reference(self, symbol, count=1):
"""
However, if referenced, ensure that the counter is applied to
the catch symbol.
"""
if symbol == self.catch_symbol:
self.catch_symbol_usage += count
else:
self.parent.reference(symbol, count) | def function[reference, parameter[self, symbol, count]]:
constant[
However, if referenced, ensure that the counter is applied to
the catch symbol.
]
if compare[name[symbol] equal[==] name[self].catch_symbol] begin[:]
<ast.AugAssign object at 0x7da18ede5150> | keyword[def] identifier[reference] ( identifier[self] , identifier[symbol] , identifier[count] = literal[int] ):
literal[string]
keyword[if] identifier[symbol] == identifier[self] . identifier[catch_symbol] :
identifier[self] . identifier[catch_symbol_usage] += identifier[count]
keyword[else] :
identifier[self] . identifier[parent] . identifier[reference] ( identifier[symbol] , identifier[count] ) | def reference(self, symbol, count=1):
"""
However, if referenced, ensure that the counter is applied to
the catch symbol.
"""
if symbol == self.catch_symbol:
self.catch_symbol_usage += count # depends on [control=['if'], data=[]]
else:
self.parent.reference(symbol, count) |
def workflow_list(obj):
""" List all available workflows. """
try:
for wf in list_workflows(config=obj['config']):
click.echo('{:23} {}'.format(
_style(obj['show_color'], wf.name, bold=True),
wf.docstring.split('\n')[0] if wf.docstring is not None else ''))
except WorkflowDefinitionError as e:
click.echo(_style(obj['show_color'],
'The graph {} in workflow {} is not a directed acyclic graph'.
format(e.graph_name, e.workflow_name), fg='red', bold=True)) | def function[workflow_list, parameter[obj]]:
constant[ List all available workflows. ]
<ast.Try object at 0x7da1b1040790> | keyword[def] identifier[workflow_list] ( identifier[obj] ):
literal[string]
keyword[try] :
keyword[for] identifier[wf] keyword[in] identifier[list_workflows] ( identifier[config] = identifier[obj] [ literal[string] ]):
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[_style] ( identifier[obj] [ literal[string] ], identifier[wf] . identifier[name] , identifier[bold] = keyword[True] ),
identifier[wf] . identifier[docstring] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[wf] . identifier[docstring] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] ))
keyword[except] identifier[WorkflowDefinitionError] keyword[as] identifier[e] :
identifier[click] . identifier[echo] ( identifier[_style] ( identifier[obj] [ literal[string] ],
literal[string] .
identifier[format] ( identifier[e] . identifier[graph_name] , identifier[e] . identifier[workflow_name] ), identifier[fg] = literal[string] , identifier[bold] = keyword[True] )) | def workflow_list(obj):
""" List all available workflows. """
try:
for wf in list_workflows(config=obj['config']):
click.echo('{:23} {}'.format(_style(obj['show_color'], wf.name, bold=True), wf.docstring.split('\n')[0] if wf.docstring is not None else '')) # depends on [control=['for'], data=['wf']] # depends on [control=['try'], data=[]]
except WorkflowDefinitionError as e:
click.echo(_style(obj['show_color'], 'The graph {} in workflow {} is not a directed acyclic graph'.format(e.graph_name, e.workflow_name), fg='red', bold=True)) # depends on [control=['except'], data=['e']] |
def _perform_validation(self, path, value, results):
"""
Validates a given value against the schema and configured validation rules.
:param path: a dot notation path to the value.
:param value: a value to be validated.
:param results: a list with validation results to add new results.
"""
name = path if path != None else "value"
value = ObjectReader.get_value(value)
super(ArraySchema, self)._perform_validation(path, value, results)
if value == None:
return
if isinstance(value, list) or isinstance(value, set) or isinstance(value, tuple):
index = 0
for element in value:
element_path = str(index) if path == None or len(path) == 0 else path + "." + str(index)
self._perform_type_validation(element_path, self.value_type, element, results)
index += 1
else:
results.append(
ValidationResult(
path,
ValidationResultType.Error,
"VALUE_ISNOT_ARRAY",
name + " type must be List or Array",
"List",
type(value)
)
) | def function[_perform_validation, parameter[self, path, value, results]]:
constant[
Validates a given value against the schema and configured validation rules.
:param path: a dot notation path to the value.
:param value: a value to be validated.
:param results: a list with validation results to add new results.
]
variable[name] assign[=] <ast.IfExp object at 0x7da18f722fb0>
variable[value] assign[=] call[name[ObjectReader].get_value, parameter[name[value]]]
call[call[name[super], parameter[name[ArraySchema], name[self]]]._perform_validation, parameter[name[path], name[value], name[results]]]
if compare[name[value] equal[==] constant[None]] begin[:]
return[None]
if <ast.BoolOp object at 0x7da1b170cb50> begin[:]
variable[index] assign[=] constant[0]
for taget[name[element]] in starred[name[value]] begin[:]
variable[element_path] assign[=] <ast.IfExp object at 0x7da1b170c6a0>
call[name[self]._perform_type_validation, parameter[name[element_path], name[self].value_type, name[element], name[results]]]
<ast.AugAssign object at 0x7da20e9b2320> | keyword[def] identifier[_perform_validation] ( identifier[self] , identifier[path] , identifier[value] , identifier[results] ):
literal[string]
identifier[name] = identifier[path] keyword[if] identifier[path] != keyword[None] keyword[else] literal[string]
identifier[value] = identifier[ObjectReader] . identifier[get_value] ( identifier[value] )
identifier[super] ( identifier[ArraySchema] , identifier[self] ). identifier[_perform_validation] ( identifier[path] , identifier[value] , identifier[results] )
keyword[if] identifier[value] == keyword[None] :
keyword[return]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[set] ) keyword[or] identifier[isinstance] ( identifier[value] , identifier[tuple] ):
identifier[index] = literal[int]
keyword[for] identifier[element] keyword[in] identifier[value] :
identifier[element_path] = identifier[str] ( identifier[index] ) keyword[if] identifier[path] == keyword[None] keyword[or] identifier[len] ( identifier[path] )== literal[int] keyword[else] identifier[path] + literal[string] + identifier[str] ( identifier[index] )
identifier[self] . identifier[_perform_type_validation] ( identifier[element_path] , identifier[self] . identifier[value_type] , identifier[element] , identifier[results] )
identifier[index] += literal[int]
keyword[else] :
identifier[results] . identifier[append] (
identifier[ValidationResult] (
identifier[path] ,
identifier[ValidationResultType] . identifier[Error] ,
literal[string] ,
identifier[name] + literal[string] ,
literal[string] ,
identifier[type] ( identifier[value] )
)
) | def _perform_validation(self, path, value, results):
"""
Validates a given value against the schema and configured validation rules.
:param path: a dot notation path to the value.
:param value: a value to be validated.
:param results: a list with validation results to add new results.
"""
name = path if path != None else 'value'
value = ObjectReader.get_value(value)
super(ArraySchema, self)._perform_validation(path, value, results)
if value == None:
return # depends on [control=['if'], data=[]]
if isinstance(value, list) or isinstance(value, set) or isinstance(value, tuple):
index = 0
for element in value:
element_path = str(index) if path == None or len(path) == 0 else path + '.' + str(index)
self._perform_type_validation(element_path, self.value_type, element, results)
index += 1 # depends on [control=['for'], data=['element']] # depends on [control=['if'], data=[]]
else:
results.append(ValidationResult(path, ValidationResultType.Error, 'VALUE_ISNOT_ARRAY', name + ' type must be List or Array', 'List', type(value))) |
def compose(*fs: Any) -> Callable:
""" Compose functions from left to right.
e.g. compose(f, g)(x) = f(g(x))
"""
return foldl1(lambda f, g: lambda *x: f(g(*x)), fs) | def function[compose, parameter[]]:
constant[ Compose functions from left to right.
e.g. compose(f, g)(x) = f(g(x))
]
return[call[name[foldl1], parameter[<ast.Lambda object at 0x7da1b26adde0>, name[fs]]]] | keyword[def] identifier[compose] (* identifier[fs] : identifier[Any] )-> identifier[Callable] :
literal[string]
keyword[return] identifier[foldl1] ( keyword[lambda] identifier[f] , identifier[g] : keyword[lambda] * identifier[x] : identifier[f] ( identifier[g] (* identifier[x] )), identifier[fs] ) | def compose(*fs: Any) -> Callable:
""" Compose functions from left to right.
e.g. compose(f, g)(x) = f(g(x))
"""
return foldl1(lambda f, g: lambda *x: f(g(*x)), fs) |
def zoom(image, factor, dimension, hdr = False, order = 3):
"""
Zooms the provided image by the supplied factor in the supplied dimension.
The factor is an integer determining how many slices should be put between each
existing pair.
If an image header (hdr) is supplied, its voxel spacing gets updated.
Returns the image and the updated header or false.
"""
# check if supplied dimension is valid
if dimension >= image.ndim:
raise argparse.ArgumentError('The supplied zoom-dimension {} exceeds the image dimensionality of 0 to {}.'.format(dimension, image.ndim - 1))
# get logger
logger = Logger.getInstance()
logger.debug('Old shape = {}.'.format(image.shape))
# perform the zoom
zoom = [1] * image.ndim
zoom[dimension] = (image.shape[dimension] + (image.shape[dimension] - 1) * factor) / float(image.shape[dimension])
logger.debug('Reshaping with = {}.'.format(zoom))
image = interpolation.zoom(image, zoom, order=order)
logger.debug('New shape = {}.'.format(image.shape))
if hdr:
new_spacing = list(header.get_pixel_spacing(hdr))
new_spacing[dimension] = new_spacing[dimension] / float(factor + 1)
logger.debug('Setting pixel spacing from {} to {}....'.format(header.get_pixel_spacing(hdr), new_spacing))
header.set_pixel_spacing(hdr, tuple(new_spacing))
return image, hdr | def function[zoom, parameter[image, factor, dimension, hdr, order]]:
constant[
Zooms the provided image by the supplied factor in the supplied dimension.
The factor is an integer determining how many slices should be put between each
existing pair.
If an image header (hdr) is supplied, its voxel spacing gets updated.
Returns the image and the updated header or false.
]
if compare[name[dimension] greater_or_equal[>=] name[image].ndim] begin[:]
<ast.Raise object at 0x7da18ede7d90>
variable[logger] assign[=] call[name[Logger].getInstance, parameter[]]
call[name[logger].debug, parameter[call[constant[Old shape = {}.].format, parameter[name[image].shape]]]]
variable[zoom] assign[=] binary_operation[list[[<ast.Constant object at 0x7da18ede7880>]] * name[image].ndim]
call[name[zoom]][name[dimension]] assign[=] binary_operation[binary_operation[call[name[image].shape][name[dimension]] + binary_operation[binary_operation[call[name[image].shape][name[dimension]] - constant[1]] * name[factor]]] / call[name[float], parameter[call[name[image].shape][name[dimension]]]]]
call[name[logger].debug, parameter[call[constant[Reshaping with = {}.].format, parameter[name[zoom]]]]]
variable[image] assign[=] call[name[interpolation].zoom, parameter[name[image], name[zoom]]]
call[name[logger].debug, parameter[call[constant[New shape = {}.].format, parameter[name[image].shape]]]]
if name[hdr] begin[:]
variable[new_spacing] assign[=] call[name[list], parameter[call[name[header].get_pixel_spacing, parameter[name[hdr]]]]]
call[name[new_spacing]][name[dimension]] assign[=] binary_operation[call[name[new_spacing]][name[dimension]] / call[name[float], parameter[binary_operation[name[factor] + constant[1]]]]]
call[name[logger].debug, parameter[call[constant[Setting pixel spacing from {} to {}....].format, parameter[call[name[header].get_pixel_spacing, parameter[name[hdr]]], name[new_spacing]]]]]
call[name[header].set_pixel_spacing, parameter[name[hdr], call[name[tuple], parameter[name[new_spacing]]]]]
return[tuple[[<ast.Name object at 0x7da18f09e290>, <ast.Name object at 0x7da18f09c5e0>]]] | keyword[def] identifier[zoom] ( identifier[image] , identifier[factor] , identifier[dimension] , identifier[hdr] = keyword[False] , identifier[order] = literal[int] ):
literal[string]
keyword[if] identifier[dimension] >= identifier[image] . identifier[ndim] :
keyword[raise] identifier[argparse] . identifier[ArgumentError] ( literal[string] . identifier[format] ( identifier[dimension] , identifier[image] . identifier[ndim] - literal[int] ))
identifier[logger] = identifier[Logger] . identifier[getInstance] ()
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[image] . identifier[shape] ))
identifier[zoom] =[ literal[int] ]* identifier[image] . identifier[ndim]
identifier[zoom] [ identifier[dimension] ]=( identifier[image] . identifier[shape] [ identifier[dimension] ]+( identifier[image] . identifier[shape] [ identifier[dimension] ]- literal[int] )* identifier[factor] )/ identifier[float] ( identifier[image] . identifier[shape] [ identifier[dimension] ])
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[zoom] ))
identifier[image] = identifier[interpolation] . identifier[zoom] ( identifier[image] , identifier[zoom] , identifier[order] = identifier[order] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[image] . identifier[shape] ))
keyword[if] identifier[hdr] :
identifier[new_spacing] = identifier[list] ( identifier[header] . identifier[get_pixel_spacing] ( identifier[hdr] ))
identifier[new_spacing] [ identifier[dimension] ]= identifier[new_spacing] [ identifier[dimension] ]/ identifier[float] ( identifier[factor] + literal[int] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[header] . identifier[get_pixel_spacing] ( identifier[hdr] ), identifier[new_spacing] ))
identifier[header] . identifier[set_pixel_spacing] ( identifier[hdr] , identifier[tuple] ( identifier[new_spacing] ))
keyword[return] identifier[image] , identifier[hdr] | def zoom(image, factor, dimension, hdr=False, order=3):
"""
Zooms the provided image by the supplied factor in the supplied dimension.
The factor is an integer determining how many slices should be put between each
existing pair.
If an image header (hdr) is supplied, its voxel spacing gets updated.
Returns the image and the updated header or false.
"""
# check if supplied dimension is valid
if dimension >= image.ndim:
raise argparse.ArgumentError('The supplied zoom-dimension {} exceeds the image dimensionality of 0 to {}.'.format(dimension, image.ndim - 1)) # depends on [control=['if'], data=['dimension']]
# get logger
logger = Logger.getInstance()
logger.debug('Old shape = {}.'.format(image.shape))
# perform the zoom
zoom = [1] * image.ndim
zoom[dimension] = (image.shape[dimension] + (image.shape[dimension] - 1) * factor) / float(image.shape[dimension])
logger.debug('Reshaping with = {}.'.format(zoom))
image = interpolation.zoom(image, zoom, order=order)
logger.debug('New shape = {}.'.format(image.shape))
if hdr:
new_spacing = list(header.get_pixel_spacing(hdr))
new_spacing[dimension] = new_spacing[dimension] / float(factor + 1)
logger.debug('Setting pixel spacing from {} to {}....'.format(header.get_pixel_spacing(hdr), new_spacing))
header.set_pixel_spacing(hdr, tuple(new_spacing)) # depends on [control=['if'], data=[]]
return (image, hdr) |
def apply_boundary_conditions(self, **kwargs):
"""Maps values to be in [0, 2pi) (the domain) first, before applying
any additional boundary conditions.
Parameters
----------
\**kwargs :
The keyword args should be the name of a parameter and value to
apply its boundary conditions to. The arguments need not include
all of the parameters in self.
Returns
-------
dict
A dictionary of the parameter names and the conditioned values.
"""
# map values to be within the domain
kwargs = dict([[p, self._domain.apply_conditions(val)]
for p,val in kwargs.items()])
# now apply additional conditions
return super(UniformAngle, self).apply_boundary_conditions(**kwargs) | def function[apply_boundary_conditions, parameter[self]]:
constant[Maps values to be in [0, 2pi) (the domain) first, before applying
any additional boundary conditions.
Parameters
----------
\**kwargs :
The keyword args should be the name of a parameter and value to
apply its boundary conditions to. The arguments need not include
all of the parameters in self.
Returns
-------
dict
A dictionary of the parameter names and the conditioned values.
]
variable[kwargs] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da20c6e62f0>]]
return[call[call[name[super], parameter[name[UniformAngle], name[self]]].apply_boundary_conditions, parameter[]]] | keyword[def] identifier[apply_boundary_conditions] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] = identifier[dict] ([[ identifier[p] , identifier[self] . identifier[_domain] . identifier[apply_conditions] ( identifier[val] )]
keyword[for] identifier[p] , identifier[val] keyword[in] identifier[kwargs] . identifier[items] ()])
keyword[return] identifier[super] ( identifier[UniformAngle] , identifier[self] ). identifier[apply_boundary_conditions] (** identifier[kwargs] ) | def apply_boundary_conditions(self, **kwargs):
"""Maps values to be in [0, 2pi) (the domain) first, before applying
any additional boundary conditions.
Parameters
----------
\\**kwargs :
The keyword args should be the name of a parameter and value to
apply its boundary conditions to. The arguments need not include
all of the parameters in self.
Returns
-------
dict
A dictionary of the parameter names and the conditioned values.
"""
# map values to be within the domain
kwargs = dict([[p, self._domain.apply_conditions(val)] for (p, val) in kwargs.items()])
# now apply additional conditions
return super(UniformAngle, self).apply_boundary_conditions(**kwargs) |
def fdefine(self, name, type, order):
"""Define a field. To initialize a newly created vdata with
fields created with fdefine(), assign a tuple of field names
to the _fields attribute or call the setfields() method.
Args::
name field name
type field data type (one of HC.xxx)
order field order (number of values in the field)
Returns::
None
C library equivalent : VSfdefine
"""
_checkErr('fdefine', _C.VSfdefine(self._id, name, type, order),
'cannot define field') | def function[fdefine, parameter[self, name, type, order]]:
constant[Define a field. To initialize a newly created vdata with
fields created with fdefine(), assign a tuple of field names
to the _fields attribute or call the setfields() method.
Args::
name field name
type field data type (one of HC.xxx)
order field order (number of values in the field)
Returns::
None
C library equivalent : VSfdefine
]
call[name[_checkErr], parameter[constant[fdefine], call[name[_C].VSfdefine, parameter[name[self]._id, name[name], name[type], name[order]]], constant[cannot define field]]] | keyword[def] identifier[fdefine] ( identifier[self] , identifier[name] , identifier[type] , identifier[order] ):
literal[string]
identifier[_checkErr] ( literal[string] , identifier[_C] . identifier[VSfdefine] ( identifier[self] . identifier[_id] , identifier[name] , identifier[type] , identifier[order] ),
literal[string] ) | def fdefine(self, name, type, order):
"""Define a field. To initialize a newly created vdata with
fields created with fdefine(), assign a tuple of field names
to the _fields attribute or call the setfields() method.
Args::
name field name
type field data type (one of HC.xxx)
order field order (number of values in the field)
Returns::
None
C library equivalent : VSfdefine
"""
_checkErr('fdefine', _C.VSfdefine(self._id, name, type, order), 'cannot define field') |
def child_added(self, child):
""" Overwrite the view """
view = child.widget
if view is not None:
self.toast.setView(view) | def function[child_added, parameter[self, child]]:
constant[ Overwrite the view ]
variable[view] assign[=] name[child].widget
if compare[name[view] is_not constant[None]] begin[:]
call[name[self].toast.setView, parameter[name[view]]] | keyword[def] identifier[child_added] ( identifier[self] , identifier[child] ):
literal[string]
identifier[view] = identifier[child] . identifier[widget]
keyword[if] identifier[view] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[toast] . identifier[setView] ( identifier[view] ) | def child_added(self, child):
""" Overwrite the view """
view = child.widget
if view is not None:
self.toast.setView(view) # depends on [control=['if'], data=['view']] |
def process_metric(self, metric, scraper_config, metric_transformers=None):
"""
Handle a prometheus metric according to the following flow:
- search scraper_config['metrics_mapper'] for a prometheus.metric <--> datadog.metric mapping
- call check method with the same name as the metric
- log some info if none of the above worked
`metric_transformers` is a dict of <metric name>:<function to run when the metric name is encountered>
"""
# If targeted metric, store labels
self._store_labels(metric, scraper_config)
if metric.name in scraper_config['ignore_metrics']:
return # Ignore the metric
# Filter metric to see if we can enrich with joined labels
self._join_labels(metric, scraper_config)
if scraper_config['_dry_run']:
return
try:
self.submit_openmetric(scraper_config['metrics_mapper'][metric.name], metric, scraper_config)
except KeyError:
if metric_transformers is not None:
if metric.name in metric_transformers:
try:
# Get the transformer function for this specific metric
transformer = metric_transformers[metric.name]
transformer(metric, scraper_config)
except Exception as err:
self.log.warning("Error handling metric: {} - error: {}".format(metric.name, err))
else:
self.log.debug(
"Unable to handle metric: {0} - error: "
"No handler function named '{0}' defined".format(metric.name)
)
else:
# build the wildcard list if first pass
if scraper_config['_metrics_wildcards'] is None:
scraper_config['_metrics_wildcards'] = [x for x in scraper_config['metrics_mapper'] if '*' in x]
# try matching wildcard (generic check)
for wildcard in scraper_config['_metrics_wildcards']:
if fnmatchcase(metric.name, wildcard):
self.submit_openmetric(metric.name, metric, scraper_config) | def function[process_metric, parameter[self, metric, scraper_config, metric_transformers]]:
constant[
Handle a prometheus metric according to the following flow:
- search scraper_config['metrics_mapper'] for a prometheus.metric <--> datadog.metric mapping
- call check method with the same name as the metric
- log some info if none of the above worked
`metric_transformers` is a dict of <metric name>:<function to run when the metric name is encountered>
]
call[name[self]._store_labels, parameter[name[metric], name[scraper_config]]]
if compare[name[metric].name in call[name[scraper_config]][constant[ignore_metrics]]] begin[:]
return[None]
call[name[self]._join_labels, parameter[name[metric], name[scraper_config]]]
if call[name[scraper_config]][constant[_dry_run]] begin[:]
return[None]
<ast.Try object at 0x7da1b26ad510> | keyword[def] identifier[process_metric] ( identifier[self] , identifier[metric] , identifier[scraper_config] , identifier[metric_transformers] = keyword[None] ):
literal[string]
identifier[self] . identifier[_store_labels] ( identifier[metric] , identifier[scraper_config] )
keyword[if] identifier[metric] . identifier[name] keyword[in] identifier[scraper_config] [ literal[string] ]:
keyword[return]
identifier[self] . identifier[_join_labels] ( identifier[metric] , identifier[scraper_config] )
keyword[if] identifier[scraper_config] [ literal[string] ]:
keyword[return]
keyword[try] :
identifier[self] . identifier[submit_openmetric] ( identifier[scraper_config] [ literal[string] ][ identifier[metric] . identifier[name] ], identifier[metric] , identifier[scraper_config] )
keyword[except] identifier[KeyError] :
keyword[if] identifier[metric_transformers] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[metric] . identifier[name] keyword[in] identifier[metric_transformers] :
keyword[try] :
identifier[transformer] = identifier[metric_transformers] [ identifier[metric] . identifier[name] ]
identifier[transformer] ( identifier[metric] , identifier[scraper_config] )
keyword[except] identifier[Exception] keyword[as] identifier[err] :
identifier[self] . identifier[log] . identifier[warning] ( literal[string] . identifier[format] ( identifier[metric] . identifier[name] , identifier[err] ))
keyword[else] :
identifier[self] . identifier[log] . identifier[debug] (
literal[string]
literal[string] . identifier[format] ( identifier[metric] . identifier[name] )
)
keyword[else] :
keyword[if] identifier[scraper_config] [ literal[string] ] keyword[is] keyword[None] :
identifier[scraper_config] [ literal[string] ]=[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[scraper_config] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[x] ]
keyword[for] identifier[wildcard] keyword[in] identifier[scraper_config] [ literal[string] ]:
keyword[if] identifier[fnmatchcase] ( identifier[metric] . identifier[name] , identifier[wildcard] ):
identifier[self] . identifier[submit_openmetric] ( identifier[metric] . identifier[name] , identifier[metric] , identifier[scraper_config] ) | def process_metric(self, metric, scraper_config, metric_transformers=None):
"""
Handle a prometheus metric according to the following flow:
- search scraper_config['metrics_mapper'] for a prometheus.metric <--> datadog.metric mapping
- call check method with the same name as the metric
- log some info if none of the above worked
`metric_transformers` is a dict of <metric name>:<function to run when the metric name is encountered>
"""
# If targeted metric, store labels
self._store_labels(metric, scraper_config)
if metric.name in scraper_config['ignore_metrics']:
return # Ignore the metric # depends on [control=['if'], data=[]]
# Filter metric to see if we can enrich with joined labels
self._join_labels(metric, scraper_config)
if scraper_config['_dry_run']:
return # depends on [control=['if'], data=[]]
try:
self.submit_openmetric(scraper_config['metrics_mapper'][metric.name], metric, scraper_config) # depends on [control=['try'], data=[]]
except KeyError:
if metric_transformers is not None:
if metric.name in metric_transformers:
try:
# Get the transformer function for this specific metric
transformer = metric_transformers[metric.name]
transformer(metric, scraper_config) # depends on [control=['try'], data=[]]
except Exception as err:
self.log.warning('Error handling metric: {} - error: {}'.format(metric.name, err)) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=['metric_transformers']]
else:
self.log.debug("Unable to handle metric: {0} - error: No handler function named '{0}' defined".format(metric.name)) # depends on [control=['if'], data=['metric_transformers']]
else:
# build the wildcard list if first pass
if scraper_config['_metrics_wildcards'] is None:
scraper_config['_metrics_wildcards'] = [x for x in scraper_config['metrics_mapper'] if '*' in x] # depends on [control=['if'], data=[]]
# try matching wildcard (generic check)
for wildcard in scraper_config['_metrics_wildcards']:
if fnmatchcase(metric.name, wildcard):
self.submit_openmetric(metric.name, metric, scraper_config) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['wildcard']] # depends on [control=['except'], data=[]] |
def metadata(self):
"""Returns all values and associated metadata for this node as a dict.
The value which would be selected if the node's value was requested
is indicated by the `default` flag.
"""
result = []
for layer in self._layers:
if layer in self._values:
result.append({
'layer': layer,
'value': self._values[layer][1],
'source': self._values[layer][0],
'default': layer == self._layers[-1]
})
return result | def function[metadata, parameter[self]]:
constant[Returns all values and associated metadata for this node as a dict.
The value which would be selected if the node's value was requested
is indicated by the `default` flag.
]
variable[result] assign[=] list[[]]
for taget[name[layer]] in starred[name[self]._layers] begin[:]
if compare[name[layer] in name[self]._values] begin[:]
call[name[result].append, parameter[dictionary[[<ast.Constant object at 0x7da18dc98f70>, <ast.Constant object at 0x7da18dc99690>, <ast.Constant object at 0x7da18dc9a5f0>, <ast.Constant object at 0x7da18dc98190>], [<ast.Name object at 0x7da18dc98c70>, <ast.Subscript object at 0x7da20c6a9960>, <ast.Subscript object at 0x7da20c6a9bd0>, <ast.Compare object at 0x7da20c6ab3d0>]]]]
return[name[result]] | keyword[def] identifier[metadata] ( identifier[self] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[layer] keyword[in] identifier[self] . identifier[_layers] :
keyword[if] identifier[layer] keyword[in] identifier[self] . identifier[_values] :
identifier[result] . identifier[append] ({
literal[string] : identifier[layer] ,
literal[string] : identifier[self] . identifier[_values] [ identifier[layer] ][ literal[int] ],
literal[string] : identifier[self] . identifier[_values] [ identifier[layer] ][ literal[int] ],
literal[string] : identifier[layer] == identifier[self] . identifier[_layers] [- literal[int] ]
})
keyword[return] identifier[result] | def metadata(self):
"""Returns all values and associated metadata for this node as a dict.
The value which would be selected if the node's value was requested
is indicated by the `default` flag.
"""
result = []
for layer in self._layers:
if layer in self._values:
result.append({'layer': layer, 'value': self._values[layer][1], 'source': self._values[layer][0], 'default': layer == self._layers[-1]}) # depends on [control=['if'], data=['layer']] # depends on [control=['for'], data=['layer']]
return result |
def category(self, category):
"""
Change category of current search and return self
"""
self.url.category = category
self.url.set_page(1)
return self | def function[category, parameter[self, category]]:
constant[
Change category of current search and return self
]
name[self].url.category assign[=] name[category]
call[name[self].url.set_page, parameter[constant[1]]]
return[name[self]] | keyword[def] identifier[category] ( identifier[self] , identifier[category] ):
literal[string]
identifier[self] . identifier[url] . identifier[category] = identifier[category]
identifier[self] . identifier[url] . identifier[set_page] ( literal[int] )
keyword[return] identifier[self] | def category(self, category):
"""
Change category of current search and return self
"""
self.url.category = category
self.url.set_page(1)
return self |
def extract_module_locals(depth=0):
"""Returns (module, locals) of the funciton `depth` frames away from the caller"""
f = sys._getframe(depth + 1)
global_ns = f.f_globals
module = sys.modules[global_ns['__name__']]
return (module, f.f_locals) | def function[extract_module_locals, parameter[depth]]:
constant[Returns (module, locals) of the funciton `depth` frames away from the caller]
variable[f] assign[=] call[name[sys]._getframe, parameter[binary_operation[name[depth] + constant[1]]]]
variable[global_ns] assign[=] name[f].f_globals
variable[module] assign[=] call[name[sys].modules][call[name[global_ns]][constant[__name__]]]
return[tuple[[<ast.Name object at 0x7da18f810970>, <ast.Attribute object at 0x7da18f813ac0>]]] | keyword[def] identifier[extract_module_locals] ( identifier[depth] = literal[int] ):
literal[string]
identifier[f] = identifier[sys] . identifier[_getframe] ( identifier[depth] + literal[int] )
identifier[global_ns] = identifier[f] . identifier[f_globals]
identifier[module] = identifier[sys] . identifier[modules] [ identifier[global_ns] [ literal[string] ]]
keyword[return] ( identifier[module] , identifier[f] . identifier[f_locals] ) | def extract_module_locals(depth=0):
"""Returns (module, locals) of the funciton `depth` frames away from the caller"""
f = sys._getframe(depth + 1)
global_ns = f.f_globals
module = sys.modules[global_ns['__name__']]
return (module, f.f_locals) |
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename)) | def function[import_media, parameter[filename]]:
constant[
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
]
if <ast.UnaryOp object at 0x7da20c794640> begin[:]
variable[filename] assign[=] name[settings].MEDIA_DUMP_FILENAME
variable[project_root] assign[=] call[name[os].getcwd, parameter[]]
with call[name[fab_settings], parameter[call[name[hide], parameter[constant[everything]]]]] begin[:]
variable[is_backup_missing] assign[=] call[name[local], parameter[binary_operation[constant[test -e "$(echo %s)"] <ast.Mod object at 0x7da2590d6920> call[name[os].path.join, parameter[name[project_root], name[filename]]]]]].failed
if name[is_backup_missing] begin[:]
call[name[abort], parameter[call[name[red], parameter[call[constant[ERROR: There is no media backup that could be imported in {0}. We need a file called {1} in that folder.].format, parameter[name[project_root], name[filename]]]]]]]
with call[name[lcd], parameter[name[project_root]]] begin[:]
call[name[local], parameter[call[constant[cp {0} {1}].format, parameter[name[filename], name[settings].MEDIA_ROOT]]]]
with call[name[lcd], parameter[name[settings].MEDIA_ROOT]] begin[:]
call[name[local], parameter[call[constant[tar -xvf {0}].format, parameter[name[filename]]]]]
call[name[local], parameter[call[constant[rm -rf {0}].format, parameter[name[filename]]]]] | keyword[def] identifier[import_media] ( identifier[filename] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[filename] :
identifier[filename] = identifier[settings] . identifier[MEDIA_DUMP_FILENAME]
identifier[project_root] = identifier[os] . identifier[getcwd] ()
keyword[with] identifier[fab_settings] ( identifier[hide] ( literal[string] ), identifier[warn_only] = keyword[True] ):
identifier[is_backup_missing] = identifier[local] ( literal[string] % identifier[os] . identifier[path] . identifier[join] (
identifier[project_root] , identifier[filename] )). identifier[failed]
keyword[if] identifier[is_backup_missing] :
identifier[abort] ( identifier[red] ( literal[string]
literal[string] . identifier[format] (
identifier[project_root] , identifier[filename] )))
keyword[with] identifier[lcd] ( identifier[project_root] ):
identifier[local] ( literal[string] . identifier[format] ( identifier[filename] , identifier[settings] . identifier[MEDIA_ROOT] ))
keyword[with] identifier[lcd] ( identifier[settings] . identifier[MEDIA_ROOT] ):
identifier[local] ( literal[string] . identifier[format] ( identifier[filename] ))
identifier[local] ( literal[string] . identifier[format] ( identifier[filename] )) | def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME # depends on [control=['if'], data=[]]
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(project_root, filename)).failed # depends on [control=['with'], data=[]]
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in {0}. We need a file called {1} in that folder.'.format(project_root, filename))) # depends on [control=['if'], data=[]]
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT)) # depends on [control=['with'], data=[]]
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename)) # depends on [control=['with'], data=[]] |
def sub_dirs(path, invisible=False):
"""
Child directories (non-recursive)
"""
dirs = [x for x in os.listdir(path) if os.path.isdir(os.path.join(path, x))]
if not invisible:
dirs = [x for x in dirs if not x.startswith('.')]
return dirs | def function[sub_dirs, parameter[path, invisible]]:
constant[
Child directories (non-recursive)
]
variable[dirs] assign[=] <ast.ListComp object at 0x7da1b1240550>
if <ast.UnaryOp object at 0x7da1b1240220> begin[:]
variable[dirs] assign[=] <ast.ListComp object at 0x7da1b1242740>
return[name[dirs]] | keyword[def] identifier[sub_dirs] ( identifier[path] , identifier[invisible] = keyword[False] ):
literal[string]
identifier[dirs] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[os] . identifier[listdir] ( identifier[path] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[x] ))]
keyword[if] keyword[not] identifier[invisible] :
identifier[dirs] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[dirs] keyword[if] keyword[not] identifier[x] . identifier[startswith] ( literal[string] )]
keyword[return] identifier[dirs] | def sub_dirs(path, invisible=False):
"""
Child directories (non-recursive)
"""
dirs = [x for x in os.listdir(path) if os.path.isdir(os.path.join(path, x))]
if not invisible:
dirs = [x for x in dirs if not x.startswith('.')] # depends on [control=['if'], data=[]]
return dirs |
def _create_column(values, dtype):
"Creates a column from values with dtype"
if str(dtype) == "tensor(int64)":
return numpy.array(values, dtype=numpy.int64)
elif str(dtype) == "tensor(float)":
return numpy.array(values, dtype=numpy.float32)
else:
raise OnnxRuntimeAssertionError("Unable to create one column from dtype '{0}'".format(dtype)) | def function[_create_column, parameter[values, dtype]]:
constant[Creates a column from values with dtype]
if compare[call[name[str], parameter[name[dtype]]] equal[==] constant[tensor(int64)]] begin[:]
return[call[name[numpy].array, parameter[name[values]]]] | keyword[def] identifier[_create_column] ( identifier[values] , identifier[dtype] ):
literal[string]
keyword[if] identifier[str] ( identifier[dtype] )== literal[string] :
keyword[return] identifier[numpy] . identifier[array] ( identifier[values] , identifier[dtype] = identifier[numpy] . identifier[int64] )
keyword[elif] identifier[str] ( identifier[dtype] )== literal[string] :
keyword[return] identifier[numpy] . identifier[array] ( identifier[values] , identifier[dtype] = identifier[numpy] . identifier[float32] )
keyword[else] :
keyword[raise] identifier[OnnxRuntimeAssertionError] ( literal[string] . identifier[format] ( identifier[dtype] )) | def _create_column(values, dtype):
"""Creates a column from values with dtype"""
if str(dtype) == 'tensor(int64)':
return numpy.array(values, dtype=numpy.int64) # depends on [control=['if'], data=[]]
elif str(dtype) == 'tensor(float)':
return numpy.array(values, dtype=numpy.float32) # depends on [control=['if'], data=[]]
else:
raise OnnxRuntimeAssertionError("Unable to create one column from dtype '{0}'".format(dtype)) |
def custom_filter_text(self, value, search):
"""Support general query using the 'text' attribute."""
if isinstance(value, list):
value = ' '.join(value)
should = [
Q('match', slug={'query': value, 'operator': 'and', 'boost': 10.0}),
Q('match', **{'slug.ngrams': {'query': value, 'operator': 'and', 'boost': 5.0}}),
Q('match', name={'query': value, 'operator': 'and', 'boost': 10.0}),
Q('match', **{'name.ngrams': {'query': value, 'operator': 'and', 'boost': 5.0}}),
Q('match', contributor_name={'query': value, 'operator': 'and', 'boost': 5.0}),
Q('match', **{'contributor_name.ngrams': {'query': value, 'operator': 'and', 'boost': 2.0}}),
Q('match', owner_names={'query': value, 'operator': 'and', 'boost': 5.0}),
Q('match', **{'owner_names.ngrams': {'query': value, 'operator': 'and', 'boost': 2.0}}),
Q('match', descriptor_data={'query': value, 'operator': 'and'}),
]
# Add registered text extensions.
for extension in composer.get_extensions(self):
if hasattr(extension, 'text_filter'):
should += extension.text_filter(value)
search = search.query('bool', should=should)
return search | def function[custom_filter_text, parameter[self, value, search]]:
constant[Support general query using the 'text' attribute.]
if call[name[isinstance], parameter[name[value], name[list]]] begin[:]
variable[value] assign[=] call[constant[ ].join, parameter[name[value]]]
variable[should] assign[=] list[[<ast.Call object at 0x7da1b1af3850>, <ast.Call object at 0x7da1b1af03a0>, <ast.Call object at 0x7da1b1af3a00>, <ast.Call object at 0x7da1b1af3c10>, <ast.Call object at 0x7da1b1af3e80>, <ast.Call object at 0x7da1b1af1180>, <ast.Call object at 0x7da1b1af0760>, <ast.Call object at 0x7da1b1af0160>, <ast.Call object at 0x7da1b1af16f0>]]
for taget[name[extension]] in starred[call[name[composer].get_extensions, parameter[name[self]]]] begin[:]
if call[name[hasattr], parameter[name[extension], constant[text_filter]]] begin[:]
<ast.AugAssign object at 0x7da1b1af0a90>
variable[search] assign[=] call[name[search].query, parameter[constant[bool]]]
return[name[search]] | keyword[def] identifier[custom_filter_text] ( identifier[self] , identifier[value] , identifier[search] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ):
identifier[value] = literal[string] . identifier[join] ( identifier[value] )
identifier[should] =[
identifier[Q] ( literal[string] , identifier[slug] ={ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }),
identifier[Q] ( literal[string] ,**{ literal[string] :{ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }}),
identifier[Q] ( literal[string] , identifier[name] ={ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }),
identifier[Q] ( literal[string] ,**{ literal[string] :{ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }}),
identifier[Q] ( literal[string] , identifier[contributor_name] ={ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }),
identifier[Q] ( literal[string] ,**{ literal[string] :{ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }}),
identifier[Q] ( literal[string] , identifier[owner_names] ={ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }),
identifier[Q] ( literal[string] ,**{ literal[string] :{ literal[string] : identifier[value] , literal[string] : literal[string] , literal[string] : literal[int] }}),
identifier[Q] ( literal[string] , identifier[descriptor_data] ={ literal[string] : identifier[value] , literal[string] : literal[string] }),
]
keyword[for] identifier[extension] keyword[in] identifier[composer] . identifier[get_extensions] ( identifier[self] ):
keyword[if] identifier[hasattr] ( identifier[extension] , literal[string] ):
identifier[should] += identifier[extension] . identifier[text_filter] ( identifier[value] )
identifier[search] = identifier[search] . identifier[query] ( literal[string] , identifier[should] = identifier[should] )
keyword[return] identifier[search] | def custom_filter_text(self, value, search):
"""Support general query using the 'text' attribute."""
if isinstance(value, list):
value = ' '.join(value) # depends on [control=['if'], data=[]]
should = [Q('match', slug={'query': value, 'operator': 'and', 'boost': 10.0}), Q('match', **{'slug.ngrams': {'query': value, 'operator': 'and', 'boost': 5.0}}), Q('match', name={'query': value, 'operator': 'and', 'boost': 10.0}), Q('match', **{'name.ngrams': {'query': value, 'operator': 'and', 'boost': 5.0}}), Q('match', contributor_name={'query': value, 'operator': 'and', 'boost': 5.0}), Q('match', **{'contributor_name.ngrams': {'query': value, 'operator': 'and', 'boost': 2.0}}), Q('match', owner_names={'query': value, 'operator': 'and', 'boost': 5.0}), Q('match', **{'owner_names.ngrams': {'query': value, 'operator': 'and', 'boost': 2.0}}), Q('match', descriptor_data={'query': value, 'operator': 'and'})]
# Add registered text extensions.
for extension in composer.get_extensions(self):
if hasattr(extension, 'text_filter'):
should += extension.text_filter(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['extension']]
search = search.query('bool', should=should)
return search |
def createEvent(self, physicalInterfaceId, eventTypeId, eventId):
"""
Create an event mapping for a physical interface.
Parameters:
physicalInterfaceId (string) - value returned by the platform when creating the physical interface
eventTypeId (string) - value returned by the platform when creating the event type
eventId (string) - matches the event id used by the device in the MQTT topic
Throws APIException on failure.
"""
req = ApiClient.allEventsUrl % (self.host, "/draft", physicalInterfaceId)
body = {"eventId" : eventId, "eventTypeId" : eventTypeId}
resp = requests.post(req, auth=self.credentials, headers={"Content-Type":"application/json"}, data=json.dumps(body),
verify=self.verify)
if resp.status_code == 201:
self.logger.debug("Event mapping created")
else:
raise ibmiotf.APIException(resp.status_code, "HTTP error creating event mapping", resp)
return resp.json() | def function[createEvent, parameter[self, physicalInterfaceId, eventTypeId, eventId]]:
constant[
Create an event mapping for a physical interface.
Parameters:
physicalInterfaceId (string) - value returned by the platform when creating the physical interface
eventTypeId (string) - value returned by the platform when creating the event type
eventId (string) - matches the event id used by the device in the MQTT topic
Throws APIException on failure.
]
variable[req] assign[=] binary_operation[name[ApiClient].allEventsUrl <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b05c8c40>, <ast.Constant object at 0x7da1b05cbc70>, <ast.Name object at 0x7da1b05c8580>]]]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b05ca8f0>, <ast.Constant object at 0x7da1b05cbdf0>], [<ast.Name object at 0x7da1b05cb430>, <ast.Name object at 0x7da1b05cae60>]]
variable[resp] assign[=] call[name[requests].post, parameter[name[req]]]
if compare[name[resp].status_code equal[==] constant[201]] begin[:]
call[name[self].logger.debug, parameter[constant[Event mapping created]]]
return[call[name[resp].json, parameter[]]] | keyword[def] identifier[createEvent] ( identifier[self] , identifier[physicalInterfaceId] , identifier[eventTypeId] , identifier[eventId] ):
literal[string]
identifier[req] = identifier[ApiClient] . identifier[allEventsUrl] %( identifier[self] . identifier[host] , literal[string] , identifier[physicalInterfaceId] )
identifier[body] ={ literal[string] : identifier[eventId] , literal[string] : identifier[eventTypeId] }
identifier[resp] = identifier[requests] . identifier[post] ( identifier[req] , identifier[auth] = identifier[self] . identifier[credentials] , identifier[headers] ={ literal[string] : literal[string] }, identifier[data] = identifier[json] . identifier[dumps] ( identifier[body] ),
identifier[verify] = identifier[self] . identifier[verify] )
keyword[if] identifier[resp] . identifier[status_code] == literal[int] :
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
keyword[raise] identifier[ibmiotf] . identifier[APIException] ( identifier[resp] . identifier[status_code] , literal[string] , identifier[resp] )
keyword[return] identifier[resp] . identifier[json] () | def createEvent(self, physicalInterfaceId, eventTypeId, eventId):
"""
Create an event mapping for a physical interface.
Parameters:
physicalInterfaceId (string) - value returned by the platform when creating the physical interface
eventTypeId (string) - value returned by the platform when creating the event type
eventId (string) - matches the event id used by the device in the MQTT topic
Throws APIException on failure.
"""
req = ApiClient.allEventsUrl % (self.host, '/draft', physicalInterfaceId)
body = {'eventId': eventId, 'eventTypeId': eventTypeId}
resp = requests.post(req, auth=self.credentials, headers={'Content-Type': 'application/json'}, data=json.dumps(body), verify=self.verify)
if resp.status_code == 201:
self.logger.debug('Event mapping created') # depends on [control=['if'], data=[]]
else:
raise ibmiotf.APIException(resp.status_code, 'HTTP error creating event mapping', resp)
return resp.json() |
def _new_dynspace(
self,
name=None,
bases=None,
formula=None,
refs=None,
arguments=None,
source=None,
):
"""Create a new dynamic root space."""
if name is None:
name = self.spacenamer.get_next(self.namespace)
if name in self.namespace:
raise ValueError("Name '%s' already exists." % name)
if not is_valid_name(name):
raise ValueError("Invalid name '%s'." % name)
space = RootDynamicSpaceImpl(
parent=self,
name=name,
formula=formula,
refs=refs,
source=source,
arguments=arguments,
)
space.is_derived = False
self._set_space(space)
if bases: # i.e. not []
dynbase = self._get_dynamic_base(bases)
space._dynbase = dynbase
dynbase._dynamic_subs.append(space)
return space | def function[_new_dynspace, parameter[self, name, bases, formula, refs, arguments, source]]:
constant[Create a new dynamic root space.]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] call[name[self].spacenamer.get_next, parameter[name[self].namespace]]
if compare[name[name] in name[self].namespace] begin[:]
<ast.Raise object at 0x7da1b00db610>
if <ast.UnaryOp object at 0x7da1b00dbee0> begin[:]
<ast.Raise object at 0x7da1b00db490>
variable[space] assign[=] call[name[RootDynamicSpaceImpl], parameter[]]
name[space].is_derived assign[=] constant[False]
call[name[self]._set_space, parameter[name[space]]]
if name[bases] begin[:]
variable[dynbase] assign[=] call[name[self]._get_dynamic_base, parameter[name[bases]]]
name[space]._dynbase assign[=] name[dynbase]
call[name[dynbase]._dynamic_subs.append, parameter[name[space]]]
return[name[space]] | keyword[def] identifier[_new_dynspace] (
identifier[self] ,
identifier[name] = keyword[None] ,
identifier[bases] = keyword[None] ,
identifier[formula] = keyword[None] ,
identifier[refs] = keyword[None] ,
identifier[arguments] = keyword[None] ,
identifier[source] = keyword[None] ,
):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[self] . identifier[spacenamer] . identifier[get_next] ( identifier[self] . identifier[namespace] )
keyword[if] identifier[name] keyword[in] identifier[self] . identifier[namespace] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] )
keyword[if] keyword[not] identifier[is_valid_name] ( identifier[name] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[name] )
identifier[space] = identifier[RootDynamicSpaceImpl] (
identifier[parent] = identifier[self] ,
identifier[name] = identifier[name] ,
identifier[formula] = identifier[formula] ,
identifier[refs] = identifier[refs] ,
identifier[source] = identifier[source] ,
identifier[arguments] = identifier[arguments] ,
)
identifier[space] . identifier[is_derived] = keyword[False]
identifier[self] . identifier[_set_space] ( identifier[space] )
keyword[if] identifier[bases] :
identifier[dynbase] = identifier[self] . identifier[_get_dynamic_base] ( identifier[bases] )
identifier[space] . identifier[_dynbase] = identifier[dynbase]
identifier[dynbase] . identifier[_dynamic_subs] . identifier[append] ( identifier[space] )
keyword[return] identifier[space] | def _new_dynspace(self, name=None, bases=None, formula=None, refs=None, arguments=None, source=None):
"""Create a new dynamic root space."""
if name is None:
name = self.spacenamer.get_next(self.namespace) # depends on [control=['if'], data=['name']]
if name in self.namespace:
raise ValueError("Name '%s' already exists." % name) # depends on [control=['if'], data=['name']]
if not is_valid_name(name):
raise ValueError("Invalid name '%s'." % name) # depends on [control=['if'], data=[]]
space = RootDynamicSpaceImpl(parent=self, name=name, formula=formula, refs=refs, source=source, arguments=arguments)
space.is_derived = False
self._set_space(space)
if bases: # i.e. not []
dynbase = self._get_dynamic_base(bases)
space._dynbase = dynbase
dynbase._dynamic_subs.append(space) # depends on [control=['if'], data=[]]
return space |
def folderitems(self):
"""TODO: Refactor to non-classic mode
"""
items = super(ServicesView, self).folderitems()
self.categories.sort()
return items | def function[folderitems, parameter[self]]:
constant[TODO: Refactor to non-classic mode
]
variable[items] assign[=] call[call[name[super], parameter[name[ServicesView], name[self]]].folderitems, parameter[]]
call[name[self].categories.sort, parameter[]]
return[name[items]] | keyword[def] identifier[folderitems] ( identifier[self] ):
literal[string]
identifier[items] = identifier[super] ( identifier[ServicesView] , identifier[self] ). identifier[folderitems] ()
identifier[self] . identifier[categories] . identifier[sort] ()
keyword[return] identifier[items] | def folderitems(self):
"""TODO: Refactor to non-classic mode
"""
items = super(ServicesView, self).folderitems()
self.categories.sort()
return items |
def history(self, request, uuid=None):
"""
Historical data endpoints could be available for any objects (currently
implemented for quotas and events count). The data is available at *<object_endpoint>/history/*,
for example: */api/quotas/<uuid>/history/*.
There are two ways to define datetime points for historical data.
1. Send *?point=<timestamp>* parameter that can list. Response will contain historical data for each given point
in the same order.
2. Send *?start=<timestamp>*, *?end=<timestamp>*, *?points_count=<integer>* parameters.
Result will contain <points_count> points from <start> to <end>.
Response format:
.. code-block:: javascript
[
{
"point": <timestamp>,
"object": {<object_representation>}
},
{
"point": <timestamp>
"object": {<object_representation>}
},
...
]
NB! There will not be any "object" for corresponding point in response if there
is no data about object for a given timestamp.
"""
mapped = {
'start': request.query_params.get('start'),
'end': request.query_params.get('end'),
'points_count': request.query_params.get('points_count'),
'point_list': request.query_params.getlist('point'),
}
history_serializer = HistorySerializer(data={k: v for k, v in mapped.items() if v})
history_serializer.is_valid(raise_exception=True)
quota = self.get_object()
serializer = self.get_serializer(quota)
serialized_versions = []
for point_date in history_serializer.get_filter_data():
serialized = {'point': datetime_to_timestamp(point_date)}
version = Version.objects.get_for_object(quota).filter(revision__date_created__lte=point_date)
if version.exists():
# make copy of serialized data and update field that are stored in version
version_object = version.first()._object_version.object
serialized['object'] = serializer.data.copy()
serialized['object'].update({
f: getattr(version_object, f) for f in quota.get_version_fields()
})
serialized_versions.append(serialized)
return response.Response(serialized_versions, status=status.HTTP_200_OK) | def function[history, parameter[self, request, uuid]]:
constant[
Historical data endpoints could be available for any objects (currently
implemented for quotas and events count). The data is available at *<object_endpoint>/history/*,
for example: */api/quotas/<uuid>/history/*.
There are two ways to define datetime points for historical data.
1. Send *?point=<timestamp>* parameter that can list. Response will contain historical data for each given point
in the same order.
2. Send *?start=<timestamp>*, *?end=<timestamp>*, *?points_count=<integer>* parameters.
Result will contain <points_count> points from <start> to <end>.
Response format:
.. code-block:: javascript
[
{
"point": <timestamp>,
"object": {<object_representation>}
},
{
"point": <timestamp>
"object": {<object_representation>}
},
...
]
NB! There will not be any "object" for corresponding point in response if there
is no data about object for a given timestamp.
]
variable[mapped] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fe7a90>, <ast.Constant object at 0x7da1b0fe7280>, <ast.Constant object at 0x7da1b0fe5060>, <ast.Constant object at 0x7da1b0fe7250>], [<ast.Call object at 0x7da1b0fe59f0>, <ast.Call object at 0x7da1b0fe48e0>, <ast.Call object at 0x7da1b0fe60e0>, <ast.Call object at 0x7da1b0fe6cb0>]]
variable[history_serializer] assign[=] call[name[HistorySerializer], parameter[]]
call[name[history_serializer].is_valid, parameter[]]
variable[quota] assign[=] call[name[self].get_object, parameter[]]
variable[serializer] assign[=] call[name[self].get_serializer, parameter[name[quota]]]
variable[serialized_versions] assign[=] list[[]]
for taget[name[point_date]] in starred[call[name[history_serializer].get_filter_data, parameter[]]] begin[:]
variable[serialized] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fe7520>], [<ast.Call object at 0x7da1b0fe4bb0>]]
variable[version] assign[=] call[call[name[Version].objects.get_for_object, parameter[name[quota]]].filter, parameter[]]
if call[name[version].exists, parameter[]] begin[:]
variable[version_object] assign[=] call[name[version].first, parameter[]]._object_version.object
call[name[serialized]][constant[object]] assign[=] call[name[serializer].data.copy, parameter[]]
call[call[name[serialized]][constant[object]].update, parameter[<ast.DictComp object at 0x7da1b0fe7bb0>]]
call[name[serialized_versions].append, parameter[name[serialized]]]
return[call[name[response].Response, parameter[name[serialized_versions]]]] | keyword[def] identifier[history] ( identifier[self] , identifier[request] , identifier[uuid] = keyword[None] ):
literal[string]
identifier[mapped] ={
literal[string] : identifier[request] . identifier[query_params] . identifier[get] ( literal[string] ),
literal[string] : identifier[request] . identifier[query_params] . identifier[get] ( literal[string] ),
literal[string] : identifier[request] . identifier[query_params] . identifier[get] ( literal[string] ),
literal[string] : identifier[request] . identifier[query_params] . identifier[getlist] ( literal[string] ),
}
identifier[history_serializer] = identifier[HistorySerializer] ( identifier[data] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[mapped] . identifier[items] () keyword[if] identifier[v] })
identifier[history_serializer] . identifier[is_valid] ( identifier[raise_exception] = keyword[True] )
identifier[quota] = identifier[self] . identifier[get_object] ()
identifier[serializer] = identifier[self] . identifier[get_serializer] ( identifier[quota] )
identifier[serialized_versions] =[]
keyword[for] identifier[point_date] keyword[in] identifier[history_serializer] . identifier[get_filter_data] ():
identifier[serialized] ={ literal[string] : identifier[datetime_to_timestamp] ( identifier[point_date] )}
identifier[version] = identifier[Version] . identifier[objects] . identifier[get_for_object] ( identifier[quota] ). identifier[filter] ( identifier[revision__date_created__lte] = identifier[point_date] )
keyword[if] identifier[version] . identifier[exists] ():
identifier[version_object] = identifier[version] . identifier[first] (). identifier[_object_version] . identifier[object]
identifier[serialized] [ literal[string] ]= identifier[serializer] . identifier[data] . identifier[copy] ()
identifier[serialized] [ literal[string] ]. identifier[update] ({
identifier[f] : identifier[getattr] ( identifier[version_object] , identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[quota] . identifier[get_version_fields] ()
})
identifier[serialized_versions] . identifier[append] ( identifier[serialized] )
keyword[return] identifier[response] . identifier[Response] ( identifier[serialized_versions] , identifier[status] = identifier[status] . identifier[HTTP_200_OK] ) | def history(self, request, uuid=None):
"""
Historical data endpoints could be available for any objects (currently
implemented for quotas and events count). The data is available at *<object_endpoint>/history/*,
for example: */api/quotas/<uuid>/history/*.
There are two ways to define datetime points for historical data.
1. Send *?point=<timestamp>* parameter that can list. Response will contain historical data for each given point
in the same order.
2. Send *?start=<timestamp>*, *?end=<timestamp>*, *?points_count=<integer>* parameters.
Result will contain <points_count> points from <start> to <end>.
Response format:
.. code-block:: javascript
[
{
"point": <timestamp>,
"object": {<object_representation>}
},
{
"point": <timestamp>
"object": {<object_representation>}
},
...
]
NB! There will not be any "object" for corresponding point in response if there
is no data about object for a given timestamp.
"""
mapped = {'start': request.query_params.get('start'), 'end': request.query_params.get('end'), 'points_count': request.query_params.get('points_count'), 'point_list': request.query_params.getlist('point')}
history_serializer = HistorySerializer(data={k: v for (k, v) in mapped.items() if v})
history_serializer.is_valid(raise_exception=True)
quota = self.get_object()
serializer = self.get_serializer(quota)
serialized_versions = []
for point_date in history_serializer.get_filter_data():
serialized = {'point': datetime_to_timestamp(point_date)}
version = Version.objects.get_for_object(quota).filter(revision__date_created__lte=point_date)
if version.exists():
# make copy of serialized data and update field that are stored in version
version_object = version.first()._object_version.object
serialized['object'] = serializer.data.copy()
serialized['object'].update({f: getattr(version_object, f) for f in quota.get_version_fields()}) # depends on [control=['if'], data=[]]
serialized_versions.append(serialized) # depends on [control=['for'], data=['point_date']]
return response.Response(serialized_versions, status=status.HTTP_200_OK) |
def injections_from_cli(opts):
"""Gets injection parameters from the inference file(s).
Parameters
----------
opts : argparser
Argparser object that has the command-line objects to parse.
Returns
-------
FieldArray
Array of the injection parameters from all of the input files given
by ``opts.input_file``.
"""
input_files = opts.input_file
if isinstance(input_files, str):
input_files = [input_files]
injections = None
# loop over all input files getting the injection files
for input_file in input_files:
fp = loadfile(input_file, 'r')
these_injs = fp.read_injections()
if injections is None:
injections = these_injs
else:
injections = injections.append(these_injs)
return injections | def function[injections_from_cli, parameter[opts]]:
constant[Gets injection parameters from the inference file(s).
Parameters
----------
opts : argparser
Argparser object that has the command-line objects to parse.
Returns
-------
FieldArray
Array of the injection parameters from all of the input files given
by ``opts.input_file``.
]
variable[input_files] assign[=] name[opts].input_file
if call[name[isinstance], parameter[name[input_files], name[str]]] begin[:]
variable[input_files] assign[=] list[[<ast.Name object at 0x7da18dc07430>]]
variable[injections] assign[=] constant[None]
for taget[name[input_file]] in starred[name[input_files]] begin[:]
variable[fp] assign[=] call[name[loadfile], parameter[name[input_file], constant[r]]]
variable[these_injs] assign[=] call[name[fp].read_injections, parameter[]]
if compare[name[injections] is constant[None]] begin[:]
variable[injections] assign[=] name[these_injs]
return[name[injections]] | keyword[def] identifier[injections_from_cli] ( identifier[opts] ):
literal[string]
identifier[input_files] = identifier[opts] . identifier[input_file]
keyword[if] identifier[isinstance] ( identifier[input_files] , identifier[str] ):
identifier[input_files] =[ identifier[input_files] ]
identifier[injections] = keyword[None]
keyword[for] identifier[input_file] keyword[in] identifier[input_files] :
identifier[fp] = identifier[loadfile] ( identifier[input_file] , literal[string] )
identifier[these_injs] = identifier[fp] . identifier[read_injections] ()
keyword[if] identifier[injections] keyword[is] keyword[None] :
identifier[injections] = identifier[these_injs]
keyword[else] :
identifier[injections] = identifier[injections] . identifier[append] ( identifier[these_injs] )
keyword[return] identifier[injections] | def injections_from_cli(opts):
"""Gets injection parameters from the inference file(s).
Parameters
----------
opts : argparser
Argparser object that has the command-line objects to parse.
Returns
-------
FieldArray
Array of the injection parameters from all of the input files given
by ``opts.input_file``.
"""
input_files = opts.input_file
if isinstance(input_files, str):
input_files = [input_files] # depends on [control=['if'], data=[]]
injections = None
# loop over all input files getting the injection files
for input_file in input_files:
fp = loadfile(input_file, 'r')
these_injs = fp.read_injections()
if injections is None:
injections = these_injs # depends on [control=['if'], data=['injections']]
else:
injections = injections.append(these_injs) # depends on [control=['for'], data=['input_file']]
return injections |
def taskGroupCreationRequested(self, *args, **kwargs):
"""
tc-gh requested the Queue service to create all the tasks in a group
supposed to signal that taskCreate API has been called for every task in the task group
for this particular repo and this particular organization
currently used for creating initial status indicators in GitHub UI using Statuses API.
This particular exchange can also be bound to RabbitMQ queues by custom routes - for that,
Pass in the array of routes as a second argument to the publish method. Currently, we do
use the statuses routes to bind the handler that creates the initial status.
This exchange outputs: ``v1/task-group-creation-requested.json#``This exchange takes the following keys:
* routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
* organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
* repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
"""
ref = {
'exchange': 'task-group-creation-requested',
'name': 'taskGroupCreationRequested',
'routingKey': [
{
'constant': 'primary',
'multipleWords': False,
'name': 'routingKeyKind',
},
{
'multipleWords': False,
'name': 'organization',
},
{
'multipleWords': False,
'name': 'repository',
},
],
'schema': 'v1/task-group-creation-requested.json#',
}
return self._makeTopicExchange(ref, *args, **kwargs) | def function[taskGroupCreationRequested, parameter[self]]:
constant[
tc-gh requested the Queue service to create all the tasks in a group
supposed to signal that taskCreate API has been called for every task in the task group
for this particular repo and this particular organization
currently used for creating initial status indicators in GitHub UI using Statuses API.
This particular exchange can also be bound to RabbitMQ queues by custom routes - for that,
Pass in the array of routes as a second argument to the publish method. Currently, we do
use the statuses routes to bind the handler that creates the initial status.
This exchange outputs: ``v1/task-group-creation-requested.json#``This exchange takes the following keys:
* routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
* organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
* repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
]
variable[ref] assign[=] dictionary[[<ast.Constant object at 0x7da204961d50>, <ast.Constant object at 0x7da2049615a0>, <ast.Constant object at 0x7da204963160>, <ast.Constant object at 0x7da204960b50>], [<ast.Constant object at 0x7da204963730>, <ast.Constant object at 0x7da204962680>, <ast.List object at 0x7da204963d30>, <ast.Constant object at 0x7da204961000>]]
return[call[name[self]._makeTopicExchange, parameter[name[ref], <ast.Starred object at 0x7da2049601c0>]]] | keyword[def] identifier[taskGroupCreationRequested] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[ref] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :[
{
literal[string] : literal[string] ,
literal[string] : keyword[False] ,
literal[string] : literal[string] ,
},
{
literal[string] : keyword[False] ,
literal[string] : literal[string] ,
},
{
literal[string] : keyword[False] ,
literal[string] : literal[string] ,
},
],
literal[string] : literal[string] ,
}
keyword[return] identifier[self] . identifier[_makeTopicExchange] ( identifier[ref] ,* identifier[args] ,** identifier[kwargs] ) | def taskGroupCreationRequested(self, *args, **kwargs):
"""
tc-gh requested the Queue service to create all the tasks in a group
supposed to signal that taskCreate API has been called for every task in the task group
for this particular repo and this particular organization
currently used for creating initial status indicators in GitHub UI using Statuses API.
This particular exchange can also be bound to RabbitMQ queues by custom routes - for that,
Pass in the array of routes as a second argument to the publish method. Currently, we do
use the statuses routes to bind the handler that creates the initial status.
This exchange outputs: ``v1/task-group-creation-requested.json#``This exchange takes the following keys:
* routingKeyKind: Identifier for the routing-key kind. This is always `"primary"` for the formalized routing key. (required)
* organization: The GitHub `organization` which had an event. All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
* repository: The GitHub `repository` which had an event.All periods have been replaced by % - such that foo.bar becomes foo%bar - and all other special characters aside from - and _ have been stripped. (required)
"""
ref = {'exchange': 'task-group-creation-requested', 'name': 'taskGroupCreationRequested', 'routingKey': [{'constant': 'primary', 'multipleWords': False, 'name': 'routingKeyKind'}, {'multipleWords': False, 'name': 'organization'}, {'multipleWords': False, 'name': 'repository'}], 'schema': 'v1/task-group-creation-requested.json#'}
return self._makeTopicExchange(ref, *args, **kwargs) |
def ReplaceInFile(filename, old, new, encoding=None):
'''
Replaces all occurrences of "old" by "new" in the given file.
:param unicode filename:
The name of the file.
:param unicode old:
The string to search for.
:param unicode new:
Replacement string.
:return unicode:
The new contents of the file.
'''
contents = GetFileContents(filename, encoding=encoding)
contents = contents.replace(old, new)
CreateFile(filename, contents, encoding=encoding)
return contents | def function[ReplaceInFile, parameter[filename, old, new, encoding]]:
constant[
Replaces all occurrences of "old" by "new" in the given file.
:param unicode filename:
The name of the file.
:param unicode old:
The string to search for.
:param unicode new:
Replacement string.
:return unicode:
The new contents of the file.
]
variable[contents] assign[=] call[name[GetFileContents], parameter[name[filename]]]
variable[contents] assign[=] call[name[contents].replace, parameter[name[old], name[new]]]
call[name[CreateFile], parameter[name[filename], name[contents]]]
return[name[contents]] | keyword[def] identifier[ReplaceInFile] ( identifier[filename] , identifier[old] , identifier[new] , identifier[encoding] = keyword[None] ):
literal[string]
identifier[contents] = identifier[GetFileContents] ( identifier[filename] , identifier[encoding] = identifier[encoding] )
identifier[contents] = identifier[contents] . identifier[replace] ( identifier[old] , identifier[new] )
identifier[CreateFile] ( identifier[filename] , identifier[contents] , identifier[encoding] = identifier[encoding] )
keyword[return] identifier[contents] | def ReplaceInFile(filename, old, new, encoding=None):
"""
Replaces all occurrences of "old" by "new" in the given file.
:param unicode filename:
The name of the file.
:param unicode old:
The string to search for.
:param unicode new:
Replacement string.
:return unicode:
The new contents of the file.
"""
contents = GetFileContents(filename, encoding=encoding)
contents = contents.replace(old, new)
CreateFile(filename, contents, encoding=encoding)
return contents |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.