code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def edge_cost(self, node_a, node_b):
"""Returns the cost of moving between the edge that connects node_a to node_b.
Returns +inf if no such edge exists."""
cost = float('inf')
node_object_a = self.get_node(node_a)
for edge_id in node_object_a['edges']:
edge = self.get_edge(edge_id)
tpl = (node_a, node_b)
if edge['vertices'] == tpl:
cost = edge['cost']
break
return cost | def function[edge_cost, parameter[self, node_a, node_b]]:
constant[Returns the cost of moving between the edge that connects node_a to node_b.
Returns +inf if no such edge exists.]
variable[cost] assign[=] call[name[float], parameter[constant[inf]]]
variable[node_object_a] assign[=] call[name[self].get_node, parameter[name[node_a]]]
for taget[name[edge_id]] in starred[call[name[node_object_a]][constant[edges]]] begin[:]
variable[edge] assign[=] call[name[self].get_edge, parameter[name[edge_id]]]
variable[tpl] assign[=] tuple[[<ast.Name object at 0x7da1b28f02e0>, <ast.Name object at 0x7da1b28f1000>]]
if compare[call[name[edge]][constant[vertices]] equal[==] name[tpl]] begin[:]
variable[cost] assign[=] call[name[edge]][constant[cost]]
break
return[name[cost]] | keyword[def] identifier[edge_cost] ( identifier[self] , identifier[node_a] , identifier[node_b] ):
literal[string]
identifier[cost] = identifier[float] ( literal[string] )
identifier[node_object_a] = identifier[self] . identifier[get_node] ( identifier[node_a] )
keyword[for] identifier[edge_id] keyword[in] identifier[node_object_a] [ literal[string] ]:
identifier[edge] = identifier[self] . identifier[get_edge] ( identifier[edge_id] )
identifier[tpl] =( identifier[node_a] , identifier[node_b] )
keyword[if] identifier[edge] [ literal[string] ]== identifier[tpl] :
identifier[cost] = identifier[edge] [ literal[string] ]
keyword[break]
keyword[return] identifier[cost] | def edge_cost(self, node_a, node_b):
"""Returns the cost of moving between the edge that connects node_a to node_b.
Returns +inf if no such edge exists."""
cost = float('inf')
node_object_a = self.get_node(node_a)
for edge_id in node_object_a['edges']:
edge = self.get_edge(edge_id)
tpl = (node_a, node_b)
if edge['vertices'] == tpl:
cost = edge['cost']
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['edge_id']]
return cost |
def _get_request_content(self, message=None):
'''Updates all messages in message with default message
parameters.
:param message: A collection of Postmark message data
:type message: a collection of message `dict`s
:rtype: JSON encoded `str`
'''
if not message:
raise MessageError('No messages to send.')
if len(message) > MAX_BATCH_MESSAGES:
err = 'Maximum {0} messages allowed in batch'
raise MessageError(err.format(MAX_BATCH_MESSAGES))
message = [self._cast_message(message=msg) for msg in message]
message = [msg.data() for msg in message]
return json.dumps(message, ensure_ascii=True) | def function[_get_request_content, parameter[self, message]]:
constant[Updates all messages in message with default message
parameters.
:param message: A collection of Postmark message data
:type message: a collection of message `dict`s
:rtype: JSON encoded `str`
]
if <ast.UnaryOp object at 0x7da18f58df30> begin[:]
<ast.Raise object at 0x7da18f58d930>
if compare[call[name[len], parameter[name[message]]] greater[>] name[MAX_BATCH_MESSAGES]] begin[:]
variable[err] assign[=] constant[Maximum {0} messages allowed in batch]
<ast.Raise object at 0x7da18bc71bd0>
variable[message] assign[=] <ast.ListComp object at 0x7da20c76dcf0>
variable[message] assign[=] <ast.ListComp object at 0x7da18f8102b0>
return[call[name[json].dumps, parameter[name[message]]]] | keyword[def] identifier[_get_request_content] ( identifier[self] , identifier[message] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[message] :
keyword[raise] identifier[MessageError] ( literal[string] )
keyword[if] identifier[len] ( identifier[message] )> identifier[MAX_BATCH_MESSAGES] :
identifier[err] = literal[string]
keyword[raise] identifier[MessageError] ( identifier[err] . identifier[format] ( identifier[MAX_BATCH_MESSAGES] ))
identifier[message] =[ identifier[self] . identifier[_cast_message] ( identifier[message] = identifier[msg] ) keyword[for] identifier[msg] keyword[in] identifier[message] ]
identifier[message] =[ identifier[msg] . identifier[data] () keyword[for] identifier[msg] keyword[in] identifier[message] ]
keyword[return] identifier[json] . identifier[dumps] ( identifier[message] , identifier[ensure_ascii] = keyword[True] ) | def _get_request_content(self, message=None):
"""Updates all messages in message with default message
parameters.
:param message: A collection of Postmark message data
:type message: a collection of message `dict`s
:rtype: JSON encoded `str`
"""
if not message:
raise MessageError('No messages to send.') # depends on [control=['if'], data=[]]
if len(message) > MAX_BATCH_MESSAGES:
err = 'Maximum {0} messages allowed in batch'
raise MessageError(err.format(MAX_BATCH_MESSAGES)) # depends on [control=['if'], data=['MAX_BATCH_MESSAGES']]
message = [self._cast_message(message=msg) for msg in message]
message = [msg.data() for msg in message]
return json.dumps(message, ensure_ascii=True) |
def _get_corpus_properties(self, corpus_name):
"""Check whether a corpus is available for import.
:type corpus_name: str
:param corpus_name: Name of available corpus.
:rtype : str
"""
try:
# corpora = LANGUAGE_CORPORA[self.language]
corpora = self.all_corpora
except NameError as name_error:
msg = 'Corpus not available for language ' \
'"%s": %s' % (self.language, name_error)
logger.error(msg)
raise CorpusImportError(msg)
for corpus_properties in corpora:
if corpus_properties['name'] == corpus_name:
return corpus_properties
msg = 'Corpus "%s" not available for the ' \
'"%s" language.' % (corpus_name, self.language)
logger.error(msg)
raise CorpusImportError(msg) | def function[_get_corpus_properties, parameter[self, corpus_name]]:
constant[Check whether a corpus is available for import.
:type corpus_name: str
:param corpus_name: Name of available corpus.
:rtype : str
]
<ast.Try object at 0x7da2044c1b10>
for taget[name[corpus_properties]] in starred[name[corpora]] begin[:]
if compare[call[name[corpus_properties]][constant[name]] equal[==] name[corpus_name]] begin[:]
return[name[corpus_properties]]
variable[msg] assign[=] binary_operation[constant[Corpus "%s" not available for the "%s" language.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2044c0850>, <ast.Attribute object at 0x7da2044c1360>]]]
call[name[logger].error, parameter[name[msg]]]
<ast.Raise object at 0x7da2044c1330> | keyword[def] identifier[_get_corpus_properties] ( identifier[self] , identifier[corpus_name] ):
literal[string]
keyword[try] :
identifier[corpora] = identifier[self] . identifier[all_corpora]
keyword[except] identifier[NameError] keyword[as] identifier[name_error] :
identifier[msg] = literal[string] literal[string] %( identifier[self] . identifier[language] , identifier[name_error] )
identifier[logger] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[CorpusImportError] ( identifier[msg] )
keyword[for] identifier[corpus_properties] keyword[in] identifier[corpora] :
keyword[if] identifier[corpus_properties] [ literal[string] ]== identifier[corpus_name] :
keyword[return] identifier[corpus_properties]
identifier[msg] = literal[string] literal[string] %( identifier[corpus_name] , identifier[self] . identifier[language] )
identifier[logger] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[CorpusImportError] ( identifier[msg] ) | def _get_corpus_properties(self, corpus_name):
"""Check whether a corpus is available for import.
:type corpus_name: str
:param corpus_name: Name of available corpus.
:rtype : str
"""
try:
# corpora = LANGUAGE_CORPORA[self.language]
corpora = self.all_corpora # depends on [control=['try'], data=[]]
except NameError as name_error:
msg = 'Corpus not available for language "%s": %s' % (self.language, name_error)
logger.error(msg)
raise CorpusImportError(msg) # depends on [control=['except'], data=['name_error']]
for corpus_properties in corpora:
if corpus_properties['name'] == corpus_name:
return corpus_properties # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['corpus_properties']]
msg = 'Corpus "%s" not available for the "%s" language.' % (corpus_name, self.language)
logger.error(msg)
raise CorpusImportError(msg) |
def read_2000256_list(self):
"""
Read 2000 times a 64 byte value from the stream.
Returns:
list: a list containing 2000 64 byte values in reversed form.
"""
items = []
for _ in range(0, 2000):
data = self.read_bytes(64)
ba = bytearray(binascii.unhexlify(data))
ba.reverse()
items.append(ba.hex().encode('utf-8'))
return items | def function[read_2000256_list, parameter[self]]:
constant[
Read 2000 times a 64 byte value from the stream.
Returns:
list: a list containing 2000 64 byte values in reversed form.
]
variable[items] assign[=] list[[]]
for taget[name[_]] in starred[call[name[range], parameter[constant[0], constant[2000]]]] begin[:]
variable[data] assign[=] call[name[self].read_bytes, parameter[constant[64]]]
variable[ba] assign[=] call[name[bytearray], parameter[call[name[binascii].unhexlify, parameter[name[data]]]]]
call[name[ba].reverse, parameter[]]
call[name[items].append, parameter[call[call[name[ba].hex, parameter[]].encode, parameter[constant[utf-8]]]]]
return[name[items]] | keyword[def] identifier[read_2000256_list] ( identifier[self] ):
literal[string]
identifier[items] =[]
keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[data] = identifier[self] . identifier[read_bytes] ( literal[int] )
identifier[ba] = identifier[bytearray] ( identifier[binascii] . identifier[unhexlify] ( identifier[data] ))
identifier[ba] . identifier[reverse] ()
identifier[items] . identifier[append] ( identifier[ba] . identifier[hex] (). identifier[encode] ( literal[string] ))
keyword[return] identifier[items] | def read_2000256_list(self):
"""
Read 2000 times a 64 byte value from the stream.
Returns:
list: a list containing 2000 64 byte values in reversed form.
"""
items = []
for _ in range(0, 2000):
data = self.read_bytes(64)
ba = bytearray(binascii.unhexlify(data))
ba.reverse()
items.append(ba.hex().encode('utf-8')) # depends on [control=['for'], data=[]]
return items |
def module_help(self, module):
"""Describes the key flags of a module.
Args:
module: module|str, the module to describe the key flags for.
Returns:
str, describing the key flags of a module.
"""
helplist = []
self._render_our_module_key_flags(module, helplist)
return '\n'.join(helplist) | def function[module_help, parameter[self, module]]:
constant[Describes the key flags of a module.
Args:
module: module|str, the module to describe the key flags for.
Returns:
str, describing the key flags of a module.
]
variable[helplist] assign[=] list[[]]
call[name[self]._render_our_module_key_flags, parameter[name[module], name[helplist]]]
return[call[constant[
].join, parameter[name[helplist]]]] | keyword[def] identifier[module_help] ( identifier[self] , identifier[module] ):
literal[string]
identifier[helplist] =[]
identifier[self] . identifier[_render_our_module_key_flags] ( identifier[module] , identifier[helplist] )
keyword[return] literal[string] . identifier[join] ( identifier[helplist] ) | def module_help(self, module):
"""Describes the key flags of a module.
Args:
module: module|str, the module to describe the key flags for.
Returns:
str, describing the key flags of a module.
"""
helplist = []
self._render_our_module_key_flags(module, helplist)
return '\n'.join(helplist) |
def add_point_region(self, y: float, x: float) -> Graphic:
"""Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
"""
graphic = Graphics.PointGraphic()
graphic.position = Geometry.FloatPoint(y, x)
self.__display_item.add_graphic(graphic)
return Graphic(graphic) | def function[add_point_region, parameter[self, y, x]]:
constant[Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
]
variable[graphic] assign[=] call[name[Graphics].PointGraphic, parameter[]]
name[graphic].position assign[=] call[name[Geometry].FloatPoint, parameter[name[y], name[x]]]
call[name[self].__display_item.add_graphic, parameter[name[graphic]]]
return[call[name[Graphic], parameter[name[graphic]]]] | keyword[def] identifier[add_point_region] ( identifier[self] , identifier[y] : identifier[float] , identifier[x] : identifier[float] )-> identifier[Graphic] :
literal[string]
identifier[graphic] = identifier[Graphics] . identifier[PointGraphic] ()
identifier[graphic] . identifier[position] = identifier[Geometry] . identifier[FloatPoint] ( identifier[y] , identifier[x] )
identifier[self] . identifier[__display_item] . identifier[add_graphic] ( identifier[graphic] )
keyword[return] identifier[Graphic] ( identifier[graphic] ) | def add_point_region(self, y: float, x: float) -> Graphic:
"""Add a point graphic to the data item.
:param x: The x coordinate, in relative units [0.0, 1.0]
:param y: The y coordinate, in relative units [0.0, 1.0]
:return: The :py:class:`nion.swift.Facade.Graphic` object that was added.
.. versionadded:: 1.0
Scriptable: Yes
"""
graphic = Graphics.PointGraphic()
graphic.position = Geometry.FloatPoint(y, x)
self.__display_item.add_graphic(graphic)
return Graphic(graphic) |
def _pin_mongos(self, server):
"""Pin this session to the given mongos Server."""
self._transaction.sharded = True
self._transaction.pinned_address = server.description.address | def function[_pin_mongos, parameter[self, server]]:
constant[Pin this session to the given mongos Server.]
name[self]._transaction.sharded assign[=] constant[True]
name[self]._transaction.pinned_address assign[=] name[server].description.address | keyword[def] identifier[_pin_mongos] ( identifier[self] , identifier[server] ):
literal[string]
identifier[self] . identifier[_transaction] . identifier[sharded] = keyword[True]
identifier[self] . identifier[_transaction] . identifier[pinned_address] = identifier[server] . identifier[description] . identifier[address] | def _pin_mongos(self, server):
"""Pin this session to the given mongos Server."""
self._transaction.sharded = True
self._transaction.pinned_address = server.description.address |
def fromMarkdown(md, *args, **kwargs):
"""
Creates abstraction using path to file
:param str path: path to markdown file
:return: TreeOfContents object
"""
return TOC.fromHTML(markdown(md, *args, **kwargs)) | def function[fromMarkdown, parameter[md]]:
constant[
Creates abstraction using path to file
:param str path: path to markdown file
:return: TreeOfContents object
]
return[call[name[TOC].fromHTML, parameter[call[name[markdown], parameter[name[md], <ast.Starred object at 0x7da1b1104550>]]]]] | keyword[def] identifier[fromMarkdown] ( identifier[md] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[TOC] . identifier[fromHTML] ( identifier[markdown] ( identifier[md] ,* identifier[args] ,** identifier[kwargs] )) | def fromMarkdown(md, *args, **kwargs):
"""
Creates abstraction using path to file
:param str path: path to markdown file
:return: TreeOfContents object
"""
return TOC.fromHTML(markdown(md, *args, **kwargs)) |
def next_location(self, raw=False):
'''Returns the next location.
Args:
raw (bool): If True, the original string contained in the Location
field will be returned. Otherwise, the URL will be
normalized to a complete URL.
Returns:
str, None: If str, the location. Otherwise, no next location.
'''
if self._response:
location = self._response.fields.get('location')
if not location or raw:
return location
return wpull.url.urljoin(self._response.request.url_info.url,
location) | def function[next_location, parameter[self, raw]]:
constant[Returns the next location.
Args:
raw (bool): If True, the original string contained in the Location
field will be returned. Otherwise, the URL will be
normalized to a complete URL.
Returns:
str, None: If str, the location. Otherwise, no next location.
]
if name[self]._response begin[:]
variable[location] assign[=] call[name[self]._response.fields.get, parameter[constant[location]]]
if <ast.BoolOp object at 0x7da1b26af760> begin[:]
return[name[location]]
return[call[name[wpull].url.urljoin, parameter[name[self]._response.request.url_info.url, name[location]]]] | keyword[def] identifier[next_location] ( identifier[self] , identifier[raw] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[_response] :
identifier[location] = identifier[self] . identifier[_response] . identifier[fields] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[location] keyword[or] identifier[raw] :
keyword[return] identifier[location]
keyword[return] identifier[wpull] . identifier[url] . identifier[urljoin] ( identifier[self] . identifier[_response] . identifier[request] . identifier[url_info] . identifier[url] ,
identifier[location] ) | def next_location(self, raw=False):
"""Returns the next location.
Args:
raw (bool): If True, the original string contained in the Location
field will be returned. Otherwise, the URL will be
normalized to a complete URL.
Returns:
str, None: If str, the location. Otherwise, no next location.
"""
if self._response:
location = self._response.fields.get('location')
if not location or raw:
return location # depends on [control=['if'], data=[]]
return wpull.url.urljoin(self._response.request.url_info.url, location) # depends on [control=['if'], data=[]] |
def get_file_from_s3(job, s3_url, encryption_key=None, per_file_encryption=True,
write_to_jobstore=True):
"""
Download a supplied URL that points to a file on Amazon S3. If the file is encrypted using
sse-c (with the user-provided key or with a hash of the usesr provided master key) then the
encryption keys will be used when downloading. The file is downloaded and written to the
jobstore if requested.
:param str s3_url: URL for the file (can be s3, S3 or https)
:param str encryption_key: Path to the master key
:param bool per_file_encryption: If encrypted, was the file encrypted using the per-file method?
:param bool write_to_jobstore: Should the file be written to the job store?
:return: Path to the downloaded file or fsID (if write_to_jobstore was True)
:rtype: str|toil.fileStore.FileID
"""
work_dir = job.fileStore.getLocalTempDir()
parsed_url = urlparse(s3_url)
if parsed_url.scheme == 'https':
download_url = 'S3:/' + parsed_url.path # path contains the second /
elif parsed_url.scheme in ('s3', 'S3'):
download_url = s3_url
else:
raise RuntimeError('Unexpected url scheme: %s' % s3_url)
filename = '/'.join([work_dir, os.path.basename(s3_url)])
# This is common to encrypted and unencrypted downloads
download_call = ['s3am', 'download', '--download-exists', 'resume']
# If an encryption key was provided, use it.
if encryption_key:
download_call.extend(['--sse-key-file', encryption_key])
if per_file_encryption:
download_call.append('--sse-key-is-master')
# This is also common to both types of downloads
download_call.extend([download_url, filename])
attempt = 0
exception = ''
while True:
try:
with open(work_dir + '/stderr', 'w') as stderr_file:
subprocess.check_call(download_call, stderr=stderr_file)
except subprocess.CalledProcessError:
# The last line of the stderr will have the error
with open(stderr_file.name) as stderr_file:
for line in stderr_file:
line = line.strip()
if line:
exception = line
if exception.startswith('boto'):
exception = exception.split(': ')
if exception[-1].startswith('403'):
raise RuntimeError('s3am failed with a "403 Forbidden" error while obtaining '
'(%s). Did you use the correct credentials?' % s3_url)
elif exception[-1].startswith('400'):
raise RuntimeError('s3am failed with a "400 Bad Request" error while obtaining '
'(%s). Are you trying to download an encrypted file without '
'a key, or an unencrypted file with one?' % s3_url)
else:
raise RuntimeError('s3am failed with (%s) while downloading (%s)' %
(': '.join(exception), s3_url))
elif exception.startswith('AttributeError'):
exception = exception.split(': ')
if exception[-1].startswith("'NoneType'"):
raise RuntimeError('Does (%s) exist on s3?' % s3_url)
else:
raise RuntimeError('s3am failed with (%s) while downloading (%s)' %
(': '.join(exception), s3_url))
else:
if attempt < 3:
attempt += 1
continue
else:
raise RuntimeError('Could not diagnose the error while downloading (%s)' %
s3_url)
except OSError:
raise RuntimeError('Failed to find "s3am". Install via "apt-get install --pre s3am"')
else:
break
finally:
os.remove(stderr_file.name)
assert os.path.exists(filename)
if write_to_jobstore:
filename = job.fileStore.writeGlobalFile(filename)
return filename | def function[get_file_from_s3, parameter[job, s3_url, encryption_key, per_file_encryption, write_to_jobstore]]:
constant[
Download a supplied URL that points to a file on Amazon S3. If the file is encrypted using
sse-c (with the user-provided key or with a hash of the usesr provided master key) then the
encryption keys will be used when downloading. The file is downloaded and written to the
jobstore if requested.
:param str s3_url: URL for the file (can be s3, S3 or https)
:param str encryption_key: Path to the master key
:param bool per_file_encryption: If encrypted, was the file encrypted using the per-file method?
:param bool write_to_jobstore: Should the file be written to the job store?
:return: Path to the downloaded file or fsID (if write_to_jobstore was True)
:rtype: str|toil.fileStore.FileID
]
variable[work_dir] assign[=] call[name[job].fileStore.getLocalTempDir, parameter[]]
variable[parsed_url] assign[=] call[name[urlparse], parameter[name[s3_url]]]
if compare[name[parsed_url].scheme equal[==] constant[https]] begin[:]
variable[download_url] assign[=] binary_operation[constant[S3:/] + name[parsed_url].path]
variable[filename] assign[=] call[constant[/].join, parameter[list[[<ast.Name object at 0x7da20c7cac50>, <ast.Call object at 0x7da20c7c9e10>]]]]
variable[download_call] assign[=] list[[<ast.Constant object at 0x7da20c7cae30>, <ast.Constant object at 0x7da20c7c9ea0>, <ast.Constant object at 0x7da20c7cbe80>, <ast.Constant object at 0x7da20c7c8b50>]]
if name[encryption_key] begin[:]
call[name[download_call].extend, parameter[list[[<ast.Constant object at 0x7da20c7c8fa0>, <ast.Name object at 0x7da20c7c8cd0>]]]]
if name[per_file_encryption] begin[:]
call[name[download_call].append, parameter[constant[--sse-key-is-master]]]
call[name[download_call].extend, parameter[list[[<ast.Name object at 0x7da20c7cb820>, <ast.Name object at 0x7da20c7cbf40>]]]]
variable[attempt] assign[=] constant[0]
variable[exception] assign[=] constant[]
while constant[True] begin[:]
<ast.Try object at 0x7da18eb56410>
assert[call[name[os].path.exists, parameter[name[filename]]]]
if name[write_to_jobstore] begin[:]
variable[filename] assign[=] call[name[job].fileStore.writeGlobalFile, parameter[name[filename]]]
return[name[filename]] | keyword[def] identifier[get_file_from_s3] ( identifier[job] , identifier[s3_url] , identifier[encryption_key] = keyword[None] , identifier[per_file_encryption] = keyword[True] ,
identifier[write_to_jobstore] = keyword[True] ):
literal[string]
identifier[work_dir] = identifier[job] . identifier[fileStore] . identifier[getLocalTempDir] ()
identifier[parsed_url] = identifier[urlparse] ( identifier[s3_url] )
keyword[if] identifier[parsed_url] . identifier[scheme] == literal[string] :
identifier[download_url] = literal[string] + identifier[parsed_url] . identifier[path]
keyword[elif] identifier[parsed_url] . identifier[scheme] keyword[in] ( literal[string] , literal[string] ):
identifier[download_url] = identifier[s3_url]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[s3_url] )
identifier[filename] = literal[string] . identifier[join] ([ identifier[work_dir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[s3_url] )])
identifier[download_call] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[encryption_key] :
identifier[download_call] . identifier[extend] ([ literal[string] , identifier[encryption_key] ])
keyword[if] identifier[per_file_encryption] :
identifier[download_call] . identifier[append] ( literal[string] )
identifier[download_call] . identifier[extend] ([ identifier[download_url] , identifier[filename] ])
identifier[attempt] = literal[int]
identifier[exception] = literal[string]
keyword[while] keyword[True] :
keyword[try] :
keyword[with] identifier[open] ( identifier[work_dir] + literal[string] , literal[string] ) keyword[as] identifier[stderr_file] :
identifier[subprocess] . identifier[check_call] ( identifier[download_call] , identifier[stderr] = identifier[stderr_file] )
keyword[except] identifier[subprocess] . identifier[CalledProcessError] :
keyword[with] identifier[open] ( identifier[stderr_file] . identifier[name] ) keyword[as] identifier[stderr_file] :
keyword[for] identifier[line] keyword[in] identifier[stderr_file] :
identifier[line] = identifier[line] . identifier[strip] ()
keyword[if] identifier[line] :
identifier[exception] = identifier[line]
keyword[if] identifier[exception] . identifier[startswith] ( literal[string] ):
identifier[exception] = identifier[exception] . identifier[split] ( literal[string] )
keyword[if] identifier[exception] [- literal[int] ]. identifier[startswith] ( literal[string] ):
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] % identifier[s3_url] )
keyword[elif] identifier[exception] [- literal[int] ]. identifier[startswith] ( literal[string] ):
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string] % identifier[s3_url] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] %
( literal[string] . identifier[join] ( identifier[exception] ), identifier[s3_url] ))
keyword[elif] identifier[exception] . identifier[startswith] ( literal[string] ):
identifier[exception] = identifier[exception] . identifier[split] ( literal[string] )
keyword[if] identifier[exception] [- literal[int] ]. identifier[startswith] ( literal[string] ):
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[s3_url] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] %
( literal[string] . identifier[join] ( identifier[exception] ), identifier[s3_url] ))
keyword[else] :
keyword[if] identifier[attempt] < literal[int] :
identifier[attempt] += literal[int]
keyword[continue]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] %
identifier[s3_url] )
keyword[except] identifier[OSError] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[else] :
keyword[break]
keyword[finally] :
identifier[os] . identifier[remove] ( identifier[stderr_file] . identifier[name] )
keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] )
keyword[if] identifier[write_to_jobstore] :
identifier[filename] = identifier[job] . identifier[fileStore] . identifier[writeGlobalFile] ( identifier[filename] )
keyword[return] identifier[filename] | def get_file_from_s3(job, s3_url, encryption_key=None, per_file_encryption=True, write_to_jobstore=True):
"""
Download a supplied URL that points to a file on Amazon S3. If the file is encrypted using
sse-c (with the user-provided key or with a hash of the usesr provided master key) then the
encryption keys will be used when downloading. The file is downloaded and written to the
jobstore if requested.
:param str s3_url: URL for the file (can be s3, S3 or https)
:param str encryption_key: Path to the master key
:param bool per_file_encryption: If encrypted, was the file encrypted using the per-file method?
:param bool write_to_jobstore: Should the file be written to the job store?
:return: Path to the downloaded file or fsID (if write_to_jobstore was True)
:rtype: str|toil.fileStore.FileID
"""
work_dir = job.fileStore.getLocalTempDir()
parsed_url = urlparse(s3_url)
if parsed_url.scheme == 'https':
download_url = 'S3:/' + parsed_url.path # path contains the second / # depends on [control=['if'], data=[]]
elif parsed_url.scheme in ('s3', 'S3'):
download_url = s3_url # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Unexpected url scheme: %s' % s3_url)
filename = '/'.join([work_dir, os.path.basename(s3_url)])
# This is common to encrypted and unencrypted downloads
download_call = ['s3am', 'download', '--download-exists', 'resume']
# If an encryption key was provided, use it.
if encryption_key:
download_call.extend(['--sse-key-file', encryption_key])
if per_file_encryption:
download_call.append('--sse-key-is-master') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# This is also common to both types of downloads
download_call.extend([download_url, filename])
attempt = 0
exception = ''
while True:
try:
with open(work_dir + '/stderr', 'w') as stderr_file:
subprocess.check_call(download_call, stderr=stderr_file) # depends on [control=['with'], data=['stderr_file']] # depends on [control=['try'], data=[]]
except subprocess.CalledProcessError:
# The last line of the stderr will have the error
with open(stderr_file.name) as stderr_file:
for line in stderr_file:
line = line.strip()
if line:
exception = line # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['stderr_file']]
if exception.startswith('boto'):
exception = exception.split(': ')
if exception[-1].startswith('403'):
raise RuntimeError('s3am failed with a "403 Forbidden" error while obtaining (%s). Did you use the correct credentials?' % s3_url) # depends on [control=['if'], data=[]]
elif exception[-1].startswith('400'):
raise RuntimeError('s3am failed with a "400 Bad Request" error while obtaining (%s). Are you trying to download an encrypted file without a key, or an unencrypted file with one?' % s3_url) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('s3am failed with (%s) while downloading (%s)' % (': '.join(exception), s3_url)) # depends on [control=['if'], data=[]]
elif exception.startswith('AttributeError'):
exception = exception.split(': ')
if exception[-1].startswith("'NoneType'"):
raise RuntimeError('Does (%s) exist on s3?' % s3_url) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('s3am failed with (%s) while downloading (%s)' % (': '.join(exception), s3_url)) # depends on [control=['if'], data=[]]
elif attempt < 3:
attempt += 1
continue # depends on [control=['if'], data=['attempt']]
else:
raise RuntimeError('Could not diagnose the error while downloading (%s)' % s3_url) # depends on [control=['except'], data=[]]
except OSError:
raise RuntimeError('Failed to find "s3am". Install via "apt-get install --pre s3am"') # depends on [control=['except'], data=[]]
else:
break
finally:
os.remove(stderr_file.name) # depends on [control=['while'], data=[]]
assert os.path.exists(filename)
if write_to_jobstore:
filename = job.fileStore.writeGlobalFile(filename) # depends on [control=['if'], data=[]]
return filename |
def Graph2Pandas_converter(self):
'''Updates self.g or self.path bc you could only choose 1'''
if isinstance(self.path, str) or isinstance(self.path, p):
self.path = str(self.path)
filetype = p(self.path).suffix
if filetype == '.pickle':
self.g = pickle.load(open(self.path, 'rb'))
if isinstance(self.g, rdflib.graph.Graph):
return self.get_sparql_dataframe()
else:
print('WARNING:: function df() wont work unless an ontology source is loaded')
return self.g
elif filetype == '.ttl' or filetype == '.rdf':
self.g = rdflib.Graph()
self.g.parse(self.path, format='turtle')
return self.get_sparql_dataframe()
elif filetype == '.nt':
self.g = rdflib.Graph()
self.g.parse(self.path, format='nt')
return self.get_sparql_dataframe()
elif filetype == '.owl' or filetype == '.xrdf':
self.g = rdflib.Graph()
try:
self.g.parse(self.path, format='xml')
except:
# some owl formats are more rdf than owl
self.g.parse(self.path, format='turtle')
return self.get_sparql_dataframe()
else:
exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')
try:
return self.get_sparql_dataframe()
self.path = None
except:
exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')
elif isinstance(self.g, rdflib.graph.Graph):
self.path = None
return self.get_sparql_dataframe()
else:
exit('Obj given is not str, pathlib obj, or an rdflib.Graph()') | def function[Graph2Pandas_converter, parameter[self]]:
constant[Updates self.g or self.path bc you could only choose 1]
if <ast.BoolOp object at 0x7da1b1a7f310> begin[:]
name[self].path assign[=] call[name[str], parameter[name[self].path]]
variable[filetype] assign[=] call[name[p], parameter[name[self].path]].suffix
if compare[name[filetype] equal[==] constant[.pickle]] begin[:]
name[self].g assign[=] call[name[pickle].load, parameter[call[name[open], parameter[name[self].path, constant[rb]]]]]
if call[name[isinstance], parameter[name[self].g, name[rdflib].graph.Graph]] begin[:]
return[call[name[self].get_sparql_dataframe, parameter[]]]
<ast.Try object at 0x7da1b1aa4a30> | keyword[def] identifier[Graph2Pandas_converter] ( identifier[self] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[path] , identifier[str] ) keyword[or] identifier[isinstance] ( identifier[self] . identifier[path] , identifier[p] ):
identifier[self] . identifier[path] = identifier[str] ( identifier[self] . identifier[path] )
identifier[filetype] = identifier[p] ( identifier[self] . identifier[path] ). identifier[suffix]
keyword[if] identifier[filetype] == literal[string] :
identifier[self] . identifier[g] = identifier[pickle] . identifier[load] ( identifier[open] ( identifier[self] . identifier[path] , literal[string] ))
keyword[if] identifier[isinstance] ( identifier[self] . identifier[g] , identifier[rdflib] . identifier[graph] . identifier[Graph] ):
keyword[return] identifier[self] . identifier[get_sparql_dataframe] ()
keyword[else] :
identifier[print] ( literal[string] )
keyword[return] identifier[self] . identifier[g]
keyword[elif] identifier[filetype] == literal[string] keyword[or] identifier[filetype] == literal[string] :
identifier[self] . identifier[g] = identifier[rdflib] . identifier[Graph] ()
identifier[self] . identifier[g] . identifier[parse] ( identifier[self] . identifier[path] , identifier[format] = literal[string] )
keyword[return] identifier[self] . identifier[get_sparql_dataframe] ()
keyword[elif] identifier[filetype] == literal[string] :
identifier[self] . identifier[g] = identifier[rdflib] . identifier[Graph] ()
identifier[self] . identifier[g] . identifier[parse] ( identifier[self] . identifier[path] , identifier[format] = literal[string] )
keyword[return] identifier[self] . identifier[get_sparql_dataframe] ()
keyword[elif] identifier[filetype] == literal[string] keyword[or] identifier[filetype] == literal[string] :
identifier[self] . identifier[g] = identifier[rdflib] . identifier[Graph] ()
keyword[try] :
identifier[self] . identifier[g] . identifier[parse] ( identifier[self] . identifier[path] , identifier[format] = literal[string] )
keyword[except] :
identifier[self] . identifier[g] . identifier[parse] ( identifier[self] . identifier[path] , identifier[format] = literal[string] )
keyword[return] identifier[self] . identifier[get_sparql_dataframe] ()
keyword[else] :
identifier[exit] ( literal[string] )
keyword[try] :
keyword[return] identifier[self] . identifier[get_sparql_dataframe] ()
identifier[self] . identifier[path] = keyword[None]
keyword[except] :
identifier[exit] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[self] . identifier[g] , identifier[rdflib] . identifier[graph] . identifier[Graph] ):
identifier[self] . identifier[path] = keyword[None]
keyword[return] identifier[self] . identifier[get_sparql_dataframe] ()
keyword[else] :
identifier[exit] ( literal[string] ) | def Graph2Pandas_converter(self):
"""Updates self.g or self.path bc you could only choose 1"""
if isinstance(self.path, str) or isinstance(self.path, p):
self.path = str(self.path)
filetype = p(self.path).suffix
if filetype == '.pickle':
self.g = pickle.load(open(self.path, 'rb'))
if isinstance(self.g, rdflib.graph.Graph):
return self.get_sparql_dataframe() # depends on [control=['if'], data=[]]
else:
print('WARNING:: function df() wont work unless an ontology source is loaded')
return self.g # depends on [control=['if'], data=[]]
elif filetype == '.ttl' or filetype == '.rdf':
self.g = rdflib.Graph()
self.g.parse(self.path, format='turtle')
return self.get_sparql_dataframe() # depends on [control=['if'], data=[]]
elif filetype == '.nt':
self.g = rdflib.Graph()
self.g.parse(self.path, format='nt')
return self.get_sparql_dataframe() # depends on [control=['if'], data=[]]
elif filetype == '.owl' or filetype == '.xrdf':
self.g = rdflib.Graph()
try:
self.g.parse(self.path, format='xml') # depends on [control=['try'], data=[]]
except:
# some owl formats are more rdf than owl
self.g.parse(self.path, format='turtle') # depends on [control=['except'], data=[]]
return self.get_sparql_dataframe() # depends on [control=['if'], data=[]]
else:
exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')
try:
return self.get_sparql_dataframe()
self.path = None # depends on [control=['try'], data=[]]
except:
exit('Format options: owl, ttl, df_pickle, rdflib.Graph()') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(self.g, rdflib.graph.Graph):
self.path = None
return self.get_sparql_dataframe() # depends on [control=['if'], data=[]]
else:
exit('Obj given is not str, pathlib obj, or an rdflib.Graph()') |
def create_readme_with_long_description():
'''Try to convert content of README.md into rst format using pypandoc,
write it into README and return it.
If pypandoc cannot be imported write content of README.md unchanged into
README and return it.
'''
this_dir = os.path.abspath(os.path.dirname(__file__))
readme_md = os.path.join(this_dir, 'README.md')
readme = os.path.join(this_dir, 'README')
if os.path.exists(readme_md):
# this is the case when running `python setup.py sdist`
if os.path.exists(readme):
os.remove(readme)
try:
import pypandoc
long_description = pypandoc.convert(readme_md, 'rst', format='md')
except(ImportError):
with open(readme_md, encoding='utf-8') as in_:
long_description = in_.read()
with open(readme, 'w') as out:
out.write(long_description)
else:
# this is in case of `pip install fabsetup-x.y.z.tar.gz`
with open(readme, encoding='utf-8') as in_:
long_description = in_.read()
return long_description | def function[create_readme_with_long_description, parameter[]]:
constant[Try to convert content of README.md into rst format using pypandoc,
write it into README and return it.
If pypandoc cannot be imported write content of README.md unchanged into
README and return it.
]
variable[this_dir] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.dirname, parameter[name[__file__]]]]]
variable[readme_md] assign[=] call[name[os].path.join, parameter[name[this_dir], constant[README.md]]]
variable[readme] assign[=] call[name[os].path.join, parameter[name[this_dir], constant[README]]]
if call[name[os].path.exists, parameter[name[readme_md]]] begin[:]
if call[name[os].path.exists, parameter[name[readme]]] begin[:]
call[name[os].remove, parameter[name[readme]]]
<ast.Try object at 0x7da1b257f040>
with call[name[open], parameter[name[readme], constant[w]]] begin[:]
call[name[out].write, parameter[name[long_description]]]
return[name[long_description]] | keyword[def] identifier[create_readme_with_long_description] ():
literal[string]
identifier[this_dir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ))
identifier[readme_md] = identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] )
identifier[readme] = identifier[os] . identifier[path] . identifier[join] ( identifier[this_dir] , literal[string] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[readme_md] ):
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[readme] ):
identifier[os] . identifier[remove] ( identifier[readme] )
keyword[try] :
keyword[import] identifier[pypandoc]
identifier[long_description] = identifier[pypandoc] . identifier[convert] ( identifier[readme_md] , literal[string] , identifier[format] = literal[string] )
keyword[except] ( identifier[ImportError] ):
keyword[with] identifier[open] ( identifier[readme_md] , identifier[encoding] = literal[string] ) keyword[as] identifier[in_] :
identifier[long_description] = identifier[in_] . identifier[read] ()
keyword[with] identifier[open] ( identifier[readme] , literal[string] ) keyword[as] identifier[out] :
identifier[out] . identifier[write] ( identifier[long_description] )
keyword[else] :
keyword[with] identifier[open] ( identifier[readme] , identifier[encoding] = literal[string] ) keyword[as] identifier[in_] :
identifier[long_description] = identifier[in_] . identifier[read] ()
keyword[return] identifier[long_description] | def create_readme_with_long_description():
"""Try to convert content of README.md into rst format using pypandoc,
write it into README and return it.
If pypandoc cannot be imported write content of README.md unchanged into
README and return it.
"""
this_dir = os.path.abspath(os.path.dirname(__file__))
readme_md = os.path.join(this_dir, 'README.md')
readme = os.path.join(this_dir, 'README')
if os.path.exists(readme_md):
# this is the case when running `python setup.py sdist`
if os.path.exists(readme):
os.remove(readme) # depends on [control=['if'], data=[]]
try:
import pypandoc
long_description = pypandoc.convert(readme_md, 'rst', format='md') # depends on [control=['try'], data=[]]
except ImportError:
with open(readme_md, encoding='utf-8') as in_:
long_description = in_.read() # depends on [control=['with'], data=['in_']] # depends on [control=['except'], data=[]]
with open(readme, 'w') as out:
out.write(long_description) # depends on [control=['with'], data=['out']] # depends on [control=['if'], data=[]]
else:
# this is in case of `pip install fabsetup-x.y.z.tar.gz`
with open(readme, encoding='utf-8') as in_:
long_description = in_.read() # depends on [control=['with'], data=['in_']]
return long_description |
def _set_publication_info_field(self, field_name, value):
"""Put a value in the publication info of the reference."""
self._ensure_reference_field('publication_info', {})
self.obj['reference']['publication_info'][field_name] = value | def function[_set_publication_info_field, parameter[self, field_name, value]]:
constant[Put a value in the publication info of the reference.]
call[name[self]._ensure_reference_field, parameter[constant[publication_info], dictionary[[], []]]]
call[call[call[name[self].obj][constant[reference]]][constant[publication_info]]][name[field_name]] assign[=] name[value] | keyword[def] identifier[_set_publication_info_field] ( identifier[self] , identifier[field_name] , identifier[value] ):
literal[string]
identifier[self] . identifier[_ensure_reference_field] ( literal[string] ,{})
identifier[self] . identifier[obj] [ literal[string] ][ literal[string] ][ identifier[field_name] ]= identifier[value] | def _set_publication_info_field(self, field_name, value):
"""Put a value in the publication info of the reference."""
self._ensure_reference_field('publication_info', {})
self.obj['reference']['publication_info'][field_name] = value |
def fix_e225(self, result):
"""Fix missing whitespace around operator."""
target = self.source[result['line'] - 1]
offset = result['column'] - 1
fixed = target[:offset] + ' ' + target[offset:]
# Only proceed if non-whitespace characters match.
# And make sure we don't break the indentation.
if (
fixed.replace(' ', '') == target.replace(' ', '') and
_get_indentation(fixed) == _get_indentation(target)
):
self.source[result['line'] - 1] = fixed
error_code = result.get('id', 0)
try:
ts = generate_tokens(fixed)
except (SyntaxError, tokenize.TokenError):
return
if not check_syntax(fixed.lstrip()):
return
errors = list(
pycodestyle.missing_whitespace_around_operator(fixed, ts))
for e in reversed(errors):
if error_code != e[1].split()[0]:
continue
offset = e[0][1]
fixed = fixed[:offset] + ' ' + fixed[offset:]
self.source[result['line'] - 1] = fixed
else:
return [] | def function[fix_e225, parameter[self, result]]:
constant[Fix missing whitespace around operator.]
variable[target] assign[=] call[name[self].source][binary_operation[call[name[result]][constant[line]] - constant[1]]]
variable[offset] assign[=] binary_operation[call[name[result]][constant[column]] - constant[1]]
variable[fixed] assign[=] binary_operation[binary_operation[call[name[target]][<ast.Slice object at 0x7da1b21c7100>] + constant[ ]] + call[name[target]][<ast.Slice object at 0x7da1b21c7280>]]
if <ast.BoolOp object at 0x7da1b21c7d60> begin[:]
call[name[self].source][binary_operation[call[name[result]][constant[line]] - constant[1]]] assign[=] name[fixed]
variable[error_code] assign[=] call[name[result].get, parameter[constant[id], constant[0]]]
<ast.Try object at 0x7da1b21c6ec0>
if <ast.UnaryOp object at 0x7da2041d9120> begin[:]
return[None]
variable[errors] assign[=] call[name[list], parameter[call[name[pycodestyle].missing_whitespace_around_operator, parameter[name[fixed], name[ts]]]]]
for taget[name[e]] in starred[call[name[reversed], parameter[name[errors]]]] begin[:]
if compare[name[error_code] not_equal[!=] call[call[call[name[e]][constant[1]].split, parameter[]]][constant[0]]] begin[:]
continue
variable[offset] assign[=] call[call[name[e]][constant[0]]][constant[1]]
variable[fixed] assign[=] binary_operation[binary_operation[call[name[fixed]][<ast.Slice object at 0x7da1b21da9e0>] + constant[ ]] + call[name[fixed]][<ast.Slice object at 0x7da1b21db820>]]
call[name[self].source][binary_operation[call[name[result]][constant[line]] - constant[1]]] assign[=] name[fixed] | keyword[def] identifier[fix_e225] ( identifier[self] , identifier[result] ):
literal[string]
identifier[target] = identifier[self] . identifier[source] [ identifier[result] [ literal[string] ]- literal[int] ]
identifier[offset] = identifier[result] [ literal[string] ]- literal[int]
identifier[fixed] = identifier[target] [: identifier[offset] ]+ literal[string] + identifier[target] [ identifier[offset] :]
keyword[if] (
identifier[fixed] . identifier[replace] ( literal[string] , literal[string] )== identifier[target] . identifier[replace] ( literal[string] , literal[string] ) keyword[and]
identifier[_get_indentation] ( identifier[fixed] )== identifier[_get_indentation] ( identifier[target] )
):
identifier[self] . identifier[source] [ identifier[result] [ literal[string] ]- literal[int] ]= identifier[fixed]
identifier[error_code] = identifier[result] . identifier[get] ( literal[string] , literal[int] )
keyword[try] :
identifier[ts] = identifier[generate_tokens] ( identifier[fixed] )
keyword[except] ( identifier[SyntaxError] , identifier[tokenize] . identifier[TokenError] ):
keyword[return]
keyword[if] keyword[not] identifier[check_syntax] ( identifier[fixed] . identifier[lstrip] ()):
keyword[return]
identifier[errors] = identifier[list] (
identifier[pycodestyle] . identifier[missing_whitespace_around_operator] ( identifier[fixed] , identifier[ts] ))
keyword[for] identifier[e] keyword[in] identifier[reversed] ( identifier[errors] ):
keyword[if] identifier[error_code] != identifier[e] [ literal[int] ]. identifier[split] ()[ literal[int] ]:
keyword[continue]
identifier[offset] = identifier[e] [ literal[int] ][ literal[int] ]
identifier[fixed] = identifier[fixed] [: identifier[offset] ]+ literal[string] + identifier[fixed] [ identifier[offset] :]
identifier[self] . identifier[source] [ identifier[result] [ literal[string] ]- literal[int] ]= identifier[fixed]
keyword[else] :
keyword[return] [] | def fix_e225(self, result):
"""Fix missing whitespace around operator."""
target = self.source[result['line'] - 1]
offset = result['column'] - 1
fixed = target[:offset] + ' ' + target[offset:]
# Only proceed if non-whitespace characters match.
# And make sure we don't break the indentation.
if fixed.replace(' ', '') == target.replace(' ', '') and _get_indentation(fixed) == _get_indentation(target):
self.source[result['line'] - 1] = fixed
error_code = result.get('id', 0)
try:
ts = generate_tokens(fixed) # depends on [control=['try'], data=[]]
except (SyntaxError, tokenize.TokenError):
return # depends on [control=['except'], data=[]]
if not check_syntax(fixed.lstrip()):
return # depends on [control=['if'], data=[]]
errors = list(pycodestyle.missing_whitespace_around_operator(fixed, ts))
for e in reversed(errors):
if error_code != e[1].split()[0]:
continue # depends on [control=['if'], data=[]]
offset = e[0][1]
fixed = fixed[:offset] + ' ' + fixed[offset:] # depends on [control=['for'], data=['e']]
self.source[result['line'] - 1] = fixed # depends on [control=['if'], data=[]]
else:
return [] |
def set_split_extents_by_tile_shape(self):
"""
Sets split shape :attr:`split_shape` and
split extents (:attr:`split_begs` and :attr:`split_ends`)
from value of :attr:`tile_shape`.
"""
self.split_shape = ((self.array_shape - 1) // self.tile_shape) + 1
self.split_begs = [[], ] * len(self.array_shape)
self.split_ends = [[], ] * len(self.array_shape)
for i in range(len(self.array_shape)):
self.split_begs[i] = _np.arange(0, self.array_shape[i], self.tile_shape[i])
self.split_ends[i] = _np.zeros_like(self.split_begs[i])
self.split_ends[i][0:-1] = self.split_begs[i][1:]
self.split_ends[i][-1] = self.array_shape[i] | def function[set_split_extents_by_tile_shape, parameter[self]]:
constant[
Sets split shape :attr:`split_shape` and
split extents (:attr:`split_begs` and :attr:`split_ends`)
from value of :attr:`tile_shape`.
]
name[self].split_shape assign[=] binary_operation[binary_operation[binary_operation[name[self].array_shape - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> name[self].tile_shape] + constant[1]]
name[self].split_begs assign[=] binary_operation[list[[<ast.List object at 0x7da1b0a21c00>]] * call[name[len], parameter[name[self].array_shape]]]
name[self].split_ends assign[=] binary_operation[list[[<ast.List object at 0x7da1b0a73940>]] * call[name[len], parameter[name[self].array_shape]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].array_shape]]]]] begin[:]
call[name[self].split_begs][name[i]] assign[=] call[name[_np].arange, parameter[constant[0], call[name[self].array_shape][name[i]], call[name[self].tile_shape][name[i]]]]
call[name[self].split_ends][name[i]] assign[=] call[name[_np].zeros_like, parameter[call[name[self].split_begs][name[i]]]]
call[call[name[self].split_ends][name[i]]][<ast.Slice object at 0x7da1b0a20b20>] assign[=] call[call[name[self].split_begs][name[i]]][<ast.Slice object at 0x7da1b0a219c0>]
call[call[name[self].split_ends][name[i]]][<ast.UnaryOp object at 0x7da1b0a229e0>] assign[=] call[name[self].array_shape][name[i]] | keyword[def] identifier[set_split_extents_by_tile_shape] ( identifier[self] ):
literal[string]
identifier[self] . identifier[split_shape] =(( identifier[self] . identifier[array_shape] - literal[int] )// identifier[self] . identifier[tile_shape] )+ literal[int]
identifier[self] . identifier[split_begs] =[[],]* identifier[len] ( identifier[self] . identifier[array_shape] )
identifier[self] . identifier[split_ends] =[[],]* identifier[len] ( identifier[self] . identifier[array_shape] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[array_shape] )):
identifier[self] . identifier[split_begs] [ identifier[i] ]= identifier[_np] . identifier[arange] ( literal[int] , identifier[self] . identifier[array_shape] [ identifier[i] ], identifier[self] . identifier[tile_shape] [ identifier[i] ])
identifier[self] . identifier[split_ends] [ identifier[i] ]= identifier[_np] . identifier[zeros_like] ( identifier[self] . identifier[split_begs] [ identifier[i] ])
identifier[self] . identifier[split_ends] [ identifier[i] ][ literal[int] :- literal[int] ]= identifier[self] . identifier[split_begs] [ identifier[i] ][ literal[int] :]
identifier[self] . identifier[split_ends] [ identifier[i] ][- literal[int] ]= identifier[self] . identifier[array_shape] [ identifier[i] ] | def set_split_extents_by_tile_shape(self):
"""
Sets split shape :attr:`split_shape` and
split extents (:attr:`split_begs` and :attr:`split_ends`)
from value of :attr:`tile_shape`.
"""
self.split_shape = (self.array_shape - 1) // self.tile_shape + 1
self.split_begs = [[]] * len(self.array_shape)
self.split_ends = [[]] * len(self.array_shape)
for i in range(len(self.array_shape)):
self.split_begs[i] = _np.arange(0, self.array_shape[i], self.tile_shape[i])
self.split_ends[i] = _np.zeros_like(self.split_begs[i])
self.split_ends[i][0:-1] = self.split_begs[i][1:]
self.split_ends[i][-1] = self.array_shape[i] # depends on [control=['for'], data=['i']] |
def get_message(self, metadata=False, asctime=True):
"""
Return the message after merging any user-supplied arguments with the message.
Args:
metadata: True if function and module name should be added.
asctime: True if time string should be added.
"""
msg = self.msg if is_string(self.msg) else str(self.msg)
if self.args:
try:
msg = msg % self.args
except:
msg += str(self.args)
if asctime: msg = "[" + self.asctime + "] " + msg
# Add metadata
if metadata:
msg += "\nCalled by %s at %s:%s\n" % (self.func_name, self.pathname, self.lineno)
return msg | def function[get_message, parameter[self, metadata, asctime]]:
constant[
Return the message after merging any user-supplied arguments with the message.
Args:
metadata: True if function and module name should be added.
asctime: True if time string should be added.
]
variable[msg] assign[=] <ast.IfExp object at 0x7da204346020>
if name[self].args begin[:]
<ast.Try object at 0x7da2043462f0>
if name[asctime] begin[:]
variable[msg] assign[=] binary_operation[binary_operation[binary_operation[constant[[] + name[self].asctime] + constant[] ]] + name[msg]]
if name[metadata] begin[:]
<ast.AugAssign object at 0x7da2043459c0>
return[name[msg]] | keyword[def] identifier[get_message] ( identifier[self] , identifier[metadata] = keyword[False] , identifier[asctime] = keyword[True] ):
literal[string]
identifier[msg] = identifier[self] . identifier[msg] keyword[if] identifier[is_string] ( identifier[self] . identifier[msg] ) keyword[else] identifier[str] ( identifier[self] . identifier[msg] )
keyword[if] identifier[self] . identifier[args] :
keyword[try] :
identifier[msg] = identifier[msg] % identifier[self] . identifier[args]
keyword[except] :
identifier[msg] += identifier[str] ( identifier[self] . identifier[args] )
keyword[if] identifier[asctime] : identifier[msg] = literal[string] + identifier[self] . identifier[asctime] + literal[string] + identifier[msg]
keyword[if] identifier[metadata] :
identifier[msg] += literal[string] %( identifier[self] . identifier[func_name] , identifier[self] . identifier[pathname] , identifier[self] . identifier[lineno] )
keyword[return] identifier[msg] | def get_message(self, metadata=False, asctime=True):
"""
Return the message after merging any user-supplied arguments with the message.
Args:
metadata: True if function and module name should be added.
asctime: True if time string should be added.
"""
msg = self.msg if is_string(self.msg) else str(self.msg)
if self.args:
try:
msg = msg % self.args # depends on [control=['try'], data=[]]
except:
msg += str(self.args) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if asctime:
msg = '[' + self.asctime + '] ' + msg # depends on [control=['if'], data=[]]
# Add metadata
if metadata:
msg += '\nCalled by %s at %s:%s\n' % (self.func_name, self.pathname, self.lineno) # depends on [control=['if'], data=[]]
return msg |
def write_config_file_value(key, value):
"""
Writes an environment variable configuration to the current
config file. This will be read in on the next restart.
The config file is created if not present.
Note: The variables will not take effect until after restart.
"""
filename = get_config_file()
config = _ConfigParser.SafeConfigParser()
config.read(filename)
__section = "Environment"
if not(config.has_section(__section)):
config.add_section(__section)
config.set(__section, key, value)
with open(filename, 'w') as config_file:
config.write(config_file) | def function[write_config_file_value, parameter[key, value]]:
constant[
Writes an environment variable configuration to the current
config file. This will be read in on the next restart.
The config file is created if not present.
Note: The variables will not take effect until after restart.
]
variable[filename] assign[=] call[name[get_config_file], parameter[]]
variable[config] assign[=] call[name[_ConfigParser].SafeConfigParser, parameter[]]
call[name[config].read, parameter[name[filename]]]
variable[__section] assign[=] constant[Environment]
if <ast.UnaryOp object at 0x7da20c993f10> begin[:]
call[name[config].add_section, parameter[name[__section]]]
call[name[config].set, parameter[name[__section], name[key], name[value]]]
with call[name[open], parameter[name[filename], constant[w]]] begin[:]
call[name[config].write, parameter[name[config_file]]] | keyword[def] identifier[write_config_file_value] ( identifier[key] , identifier[value] ):
literal[string]
identifier[filename] = identifier[get_config_file] ()
identifier[config] = identifier[_ConfigParser] . identifier[SafeConfigParser] ()
identifier[config] . identifier[read] ( identifier[filename] )
identifier[__section] = literal[string]
keyword[if] keyword[not] ( identifier[config] . identifier[has_section] ( identifier[__section] )):
identifier[config] . identifier[add_section] ( identifier[__section] )
identifier[config] . identifier[set] ( identifier[__section] , identifier[key] , identifier[value] )
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[config_file] :
identifier[config] . identifier[write] ( identifier[config_file] ) | def write_config_file_value(key, value):
"""
Writes an environment variable configuration to the current
config file. This will be read in on the next restart.
The config file is created if not present.
Note: The variables will not take effect until after restart.
"""
filename = get_config_file()
config = _ConfigParser.SafeConfigParser()
config.read(filename)
__section = 'Environment'
if not config.has_section(__section):
config.add_section(__section) # depends on [control=['if'], data=[]]
config.set(__section, key, value)
with open(filename, 'w') as config_file:
config.write(config_file) # depends on [control=['with'], data=['config_file']] |
def rebuildtable(cls):
"""Regenerate the entire closuretree."""
cls._closure_model.objects.all().delete()
cls._closure_model.objects.bulk_create([cls._closure_model(
parent_id=x['pk'],
child_id=x['pk'],
depth=0
) for x in cls.objects.values("pk")])
for node in cls.objects.all():
node._closure_createlink() | def function[rebuildtable, parameter[cls]]:
constant[Regenerate the entire closuretree.]
call[call[name[cls]._closure_model.objects.all, parameter[]].delete, parameter[]]
call[name[cls]._closure_model.objects.bulk_create, parameter[<ast.ListComp object at 0x7da18fe914b0>]]
for taget[name[node]] in starred[call[name[cls].objects.all, parameter[]]] begin[:]
call[name[node]._closure_createlink, parameter[]] | keyword[def] identifier[rebuildtable] ( identifier[cls] ):
literal[string]
identifier[cls] . identifier[_closure_model] . identifier[objects] . identifier[all] (). identifier[delete] ()
identifier[cls] . identifier[_closure_model] . identifier[objects] . identifier[bulk_create] ([ identifier[cls] . identifier[_closure_model] (
identifier[parent_id] = identifier[x] [ literal[string] ],
identifier[child_id] = identifier[x] [ literal[string] ],
identifier[depth] = literal[int]
) keyword[for] identifier[x] keyword[in] identifier[cls] . identifier[objects] . identifier[values] ( literal[string] )])
keyword[for] identifier[node] keyword[in] identifier[cls] . identifier[objects] . identifier[all] ():
identifier[node] . identifier[_closure_createlink] () | def rebuildtable(cls):
"""Regenerate the entire closuretree."""
cls._closure_model.objects.all().delete()
cls._closure_model.objects.bulk_create([cls._closure_model(parent_id=x['pk'], child_id=x['pk'], depth=0) for x in cls.objects.values('pk')])
for node in cls.objects.all():
node._closure_createlink() # depends on [control=['for'], data=['node']] |
def gpr(src, rec, depth, res, freqtime, cf, gain=None, ab=11, aniso=None,
epermH=None, epermV=None, mpermH=None, mpermV=None, xdirect=False,
ht='quad', htarg=None, ft='fft', ftarg=None, opt=None, loop=None,
verb=2):
r"""Return the Ground-Penetrating Radar signal.
THIS FUNCTION IS EXPERIMENTAL, USE WITH CAUTION.
It is rather an example how you can calculate GPR responses; however, DO
NOT RELY ON IT! It works only well with QUAD or QWE (``quad``, ``qwe``) for
the Hankel transform, and with FFT (``fft``) for the Fourier transform.
It calls internally ``dipole`` for the frequency-domain calculation. It
subsequently convolves the response with a Ricker wavelet with central
frequency ``cf``. If signal!=None, it carries out the Fourier transform and
applies a gain to the response.
For input parameters see the function ``dipole``, except for:
Parameters
----------
cf : float
Centre frequency of GPR-signal, in Hz. Sensible values are between
10 MHz and 3000 MHz.
gain : float
Power of gain function. If None, no gain is applied. Only used if
signal!=None.
Returns
-------
EM : ndarray
GPR response
"""
if verb > 2:
print(" GPR : EXPERIMENTAL, USE WITH CAUTION")
print(" > centre freq : " + str(cf))
print(" > gain : " + str(gain))
# === 1. CHECK TIME ============
# Check times and Fourier Transform arguments, get required frequencies
time, freq, ft, ftarg = check_time(freqtime, 0, ft, ftarg, verb)
# === 2. CALL DIPOLE ============
EM = dipole(src, rec, depth, res, freq, None, ab, aniso, epermH, epermV,
mpermH, mpermV, xdirect, ht, htarg, ft, ftarg, opt, loop, verb)
# === 3. GPR STUFF
# Get required parameters
src, nsrc = check_dipole(src, 'src', 0)
rec, nrec = check_dipole(rec, 'rec', 0)
off, _ = get_off_ang(src, rec, nsrc, nrec, 0)
# Reshape output from dipole
EM = EM.reshape((-1, nrec*nsrc), order='F')
# Multiply with ricker wavelet
cfc = -(np.r_[0, freq[:-1]]/cf)**2
fwave = cfc*np.exp(cfc)
EM *= fwave[:, None]
# Do f->t transform
EM, conv = tem(EM, off, freq, time, 0, ft, ftarg)
# In case of QWE/QUAD, print Warning if not converged
conv_warning(conv, ftarg, 'Fourier', verb)
# Apply gain; make pure real
EM *= (1 + np.abs((time*10**9)**gain))[:, None]
EM = EM.real
# Reshape for number of sources
EM = np.squeeze(EM.reshape((-1, nrec, nsrc), order='F'))
return EM | def function[gpr, parameter[src, rec, depth, res, freqtime, cf, gain, ab, aniso, epermH, epermV, mpermH, mpermV, xdirect, ht, htarg, ft, ftarg, opt, loop, verb]]:
constant[Return the Ground-Penetrating Radar signal.
THIS FUNCTION IS EXPERIMENTAL, USE WITH CAUTION.
It is rather an example how you can calculate GPR responses; however, DO
NOT RELY ON IT! It works only well with QUAD or QWE (``quad``, ``qwe``) for
the Hankel transform, and with FFT (``fft``) for the Fourier transform.
It calls internally ``dipole`` for the frequency-domain calculation. It
subsequently convolves the response with a Ricker wavelet with central
frequency ``cf``. If signal!=None, it carries out the Fourier transform and
applies a gain to the response.
For input parameters see the function ``dipole``, except for:
Parameters
----------
cf : float
Centre frequency of GPR-signal, in Hz. Sensible values are between
10 MHz and 3000 MHz.
gain : float
Power of gain function. If None, no gain is applied. Only used if
signal!=None.
Returns
-------
EM : ndarray
GPR response
]
if compare[name[verb] greater[>] constant[2]] begin[:]
call[name[print], parameter[constant[ GPR : EXPERIMENTAL, USE WITH CAUTION]]]
call[name[print], parameter[binary_operation[constant[ > centre freq : ] + call[name[str], parameter[name[cf]]]]]]
call[name[print], parameter[binary_operation[constant[ > gain : ] + call[name[str], parameter[name[gain]]]]]]
<ast.Tuple object at 0x7da18f09d450> assign[=] call[name[check_time], parameter[name[freqtime], constant[0], name[ft], name[ftarg], name[verb]]]
variable[EM] assign[=] call[name[dipole], parameter[name[src], name[rec], name[depth], name[res], name[freq], constant[None], name[ab], name[aniso], name[epermH], name[epermV], name[mpermH], name[mpermV], name[xdirect], name[ht], name[htarg], name[ft], name[ftarg], name[opt], name[loop], name[verb]]]
<ast.Tuple object at 0x7da18f09c190> assign[=] call[name[check_dipole], parameter[name[src], constant[src], constant[0]]]
<ast.Tuple object at 0x7da18f09e830> assign[=] call[name[check_dipole], parameter[name[rec], constant[rec], constant[0]]]
<ast.Tuple object at 0x7da18f09d030> assign[=] call[name[get_off_ang], parameter[name[src], name[rec], name[nsrc], name[nrec], constant[0]]]
variable[EM] assign[=] call[name[EM].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da18f09f250>, <ast.BinOp object at 0x7da18f09f0a0>]]]]
variable[cfc] assign[=] <ast.UnaryOp object at 0x7da207f989d0>
variable[fwave] assign[=] binary_operation[name[cfc] * call[name[np].exp, parameter[name[cfc]]]]
<ast.AugAssign object at 0x7da207f992a0>
<ast.Tuple object at 0x7da207f9ab00> assign[=] call[name[tem], parameter[name[EM], name[off], name[freq], name[time], constant[0], name[ft], name[ftarg]]]
call[name[conv_warning], parameter[name[conv], name[ftarg], constant[Fourier], name[verb]]]
<ast.AugAssign object at 0x7da207f9a6b0>
variable[EM] assign[=] name[EM].real
variable[EM] assign[=] call[name[np].squeeze, parameter[call[name[EM].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b0d57760>, <ast.Name object at 0x7da1b0d54490>, <ast.Name object at 0x7da1b0d54e80>]]]]]]
return[name[EM]] | keyword[def] identifier[gpr] ( identifier[src] , identifier[rec] , identifier[depth] , identifier[res] , identifier[freqtime] , identifier[cf] , identifier[gain] = keyword[None] , identifier[ab] = literal[int] , identifier[aniso] = keyword[None] ,
identifier[epermH] = keyword[None] , identifier[epermV] = keyword[None] , identifier[mpermH] = keyword[None] , identifier[mpermV] = keyword[None] , identifier[xdirect] = keyword[False] ,
identifier[ht] = literal[string] , identifier[htarg] = keyword[None] , identifier[ft] = literal[string] , identifier[ftarg] = keyword[None] , identifier[opt] = keyword[None] , identifier[loop] = keyword[None] ,
identifier[verb] = literal[int] ):
literal[string]
keyword[if] identifier[verb] > literal[int] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] + identifier[str] ( identifier[cf] ))
identifier[print] ( literal[string] + identifier[str] ( identifier[gain] ))
identifier[time] , identifier[freq] , identifier[ft] , identifier[ftarg] = identifier[check_time] ( identifier[freqtime] , literal[int] , identifier[ft] , identifier[ftarg] , identifier[verb] )
identifier[EM] = identifier[dipole] ( identifier[src] , identifier[rec] , identifier[depth] , identifier[res] , identifier[freq] , keyword[None] , identifier[ab] , identifier[aniso] , identifier[epermH] , identifier[epermV] ,
identifier[mpermH] , identifier[mpermV] , identifier[xdirect] , identifier[ht] , identifier[htarg] , identifier[ft] , identifier[ftarg] , identifier[opt] , identifier[loop] , identifier[verb] )
identifier[src] , identifier[nsrc] = identifier[check_dipole] ( identifier[src] , literal[string] , literal[int] )
identifier[rec] , identifier[nrec] = identifier[check_dipole] ( identifier[rec] , literal[string] , literal[int] )
identifier[off] , identifier[_] = identifier[get_off_ang] ( identifier[src] , identifier[rec] , identifier[nsrc] , identifier[nrec] , literal[int] )
identifier[EM] = identifier[EM] . identifier[reshape] ((- literal[int] , identifier[nrec] * identifier[nsrc] ), identifier[order] = literal[string] )
identifier[cfc] =-( identifier[np] . identifier[r_] [ literal[int] , identifier[freq] [:- literal[int] ]]/ identifier[cf] )** literal[int]
identifier[fwave] = identifier[cfc] * identifier[np] . identifier[exp] ( identifier[cfc] )
identifier[EM] *= identifier[fwave] [:, keyword[None] ]
identifier[EM] , identifier[conv] = identifier[tem] ( identifier[EM] , identifier[off] , identifier[freq] , identifier[time] , literal[int] , identifier[ft] , identifier[ftarg] )
identifier[conv_warning] ( identifier[conv] , identifier[ftarg] , literal[string] , identifier[verb] )
identifier[EM] *=( literal[int] + identifier[np] . identifier[abs] (( identifier[time] * literal[int] ** literal[int] )** identifier[gain] ))[:, keyword[None] ]
identifier[EM] = identifier[EM] . identifier[real]
identifier[EM] = identifier[np] . identifier[squeeze] ( identifier[EM] . identifier[reshape] ((- literal[int] , identifier[nrec] , identifier[nsrc] ), identifier[order] = literal[string] ))
keyword[return] identifier[EM] | def gpr(src, rec, depth, res, freqtime, cf, gain=None, ab=11, aniso=None, epermH=None, epermV=None, mpermH=None, mpermV=None, xdirect=False, ht='quad', htarg=None, ft='fft', ftarg=None, opt=None, loop=None, verb=2):
"""Return the Ground-Penetrating Radar signal.
THIS FUNCTION IS EXPERIMENTAL, USE WITH CAUTION.
It is rather an example how you can calculate GPR responses; however, DO
NOT RELY ON IT! It works only well with QUAD or QWE (``quad``, ``qwe``) for
the Hankel transform, and with FFT (``fft``) for the Fourier transform.
It calls internally ``dipole`` for the frequency-domain calculation. It
subsequently convolves the response with a Ricker wavelet with central
frequency ``cf``. If signal!=None, it carries out the Fourier transform and
applies a gain to the response.
For input parameters see the function ``dipole``, except for:
Parameters
----------
cf : float
Centre frequency of GPR-signal, in Hz. Sensible values are between
10 MHz and 3000 MHz.
gain : float
Power of gain function. If None, no gain is applied. Only used if
signal!=None.
Returns
-------
EM : ndarray
GPR response
"""
if verb > 2:
print(' GPR : EXPERIMENTAL, USE WITH CAUTION')
print(' > centre freq : ' + str(cf))
print(' > gain : ' + str(gain)) # depends on [control=['if'], data=[]]
# === 1. CHECK TIME ============
# Check times and Fourier Transform arguments, get required frequencies
(time, freq, ft, ftarg) = check_time(freqtime, 0, ft, ftarg, verb)
# === 2. CALL DIPOLE ============
EM = dipole(src, rec, depth, res, freq, None, ab, aniso, epermH, epermV, mpermH, mpermV, xdirect, ht, htarg, ft, ftarg, opt, loop, verb)
# === 3. GPR STUFF
# Get required parameters
(src, nsrc) = check_dipole(src, 'src', 0)
(rec, nrec) = check_dipole(rec, 'rec', 0)
(off, _) = get_off_ang(src, rec, nsrc, nrec, 0)
# Reshape output from dipole
EM = EM.reshape((-1, nrec * nsrc), order='F')
# Multiply with ricker wavelet
cfc = -(np.r_[0, freq[:-1]] / cf) ** 2
fwave = cfc * np.exp(cfc)
EM *= fwave[:, None]
# Do f->t transform
(EM, conv) = tem(EM, off, freq, time, 0, ft, ftarg)
# In case of QWE/QUAD, print Warning if not converged
conv_warning(conv, ftarg, 'Fourier', verb)
# Apply gain; make pure real
EM *= (1 + np.abs((time * 10 ** 9) ** gain))[:, None]
EM = EM.real
# Reshape for number of sources
EM = np.squeeze(EM.reshape((-1, nrec, nsrc), order='F'))
return EM |
def charge(self):
r'''Charge of a chemical, computed with RDKit from a chemical's SMILES.
If RDKit is not available, holds None.
Examples
--------
>>> Chemical('sodium ion').charge
1
'''
try:
if not self.rdkitmol:
return charge_from_formula(self.formula)
else:
return Chem.GetFormalCharge(self.rdkitmol)
except:
return charge_from_formula(self.formula) | def function[charge, parameter[self]]:
constant[Charge of a chemical, computed with RDKit from a chemical's SMILES.
If RDKit is not available, holds None.
Examples
--------
>>> Chemical('sodium ion').charge
1
]
<ast.Try object at 0x7da204344040> | keyword[def] identifier[charge] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[if] keyword[not] identifier[self] . identifier[rdkitmol] :
keyword[return] identifier[charge_from_formula] ( identifier[self] . identifier[formula] )
keyword[else] :
keyword[return] identifier[Chem] . identifier[GetFormalCharge] ( identifier[self] . identifier[rdkitmol] )
keyword[except] :
keyword[return] identifier[charge_from_formula] ( identifier[self] . identifier[formula] ) | def charge(self):
"""Charge of a chemical, computed with RDKit from a chemical's SMILES.
If RDKit is not available, holds None.
Examples
--------
>>> Chemical('sodium ion').charge
1
"""
try:
if not self.rdkitmol:
return charge_from_formula(self.formula) # depends on [control=['if'], data=[]]
else:
return Chem.GetFormalCharge(self.rdkitmol) # depends on [control=['try'], data=[]]
except:
return charge_from_formula(self.formula) # depends on [control=['except'], data=[]] |
def getGuestList(request):
'''
This function handles the filtering of available eventregistration-related
invoice items and is used on the revenue reporting form.
'''
if not (
request.method == 'POST' and
request.POST.get('guestlist_id') and
request.POST.get('event_id') and
request.user.is_authenticated and
request.user.has_perm('guestlist.view_guestlist')
):
return JsonResponse({})
guestList = GuestList.objects.filter(id=request.POST.get('guestlist_id')).first()
event = Event.objects.filter(id=request.POST.get('event_id')).first()
if not guestList or not event:
return JsonResponse({})
return JsonResponse({
'names': guestList.getListForEvent(event),
}) | def function[getGuestList, parameter[request]]:
constant[
This function handles the filtering of available eventregistration-related
invoice items and is used on the revenue reporting form.
]
if <ast.UnaryOp object at 0x7da1b159aa70> begin[:]
return[call[name[JsonResponse], parameter[dictionary[[], []]]]]
variable[guestList] assign[=] call[call[name[GuestList].objects.filter, parameter[]].first, parameter[]]
variable[event] assign[=] call[call[name[Event].objects.filter, parameter[]].first, parameter[]]
if <ast.BoolOp object at 0x7da1b159ad40> begin[:]
return[call[name[JsonResponse], parameter[dictionary[[], []]]]]
return[call[name[JsonResponse], parameter[dictionary[[<ast.Constant object at 0x7da1b15987c0>], [<ast.Call object at 0x7da1b1599f30>]]]]] | keyword[def] identifier[getGuestList] ( identifier[request] ):
literal[string]
keyword[if] keyword[not] (
identifier[request] . identifier[method] == literal[string] keyword[and]
identifier[request] . identifier[POST] . identifier[get] ( literal[string] ) keyword[and]
identifier[request] . identifier[POST] . identifier[get] ( literal[string] ) keyword[and]
identifier[request] . identifier[user] . identifier[is_authenticated] keyword[and]
identifier[request] . identifier[user] . identifier[has_perm] ( literal[string] )
):
keyword[return] identifier[JsonResponse] ({})
identifier[guestList] = identifier[GuestList] . identifier[objects] . identifier[filter] ( identifier[id] = identifier[request] . identifier[POST] . identifier[get] ( literal[string] )). identifier[first] ()
identifier[event] = identifier[Event] . identifier[objects] . identifier[filter] ( identifier[id] = identifier[request] . identifier[POST] . identifier[get] ( literal[string] )). identifier[first] ()
keyword[if] keyword[not] identifier[guestList] keyword[or] keyword[not] identifier[event] :
keyword[return] identifier[JsonResponse] ({})
keyword[return] identifier[JsonResponse] ({
literal[string] : identifier[guestList] . identifier[getListForEvent] ( identifier[event] ),
}) | def getGuestList(request):
"""
This function handles the filtering of available eventregistration-related
invoice items and is used on the revenue reporting form.
"""
if not (request.method == 'POST' and request.POST.get('guestlist_id') and request.POST.get('event_id') and request.user.is_authenticated and request.user.has_perm('guestlist.view_guestlist')):
return JsonResponse({}) # depends on [control=['if'], data=[]]
guestList = GuestList.objects.filter(id=request.POST.get('guestlist_id')).first()
event = Event.objects.filter(id=request.POST.get('event_id')).first()
if not guestList or not event:
return JsonResponse({}) # depends on [control=['if'], data=[]]
return JsonResponse({'names': guestList.getListForEvent(event)}) |
def normalize(self):
"""Replace rectangle with its finite version."""
if self.x1 < self.x0:
self.x0, self.x1 = self.x1, self.x0
if self.y1 < self.y0:
self.y0, self.y1 = self.y1, self.y0
return self | def function[normalize, parameter[self]]:
constant[Replace rectangle with its finite version.]
if compare[name[self].x1 less[<] name[self].x0] begin[:]
<ast.Tuple object at 0x7da18f8129e0> assign[=] tuple[[<ast.Attribute object at 0x7da18f812290>, <ast.Attribute object at 0x7da18f812bc0>]]
if compare[name[self].y1 less[<] name[self].y0] begin[:]
<ast.Tuple object at 0x7da18f811f30> assign[=] tuple[[<ast.Attribute object at 0x7da18f8102e0>, <ast.Attribute object at 0x7da18f812c20>]]
return[name[self]] | keyword[def] identifier[normalize] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[x1] < identifier[self] . identifier[x0] :
identifier[self] . identifier[x0] , identifier[self] . identifier[x1] = identifier[self] . identifier[x1] , identifier[self] . identifier[x0]
keyword[if] identifier[self] . identifier[y1] < identifier[self] . identifier[y0] :
identifier[self] . identifier[y0] , identifier[self] . identifier[y1] = identifier[self] . identifier[y1] , identifier[self] . identifier[y0]
keyword[return] identifier[self] | def normalize(self):
"""Replace rectangle with its finite version."""
if self.x1 < self.x0:
(self.x0, self.x1) = (self.x1, self.x0) # depends on [control=['if'], data=[]]
if self.y1 < self.y0:
(self.y0, self.y1) = (self.y1, self.y0) # depends on [control=['if'], data=[]]
return self |
def predict_variant_coding_effect_on_transcript(
variant,
transcript,
trimmed_cdna_ref,
trimmed_cdna_alt,
transcript_offset):
"""
Given a minimal cDNA ref/alt nucleotide string pair and an offset into a
given transcript, determine the coding effect of this nucleotide substitution
onto the translated protein.
Parameters
----------
variant : Variant
transcript : Transcript
trimmed_cdna_ref : str
Reference nucleotides we expect to find in the transcript's CDS
trimmed_cdna_alt : str
Alternate nucleotides we're replacing the reference with
transcript_offset : int
Offset into the full transcript sequence of the ref->alt substitution
"""
if not transcript.complete:
raise ValueError(
("Can't annotate coding effect for %s"
" on incomplete transcript %s" % (variant, transcript)))
sequence = transcript.sequence
n_ref = len(trimmed_cdna_ref)
n_alt = len(trimmed_cdna_alt)
# reference nucleotides found on the transcript, if these don't match
# what we were told to expect from the variant then raise an exception
ref_nucleotides_from_transcript = str(
sequence[transcript_offset:transcript_offset + n_ref])
# Make sure that the reference sequence agrees with what we expected
# from the VCF
assert ref_nucleotides_from_transcript == trimmed_cdna_ref, \
"%s: expected ref '%s' at offset %d of %s, transcript has '%s'" % (
variant,
trimmed_cdna_ref,
transcript_offset,
transcript,
ref_nucleotides_from_transcript)
start_codon_offset = transcript.first_start_codon_spliced_offset
stop_codon_offset = transcript.last_stop_codon_spliced_offset
cds_len = stop_codon_offset - start_codon_offset + 1
if cds_len < 3:
raise ValueError(
"Coding sequence for %s is too short: '%s'" % (
transcript,
transcript.sequence[start_codon_offset:stop_codon_offset + 1]))
if n_ref == 0 and transcript.strand == "-":
# By convention, genomic insertions happen *after* their base 1 position on
# a chromosome. On the reverse strand, however, an insertion has to go
# before the nucleotide at some transcript offset.
# Example:
# chromosome sequence:
# TTT|GATCTCGTA|CCC
# transcript on reverse strand:
# CCC|ATGCTCTAG|TTT
# where the CDS is emphasized:
# ATGCTCTAG
# If we have a genomic insertion g.6insATT
# the genomic sequence becomes:
# TTT|GAT_ATT_CTCGTA|CCC
# (insert the "ATT" after the "T" at position 6)
# On the reverse strand this becomes:
# CCC|ATGCTC_TTA_TAG|TTT
# (insert the "ATT" *before* the "T" at position 10)
#
# To preserve the interpretation of the start offset as the base
# before the insertion, need to subtract one
cds_offset = transcript_offset - start_codon_offset - 1
else:
cds_offset = transcript_offset - start_codon_offset
assert cds_offset < cds_len, \
"Expected CDS offset (%d) < |CDS| (%d) for %s on %s" % (
cds_offset, cds_len, variant, transcript)
sequence_from_start_codon = str(sequence[start_codon_offset:])
# is this an in-frame mutations?
if (n_ref - n_alt) % 3 == 0:
return predict_in_frame_coding_effect(
variant=variant,
transcript=transcript,
trimmed_cdna_ref=trimmed_cdna_ref,
trimmed_cdna_alt=trimmed_cdna_alt,
cds_offset=cds_offset,
sequence_from_start_codon=sequence_from_start_codon)
else:
return predict_frameshift_coding_effect(
variant=variant,
transcript=transcript,
trimmed_cdna_ref=trimmed_cdna_ref,
trimmed_cdna_alt=trimmed_cdna_alt,
cds_offset=cds_offset,
sequence_from_start_codon=sequence_from_start_codon) | def function[predict_variant_coding_effect_on_transcript, parameter[variant, transcript, trimmed_cdna_ref, trimmed_cdna_alt, transcript_offset]]:
constant[
Given a minimal cDNA ref/alt nucleotide string pair and an offset into a
given transcript, determine the coding effect of this nucleotide substitution
onto the translated protein.
Parameters
----------
variant : Variant
transcript : Transcript
trimmed_cdna_ref : str
Reference nucleotides we expect to find in the transcript's CDS
trimmed_cdna_alt : str
Alternate nucleotides we're replacing the reference with
transcript_offset : int
Offset into the full transcript sequence of the ref->alt substitution
]
if <ast.UnaryOp object at 0x7da1b05359f0> begin[:]
<ast.Raise object at 0x7da1b056be20>
variable[sequence] assign[=] name[transcript].sequence
variable[n_ref] assign[=] call[name[len], parameter[name[trimmed_cdna_ref]]]
variable[n_alt] assign[=] call[name[len], parameter[name[trimmed_cdna_alt]]]
variable[ref_nucleotides_from_transcript] assign[=] call[name[str], parameter[call[name[sequence]][<ast.Slice object at 0x7da1b056b4f0>]]]
assert[compare[name[ref_nucleotides_from_transcript] equal[==] name[trimmed_cdna_ref]]]
variable[start_codon_offset] assign[=] name[transcript].first_start_codon_spliced_offset
variable[stop_codon_offset] assign[=] name[transcript].last_stop_codon_spliced_offset
variable[cds_len] assign[=] binary_operation[binary_operation[name[stop_codon_offset] - name[start_codon_offset]] + constant[1]]
if compare[name[cds_len] less[<] constant[3]] begin[:]
<ast.Raise object at 0x7da1b05692a0>
if <ast.BoolOp object at 0x7da1b05697e0> begin[:]
variable[cds_offset] assign[=] binary_operation[binary_operation[name[transcript_offset] - name[start_codon_offset]] - constant[1]]
assert[compare[name[cds_offset] less[<] name[cds_len]]]
variable[sequence_from_start_codon] assign[=] call[name[str], parameter[call[name[sequence]][<ast.Slice object at 0x7da1b056a140>]]]
if compare[binary_operation[binary_operation[name[n_ref] - name[n_alt]] <ast.Mod object at 0x7da2590d6920> constant[3]] equal[==] constant[0]] begin[:]
return[call[name[predict_in_frame_coding_effect], parameter[]]] | keyword[def] identifier[predict_variant_coding_effect_on_transcript] (
identifier[variant] ,
identifier[transcript] ,
identifier[trimmed_cdna_ref] ,
identifier[trimmed_cdna_alt] ,
identifier[transcript_offset] ):
literal[string]
keyword[if] keyword[not] identifier[transcript] . identifier[complete] :
keyword[raise] identifier[ValueError] (
( literal[string]
literal[string] %( identifier[variant] , identifier[transcript] )))
identifier[sequence] = identifier[transcript] . identifier[sequence]
identifier[n_ref] = identifier[len] ( identifier[trimmed_cdna_ref] )
identifier[n_alt] = identifier[len] ( identifier[trimmed_cdna_alt] )
identifier[ref_nucleotides_from_transcript] = identifier[str] (
identifier[sequence] [ identifier[transcript_offset] : identifier[transcript_offset] + identifier[n_ref] ])
keyword[assert] identifier[ref_nucleotides_from_transcript] == identifier[trimmed_cdna_ref] , literal[string] %(
identifier[variant] ,
identifier[trimmed_cdna_ref] ,
identifier[transcript_offset] ,
identifier[transcript] ,
identifier[ref_nucleotides_from_transcript] )
identifier[start_codon_offset] = identifier[transcript] . identifier[first_start_codon_spliced_offset]
identifier[stop_codon_offset] = identifier[transcript] . identifier[last_stop_codon_spliced_offset]
identifier[cds_len] = identifier[stop_codon_offset] - identifier[start_codon_offset] + literal[int]
keyword[if] identifier[cds_len] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string] %(
identifier[transcript] ,
identifier[transcript] . identifier[sequence] [ identifier[start_codon_offset] : identifier[stop_codon_offset] + literal[int] ]))
keyword[if] identifier[n_ref] == literal[int] keyword[and] identifier[transcript] . identifier[strand] == literal[string] :
identifier[cds_offset] = identifier[transcript_offset] - identifier[start_codon_offset] - literal[int]
keyword[else] :
identifier[cds_offset] = identifier[transcript_offset] - identifier[start_codon_offset]
keyword[assert] identifier[cds_offset] < identifier[cds_len] , literal[string] %(
identifier[cds_offset] , identifier[cds_len] , identifier[variant] , identifier[transcript] )
identifier[sequence_from_start_codon] = identifier[str] ( identifier[sequence] [ identifier[start_codon_offset] :])
keyword[if] ( identifier[n_ref] - identifier[n_alt] )% literal[int] == literal[int] :
keyword[return] identifier[predict_in_frame_coding_effect] (
identifier[variant] = identifier[variant] ,
identifier[transcript] = identifier[transcript] ,
identifier[trimmed_cdna_ref] = identifier[trimmed_cdna_ref] ,
identifier[trimmed_cdna_alt] = identifier[trimmed_cdna_alt] ,
identifier[cds_offset] = identifier[cds_offset] ,
identifier[sequence_from_start_codon] = identifier[sequence_from_start_codon] )
keyword[else] :
keyword[return] identifier[predict_frameshift_coding_effect] (
identifier[variant] = identifier[variant] ,
identifier[transcript] = identifier[transcript] ,
identifier[trimmed_cdna_ref] = identifier[trimmed_cdna_ref] ,
identifier[trimmed_cdna_alt] = identifier[trimmed_cdna_alt] ,
identifier[cds_offset] = identifier[cds_offset] ,
identifier[sequence_from_start_codon] = identifier[sequence_from_start_codon] ) | def predict_variant_coding_effect_on_transcript(variant, transcript, trimmed_cdna_ref, trimmed_cdna_alt, transcript_offset):
"""
Given a minimal cDNA ref/alt nucleotide string pair and an offset into a
given transcript, determine the coding effect of this nucleotide substitution
onto the translated protein.
Parameters
----------
variant : Variant
transcript : Transcript
trimmed_cdna_ref : str
Reference nucleotides we expect to find in the transcript's CDS
trimmed_cdna_alt : str
Alternate nucleotides we're replacing the reference with
transcript_offset : int
Offset into the full transcript sequence of the ref->alt substitution
"""
if not transcript.complete:
raise ValueError("Can't annotate coding effect for %s on incomplete transcript %s" % (variant, transcript)) # depends on [control=['if'], data=[]]
sequence = transcript.sequence
n_ref = len(trimmed_cdna_ref)
n_alt = len(trimmed_cdna_alt)
# reference nucleotides found on the transcript, if these don't match
# what we were told to expect from the variant then raise an exception
ref_nucleotides_from_transcript = str(sequence[transcript_offset:transcript_offset + n_ref])
# Make sure that the reference sequence agrees with what we expected
# from the VCF
assert ref_nucleotides_from_transcript == trimmed_cdna_ref, "%s: expected ref '%s' at offset %d of %s, transcript has '%s'" % (variant, trimmed_cdna_ref, transcript_offset, transcript, ref_nucleotides_from_transcript)
start_codon_offset = transcript.first_start_codon_spliced_offset
stop_codon_offset = transcript.last_stop_codon_spliced_offset
cds_len = stop_codon_offset - start_codon_offset + 1
if cds_len < 3:
raise ValueError("Coding sequence for %s is too short: '%s'" % (transcript, transcript.sequence[start_codon_offset:stop_codon_offset + 1])) # depends on [control=['if'], data=[]]
if n_ref == 0 and transcript.strand == '-':
# By convention, genomic insertions happen *after* their base 1 position on
# a chromosome. On the reverse strand, however, an insertion has to go
# before the nucleotide at some transcript offset.
# Example:
# chromosome sequence:
# TTT|GATCTCGTA|CCC
# transcript on reverse strand:
# CCC|ATGCTCTAG|TTT
# where the CDS is emphasized:
# ATGCTCTAG
# If we have a genomic insertion g.6insATT
# the genomic sequence becomes:
# TTT|GAT_ATT_CTCGTA|CCC
# (insert the "ATT" after the "T" at position 6)
# On the reverse strand this becomes:
# CCC|ATGCTC_TTA_TAG|TTT
# (insert the "ATT" *before* the "T" at position 10)
#
# To preserve the interpretation of the start offset as the base
# before the insertion, need to subtract one
cds_offset = transcript_offset - start_codon_offset - 1 # depends on [control=['if'], data=[]]
else:
cds_offset = transcript_offset - start_codon_offset
assert cds_offset < cds_len, 'Expected CDS offset (%d) < |CDS| (%d) for %s on %s' % (cds_offset, cds_len, variant, transcript)
sequence_from_start_codon = str(sequence[start_codon_offset:])
# is this an in-frame mutations?
if (n_ref - n_alt) % 3 == 0:
return predict_in_frame_coding_effect(variant=variant, transcript=transcript, trimmed_cdna_ref=trimmed_cdna_ref, trimmed_cdna_alt=trimmed_cdna_alt, cds_offset=cds_offset, sequence_from_start_codon=sequence_from_start_codon) # depends on [control=['if'], data=[]]
else:
return predict_frameshift_coding_effect(variant=variant, transcript=transcript, trimmed_cdna_ref=trimmed_cdna_ref, trimmed_cdna_alt=trimmed_cdna_alt, cds_offset=cds_offset, sequence_from_start_codon=sequence_from_start_codon) |
def execute(
command,
abort=True,
capture=False,
verbose=False,
echo=False,
stream=None,
):
"""Run a command locally.
Arguments:
command: a command to execute.
abort: If True, a non-zero return code will trigger an exception.
capture: If True, returns the output of the command.
If False, returns a subprocess result.
echo: if True, prints the command before executing it.
verbose: If True, prints the output of the command.
stream: If set, stdout/stderr will be redirected to the given stream.
Ignored if `capture` is True.
"""
stream = stream or sys.stdout
if echo:
out = stream
out.write(u'$ %s' % command)
# Capture stdout and stderr in the same stream
command = u'%s 2>&1' % command
if verbose:
out = stream
err = stream
else:
out = subprocess.PIPE
err = subprocess.PIPE
process = subprocess.Popen(
command,
shell=True,
stdout=out,
stderr=err,
)
# propagate SIGTERM to all child processes within
# the process group. this prevents subprocesses from
# being orphaned when the current process is terminated
signal.signal(
signal.SIGTERM,
make_terminate_handler(process)
)
# Wait for the process to complete
stdout, _ = process.communicate()
stdout = stdout.strip() if stdout else ''
if not isinstance(stdout, unicode):
stdout = stdout.decode('utf-8')
if abort and process.returncode != 0:
message = (
u'Error #%d running "%s"%s' % (
process.returncode,
command,
':\n====================\n'
'%s\n'
'====================\n' % (
stdout
) if stdout else ''
)
)
raise Exception(message)
if capture:
return stdout
else:
return process | def function[execute, parameter[command, abort, capture, verbose, echo, stream]]:
constant[Run a command locally.
Arguments:
command: a command to execute.
abort: If True, a non-zero return code will trigger an exception.
capture: If True, returns the output of the command.
If False, returns a subprocess result.
echo: if True, prints the command before executing it.
verbose: If True, prints the output of the command.
stream: If set, stdout/stderr will be redirected to the given stream.
Ignored if `capture` is True.
]
variable[stream] assign[=] <ast.BoolOp object at 0x7da20e9b32e0>
if name[echo] begin[:]
variable[out] assign[=] name[stream]
call[name[out].write, parameter[binary_operation[constant[$ %s] <ast.Mod object at 0x7da2590d6920> name[command]]]]
variable[command] assign[=] binary_operation[constant[%s 2>&1] <ast.Mod object at 0x7da2590d6920> name[command]]
if name[verbose] begin[:]
variable[out] assign[=] name[stream]
variable[err] assign[=] name[stream]
variable[process] assign[=] call[name[subprocess].Popen, parameter[name[command]]]
call[name[signal].signal, parameter[name[signal].SIGTERM, call[name[make_terminate_handler], parameter[name[process]]]]]
<ast.Tuple object at 0x7da20e9b0280> assign[=] call[name[process].communicate, parameter[]]
variable[stdout] assign[=] <ast.IfExp object at 0x7da20e9b0a60>
if <ast.UnaryOp object at 0x7da20e9b20e0> begin[:]
variable[stdout] assign[=] call[name[stdout].decode, parameter[constant[utf-8]]]
if <ast.BoolOp object at 0x7da20e9b2680> begin[:]
variable[message] assign[=] binary_operation[constant[Error #%d running "%s"%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e9b08e0>, <ast.Name object at 0x7da20e9b0e80>, <ast.IfExp object at 0x7da20e9b0760>]]]
<ast.Raise object at 0x7da20e9b2740>
if name[capture] begin[:]
return[name[stdout]] | keyword[def] identifier[execute] (
identifier[command] ,
identifier[abort] = keyword[True] ,
identifier[capture] = keyword[False] ,
identifier[verbose] = keyword[False] ,
identifier[echo] = keyword[False] ,
identifier[stream] = keyword[None] ,
):
literal[string]
identifier[stream] = identifier[stream] keyword[or] identifier[sys] . identifier[stdout]
keyword[if] identifier[echo] :
identifier[out] = identifier[stream]
identifier[out] . identifier[write] ( literal[string] % identifier[command] )
identifier[command] = literal[string] % identifier[command]
keyword[if] identifier[verbose] :
identifier[out] = identifier[stream]
identifier[err] = identifier[stream]
keyword[else] :
identifier[out] = identifier[subprocess] . identifier[PIPE]
identifier[err] = identifier[subprocess] . identifier[PIPE]
identifier[process] = identifier[subprocess] . identifier[Popen] (
identifier[command] ,
identifier[shell] = keyword[True] ,
identifier[stdout] = identifier[out] ,
identifier[stderr] = identifier[err] ,
)
identifier[signal] . identifier[signal] (
identifier[signal] . identifier[SIGTERM] ,
identifier[make_terminate_handler] ( identifier[process] )
)
identifier[stdout] , identifier[_] = identifier[process] . identifier[communicate] ()
identifier[stdout] = identifier[stdout] . identifier[strip] () keyword[if] identifier[stdout] keyword[else] literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[stdout] , identifier[unicode] ):
identifier[stdout] = identifier[stdout] . identifier[decode] ( literal[string] )
keyword[if] identifier[abort] keyword[and] identifier[process] . identifier[returncode] != literal[int] :
identifier[message] =(
literal[string] %(
identifier[process] . identifier[returncode] ,
identifier[command] ,
literal[string]
literal[string]
literal[string] %(
identifier[stdout]
) keyword[if] identifier[stdout] keyword[else] literal[string]
)
)
keyword[raise] identifier[Exception] ( identifier[message] )
keyword[if] identifier[capture] :
keyword[return] identifier[stdout]
keyword[else] :
keyword[return] identifier[process] | def execute(command, abort=True, capture=False, verbose=False, echo=False, stream=None):
"""Run a command locally.
Arguments:
command: a command to execute.
abort: If True, a non-zero return code will trigger an exception.
capture: If True, returns the output of the command.
If False, returns a subprocess result.
echo: if True, prints the command before executing it.
verbose: If True, prints the output of the command.
stream: If set, stdout/stderr will be redirected to the given stream.
Ignored if `capture` is True.
"""
stream = stream or sys.stdout
if echo:
out = stream
out.write(u'$ %s' % command) # depends on [control=['if'], data=[]]
# Capture stdout and stderr in the same stream
command = u'%s 2>&1' % command
if verbose:
out = stream
err = stream # depends on [control=['if'], data=[]]
else:
out = subprocess.PIPE
err = subprocess.PIPE
process = subprocess.Popen(command, shell=True, stdout=out, stderr=err)
# propagate SIGTERM to all child processes within
# the process group. this prevents subprocesses from
# being orphaned when the current process is terminated
signal.signal(signal.SIGTERM, make_terminate_handler(process))
# Wait for the process to complete
(stdout, _) = process.communicate()
stdout = stdout.strip() if stdout else ''
if not isinstance(stdout, unicode):
stdout = stdout.decode('utf-8') # depends on [control=['if'], data=[]]
if abort and process.returncode != 0:
message = u'Error #%d running "%s"%s' % (process.returncode, command, ':\n====================\n%s\n====================\n' % stdout if stdout else '')
raise Exception(message) # depends on [control=['if'], data=[]]
if capture:
return stdout # depends on [control=['if'], data=[]]
else:
return process |
def configs_in(src_dir):
"""Enumerate all configs in src_dir"""
for filename in files_in_dir(src_dir, 'json'):
with open(os.path.join(src_dir, filename), 'rb') as in_f:
yield json.load(in_f) | def function[configs_in, parameter[src_dir]]:
constant[Enumerate all configs in src_dir]
for taget[name[filename]] in starred[call[name[files_in_dir], parameter[name[src_dir], constant[json]]]] begin[:]
with call[name[open], parameter[call[name[os].path.join, parameter[name[src_dir], name[filename]]], constant[rb]]] begin[:]
<ast.Yield object at 0x7da1b1f71ae0> | keyword[def] identifier[configs_in] ( identifier[src_dir] ):
literal[string]
keyword[for] identifier[filename] keyword[in] identifier[files_in_dir] ( identifier[src_dir] , literal[string] ):
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[src_dir] , identifier[filename] ), literal[string] ) keyword[as] identifier[in_f] :
keyword[yield] identifier[json] . identifier[load] ( identifier[in_f] ) | def configs_in(src_dir):
"""Enumerate all configs in src_dir"""
for filename in files_in_dir(src_dir, 'json'):
with open(os.path.join(src_dir, filename), 'rb') as in_f:
yield json.load(in_f) # depends on [control=['with'], data=['in_f']] # depends on [control=['for'], data=['filename']] |
def create_list(self, list_json):
'''
Create List object from JSON object
Returns:
List: The list from the given `list_json`.
'''
return trolly.list.List(
trello_client=self,
list_id=list_json['id'],
name=list_json['name'],
data=list_json,
) | def function[create_list, parameter[self, list_json]]:
constant[
Create List object from JSON object
Returns:
List: The list from the given `list_json`.
]
return[call[name[trolly].list.List, parameter[]]] | keyword[def] identifier[create_list] ( identifier[self] , identifier[list_json] ):
literal[string]
keyword[return] identifier[trolly] . identifier[list] . identifier[List] (
identifier[trello_client] = identifier[self] ,
identifier[list_id] = identifier[list_json] [ literal[string] ],
identifier[name] = identifier[list_json] [ literal[string] ],
identifier[data] = identifier[list_json] ,
) | def create_list(self, list_json):
"""
Create List object from JSON object
Returns:
List: The list from the given `list_json`.
"""
return trolly.list.List(trello_client=self, list_id=list_json['id'], name=list_json['name'], data=list_json) |
def add_control_number(self, tag, value):
"""Add a control-number 00x for given tag with value."""
record_add_field(self.record,
tag,
controlfield_value=value) | def function[add_control_number, parameter[self, tag, value]]:
constant[Add a control-number 00x for given tag with value.]
call[name[record_add_field], parameter[name[self].record, name[tag]]] | keyword[def] identifier[add_control_number] ( identifier[self] , identifier[tag] , identifier[value] ):
literal[string]
identifier[record_add_field] ( identifier[self] . identifier[record] ,
identifier[tag] ,
identifier[controlfield_value] = identifier[value] ) | def add_control_number(self, tag, value):
"""Add a control-number 00x for given tag with value."""
record_add_field(self.record, tag, controlfield_value=value) |
def get_chat_administrators(self, *args, **kwargs):
"""See :func:`get_chat_administrators`"""
return get_chat_administrators(*args, **self._merge_overrides(**kwargs)).run() | def function[get_chat_administrators, parameter[self]]:
constant[See :func:`get_chat_administrators`]
return[call[call[name[get_chat_administrators], parameter[<ast.Starred object at 0x7da1b0e9cf70>]].run, parameter[]]] | keyword[def] identifier[get_chat_administrators] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[get_chat_administrators] (* identifier[args] ,** identifier[self] . identifier[_merge_overrides] (** identifier[kwargs] )). identifier[run] () | def get_chat_administrators(self, *args, **kwargs):
"""See :func:`get_chat_administrators`"""
return get_chat_administrators(*args, **self._merge_overrides(**kwargs)).run() |
def toggle_wrap_mode(self, checked):
"""Toggle wrap mode"""
self.shell.toggle_wrap_mode(checked)
self.set_option('wrap', checked) | def function[toggle_wrap_mode, parameter[self, checked]]:
constant[Toggle wrap mode]
call[name[self].shell.toggle_wrap_mode, parameter[name[checked]]]
call[name[self].set_option, parameter[constant[wrap], name[checked]]] | keyword[def] identifier[toggle_wrap_mode] ( identifier[self] , identifier[checked] ):
literal[string]
identifier[self] . identifier[shell] . identifier[toggle_wrap_mode] ( identifier[checked] )
identifier[self] . identifier[set_option] ( literal[string] , identifier[checked] ) | def toggle_wrap_mode(self, checked):
"""Toggle wrap mode"""
self.shell.toggle_wrap_mode(checked)
self.set_option('wrap', checked) |
def get_prev_block_hash(block_representation, coin_symbol='btc', api_key=None):
'''
Takes a block_representation and returns the previous block hash
'''
return get_block_overview(block_representation=block_representation,
coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['prev_block'] | def function[get_prev_block_hash, parameter[block_representation, coin_symbol, api_key]]:
constant[
Takes a block_representation and returns the previous block hash
]
return[call[call[name[get_block_overview], parameter[]]][constant[prev_block]]] | keyword[def] identifier[get_prev_block_hash] ( identifier[block_representation] , identifier[coin_symbol] = literal[string] , identifier[api_key] = keyword[None] ):
literal[string]
keyword[return] identifier[get_block_overview] ( identifier[block_representation] = identifier[block_representation] ,
identifier[coin_symbol] = identifier[coin_symbol] , identifier[txn_limit] = literal[int] , identifier[api_key] = identifier[api_key] )[ literal[string] ] | def get_prev_block_hash(block_representation, coin_symbol='btc', api_key=None):
"""
Takes a block_representation and returns the previous block hash
"""
return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['prev_block'] |
def is_not_null_predicate(
raw_crash, dumps, processed_crash, processor, key=''
):
"""a predicate that converts the key'd source to boolean.
parameters:
raw_crash - dict
dumps - placeholder in a fat interface - unused
processed_crash - placeholder in a fat interface - unused
processor - placeholder in a fat interface - unused
"""
try:
return bool(raw_crash[key])
except KeyError:
return False | def function[is_not_null_predicate, parameter[raw_crash, dumps, processed_crash, processor, key]]:
constant[a predicate that converts the key'd source to boolean.
parameters:
raw_crash - dict
dumps - placeholder in a fat interface - unused
processed_crash - placeholder in a fat interface - unused
processor - placeholder in a fat interface - unused
]
<ast.Try object at 0x7da18dc9be20> | keyword[def] identifier[is_not_null_predicate] (
identifier[raw_crash] , identifier[dumps] , identifier[processed_crash] , identifier[processor] , identifier[key] = literal[string]
):
literal[string]
keyword[try] :
keyword[return] identifier[bool] ( identifier[raw_crash] [ identifier[key] ])
keyword[except] identifier[KeyError] :
keyword[return] keyword[False] | def is_not_null_predicate(raw_crash, dumps, processed_crash, processor, key=''):
"""a predicate that converts the key'd source to boolean.
parameters:
raw_crash - dict
dumps - placeholder in a fat interface - unused
processed_crash - placeholder in a fat interface - unused
processor - placeholder in a fat interface - unused
"""
try:
return bool(raw_crash[key]) # depends on [control=['try'], data=[]]
except KeyError:
return False # depends on [control=['except'], data=[]] |
def create_collection(cls, collection, **kwargs):
"""Create Collection
Create a new Collection
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_collection(collection, async=True)
>>> result = thread.get()
:param async bool
:param Collection collection: Attributes of collection to create (required)
:return: Collection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_collection_with_http_info(collection, **kwargs)
else:
(data) = cls._create_collection_with_http_info(collection, **kwargs)
return data | def function[create_collection, parameter[cls, collection]]:
constant[Create Collection
Create a new Collection
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_collection(collection, async=True)
>>> result = thread.get()
:param async bool
:param Collection collection: Attributes of collection to create (required)
:return: Collection
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._create_collection_with_http_info, parameter[name[collection]]]] | keyword[def] identifier[create_collection] ( identifier[cls] , identifier[collection] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_create_collection_with_http_info] ( identifier[collection] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_create_collection_with_http_info] ( identifier[collection] ,** identifier[kwargs] )
keyword[return] identifier[data] | def create_collection(cls, collection, **kwargs):
"""Create Collection
Create a new Collection
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_collection(collection, async=True)
>>> result = thread.get()
:param async bool
:param Collection collection: Attributes of collection to create (required)
:return: Collection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_collection_with_http_info(collection, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._create_collection_with_http_info(collection, **kwargs)
return data |
def cov(self, col1, col2):
"""
Calculate the sample covariance for the given columns, specified by their names, as a
double value. :func:`DataFrame.cov` and :func:`DataFrameStatFunctions.cov` are aliases.
:param col1: The name of the first column
:param col2: The name of the second column
"""
if not isinstance(col1, basestring):
raise ValueError("col1 should be a string.")
if not isinstance(col2, basestring):
raise ValueError("col2 should be a string.")
return self._jdf.stat().cov(col1, col2) | def function[cov, parameter[self, col1, col2]]:
constant[
Calculate the sample covariance for the given columns, specified by their names, as a
double value. :func:`DataFrame.cov` and :func:`DataFrameStatFunctions.cov` are aliases.
:param col1: The name of the first column
:param col2: The name of the second column
]
if <ast.UnaryOp object at 0x7da207f9ab00> begin[:]
<ast.Raise object at 0x7da207f9a1a0>
if <ast.UnaryOp object at 0x7da18f58e1d0> begin[:]
<ast.Raise object at 0x7da18f58c520>
return[call[call[name[self]._jdf.stat, parameter[]].cov, parameter[name[col1], name[col2]]]] | keyword[def] identifier[cov] ( identifier[self] , identifier[col1] , identifier[col2] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[col1] , identifier[basestring] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[col2] , identifier[basestring] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[self] . identifier[_jdf] . identifier[stat] (). identifier[cov] ( identifier[col1] , identifier[col2] ) | def cov(self, col1, col2):
"""
Calculate the sample covariance for the given columns, specified by their names, as a
double value. :func:`DataFrame.cov` and :func:`DataFrameStatFunctions.cov` are aliases.
:param col1: The name of the first column
:param col2: The name of the second column
"""
if not isinstance(col1, basestring):
raise ValueError('col1 should be a string.') # depends on [control=['if'], data=[]]
if not isinstance(col2, basestring):
raise ValueError('col2 should be a string.') # depends on [control=['if'], data=[]]
return self._jdf.stat().cov(col1, col2) |
def nug(self):
r'''Kinematic viscosity of the gas phase of the chemical at its
current temperature and pressure, in units of [m^2/s].
.. math::
\nu = \frac{\mu}{\rho}
Utilizes the temperature and pressure dependent object oriented
interfaces :obj:`thermo.volume.VolumeGas`,
:obj:`thermo.viscosity.ViscosityGas` to calculate the
actual properties.
Examples
--------
>>> Chemical('methane', T=115).nug
2.5056924327995865e-06
'''
mug, rhog = self.mug, self.rhog
if all([mug, rhog]):
return nu_mu_converter(mu=mug, rho=rhog)
return None | def function[nug, parameter[self]]:
constant[Kinematic viscosity of the gas phase of the chemical at its
current temperature and pressure, in units of [m^2/s].
.. math::
\nu = \frac{\mu}{\rho}
Utilizes the temperature and pressure dependent object oriented
interfaces :obj:`thermo.volume.VolumeGas`,
:obj:`thermo.viscosity.ViscosityGas` to calculate the
actual properties.
Examples
--------
>>> Chemical('methane', T=115).nug
2.5056924327995865e-06
]
<ast.Tuple object at 0x7da2043441c0> assign[=] tuple[[<ast.Attribute object at 0x7da204345d20>, <ast.Attribute object at 0x7da204347580>]]
if call[name[all], parameter[list[[<ast.Name object at 0x7da204345d50>, <ast.Name object at 0x7da2043448b0>]]]] begin[:]
return[call[name[nu_mu_converter], parameter[]]]
return[constant[None]] | keyword[def] identifier[nug] ( identifier[self] ):
literal[string]
identifier[mug] , identifier[rhog] = identifier[self] . identifier[mug] , identifier[self] . identifier[rhog]
keyword[if] identifier[all] ([ identifier[mug] , identifier[rhog] ]):
keyword[return] identifier[nu_mu_converter] ( identifier[mu] = identifier[mug] , identifier[rho] = identifier[rhog] )
keyword[return] keyword[None] | def nug(self):
"""Kinematic viscosity of the gas phase of the chemical at its
current temperature and pressure, in units of [m^2/s].
.. math::
\\nu = \\frac{\\mu}{\\rho}
Utilizes the temperature and pressure dependent object oriented
interfaces :obj:`thermo.volume.VolumeGas`,
:obj:`thermo.viscosity.ViscosityGas` to calculate the
actual properties.
Examples
--------
>>> Chemical('methane', T=115).nug
2.5056924327995865e-06
"""
(mug, rhog) = (self.mug, self.rhog)
if all([mug, rhog]):
return nu_mu_converter(mu=mug, rho=rhog) # depends on [control=['if'], data=[]]
return None |
def _bresenham(self, faces, dx):
r'''
A Bresenham line function to generate points to fill in for the fibers
'''
line_points = []
for face in faces:
# Get in hull order
fx = face[:, 0]
fy = face[:, 1]
fz = face[:, 2]
# Find the axis with the smallest spread and remove it to make 2D
if (np.std(fx) < np.std(fy)) and (np.std(fx) < np.std(fz)):
f2d = np.vstack((fy, fz)).T
elif (np.std(fy) < np.std(fx)) and (np.std(fy) < np.std(fz)):
f2d = np.vstack((fx, fz)).T
else:
f2d = np.vstack((fx, fy)).T
hull = sptl.ConvexHull(f2d, qhull_options='QJ Pp')
face = np.around(face[hull.vertices].astype(float), 6)
for i in range(len(face)):
vec = face[i]-face[i-1]
vec_length = np.linalg.norm(vec)
increments = np.ceil(vec_length/dx)
check_p_old = np.array([-1, -1, -1])
for x in np.linspace(0, 1, increments):
check_p_new = face[i-1]+(vec*x)
if np.sum(check_p_new - check_p_old) != 0:
line_points.append(check_p_new)
check_p_old = check_p_new
return np.asarray(line_points) | def function[_bresenham, parameter[self, faces, dx]]:
constant[
A Bresenham line function to generate points to fill in for the fibers
]
variable[line_points] assign[=] list[[]]
for taget[name[face]] in starred[name[faces]] begin[:]
variable[fx] assign[=] call[name[face]][tuple[[<ast.Slice object at 0x7da204567280>, <ast.Constant object at 0x7da2045643d0>]]]
variable[fy] assign[=] call[name[face]][tuple[[<ast.Slice object at 0x7da204564d00>, <ast.Constant object at 0x7da204565cf0>]]]
variable[fz] assign[=] call[name[face]][tuple[[<ast.Slice object at 0x7da204566c50>, <ast.Constant object at 0x7da2045672b0>]]]
if <ast.BoolOp object at 0x7da2045643a0> begin[:]
variable[f2d] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da204567e20>, <ast.Name object at 0x7da204567970>]]]].T
variable[hull] assign[=] call[name[sptl].ConvexHull, parameter[name[f2d]]]
variable[face] assign[=] call[name[np].around, parameter[call[call[name[face]][name[hull].vertices].astype, parameter[name[float]]], constant[6]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[face]]]]]] begin[:]
variable[vec] assign[=] binary_operation[call[name[face]][name[i]] - call[name[face]][binary_operation[name[i] - constant[1]]]]
variable[vec_length] assign[=] call[name[np].linalg.norm, parameter[name[vec]]]
variable[increments] assign[=] call[name[np].ceil, parameter[binary_operation[name[vec_length] / name[dx]]]]
variable[check_p_old] assign[=] call[name[np].array, parameter[list[[<ast.UnaryOp object at 0x7da204566140>, <ast.UnaryOp object at 0x7da204564d60>, <ast.UnaryOp object at 0x7da204565510>]]]]
for taget[name[x]] in starred[call[name[np].linspace, parameter[constant[0], constant[1], name[increments]]]] begin[:]
variable[check_p_new] assign[=] binary_operation[call[name[face]][binary_operation[name[i] - constant[1]]] + binary_operation[name[vec] * name[x]]]
if compare[call[name[np].sum, parameter[binary_operation[name[check_p_new] - name[check_p_old]]]] not_equal[!=] constant[0]] begin[:]
call[name[line_points].append, parameter[name[check_p_new]]]
variable[check_p_old] assign[=] name[check_p_new]
return[call[name[np].asarray, parameter[name[line_points]]]] | keyword[def] identifier[_bresenham] ( identifier[self] , identifier[faces] , identifier[dx] ):
literal[string]
identifier[line_points] =[]
keyword[for] identifier[face] keyword[in] identifier[faces] :
identifier[fx] = identifier[face] [:, literal[int] ]
identifier[fy] = identifier[face] [:, literal[int] ]
identifier[fz] = identifier[face] [:, literal[int] ]
keyword[if] ( identifier[np] . identifier[std] ( identifier[fx] )< identifier[np] . identifier[std] ( identifier[fy] )) keyword[and] ( identifier[np] . identifier[std] ( identifier[fx] )< identifier[np] . identifier[std] ( identifier[fz] )):
identifier[f2d] = identifier[np] . identifier[vstack] (( identifier[fy] , identifier[fz] )). identifier[T]
keyword[elif] ( identifier[np] . identifier[std] ( identifier[fy] )< identifier[np] . identifier[std] ( identifier[fx] )) keyword[and] ( identifier[np] . identifier[std] ( identifier[fy] )< identifier[np] . identifier[std] ( identifier[fz] )):
identifier[f2d] = identifier[np] . identifier[vstack] (( identifier[fx] , identifier[fz] )). identifier[T]
keyword[else] :
identifier[f2d] = identifier[np] . identifier[vstack] (( identifier[fx] , identifier[fy] )). identifier[T]
identifier[hull] = identifier[sptl] . identifier[ConvexHull] ( identifier[f2d] , identifier[qhull_options] = literal[string] )
identifier[face] = identifier[np] . identifier[around] ( identifier[face] [ identifier[hull] . identifier[vertices] ]. identifier[astype] ( identifier[float] ), literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[face] )):
identifier[vec] = identifier[face] [ identifier[i] ]- identifier[face] [ identifier[i] - literal[int] ]
identifier[vec_length] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[vec] )
identifier[increments] = identifier[np] . identifier[ceil] ( identifier[vec_length] / identifier[dx] )
identifier[check_p_old] = identifier[np] . identifier[array] ([- literal[int] ,- literal[int] ,- literal[int] ])
keyword[for] identifier[x] keyword[in] identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[increments] ):
identifier[check_p_new] = identifier[face] [ identifier[i] - literal[int] ]+( identifier[vec] * identifier[x] )
keyword[if] identifier[np] . identifier[sum] ( identifier[check_p_new] - identifier[check_p_old] )!= literal[int] :
identifier[line_points] . identifier[append] ( identifier[check_p_new] )
identifier[check_p_old] = identifier[check_p_new]
keyword[return] identifier[np] . identifier[asarray] ( identifier[line_points] ) | def _bresenham(self, faces, dx):
"""
A Bresenham line function to generate points to fill in for the fibers
"""
line_points = []
for face in faces:
# Get in hull order
fx = face[:, 0]
fy = face[:, 1]
fz = face[:, 2]
# Find the axis with the smallest spread and remove it to make 2D
if np.std(fx) < np.std(fy) and np.std(fx) < np.std(fz):
f2d = np.vstack((fy, fz)).T # depends on [control=['if'], data=[]]
elif np.std(fy) < np.std(fx) and np.std(fy) < np.std(fz):
f2d = np.vstack((fx, fz)).T # depends on [control=['if'], data=[]]
else:
f2d = np.vstack((fx, fy)).T
hull = sptl.ConvexHull(f2d, qhull_options='QJ Pp')
face = np.around(face[hull.vertices].astype(float), 6)
for i in range(len(face)):
vec = face[i] - face[i - 1]
vec_length = np.linalg.norm(vec)
increments = np.ceil(vec_length / dx)
check_p_old = np.array([-1, -1, -1])
for x in np.linspace(0, 1, increments):
check_p_new = face[i - 1] + vec * x
if np.sum(check_p_new - check_p_old) != 0:
line_points.append(check_p_new)
check_p_old = check_p_new # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['face']]
return np.asarray(line_points) |
def selection_range(self): # XXX: shouldn't this return `None` if there is no selection???
"""
Return (from, to) tuple of the selection.
start and end position are included.
This doesn't take the selection type into account. Use
`selection_ranges` instead.
"""
if self.selection:
from_, to = sorted([self.cursor_position, self.selection.original_cursor_position])
else:
from_, to = self.cursor_position, self.cursor_position
return from_, to | def function[selection_range, parameter[self]]:
constant[
Return (from, to) tuple of the selection.
start and end position are included.
This doesn't take the selection type into account. Use
`selection_ranges` instead.
]
if name[self].selection begin[:]
<ast.Tuple object at 0x7da18f00faf0> assign[=] call[name[sorted], parameter[list[[<ast.Attribute object at 0x7da18f00d630>, <ast.Attribute object at 0x7da18f00c610>]]]]
return[tuple[[<ast.Name object at 0x7da18f00f160>, <ast.Name object at 0x7da18f00c0a0>]]] | keyword[def] identifier[selection_range] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[selection] :
identifier[from_] , identifier[to] = identifier[sorted] ([ identifier[self] . identifier[cursor_position] , identifier[self] . identifier[selection] . identifier[original_cursor_position] ])
keyword[else] :
identifier[from_] , identifier[to] = identifier[self] . identifier[cursor_position] , identifier[self] . identifier[cursor_position]
keyword[return] identifier[from_] , identifier[to] | def selection_range(self): # XXX: shouldn't this return `None` if there is no selection???
"\n Return (from, to) tuple of the selection.\n start and end position are included.\n\n This doesn't take the selection type into account. Use\n `selection_ranges` instead.\n "
if self.selection:
(from_, to) = sorted([self.cursor_position, self.selection.original_cursor_position]) # depends on [control=['if'], data=[]]
else:
(from_, to) = (self.cursor_position, self.cursor_position)
return (from_, to) |
def excerpts(n_samples, n_excerpts=None, excerpt_size=None):
"""Yield (start, end) where start is included and end is excluded."""
assert n_excerpts >= 2
step = _excerpt_step(n_samples,
n_excerpts=n_excerpts,
excerpt_size=excerpt_size)
for i in range(n_excerpts):
start = i * step
if start >= n_samples:
break
end = min(start + excerpt_size, n_samples)
yield start, end | def function[excerpts, parameter[n_samples, n_excerpts, excerpt_size]]:
constant[Yield (start, end) where start is included and end is excluded.]
assert[compare[name[n_excerpts] greater_or_equal[>=] constant[2]]]
variable[step] assign[=] call[name[_excerpt_step], parameter[name[n_samples]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n_excerpts]]]] begin[:]
variable[start] assign[=] binary_operation[name[i] * name[step]]
if compare[name[start] greater_or_equal[>=] name[n_samples]] begin[:]
break
variable[end] assign[=] call[name[min], parameter[binary_operation[name[start] + name[excerpt_size]], name[n_samples]]]
<ast.Yield object at 0x7da1b12f32e0> | keyword[def] identifier[excerpts] ( identifier[n_samples] , identifier[n_excerpts] = keyword[None] , identifier[excerpt_size] = keyword[None] ):
literal[string]
keyword[assert] identifier[n_excerpts] >= literal[int]
identifier[step] = identifier[_excerpt_step] ( identifier[n_samples] ,
identifier[n_excerpts] = identifier[n_excerpts] ,
identifier[excerpt_size] = identifier[excerpt_size] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_excerpts] ):
identifier[start] = identifier[i] * identifier[step]
keyword[if] identifier[start] >= identifier[n_samples] :
keyword[break]
identifier[end] = identifier[min] ( identifier[start] + identifier[excerpt_size] , identifier[n_samples] )
keyword[yield] identifier[start] , identifier[end] | def excerpts(n_samples, n_excerpts=None, excerpt_size=None):
"""Yield (start, end) where start is included and end is excluded."""
assert n_excerpts >= 2
step = _excerpt_step(n_samples, n_excerpts=n_excerpts, excerpt_size=excerpt_size)
for i in range(n_excerpts):
start = i * step
if start >= n_samples:
break # depends on [control=['if'], data=[]]
end = min(start + excerpt_size, n_samples)
yield (start, end) # depends on [control=['for'], data=['i']] |
def sliding_tensor(mv_time_series, width, step, order='F'):
'''
segments multivariate time series with sliding window
Parameters
----------
mv_time_series : array like shape [n_samples, n_variables]
multivariate time series or sequence
width : int > 0
segment width in samples
step : int > 0
stepsize for sliding in samples
Returns
-------
data : array like shape [n_segments, width, n_variables]
segmented multivariate time series data
'''
D = mv_time_series.shape[1]
data = [sliding_window(mv_time_series[:, j], width, step, order) for j in range(D)]
return np.stack(data, axis=2) | def function[sliding_tensor, parameter[mv_time_series, width, step, order]]:
constant[
segments multivariate time series with sliding window
Parameters
----------
mv_time_series : array like shape [n_samples, n_variables]
multivariate time series or sequence
width : int > 0
segment width in samples
step : int > 0
stepsize for sliding in samples
Returns
-------
data : array like shape [n_segments, width, n_variables]
segmented multivariate time series data
]
variable[D] assign[=] call[name[mv_time_series].shape][constant[1]]
variable[data] assign[=] <ast.ListComp object at 0x7da204347100>
return[call[name[np].stack, parameter[name[data]]]] | keyword[def] identifier[sliding_tensor] ( identifier[mv_time_series] , identifier[width] , identifier[step] , identifier[order] = literal[string] ):
literal[string]
identifier[D] = identifier[mv_time_series] . identifier[shape] [ literal[int] ]
identifier[data] =[ identifier[sliding_window] ( identifier[mv_time_series] [:, identifier[j] ], identifier[width] , identifier[step] , identifier[order] ) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[D] )]
keyword[return] identifier[np] . identifier[stack] ( identifier[data] , identifier[axis] = literal[int] ) | def sliding_tensor(mv_time_series, width, step, order='F'):
"""
segments multivariate time series with sliding window
Parameters
----------
mv_time_series : array like shape [n_samples, n_variables]
multivariate time series or sequence
width : int > 0
segment width in samples
step : int > 0
stepsize for sliding in samples
Returns
-------
data : array like shape [n_segments, width, n_variables]
segmented multivariate time series data
"""
D = mv_time_series.shape[1]
data = [sliding_window(mv_time_series[:, j], width, step, order) for j in range(D)]
return np.stack(data, axis=2) |
def add_profile_point(self,
value,
source='',
reference='',
method='',
ticket='',
campaign=None,
confidence=None,
bucket_list=[]):
"""
Add an indicator to CRITs
Args:
value: The profile point itself
source: Source of the information
reference: A reference where more information can be found
method: The method for adding this indicator
campaign: If the indicator has a campaign, add it here
confidence: The confidence this indicator belongs to the given
campaign
bucket_list: Bucket list items for this indicator
ticket: A ticket associated with this indicator
Returns:
JSON object for the indicator or None if it failed.
"""
# Time to upload these indicators
data = {
'api_key': self.api_key,
'username': self.username,
'source': source,
'reference': reference,
'method': '',
'campaign': campaign,
'confidence': confidence,
'bucket_list': ','.join(bucket_list),
'ticket': ticket,
'value': value,
}
r = requests.post("{0}/profile_points/".format(self.url), data=data,
verify=self.verify, proxies=self.proxies)
if r.status_code == 200:
log.debug("Profile Point uploaded successfully - {}".format(value))
pp = json.loads(r.text)
return pp
return None | def function[add_profile_point, parameter[self, value, source, reference, method, ticket, campaign, confidence, bucket_list]]:
constant[
Add an indicator to CRITs
Args:
value: The profile point itself
source: Source of the information
reference: A reference where more information can be found
method: The method for adding this indicator
campaign: If the indicator has a campaign, add it here
confidence: The confidence this indicator belongs to the given
campaign
bucket_list: Bucket list items for this indicator
ticket: A ticket associated with this indicator
Returns:
JSON object for the indicator or None if it failed.
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da2045643a0>, <ast.Constant object at 0x7da204564be0>, <ast.Constant object at 0x7da204567130>, <ast.Constant object at 0x7da204565c60>, <ast.Constant object at 0x7da204565ed0>, <ast.Constant object at 0x7da2045640a0>, <ast.Constant object at 0x7da204564ca0>, <ast.Constant object at 0x7da2045657b0>, <ast.Constant object at 0x7da2045661d0>, <ast.Constant object at 0x7da204566bc0>], [<ast.Attribute object at 0x7da204567190>, <ast.Attribute object at 0x7da2045663e0>, <ast.Name object at 0x7da204567280>, <ast.Name object at 0x7da204566230>, <ast.Constant object at 0x7da2045652a0>, <ast.Name object at 0x7da204567040>, <ast.Name object at 0x7da2045645e0>, <ast.Call object at 0x7da204565540>, <ast.Name object at 0x7da204564490>, <ast.Name object at 0x7da204566b60>]]
variable[r] assign[=] call[name[requests].post, parameter[call[constant[{0}/profile_points/].format, parameter[name[self].url]]]]
if compare[name[r].status_code equal[==] constant[200]] begin[:]
call[name[log].debug, parameter[call[constant[Profile Point uploaded successfully - {}].format, parameter[name[value]]]]]
variable[pp] assign[=] call[name[json].loads, parameter[name[r].text]]
return[name[pp]]
return[constant[None]] | keyword[def] identifier[add_profile_point] ( identifier[self] ,
identifier[value] ,
identifier[source] = literal[string] ,
identifier[reference] = literal[string] ,
identifier[method] = literal[string] ,
identifier[ticket] = literal[string] ,
identifier[campaign] = keyword[None] ,
identifier[confidence] = keyword[None] ,
identifier[bucket_list] =[]):
literal[string]
identifier[data] ={
literal[string] : identifier[self] . identifier[api_key] ,
literal[string] : identifier[self] . identifier[username] ,
literal[string] : identifier[source] ,
literal[string] : identifier[reference] ,
literal[string] : literal[string] ,
literal[string] : identifier[campaign] ,
literal[string] : identifier[confidence] ,
literal[string] : literal[string] . identifier[join] ( identifier[bucket_list] ),
literal[string] : identifier[ticket] ,
literal[string] : identifier[value] ,
}
identifier[r] = identifier[requests] . identifier[post] ( literal[string] . identifier[format] ( identifier[self] . identifier[url] ), identifier[data] = identifier[data] ,
identifier[verify] = identifier[self] . identifier[verify] , identifier[proxies] = identifier[self] . identifier[proxies] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[value] ))
identifier[pp] = identifier[json] . identifier[loads] ( identifier[r] . identifier[text] )
keyword[return] identifier[pp]
keyword[return] keyword[None] | def add_profile_point(self, value, source='', reference='', method='', ticket='', campaign=None, confidence=None, bucket_list=[]):
"""
Add an indicator to CRITs
Args:
value: The profile point itself
source: Source of the information
reference: A reference where more information can be found
method: The method for adding this indicator
campaign: If the indicator has a campaign, add it here
confidence: The confidence this indicator belongs to the given
campaign
bucket_list: Bucket list items for this indicator
ticket: A ticket associated with this indicator
Returns:
JSON object for the indicator or None if it failed.
"""
# Time to upload these indicators
data = {'api_key': self.api_key, 'username': self.username, 'source': source, 'reference': reference, 'method': '', 'campaign': campaign, 'confidence': confidence, 'bucket_list': ','.join(bucket_list), 'ticket': ticket, 'value': value}
r = requests.post('{0}/profile_points/'.format(self.url), data=data, verify=self.verify, proxies=self.proxies)
if r.status_code == 200:
log.debug('Profile Point uploaded successfully - {}'.format(value))
pp = json.loads(r.text)
return pp # depends on [control=['if'], data=[]]
return None |
def create_sensor(sensor_id, sensor, async_set_state_callback):
"""Simplify creating sensor by not needing to know type."""
if sensor['type'] in CONSUMPTION:
return Consumption(sensor_id, sensor)
if sensor['type'] in CARBONMONOXIDE:
return CarbonMonoxide(sensor_id, sensor)
if sensor['type'] in DAYLIGHT:
return Daylight(sensor_id, sensor)
if sensor['type'] in FIRE:
return Fire(sensor_id, sensor)
if sensor['type'] in GENERICFLAG:
return GenericFlag(sensor_id, sensor)
if sensor['type'] in GENERICSTATUS:
return GenericStatus(sensor_id, sensor)
if sensor['type'] in HUMIDITY:
return Humidity(sensor_id, sensor)
if sensor['type'] in LIGHTLEVEL:
return LightLevel(sensor_id, sensor)
if sensor['type'] in OPENCLOSE:
return OpenClose(sensor_id, sensor)
if sensor['type'] in POWER:
return Power(sensor_id, sensor)
if sensor['type'] in PRESENCE:
return Presence(sensor_id, sensor)
if sensor['type'] in PRESSURE:
return Pressure(sensor_id, sensor)
if sensor['type'] in SWITCH:
return Switch(sensor_id, sensor)
if sensor['type'] in TEMPERATURE:
return Temperature(sensor_id, sensor)
if sensor['type'] in THERMOSTAT:
return Thermostat(sensor_id, sensor, async_set_state_callback)
if sensor['type'] in VIBRATION:
return Vibration(sensor_id, sensor)
if sensor['type'] in WATER:
return Water(sensor_id, sensor) | def function[create_sensor, parameter[sensor_id, sensor, async_set_state_callback]]:
constant[Simplify creating sensor by not needing to know type.]
if compare[call[name[sensor]][constant[type]] in name[CONSUMPTION]] begin[:]
return[call[name[Consumption], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[CARBONMONOXIDE]] begin[:]
return[call[name[CarbonMonoxide], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[DAYLIGHT]] begin[:]
return[call[name[Daylight], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[FIRE]] begin[:]
return[call[name[Fire], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[GENERICFLAG]] begin[:]
return[call[name[GenericFlag], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[GENERICSTATUS]] begin[:]
return[call[name[GenericStatus], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[HUMIDITY]] begin[:]
return[call[name[Humidity], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[LIGHTLEVEL]] begin[:]
return[call[name[LightLevel], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[OPENCLOSE]] begin[:]
return[call[name[OpenClose], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[POWER]] begin[:]
return[call[name[Power], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[PRESENCE]] begin[:]
return[call[name[Presence], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[PRESSURE]] begin[:]
return[call[name[Pressure], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[SWITCH]] begin[:]
return[call[name[Switch], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[TEMPERATURE]] begin[:]
return[call[name[Temperature], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[THERMOSTAT]] begin[:]
return[call[name[Thermostat], parameter[name[sensor_id], name[sensor], name[async_set_state_callback]]]]
if compare[call[name[sensor]][constant[type]] in name[VIBRATION]] begin[:]
return[call[name[Vibration], parameter[name[sensor_id], name[sensor]]]]
if compare[call[name[sensor]][constant[type]] in name[WATER]] begin[:]
return[call[name[Water], parameter[name[sensor_id], name[sensor]]]] | keyword[def] identifier[create_sensor] ( identifier[sensor_id] , identifier[sensor] , identifier[async_set_state_callback] ):
literal[string]
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[CONSUMPTION] :
keyword[return] identifier[Consumption] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[CARBONMONOXIDE] :
keyword[return] identifier[CarbonMonoxide] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[DAYLIGHT] :
keyword[return] identifier[Daylight] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[FIRE] :
keyword[return] identifier[Fire] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[GENERICFLAG] :
keyword[return] identifier[GenericFlag] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[GENERICSTATUS] :
keyword[return] identifier[GenericStatus] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[HUMIDITY] :
keyword[return] identifier[Humidity] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[LIGHTLEVEL] :
keyword[return] identifier[LightLevel] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[OPENCLOSE] :
keyword[return] identifier[OpenClose] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[POWER] :
keyword[return] identifier[Power] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[PRESENCE] :
keyword[return] identifier[Presence] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[PRESSURE] :
keyword[return] identifier[Pressure] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[SWITCH] :
keyword[return] identifier[Switch] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[TEMPERATURE] :
keyword[return] identifier[Temperature] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[THERMOSTAT] :
keyword[return] identifier[Thermostat] ( identifier[sensor_id] , identifier[sensor] , identifier[async_set_state_callback] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[VIBRATION] :
keyword[return] identifier[Vibration] ( identifier[sensor_id] , identifier[sensor] )
keyword[if] identifier[sensor] [ literal[string] ] keyword[in] identifier[WATER] :
keyword[return] identifier[Water] ( identifier[sensor_id] , identifier[sensor] ) | def create_sensor(sensor_id, sensor, async_set_state_callback):
"""Simplify creating sensor by not needing to know type."""
if sensor['type'] in CONSUMPTION:
return Consumption(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in CARBONMONOXIDE:
return CarbonMonoxide(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in DAYLIGHT:
return Daylight(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in FIRE:
return Fire(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in GENERICFLAG:
return GenericFlag(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in GENERICSTATUS:
return GenericStatus(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in HUMIDITY:
return Humidity(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in LIGHTLEVEL:
return LightLevel(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in OPENCLOSE:
return OpenClose(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in POWER:
return Power(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in PRESENCE:
return Presence(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in PRESSURE:
return Pressure(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in SWITCH:
return Switch(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in TEMPERATURE:
return Temperature(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in THERMOSTAT:
return Thermostat(sensor_id, sensor, async_set_state_callback) # depends on [control=['if'], data=[]]
if sensor['type'] in VIBRATION:
return Vibration(sensor_id, sensor) # depends on [control=['if'], data=[]]
if sensor['type'] in WATER:
return Water(sensor_id, sensor) # depends on [control=['if'], data=[]] |
def get_polypeptide_within(self, chain_id, resnum, angstroms, only_protein=True,
use_ca=False, custom_coord=None, return_resnums=False):
"""Get a Polypeptide object of the amino acids within X angstroms of the specified chain + residue number.
Args:
resnum (int): Residue number of the structure
chain_id (str): Chain ID of the residue number
angstroms (float): Radius of the search sphere
only_protein (bool): If only protein atoms (no HETATMS) should be included in the returned sequence
use_ca (bool): If the alpha-carbon atom should be used for searching, default is False (last atom of residue used)
custom_coord (list): custom XYZ coord
return_resnums (bool): if list of resnums should be returned
Returns:
Bio.PDB.Polypeptide.Polypeptide: Biopython Polypeptide object
"""
# XTODO: documentation, unit test
if self.structure:
parsed = self.structure
else:
parsed = self.parse_structure()
residue_list = ssbio.protein.structure.properties.residues.within(resnum=resnum, chain_id=chain_id,
model=parsed.first_model,
angstroms=angstroms, use_ca=use_ca,
custom_coord=custom_coord)
if only_protein:
filtered_residue_list = [x for x in residue_list if x.id[0] == ' ']
else:
filtered_residue_list = residue_list
residue_list_combined = Polypeptide(filtered_residue_list)
if return_resnums:
resnums = [int(x.id[1]) for x in filtered_residue_list]
return residue_list_combined, resnums
return residue_list_combined | def function[get_polypeptide_within, parameter[self, chain_id, resnum, angstroms, only_protein, use_ca, custom_coord, return_resnums]]:
constant[Get a Polypeptide object of the amino acids within X angstroms of the specified chain + residue number.
Args:
resnum (int): Residue number of the structure
chain_id (str): Chain ID of the residue number
angstroms (float): Radius of the search sphere
only_protein (bool): If only protein atoms (no HETATMS) should be included in the returned sequence
use_ca (bool): If the alpha-carbon atom should be used for searching, default is False (last atom of residue used)
custom_coord (list): custom XYZ coord
return_resnums (bool): if list of resnums should be returned
Returns:
Bio.PDB.Polypeptide.Polypeptide: Biopython Polypeptide object
]
if name[self].structure begin[:]
variable[parsed] assign[=] name[self].structure
variable[residue_list] assign[=] call[name[ssbio].protein.structure.properties.residues.within, parameter[]]
if name[only_protein] begin[:]
variable[filtered_residue_list] assign[=] <ast.ListComp object at 0x7da1b0ea3190>
variable[residue_list_combined] assign[=] call[name[Polypeptide], parameter[name[filtered_residue_list]]]
if name[return_resnums] begin[:]
variable[resnums] assign[=] <ast.ListComp object at 0x7da1b0c5ae00>
return[tuple[[<ast.Name object at 0x7da1b0c583a0>, <ast.Name object at 0x7da1b0c585e0>]]]
return[name[residue_list_combined]] | keyword[def] identifier[get_polypeptide_within] ( identifier[self] , identifier[chain_id] , identifier[resnum] , identifier[angstroms] , identifier[only_protein] = keyword[True] ,
identifier[use_ca] = keyword[False] , identifier[custom_coord] = keyword[None] , identifier[return_resnums] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[structure] :
identifier[parsed] = identifier[self] . identifier[structure]
keyword[else] :
identifier[parsed] = identifier[self] . identifier[parse_structure] ()
identifier[residue_list] = identifier[ssbio] . identifier[protein] . identifier[structure] . identifier[properties] . identifier[residues] . identifier[within] ( identifier[resnum] = identifier[resnum] , identifier[chain_id] = identifier[chain_id] ,
identifier[model] = identifier[parsed] . identifier[first_model] ,
identifier[angstroms] = identifier[angstroms] , identifier[use_ca] = identifier[use_ca] ,
identifier[custom_coord] = identifier[custom_coord] )
keyword[if] identifier[only_protein] :
identifier[filtered_residue_list] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[residue_list] keyword[if] identifier[x] . identifier[id] [ literal[int] ]== literal[string] ]
keyword[else] :
identifier[filtered_residue_list] = identifier[residue_list]
identifier[residue_list_combined] = identifier[Polypeptide] ( identifier[filtered_residue_list] )
keyword[if] identifier[return_resnums] :
identifier[resnums] =[ identifier[int] ( identifier[x] . identifier[id] [ literal[int] ]) keyword[for] identifier[x] keyword[in] identifier[filtered_residue_list] ]
keyword[return] identifier[residue_list_combined] , identifier[resnums]
keyword[return] identifier[residue_list_combined] | def get_polypeptide_within(self, chain_id, resnum, angstroms, only_protein=True, use_ca=False, custom_coord=None, return_resnums=False):
"""Get a Polypeptide object of the amino acids within X angstroms of the specified chain + residue number.
Args:
resnum (int): Residue number of the structure
chain_id (str): Chain ID of the residue number
angstroms (float): Radius of the search sphere
only_protein (bool): If only protein atoms (no HETATMS) should be included in the returned sequence
use_ca (bool): If the alpha-carbon atom should be used for searching, default is False (last atom of residue used)
custom_coord (list): custom XYZ coord
return_resnums (bool): if list of resnums should be returned
Returns:
Bio.PDB.Polypeptide.Polypeptide: Biopython Polypeptide object
"""
# XTODO: documentation, unit test
if self.structure:
parsed = self.structure # depends on [control=['if'], data=[]]
else:
parsed = self.parse_structure()
residue_list = ssbio.protein.structure.properties.residues.within(resnum=resnum, chain_id=chain_id, model=parsed.first_model, angstroms=angstroms, use_ca=use_ca, custom_coord=custom_coord)
if only_protein:
filtered_residue_list = [x for x in residue_list if x.id[0] == ' '] # depends on [control=['if'], data=[]]
else:
filtered_residue_list = residue_list
residue_list_combined = Polypeptide(filtered_residue_list)
if return_resnums:
resnums = [int(x.id[1]) for x in filtered_residue_list]
return (residue_list_combined, resnums) # depends on [control=['if'], data=[]]
return residue_list_combined |
def get_palette_pdf_string(self):
"Returns palette pre-formatted for use in PDF"
buflen = len('< ') + len(' rrggbb') * self._cdata.ncolors + len('>')
buf = ffi.buffer(self._cdata.cmapdatahex, buflen)
return bytes(buf) | def function[get_palette_pdf_string, parameter[self]]:
constant[Returns palette pre-formatted for use in PDF]
variable[buflen] assign[=] binary_operation[binary_operation[call[name[len], parameter[constant[< ]]] + binary_operation[call[name[len], parameter[constant[ rrggbb]]] * name[self]._cdata.ncolors]] + call[name[len], parameter[constant[>]]]]
variable[buf] assign[=] call[name[ffi].buffer, parameter[name[self]._cdata.cmapdatahex, name[buflen]]]
return[call[name[bytes], parameter[name[buf]]]] | keyword[def] identifier[get_palette_pdf_string] ( identifier[self] ):
literal[string]
identifier[buflen] = identifier[len] ( literal[string] )+ identifier[len] ( literal[string] )* identifier[self] . identifier[_cdata] . identifier[ncolors] + identifier[len] ( literal[string] )
identifier[buf] = identifier[ffi] . identifier[buffer] ( identifier[self] . identifier[_cdata] . identifier[cmapdatahex] , identifier[buflen] )
keyword[return] identifier[bytes] ( identifier[buf] ) | def get_palette_pdf_string(self):
"""Returns palette pre-formatted for use in PDF"""
buflen = len('< ') + len(' rrggbb') * self._cdata.ncolors + len('>')
buf = ffi.buffer(self._cdata.cmapdatahex, buflen)
return bytes(buf) |
def _ref_has_region_assembled_twice(nucmer_hits, ref_seq, threshold):
'''Returns true iff there is a part of the reference that is assembled
more than once (ie covered by >1 nucmer hit).
Needs a minimum proportin of the ref to be assembled more than once,
determined by threshold.
nucmer_hits = hits made by self._parse_nucmer_coords_file.'''
coords = AssemblyCompare.nucmer_hits_to_ref_coords(nucmer_hits)
covered = []
for coords_list in coords.values():
covered.extend(coords_list)
covered.sort()
if len(covered) <= 1:
return False
coverage = {}
for i in covered:
for j in range(i.start, i.end + 1):
coverage[j] = coverage.get(j, 0) + 1
bases_depth_at_least_two = len([1 for x in coverage.values() if x > 1])
return bases_depth_at_least_two / len(ref_seq) >= threshold | def function[_ref_has_region_assembled_twice, parameter[nucmer_hits, ref_seq, threshold]]:
constant[Returns true iff there is a part of the reference that is assembled
more than once (ie covered by >1 nucmer hit).
Needs a minimum proportin of the ref to be assembled more than once,
determined by threshold.
nucmer_hits = hits made by self._parse_nucmer_coords_file.]
variable[coords] assign[=] call[name[AssemblyCompare].nucmer_hits_to_ref_coords, parameter[name[nucmer_hits]]]
variable[covered] assign[=] list[[]]
for taget[name[coords_list]] in starred[call[name[coords].values, parameter[]]] begin[:]
call[name[covered].extend, parameter[name[coords_list]]]
call[name[covered].sort, parameter[]]
if compare[call[name[len], parameter[name[covered]]] less_or_equal[<=] constant[1]] begin[:]
return[constant[False]]
variable[coverage] assign[=] dictionary[[], []]
for taget[name[i]] in starred[name[covered]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[name[i].start, binary_operation[name[i].end + constant[1]]]]] begin[:]
call[name[coverage]][name[j]] assign[=] binary_operation[call[name[coverage].get, parameter[name[j], constant[0]]] + constant[1]]
variable[bases_depth_at_least_two] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da18fe93c10>]]
return[compare[binary_operation[name[bases_depth_at_least_two] / call[name[len], parameter[name[ref_seq]]]] greater_or_equal[>=] name[threshold]]] | keyword[def] identifier[_ref_has_region_assembled_twice] ( identifier[nucmer_hits] , identifier[ref_seq] , identifier[threshold] ):
literal[string]
identifier[coords] = identifier[AssemblyCompare] . identifier[nucmer_hits_to_ref_coords] ( identifier[nucmer_hits] )
identifier[covered] =[]
keyword[for] identifier[coords_list] keyword[in] identifier[coords] . identifier[values] ():
identifier[covered] . identifier[extend] ( identifier[coords_list] )
identifier[covered] . identifier[sort] ()
keyword[if] identifier[len] ( identifier[covered] )<= literal[int] :
keyword[return] keyword[False]
identifier[coverage] ={}
keyword[for] identifier[i] keyword[in] identifier[covered] :
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] . identifier[start] , identifier[i] . identifier[end] + literal[int] ):
identifier[coverage] [ identifier[j] ]= identifier[coverage] . identifier[get] ( identifier[j] , literal[int] )+ literal[int]
identifier[bases_depth_at_least_two] = identifier[len] ([ literal[int] keyword[for] identifier[x] keyword[in] identifier[coverage] . identifier[values] () keyword[if] identifier[x] > literal[int] ])
keyword[return] identifier[bases_depth_at_least_two] / identifier[len] ( identifier[ref_seq] )>= identifier[threshold] | def _ref_has_region_assembled_twice(nucmer_hits, ref_seq, threshold):
"""Returns true iff there is a part of the reference that is assembled
more than once (ie covered by >1 nucmer hit).
Needs a minimum proportin of the ref to be assembled more than once,
determined by threshold.
nucmer_hits = hits made by self._parse_nucmer_coords_file."""
coords = AssemblyCompare.nucmer_hits_to_ref_coords(nucmer_hits)
covered = []
for coords_list in coords.values():
covered.extend(coords_list) # depends on [control=['for'], data=['coords_list']]
covered.sort()
if len(covered) <= 1:
return False # depends on [control=['if'], data=[]]
coverage = {}
for i in covered:
for j in range(i.start, i.end + 1):
coverage[j] = coverage.get(j, 0) + 1 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
bases_depth_at_least_two = len([1 for x in coverage.values() if x > 1])
return bases_depth_at_least_two / len(ref_seq) >= threshold |
def write(self, *args):
"""See ConfigParser.write(). Also writes secure items to keystore."""
ConfigParser.write(self, *args)
if self.keyring_available:
for key, thing in self._unsaved.items():
action = thing[0]
value = thing[1]
if action == 'set':
keyring.set_password(self.keyring_name, key, value)
elif action == 'delete':
try:
keyring.delete_password(self.keyring_name, key)
except:
pass
self._unsaved = {} | def function[write, parameter[self]]:
constant[See ConfigParser.write(). Also writes secure items to keystore.]
call[name[ConfigParser].write, parameter[name[self], <ast.Starred object at 0x7da18dc053f0>]]
if name[self].keyring_available begin[:]
for taget[tuple[[<ast.Name object at 0x7da18dc078b0>, <ast.Name object at 0x7da18dc06350>]]] in starred[call[name[self]._unsaved.items, parameter[]]] begin[:]
variable[action] assign[=] call[name[thing]][constant[0]]
variable[value] assign[=] call[name[thing]][constant[1]]
if compare[name[action] equal[==] constant[set]] begin[:]
call[name[keyring].set_password, parameter[name[self].keyring_name, name[key], name[value]]]
name[self]._unsaved assign[=] dictionary[[], []] | keyword[def] identifier[write] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[ConfigParser] . identifier[write] ( identifier[self] ,* identifier[args] )
keyword[if] identifier[self] . identifier[keyring_available] :
keyword[for] identifier[key] , identifier[thing] keyword[in] identifier[self] . identifier[_unsaved] . identifier[items] ():
identifier[action] = identifier[thing] [ literal[int] ]
identifier[value] = identifier[thing] [ literal[int] ]
keyword[if] identifier[action] == literal[string] :
identifier[keyring] . identifier[set_password] ( identifier[self] . identifier[keyring_name] , identifier[key] , identifier[value] )
keyword[elif] identifier[action] == literal[string] :
keyword[try] :
identifier[keyring] . identifier[delete_password] ( identifier[self] . identifier[keyring_name] , identifier[key] )
keyword[except] :
keyword[pass]
identifier[self] . identifier[_unsaved] ={} | def write(self, *args):
"""See ConfigParser.write(). Also writes secure items to keystore."""
ConfigParser.write(self, *args)
if self.keyring_available:
for (key, thing) in self._unsaved.items():
action = thing[0]
value = thing[1]
if action == 'set':
keyring.set_password(self.keyring_name, key, value) # depends on [control=['if'], data=[]]
elif action == 'delete':
try:
keyring.delete_password(self.keyring_name, key) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
self._unsaved = {} |
def parameter_scope(name, scope=None):
"""
Grouping parameters registered by parametric functions
listed in :mod:`nnabla.parametric_functions`.
Args:
name (str): Parameter scope name.
scope (OrderedDict, optional):
Specifiy current parameter scope as a local dictionary.
The default value is ``None``. In this case,
the current parameter scope maintained in global is used.
Example:
.. code-block:: python
import nnabla as nn
import nnabla.parametric_functions as PF
import nnabla.functions as F
with nn.parameter_scope('conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
Nesting `with` blocks allows you to nest parameter scopes.
This can also be done by using "/" inside the parameter names.
Example:
.. code-block:: python
with nn.parameter_scope('network1'):
with nn.parameter_scope('conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
is equivalent to
.. code-block:: python
with nn.parameter_scope('network1/conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('network1/conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
"""
global current_scope
names = name.strip('/').split('/')
if not names:
raise ValueError(
'Invalid argument of parameter_scope("{}").'.format(name))
prev_scope = current_scope
if scope is None:
scope = current_scope
else:
if not isinstance(scope, dict):
raise ValueError(
'Scope must be a dictionary. {} is given.'.format(type(scope)))
for name in names:
parent_scope = scope
# When name is empty, the given scope is used as a current scope.
if name:
# Creates a new scope dict if it doesn't exist.
# `dict.get` returns default value (OrderedDict())
# if scope contains `name`
scope = scope.get(name, OrderedDict())
assert isinstance(scope, dict)
parent_scope[name] = scope
current_scope = scope
try:
yield current_scope
finally:
current_scope = prev_scope | def function[parameter_scope, parameter[name, scope]]:
constant[
Grouping parameters registered by parametric functions
listed in :mod:`nnabla.parametric_functions`.
Args:
name (str): Parameter scope name.
scope (OrderedDict, optional):
Specifiy current parameter scope as a local dictionary.
The default value is ``None``. In this case,
the current parameter scope maintained in global is used.
Example:
.. code-block:: python
import nnabla as nn
import nnabla.parametric_functions as PF
import nnabla.functions as F
with nn.parameter_scope('conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
Nesting `with` blocks allows you to nest parameter scopes.
This can also be done by using "/" inside the parameter names.
Example:
.. code-block:: python
with nn.parameter_scope('network1'):
with nn.parameter_scope('conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
is equivalent to
.. code-block:: python
with nn.parameter_scope('network1/conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('network1/conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
]
<ast.Global object at 0x7da20c6aa230>
variable[names] assign[=] call[call[name[name].strip, parameter[constant[/]]].split, parameter[constant[/]]]
if <ast.UnaryOp object at 0x7da20e957790> begin[:]
<ast.Raise object at 0x7da20e954610>
variable[prev_scope] assign[=] name[current_scope]
if compare[name[scope] is constant[None]] begin[:]
variable[scope] assign[=] name[current_scope]
for taget[name[name]] in starred[name[names]] begin[:]
variable[parent_scope] assign[=] name[scope]
if name[name] begin[:]
variable[scope] assign[=] call[name[scope].get, parameter[name[name], call[name[OrderedDict], parameter[]]]]
assert[call[name[isinstance], parameter[name[scope], name[dict]]]]
call[name[parent_scope]][name[name]] assign[=] name[scope]
variable[current_scope] assign[=] name[scope]
<ast.Try object at 0x7da1b26aee90> | keyword[def] identifier[parameter_scope] ( identifier[name] , identifier[scope] = keyword[None] ):
literal[string]
keyword[global] identifier[current_scope]
identifier[names] = identifier[name] . identifier[strip] ( literal[string] ). identifier[split] ( literal[string] )
keyword[if] keyword[not] identifier[names] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[name] ))
identifier[prev_scope] = identifier[current_scope]
keyword[if] identifier[scope] keyword[is] keyword[None] :
identifier[scope] = identifier[current_scope]
keyword[else] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[scope] , identifier[dict] ):
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[type] ( identifier[scope] )))
keyword[for] identifier[name] keyword[in] identifier[names] :
identifier[parent_scope] = identifier[scope]
keyword[if] identifier[name] :
identifier[scope] = identifier[scope] . identifier[get] ( identifier[name] , identifier[OrderedDict] ())
keyword[assert] identifier[isinstance] ( identifier[scope] , identifier[dict] )
identifier[parent_scope] [ identifier[name] ]= identifier[scope]
identifier[current_scope] = identifier[scope]
keyword[try] :
keyword[yield] identifier[current_scope]
keyword[finally] :
identifier[current_scope] = identifier[prev_scope] | def parameter_scope(name, scope=None):
"""
Grouping parameters registered by parametric functions
listed in :mod:`nnabla.parametric_functions`.
Args:
name (str): Parameter scope name.
scope (OrderedDict, optional):
Specifiy current parameter scope as a local dictionary.
The default value is ``None``. In this case,
the current parameter scope maintained in global is used.
Example:
.. code-block:: python
import nnabla as nn
import nnabla.parametric_functions as PF
import nnabla.functions as F
with nn.parameter_scope('conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
Nesting `with` blocks allows you to nest parameter scopes.
This can also be done by using "/" inside the parameter names.
Example:
.. code-block:: python
with nn.parameter_scope('network1'):
with nn.parameter_scope('conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
is equivalent to
.. code-block:: python
with nn.parameter_scope('network1/conv1'):
conv_out1 = PF.convolution(x, 32, (5, 5))
bn_out1 = PF.batch_normalization(conv_out1)
act_out1 = F.relu(bn_out1)
with nn.parameter_scope('network1/conv2'):
conv_out2 = PF.convolution(act_out1, 64, (3, 3))
bn_out2 = PF.batch_normalization(conv_out2)
act_out2 = F.relu(bn_out2)
"""
global current_scope
names = name.strip('/').split('/')
if not names:
raise ValueError('Invalid argument of parameter_scope("{}").'.format(name)) # depends on [control=['if'], data=[]]
prev_scope = current_scope
if scope is None:
scope = current_scope # depends on [control=['if'], data=['scope']]
elif not isinstance(scope, dict):
raise ValueError('Scope must be a dictionary. {} is given.'.format(type(scope))) # depends on [control=['if'], data=[]]
for name in names:
parent_scope = scope
# When name is empty, the given scope is used as a current scope.
if name:
# Creates a new scope dict if it doesn't exist.
# `dict.get` returns default value (OrderedDict())
# if scope contains `name`
scope = scope.get(name, OrderedDict())
assert isinstance(scope, dict)
parent_scope[name] = scope # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
current_scope = scope
try:
yield current_scope # depends on [control=['try'], data=[]]
finally:
current_scope = prev_scope |
def lookupSpatialReferenceID(cls, directory, filename):
"""
Look up spatial reference system using the projection file.
Args:
directory (str):
filename (str):
Return:
int: Spatial Reference ID
"""
path = os.path.join(directory, filename)
with open(path, 'r') as f:
srid = lookupSpatialReferenceID(f.read())
return srid | def function[lookupSpatialReferenceID, parameter[cls, directory, filename]]:
constant[
Look up spatial reference system using the projection file.
Args:
directory (str):
filename (str):
Return:
int: Spatial Reference ID
]
variable[path] assign[=] call[name[os].path.join, parameter[name[directory], name[filename]]]
with call[name[open], parameter[name[path], constant[r]]] begin[:]
variable[srid] assign[=] call[name[lookupSpatialReferenceID], parameter[call[name[f].read, parameter[]]]]
return[name[srid]] | keyword[def] identifier[lookupSpatialReferenceID] ( identifier[cls] , identifier[directory] , identifier[filename] ):
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory] , identifier[filename] )
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
identifier[srid] = identifier[lookupSpatialReferenceID] ( identifier[f] . identifier[read] ())
keyword[return] identifier[srid] | def lookupSpatialReferenceID(cls, directory, filename):
"""
Look up spatial reference system using the projection file.
Args:
directory (str):
filename (str):
Return:
int: Spatial Reference ID
"""
path = os.path.join(directory, filename)
with open(path, 'r') as f:
srid = lookupSpatialReferenceID(f.read()) # depends on [control=['with'], data=['f']]
return srid |
def parse_manifest(template_lines):
"""List of file names included by the MANIFEST.in template lines."""
manifest_files = distutils.filelist.FileList()
for line in template_lines:
if line.strip():
manifest_files.process_template_line(line)
return manifest_files.files | def function[parse_manifest, parameter[template_lines]]:
constant[List of file names included by the MANIFEST.in template lines.]
variable[manifest_files] assign[=] call[name[distutils].filelist.FileList, parameter[]]
for taget[name[line]] in starred[name[template_lines]] begin[:]
if call[name[line].strip, parameter[]] begin[:]
call[name[manifest_files].process_template_line, parameter[name[line]]]
return[name[manifest_files].files] | keyword[def] identifier[parse_manifest] ( identifier[template_lines] ):
literal[string]
identifier[manifest_files] = identifier[distutils] . identifier[filelist] . identifier[FileList] ()
keyword[for] identifier[line] keyword[in] identifier[template_lines] :
keyword[if] identifier[line] . identifier[strip] ():
identifier[manifest_files] . identifier[process_template_line] ( identifier[line] )
keyword[return] identifier[manifest_files] . identifier[files] | def parse_manifest(template_lines):
"""List of file names included by the MANIFEST.in template lines."""
manifest_files = distutils.filelist.FileList()
for line in template_lines:
if line.strip():
manifest_files.process_template_line(line) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return manifest_files.files |
def get_hosts_dict(self) -> Dict:
"""
Returns serialized dictionary of hosts from inventory
"""
return {
k: deserializer.inventory.InventoryElement.serialize(v).dict()
for k, v in self.hosts.items()
} | def function[get_hosts_dict, parameter[self]]:
constant[
Returns serialized dictionary of hosts from inventory
]
return[<ast.DictComp object at 0x7da1b1cc0fd0>] | keyword[def] identifier[get_hosts_dict] ( identifier[self] )-> identifier[Dict] :
literal[string]
keyword[return] {
identifier[k] : identifier[deserializer] . identifier[inventory] . identifier[InventoryElement] . identifier[serialize] ( identifier[v] ). identifier[dict] ()
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[hosts] . identifier[items] ()
} | def get_hosts_dict(self) -> Dict:
"""
Returns serialized dictionary of hosts from inventory
"""
return {k: deserializer.inventory.InventoryElement.serialize(v).dict() for (k, v) in self.hosts.items()} |
def remove(self, priority, observer, callble):
"""
Remove one observer, which had priority and callble.
"""
self.flush()
for i in range(len(self) - 1, -1, -1):
p,o,c = self[i]
if priority==p and observer==o and callble==c:
del self._poc[i] | def function[remove, parameter[self, priority, observer, callble]]:
constant[
Remove one observer, which had priority and callble.
]
call[name[self].flush, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[self]]] - constant[1]], <ast.UnaryOp object at 0x7da1b0ececb0>, <ast.UnaryOp object at 0x7da1b0ecec50>]]] begin[:]
<ast.Tuple object at 0x7da1b0ecdb70> assign[=] call[name[self]][name[i]]
if <ast.BoolOp object at 0x7da1b0ecf5e0> begin[:]
<ast.Delete object at 0x7da1b0ecf460> | keyword[def] identifier[remove] ( identifier[self] , identifier[priority] , identifier[observer] , identifier[callble] ):
literal[string]
identifier[self] . identifier[flush] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] )- literal[int] ,- literal[int] ,- literal[int] ):
identifier[p] , identifier[o] , identifier[c] = identifier[self] [ identifier[i] ]
keyword[if] identifier[priority] == identifier[p] keyword[and] identifier[observer] == identifier[o] keyword[and] identifier[callble] == identifier[c] :
keyword[del] identifier[self] . identifier[_poc] [ identifier[i] ] | def remove(self, priority, observer, callble):
"""
Remove one observer, which had priority and callble.
"""
self.flush()
for i in range(len(self) - 1, -1, -1):
(p, o, c) = self[i]
if priority == p and observer == o and (callble == c):
del self._poc[i] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] |
def get_comment_template_name(comment):
"""
Internal function for the rendering of comments.
"""
ctype = ContentType.objects.get_for_id(comment.content_type_id)
return [
"comments/%s/%s/comment.html" % (ctype.app_label, ctype.model),
"comments/%s/comment.html" % ctype.app_label,
"comments/comment.html"
] | def function[get_comment_template_name, parameter[comment]]:
constant[
Internal function for the rendering of comments.
]
variable[ctype] assign[=] call[name[ContentType].objects.get_for_id, parameter[name[comment].content_type_id]]
return[list[[<ast.BinOp object at 0x7da1b043cbb0>, <ast.BinOp object at 0x7da1b043e2f0>, <ast.Constant object at 0x7da1b043f100>]]] | keyword[def] identifier[get_comment_template_name] ( identifier[comment] ):
literal[string]
identifier[ctype] = identifier[ContentType] . identifier[objects] . identifier[get_for_id] ( identifier[comment] . identifier[content_type_id] )
keyword[return] [
literal[string] %( identifier[ctype] . identifier[app_label] , identifier[ctype] . identifier[model] ),
literal[string] % identifier[ctype] . identifier[app_label] ,
literal[string]
] | def get_comment_template_name(comment):
"""
Internal function for the rendering of comments.
"""
ctype = ContentType.objects.get_for_id(comment.content_type_id)
return ['comments/%s/%s/comment.html' % (ctype.app_label, ctype.model), 'comments/%s/comment.html' % ctype.app_label, 'comments/comment.html'] |
def skew(self):
r"""
Skewness coefficient value as a result of an uncertainty calculation,
defined as::
_____ m3
\/beta1 = ------
std**3
where m3 is the third central moment and std is the standard deviation
"""
mn = self.mean
sd = self.std
sk = 0.0 if abs(sd) <= 1e-8 else np.mean((self._mcpts - mn) ** 3) / sd ** 3
return sk | def function[skew, parameter[self]]:
constant[
Skewness coefficient value as a result of an uncertainty calculation,
defined as::
_____ m3
\/beta1 = ------
std**3
where m3 is the third central moment and std is the standard deviation
]
variable[mn] assign[=] name[self].mean
variable[sd] assign[=] name[self].std
variable[sk] assign[=] <ast.IfExp object at 0x7da1b0fdbe20>
return[name[sk]] | keyword[def] identifier[skew] ( identifier[self] ):
literal[string]
identifier[mn] = identifier[self] . identifier[mean]
identifier[sd] = identifier[self] . identifier[std]
identifier[sk] = literal[int] keyword[if] identifier[abs] ( identifier[sd] )<= literal[int] keyword[else] identifier[np] . identifier[mean] (( identifier[self] . identifier[_mcpts] - identifier[mn] )** literal[int] )/ identifier[sd] ** literal[int]
keyword[return] identifier[sk] | def skew(self):
"""
Skewness coefficient value as a result of an uncertainty calculation,
defined as::
_____ m3
\\/beta1 = ------
std**3
where m3 is the third central moment and std is the standard deviation
"""
mn = self.mean
sd = self.std
sk = 0.0 if abs(sd) <= 1e-08 else np.mean((self._mcpts - mn) ** 3) / sd ** 3
return sk |
def assert_on_branch(branch_name):
# type: (str) -> None
""" Print error and exit if *branch_name* is not the current branch.
Args:
branch_name (str):
The supposed name of the current branch.
"""
branch = git.current_branch(refresh=True)
if branch.name != branch_name:
if context.get('pretend', False):
log.info("Would assert that you're on a <33>{}<32> branch",
branch_name)
else:
log.err("You're not on a <33>{}<31> branch!", branch_name)
sys.exit(1) | def function[assert_on_branch, parameter[branch_name]]:
constant[ Print error and exit if *branch_name* is not the current branch.
Args:
branch_name (str):
The supposed name of the current branch.
]
variable[branch] assign[=] call[name[git].current_branch, parameter[]]
if compare[name[branch].name not_equal[!=] name[branch_name]] begin[:]
if call[name[context].get, parameter[constant[pretend], constant[False]]] begin[:]
call[name[log].info, parameter[constant[Would assert that you're on a <33>{}<32> branch], name[branch_name]]] | keyword[def] identifier[assert_on_branch] ( identifier[branch_name] ):
literal[string]
identifier[branch] = identifier[git] . identifier[current_branch] ( identifier[refresh] = keyword[True] )
keyword[if] identifier[branch] . identifier[name] != identifier[branch_name] :
keyword[if] identifier[context] . identifier[get] ( literal[string] , keyword[False] ):
identifier[log] . identifier[info] ( literal[string] ,
identifier[branch_name] )
keyword[else] :
identifier[log] . identifier[err] ( literal[string] , identifier[branch_name] )
identifier[sys] . identifier[exit] ( literal[int] ) | def assert_on_branch(branch_name):
# type: (str) -> None
' Print error and exit if *branch_name* is not the current branch.\n\n Args:\n branch_name (str):\n The supposed name of the current branch.\n '
branch = git.current_branch(refresh=True)
if branch.name != branch_name:
if context.get('pretend', False):
log.info("Would assert that you're on a <33>{}<32> branch", branch_name) # depends on [control=['if'], data=[]]
else:
log.err("You're not on a <33>{}<31> branch!", branch_name)
sys.exit(1) # depends on [control=['if'], data=['branch_name']] |
def __update_filter(self):
"""
Create a combined filter. Set the resulting filter into the document controller.
"""
filters = list()
if self.__date_filter:
filters.append(self.__date_filter)
if self.__text_filter:
filters.append(self.__text_filter)
self.document_controller.display_filter = ListModel.AndFilter(filters) | def function[__update_filter, parameter[self]]:
constant[
Create a combined filter. Set the resulting filter into the document controller.
]
variable[filters] assign[=] call[name[list], parameter[]]
if name[self].__date_filter begin[:]
call[name[filters].append, parameter[name[self].__date_filter]]
if name[self].__text_filter begin[:]
call[name[filters].append, parameter[name[self].__text_filter]]
name[self].document_controller.display_filter assign[=] call[name[ListModel].AndFilter, parameter[name[filters]]] | keyword[def] identifier[__update_filter] ( identifier[self] ):
literal[string]
identifier[filters] = identifier[list] ()
keyword[if] identifier[self] . identifier[__date_filter] :
identifier[filters] . identifier[append] ( identifier[self] . identifier[__date_filter] )
keyword[if] identifier[self] . identifier[__text_filter] :
identifier[filters] . identifier[append] ( identifier[self] . identifier[__text_filter] )
identifier[self] . identifier[document_controller] . identifier[display_filter] = identifier[ListModel] . identifier[AndFilter] ( identifier[filters] ) | def __update_filter(self):
"""
Create a combined filter. Set the resulting filter into the document controller.
"""
filters = list()
if self.__date_filter:
filters.append(self.__date_filter) # depends on [control=['if'], data=[]]
if self.__text_filter:
filters.append(self.__text_filter) # depends on [control=['if'], data=[]]
self.document_controller.display_filter = ListModel.AndFilter(filters) |
def get_group_creator(self, group):
"""
Get the group's creator
:type group: str
:param group: group name
:rtype: list
:return: creator userId
"""
data = {
'name': group,
}
response = _fix_group(self.post('getGroupCreator', data))
return response | def function[get_group_creator, parameter[self, group]]:
constant[
Get the group's creator
:type group: str
:param group: group name
:rtype: list
:return: creator userId
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da20e961ab0>], [<ast.Name object at 0x7da20e9631f0>]]
variable[response] assign[=] call[name[_fix_group], parameter[call[name[self].post, parameter[constant[getGroupCreator], name[data]]]]]
return[name[response]] | keyword[def] identifier[get_group_creator] ( identifier[self] , identifier[group] ):
literal[string]
identifier[data] ={
literal[string] : identifier[group] ,
}
identifier[response] = identifier[_fix_group] ( identifier[self] . identifier[post] ( literal[string] , identifier[data] ))
keyword[return] identifier[response] | def get_group_creator(self, group):
"""
Get the group's creator
:type group: str
:param group: group name
:rtype: list
:return: creator userId
"""
data = {'name': group}
response = _fix_group(self.post('getGroupCreator', data))
return response |
def logpdf(self, X):
"""
Log PDF for Inverse Wishart prior
Parameters
----------
X : float
Covariance matrix for which the prior is being formed over
Returns
----------
- log(p(X))
"""
return invwishart.logpdf(X, df=self.v, scale=self.Psi) | def function[logpdf, parameter[self, X]]:
constant[
Log PDF for Inverse Wishart prior
Parameters
----------
X : float
Covariance matrix for which the prior is being formed over
Returns
----------
- log(p(X))
]
return[call[name[invwishart].logpdf, parameter[name[X]]]] | keyword[def] identifier[logpdf] ( identifier[self] , identifier[X] ):
literal[string]
keyword[return] identifier[invwishart] . identifier[logpdf] ( identifier[X] , identifier[df] = identifier[self] . identifier[v] , identifier[scale] = identifier[self] . identifier[Psi] ) | def logpdf(self, X):
"""
Log PDF for Inverse Wishart prior
Parameters
----------
X : float
Covariance matrix for which the prior is being formed over
Returns
----------
- log(p(X))
"""
return invwishart.logpdf(X, df=self.v, scale=self.Psi) |
def facebook_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
An introduction to the Facebook Graph API can be found at
http://developers.facebook.com/docs/api
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
url = "https://graph.facebook.com" + path
all_args = {}
if access_token:
all_args["access_token"] = access_token
all_args.update(args)
all_args.update(post_args or {})
if all_args: url += "?" + urllib.urlencode(all_args)
callback = self.async_callback(self._on_facebook_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method="POST", body=urllib.urlencode(post_args),
callback=callback)
else:
http.fetch(url, callback=callback) | def function[facebook_request, parameter[self, path, callback, access_token, post_args]]:
constant[Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
An introduction to the Facebook Graph API can be found at
http://developers.facebook.com/docs/api
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
]
variable[url] assign[=] binary_operation[constant[https://graph.facebook.com] + name[path]]
variable[all_args] assign[=] dictionary[[], []]
if name[access_token] begin[:]
call[name[all_args]][constant[access_token]] assign[=] name[access_token]
call[name[all_args].update, parameter[name[args]]]
call[name[all_args].update, parameter[<ast.BoolOp object at 0x7da1b01c0a00>]]
if name[all_args] begin[:]
<ast.AugAssign object at 0x7da18fe92920>
variable[callback] assign[=] call[name[self].async_callback, parameter[name[self]._on_facebook_request, name[callback]]]
variable[http] assign[=] call[name[httpclient].AsyncHTTPClient, parameter[]]
if compare[name[post_args] is_not constant[None]] begin[:]
call[name[http].fetch, parameter[name[url]]] | keyword[def] identifier[facebook_request] ( identifier[self] , identifier[path] , identifier[callback] , identifier[access_token] = keyword[None] ,
identifier[post_args] = keyword[None] ,** identifier[args] ):
literal[string]
identifier[url] = literal[string] + identifier[path]
identifier[all_args] ={}
keyword[if] identifier[access_token] :
identifier[all_args] [ literal[string] ]= identifier[access_token]
identifier[all_args] . identifier[update] ( identifier[args] )
identifier[all_args] . identifier[update] ( identifier[post_args] keyword[or] {})
keyword[if] identifier[all_args] : identifier[url] += literal[string] + identifier[urllib] . identifier[urlencode] ( identifier[all_args] )
identifier[callback] = identifier[self] . identifier[async_callback] ( identifier[self] . identifier[_on_facebook_request] , identifier[callback] )
identifier[http] = identifier[httpclient] . identifier[AsyncHTTPClient] ()
keyword[if] identifier[post_args] keyword[is] keyword[not] keyword[None] :
identifier[http] . identifier[fetch] ( identifier[url] , identifier[method] = literal[string] , identifier[body] = identifier[urllib] . identifier[urlencode] ( identifier[post_args] ),
identifier[callback] = identifier[callback] )
keyword[else] :
identifier[http] . identifier[fetch] ( identifier[url] , identifier[callback] = identifier[callback] ) | def facebook_request(self, path, callback, access_token=None, post_args=None, **args):
"""Fetches the given relative API path, e.g., "/btaylor/picture"
If the request is a POST, post_args should be provided. Query
string arguments should be given as keyword arguments.
An introduction to the Facebook Graph API can be found at
http://developers.facebook.com/docs/api
Many methods require an OAuth access token which you can obtain
through authorize_redirect() and get_authenticated_user(). The
user returned through that process includes an 'access_token'
attribute that can be used to make authenticated requests via
this method. Example usage::
class MainHandler(tornado.web.RequestHandler,
tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
@tornado.web.asynchronous
def get(self):
self.facebook_request(
"/me/feed",
post_args={"message": "I am posting from my Tornado application!"},
access_token=self.current_user["access_token"],
callback=self.async_callback(self._on_post))
def _on_post(self, new_entry):
if not new_entry:
# Call failed; perhaps missing permission?
self.authorize_redirect()
return
self.finish("Posted a message!")
"""
url = 'https://graph.facebook.com' + path
all_args = {}
if access_token:
all_args['access_token'] = access_token
all_args.update(args)
all_args.update(post_args or {}) # depends on [control=['if'], data=[]]
if all_args:
url += '?' + urllib.urlencode(all_args) # depends on [control=['if'], data=[]]
callback = self.async_callback(self._on_facebook_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
http.fetch(url, method='POST', body=urllib.urlencode(post_args), callback=callback) # depends on [control=['if'], data=['post_args']]
else:
http.fetch(url, callback=callback) |
def cache(ctx, clear_subliminal):
"""Cache management."""
if clear_subliminal:
for file in glob.glob(os.path.join(ctx.parent.params['cache_dir'], cache_file) + '*'):
os.remove(file)
click.echo('Subliminal\'s cache cleared.')
else:
click.echo('Nothing done.') | def function[cache, parameter[ctx, clear_subliminal]]:
constant[Cache management.]
if name[clear_subliminal] begin[:]
for taget[name[file]] in starred[call[name[glob].glob, parameter[binary_operation[call[name[os].path.join, parameter[call[name[ctx].parent.params][constant[cache_dir]], name[cache_file]]] + constant[*]]]]] begin[:]
call[name[os].remove, parameter[name[file]]]
call[name[click].echo, parameter[constant[Subliminal's cache cleared.]]] | keyword[def] identifier[cache] ( identifier[ctx] , identifier[clear_subliminal] ):
literal[string]
keyword[if] identifier[clear_subliminal] :
keyword[for] identifier[file] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[ctx] . identifier[parent] . identifier[params] [ literal[string] ], identifier[cache_file] )+ literal[string] ):
identifier[os] . identifier[remove] ( identifier[file] )
identifier[click] . identifier[echo] ( literal[string] )
keyword[else] :
identifier[click] . identifier[echo] ( literal[string] ) | def cache(ctx, clear_subliminal):
"""Cache management."""
if clear_subliminal:
for file in glob.glob(os.path.join(ctx.parent.params['cache_dir'], cache_file) + '*'):
os.remove(file) # depends on [control=['for'], data=['file']]
click.echo("Subliminal's cache cleared.") # depends on [control=['if'], data=[]]
else:
click.echo('Nothing done.') |
def extract_isosurface(pvol):
""" Extracts the largest isosurface from a volume.
The following example illustrates one of the usage scenarios:
.. code-block:: python
:linenos:
from geomdl import construct, multi
from geomdl.visualization import VisMPL
# Assuming that "myvol" variable stores your spline volume information
isosrf = construct.extract_isosurface(myvol)
# Create a surface container to store extracted isosurface
msurf = multi.SurfaceContainer(isosrf)
# Set visualization components
msurf.vis = VisMPL.VisSurface(VisMPL.VisConfig(ctrlpts=False))
# Render isosurface
msurf.render()
:param pvol: input volume
:type pvol: abstract.Volume
:return: isosurface (as a tuple of surfaces)
:rtype: tuple
"""
if pvol.pdimension != 3:
raise GeomdlException("The input should be a spline volume")
if len(pvol) != 1:
raise GeomdlException("Can only operate on single spline volumes")
# Extract surfaces from the parametric volume
isosrf = extract_surfaces(pvol)
# Return the isosurface
return isosrf['uv'][0], isosrf['uv'][-1], isosrf['uw'][0], isosrf['uw'][-1], isosrf['vw'][0], isosrf['vw'][-1] | def function[extract_isosurface, parameter[pvol]]:
constant[ Extracts the largest isosurface from a volume.
The following example illustrates one of the usage scenarios:
.. code-block:: python
:linenos:
from geomdl import construct, multi
from geomdl.visualization import VisMPL
# Assuming that "myvol" variable stores your spline volume information
isosrf = construct.extract_isosurface(myvol)
# Create a surface container to store extracted isosurface
msurf = multi.SurfaceContainer(isosrf)
# Set visualization components
msurf.vis = VisMPL.VisSurface(VisMPL.VisConfig(ctrlpts=False))
# Render isosurface
msurf.render()
:param pvol: input volume
:type pvol: abstract.Volume
:return: isosurface (as a tuple of surfaces)
:rtype: tuple
]
if compare[name[pvol].pdimension not_equal[!=] constant[3]] begin[:]
<ast.Raise object at 0x7da2041d8130>
if compare[call[name[len], parameter[name[pvol]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da2041db1f0>
variable[isosrf] assign[=] call[name[extract_surfaces], parameter[name[pvol]]]
return[tuple[[<ast.Subscript object at 0x7da20c76c670>, <ast.Subscript object at 0x7da20c76dfc0>, <ast.Subscript object at 0x7da20c76c8e0>, <ast.Subscript object at 0x7da20c76cc40>, <ast.Subscript object at 0x7da20c76ee60>, <ast.Subscript object at 0x7da20c76fd30>]]] | keyword[def] identifier[extract_isosurface] ( identifier[pvol] ):
literal[string]
keyword[if] identifier[pvol] . identifier[pdimension] != literal[int] :
keyword[raise] identifier[GeomdlException] ( literal[string] )
keyword[if] identifier[len] ( identifier[pvol] )!= literal[int] :
keyword[raise] identifier[GeomdlException] ( literal[string] )
identifier[isosrf] = identifier[extract_surfaces] ( identifier[pvol] )
keyword[return] identifier[isosrf] [ literal[string] ][ literal[int] ], identifier[isosrf] [ literal[string] ][- literal[int] ], identifier[isosrf] [ literal[string] ][ literal[int] ], identifier[isosrf] [ literal[string] ][- literal[int] ], identifier[isosrf] [ literal[string] ][ literal[int] ], identifier[isosrf] [ literal[string] ][- literal[int] ] | def extract_isosurface(pvol):
""" Extracts the largest isosurface from a volume.
The following example illustrates one of the usage scenarios:
.. code-block:: python
:linenos:
from geomdl import construct, multi
from geomdl.visualization import VisMPL
# Assuming that "myvol" variable stores your spline volume information
isosrf = construct.extract_isosurface(myvol)
# Create a surface container to store extracted isosurface
msurf = multi.SurfaceContainer(isosrf)
# Set visualization components
msurf.vis = VisMPL.VisSurface(VisMPL.VisConfig(ctrlpts=False))
# Render isosurface
msurf.render()
:param pvol: input volume
:type pvol: abstract.Volume
:return: isosurface (as a tuple of surfaces)
:rtype: tuple
"""
if pvol.pdimension != 3:
raise GeomdlException('The input should be a spline volume') # depends on [control=['if'], data=[]]
if len(pvol) != 1:
raise GeomdlException('Can only operate on single spline volumes') # depends on [control=['if'], data=[]]
# Extract surfaces from the parametric volume
isosrf = extract_surfaces(pvol)
# Return the isosurface
return (isosrf['uv'][0], isosrf['uv'][-1], isosrf['uw'][0], isosrf['uw'][-1], isosrf['vw'][0], isosrf['vw'][-1]) |
def p_compound_list(p):
'''compound_list : list
| newline_list list1'''
if len(p) == 2:
p[0] = p[1]
else:
parts = p[2]
if len(parts) > 1:
p[0] = ast.node(kind='list', parts=parts, pos=_partsspan(parts))
else:
p[0] = parts[0] | def function[p_compound_list, parameter[p]]:
constant[compound_list : list
| newline_list list1]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:]
call[name[p]][constant[0]] assign[=] call[name[p]][constant[1]] | keyword[def] identifier[p_compound_list] ( identifier[p] ):
literal[string]
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]
keyword[else] :
identifier[parts] = identifier[p] [ literal[int] ]
keyword[if] identifier[len] ( identifier[parts] )> literal[int] :
identifier[p] [ literal[int] ]= identifier[ast] . identifier[node] ( identifier[kind] = literal[string] , identifier[parts] = identifier[parts] , identifier[pos] = identifier[_partsspan] ( identifier[parts] ))
keyword[else] :
identifier[p] [ literal[int] ]= identifier[parts] [ literal[int] ] | def p_compound_list(p):
"""compound_list : list
| newline_list list1"""
if len(p) == 2:
p[0] = p[1] # depends on [control=['if'], data=[]]
else:
parts = p[2]
if len(parts) > 1:
p[0] = ast.node(kind='list', parts=parts, pos=_partsspan(parts)) # depends on [control=['if'], data=[]]
else:
p[0] = parts[0] |
def rsky_distribution(self,rmax=None,smooth=0.1,nbins=100):
"""
Distribution of projected separations
Returns a :class:`simpledists.Hist_Distribution` object.
:param rmax: (optional)
Maximum radius to calculate distribution.
:param dr: (optional)
Bin width for histogram
:param smooth: (optional)
Smoothing parameter for :class:`simpledists.Hist_Distribution`
:param nbins: (optional)
Number of bins for histogram
:return:
:class:`simpledists.Hist_Distribution` describing Rsky distribution
"""
if rmax is None:
if hasattr(self,'maxrad'):
rmax = self.maxrad
else:
rmax = np.percentile(self.Rsky,99)
dist = dists.Hist_Distribution(self.Rsky.value,bins=nbins,maxval=rmax,smooth=smooth)
return dist | def function[rsky_distribution, parameter[self, rmax, smooth, nbins]]:
constant[
Distribution of projected separations
Returns a :class:`simpledists.Hist_Distribution` object.
:param rmax: (optional)
Maximum radius to calculate distribution.
:param dr: (optional)
Bin width for histogram
:param smooth: (optional)
Smoothing parameter for :class:`simpledists.Hist_Distribution`
:param nbins: (optional)
Number of bins for histogram
:return:
:class:`simpledists.Hist_Distribution` describing Rsky distribution
]
if compare[name[rmax] is constant[None]] begin[:]
if call[name[hasattr], parameter[name[self], constant[maxrad]]] begin[:]
variable[rmax] assign[=] name[self].maxrad
variable[dist] assign[=] call[name[dists].Hist_Distribution, parameter[name[self].Rsky.value]]
return[name[dist]] | keyword[def] identifier[rsky_distribution] ( identifier[self] , identifier[rmax] = keyword[None] , identifier[smooth] = literal[int] , identifier[nbins] = literal[int] ):
literal[string]
keyword[if] identifier[rmax] keyword[is] keyword[None] :
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[rmax] = identifier[self] . identifier[maxrad]
keyword[else] :
identifier[rmax] = identifier[np] . identifier[percentile] ( identifier[self] . identifier[Rsky] , literal[int] )
identifier[dist] = identifier[dists] . identifier[Hist_Distribution] ( identifier[self] . identifier[Rsky] . identifier[value] , identifier[bins] = identifier[nbins] , identifier[maxval] = identifier[rmax] , identifier[smooth] = identifier[smooth] )
keyword[return] identifier[dist] | def rsky_distribution(self, rmax=None, smooth=0.1, nbins=100):
"""
Distribution of projected separations
Returns a :class:`simpledists.Hist_Distribution` object.
:param rmax: (optional)
Maximum radius to calculate distribution.
:param dr: (optional)
Bin width for histogram
:param smooth: (optional)
Smoothing parameter for :class:`simpledists.Hist_Distribution`
:param nbins: (optional)
Number of bins for histogram
:return:
:class:`simpledists.Hist_Distribution` describing Rsky distribution
"""
if rmax is None:
if hasattr(self, 'maxrad'):
rmax = self.maxrad # depends on [control=['if'], data=[]]
else:
rmax = np.percentile(self.Rsky, 99) # depends on [control=['if'], data=['rmax']]
dist = dists.Hist_Distribution(self.Rsky.value, bins=nbins, maxval=rmax, smooth=smooth)
return dist |
def deserialize(self):
"""
Based on a text based representation of an RSA key this method
instantiates a
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey or
RSAPublicKey instance
"""
# first look for the public parts of a RSA key
if self.n and self.e:
try:
numbers = {}
# loop over all the parameters that define a RSA key
for param in self.longs:
item = getattr(self, param)
if not item:
continue
else:
try:
val = int(deser(item))
except Exception:
raise
else:
numbers[param] = val
if 'd' in numbers:
self.priv_key = rsa_construct_private(numbers)
self.pub_key = self.priv_key.public_key()
else:
self.pub_key = rsa_construct_public(numbers)
except ValueError as err:
raise DeSerializationNotPossible("%s" % err)
if self.x5c:
_cert_chain = []
for der_data in self.x5c:
_cert_chain.append(der_cert(base64.b64decode(der_data)))
if self.x5t: # verify the cert thumbprint
if isinstance(self.x5t, bytes):
_x5t = self.x5t
else:
_x5t = self.x5t.encode('ascii')
if _x5t != x5t_calculation(self.x5c[0]):
raise DeSerializationNotPossible(
"The thumbprint 'x5t' does not match the certificate.")
if self.pub_key:
if not rsa_eq(self.pub_key, _cert_chain[0].public_key()):
raise ValueError(
'key described by components and key in x5c not equal')
else:
self.pub_key = _cert_chain[0].public_key()
self._serialize(self.pub_key)
if len(self.x5c) > 1: # verify chain
pass
if not self.priv_key and not self.pub_key:
raise DeSerializationNotPossible() | def function[deserialize, parameter[self]]:
constant[
Based on a text based representation of an RSA key this method
instantiates a
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey or
RSAPublicKey instance
]
if <ast.BoolOp object at 0x7da20c6a84c0> begin[:]
<ast.Try object at 0x7da20c6aa350>
if name[self].x5c begin[:]
variable[_cert_chain] assign[=] list[[]]
for taget[name[der_data]] in starred[name[self].x5c] begin[:]
call[name[_cert_chain].append, parameter[call[name[der_cert], parameter[call[name[base64].b64decode, parameter[name[der_data]]]]]]]
if name[self].x5t begin[:]
if call[name[isinstance], parameter[name[self].x5t, name[bytes]]] begin[:]
variable[_x5t] assign[=] name[self].x5t
if compare[name[_x5t] not_equal[!=] call[name[x5t_calculation], parameter[call[name[self].x5c][constant[0]]]]] begin[:]
<ast.Raise object at 0x7da1b056b9d0>
if name[self].pub_key begin[:]
if <ast.UnaryOp object at 0x7da1b0569ea0> begin[:]
<ast.Raise object at 0x7da1b056ac50>
call[name[self]._serialize, parameter[name[self].pub_key]]
if compare[call[name[len], parameter[name[self].x5c]] greater[>] constant[1]] begin[:]
pass
if <ast.BoolOp object at 0x7da1b0529810> begin[:]
<ast.Raise object at 0x7da1b052ae90> | keyword[def] identifier[deserialize] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[n] keyword[and] identifier[self] . identifier[e] :
keyword[try] :
identifier[numbers] ={}
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[longs] :
identifier[item] = identifier[getattr] ( identifier[self] , identifier[param] )
keyword[if] keyword[not] identifier[item] :
keyword[continue]
keyword[else] :
keyword[try] :
identifier[val] = identifier[int] ( identifier[deser] ( identifier[item] ))
keyword[except] identifier[Exception] :
keyword[raise]
keyword[else] :
identifier[numbers] [ identifier[param] ]= identifier[val]
keyword[if] literal[string] keyword[in] identifier[numbers] :
identifier[self] . identifier[priv_key] = identifier[rsa_construct_private] ( identifier[numbers] )
identifier[self] . identifier[pub_key] = identifier[self] . identifier[priv_key] . identifier[public_key] ()
keyword[else] :
identifier[self] . identifier[pub_key] = identifier[rsa_construct_public] ( identifier[numbers] )
keyword[except] identifier[ValueError] keyword[as] identifier[err] :
keyword[raise] identifier[DeSerializationNotPossible] ( literal[string] % identifier[err] )
keyword[if] identifier[self] . identifier[x5c] :
identifier[_cert_chain] =[]
keyword[for] identifier[der_data] keyword[in] identifier[self] . identifier[x5c] :
identifier[_cert_chain] . identifier[append] ( identifier[der_cert] ( identifier[base64] . identifier[b64decode] ( identifier[der_data] )))
keyword[if] identifier[self] . identifier[x5t] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[x5t] , identifier[bytes] ):
identifier[_x5t] = identifier[self] . identifier[x5t]
keyword[else] :
identifier[_x5t] = identifier[self] . identifier[x5t] . identifier[encode] ( literal[string] )
keyword[if] identifier[_x5t] != identifier[x5t_calculation] ( identifier[self] . identifier[x5c] [ literal[int] ]):
keyword[raise] identifier[DeSerializationNotPossible] (
literal[string] )
keyword[if] identifier[self] . identifier[pub_key] :
keyword[if] keyword[not] identifier[rsa_eq] ( identifier[self] . identifier[pub_key] , identifier[_cert_chain] [ literal[int] ]. identifier[public_key] ()):
keyword[raise] identifier[ValueError] (
literal[string] )
keyword[else] :
identifier[self] . identifier[pub_key] = identifier[_cert_chain] [ literal[int] ]. identifier[public_key] ()
identifier[self] . identifier[_serialize] ( identifier[self] . identifier[pub_key] )
keyword[if] identifier[len] ( identifier[self] . identifier[x5c] )> literal[int] :
keyword[pass]
keyword[if] keyword[not] identifier[self] . identifier[priv_key] keyword[and] keyword[not] identifier[self] . identifier[pub_key] :
keyword[raise] identifier[DeSerializationNotPossible] () | def deserialize(self):
"""
Based on a text based representation of an RSA key this method
instantiates a
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey or
RSAPublicKey instance
"""
# first look for the public parts of a RSA key
if self.n and self.e:
try:
numbers = {}
# loop over all the parameters that define a RSA key
for param in self.longs:
item = getattr(self, param)
if not item:
continue # depends on [control=['if'], data=[]]
else:
try:
val = int(deser(item)) # depends on [control=['try'], data=[]]
except Exception:
raise # depends on [control=['except'], data=[]]
else:
numbers[param] = val # depends on [control=['for'], data=['param']]
if 'd' in numbers:
self.priv_key = rsa_construct_private(numbers)
self.pub_key = self.priv_key.public_key() # depends on [control=['if'], data=['numbers']]
else:
self.pub_key = rsa_construct_public(numbers) # depends on [control=['try'], data=[]]
except ValueError as err:
raise DeSerializationNotPossible('%s' % err) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]]
if self.x5c:
_cert_chain = []
for der_data in self.x5c:
_cert_chain.append(der_cert(base64.b64decode(der_data))) # depends on [control=['for'], data=['der_data']]
if self.x5t: # verify the cert thumbprint
if isinstance(self.x5t, bytes):
_x5t = self.x5t # depends on [control=['if'], data=[]]
else:
_x5t = self.x5t.encode('ascii')
if _x5t != x5t_calculation(self.x5c[0]):
raise DeSerializationNotPossible("The thumbprint 'x5t' does not match the certificate.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.pub_key:
if not rsa_eq(self.pub_key, _cert_chain[0].public_key()):
raise ValueError('key described by components and key in x5c not equal') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.pub_key = _cert_chain[0].public_key()
self._serialize(self.pub_key)
if len(self.x5c) > 1: # verify chain
pass # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not self.priv_key and (not self.pub_key):
raise DeSerializationNotPossible() # depends on [control=['if'], data=[]] |
def _dataframe_fields(self):
"""
Creates a dictionary of all fields to include with DataFrame.
With the result of the calls to class properties changing based on the
class index value, the dictionary should be regenerated every time the
index is changed when the dataframe property is requested.
Returns
-------
dictionary
Returns a dictionary where the keys are the shortened ``string``
attribute names and the values are the actual value for each
attribute for the specified index.
"""
fields_to_include = {
'assist_percentage': self.assist_percentage,
'assists': self.assists,
'block_percentage': self.block_percentage,
'blocks': self.blocks,
'box_plus_minus': self.box_plus_minus,
'conference': self.conference,
'defensive_box_plus_minus': self.defensive_box_plus_minus,
'defensive_rebound_percentage': self.defensive_rebound_percentage,
'defensive_rebounds': self.defensive_rebounds,
'defensive_win_shares': self.defensive_win_shares,
'effective_field_goal_percentage':
self.effective_field_goal_percentage,
'field_goal_attempts': self.field_goal_attempts,
'field_goal_percentage': self.field_goal_percentage,
'field_goals': self.field_goals,
'free_throw_attempt_rate': self.free_throw_attempt_rate,
'free_throw_attempts': self.free_throw_attempts,
'free_throw_percentage': self.free_throw_percentage,
'free_throws': self.free_throws,
'games_played': self.games_played,
'games_started': self.games_started,
'height': self.height,
'minutes_played': self.minutes_played,
'offensive_box_plus_minus': self.offensive_box_plus_minus,
'offensive_rebound_percentage': self.offensive_rebound_percentage,
'offensive_rebounds': self.offensive_rebounds,
'offensive_win_shares': self.offensive_win_shares,
'personal_fouls': self.personal_fouls,
'player_efficiency_rating': self.player_efficiency_rating,
'player_id': self.player_id,
'points': self.points,
'points_produced': self.points_produced,
'position': self.position,
'steal_percentage': self.steal_percentage,
'steals': self.steals,
'team_abbreviation': self.team_abbreviation,
'three_point_attempt_rate': self.three_point_attempt_rate,
'three_point_attempts': self.three_point_attempts,
'three_point_percentage': self.three_point_percentage,
'three_pointers': self.three_pointers,
'total_rebound_percentage': self.total_rebound_percentage,
'total_rebounds': self.total_rebounds,
'true_shooting_percentage': self.true_shooting_percentage,
'turnover_percentage': self.turnover_percentage,
'turnovers': self.turnovers,
'two_point_attempts': self.two_point_attempts,
'two_point_percentage': self.two_point_percentage,
'two_pointers': self.two_pointers,
'usage_percentage': self.usage_percentage,
'weight': self.weight,
'win_shares': self.win_shares,
'win_shares_per_40_minutes': self.win_shares_per_40_minutes,
}
return fields_to_include | def function[_dataframe_fields, parameter[self]]:
constant[
Creates a dictionary of all fields to include with DataFrame.
With the result of the calls to class properties changing based on the
class index value, the dictionary should be regenerated every time the
index is changed when the dataframe property is requested.
Returns
-------
dictionary
Returns a dictionary where the keys are the shortened ``string``
attribute names and the values are the actual value for each
attribute for the specified index.
]
variable[fields_to_include] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b351b0>, <ast.Constant object at 0x7da1b0b35cf0>, <ast.Constant object at 0x7da1b0b36920>, <ast.Constant object at 0x7da1b0b36cb0>, <ast.Constant object at 0x7da1b0b35b10>, <ast.Constant object at 0x7da1b0b35e70>, <ast.Constant object at 0x7da1b0b37100>, <ast.Constant object at 0x7da1b0b36770>, <ast.Constant object at 0x7da1b0b35300>, <ast.Constant object at 0x7da1b0b37910>, <ast.Constant object at 0x7da1b0b36f80>, <ast.Constant object at 0x7da1b0b35180>, <ast.Constant object at 0x7da1b0b34ee0>, <ast.Constant object at 0x7da1b0b362f0>, <ast.Constant object at 0x7da1b0b35bd0>, <ast.Constant object at 0x7da1b0b37ee0>, <ast.Constant object at 0x7da1b0b356f0>, <ast.Constant object at 0x7da1b0b35a80>, <ast.Constant object at 0x7da1b0b36620>, <ast.Constant object at 0x7da1b0b36020>, <ast.Constant object at 0x7da1b0b35d50>, <ast.Constant object at 0x7da1b0b36290>, <ast.Constant object at 0x7da1b0b34fa0>, <ast.Constant object at 0x7da1b0b37c10>, <ast.Constant object at 0x7da1b0b37310>, <ast.Constant object at 0x7da1b0b36980>, <ast.Constant object at 0x7da1b0b366b0>, <ast.Constant object at 0x7da1b0b37460>, <ast.Constant object at 0x7da1b0b36e90>, <ast.Constant object at 0x7da1b0b375e0>, <ast.Constant object at 0x7da1b0b36170>, <ast.Constant object at 0x7da1b0b354e0>, <ast.Constant object at 0x7da1b0b37550>, <ast.Constant object at 0x7da1b0b354b0>, <ast.Constant object at 0x7da1b0b37f40>, <ast.Constant object at 0x7da1b0b37b50>, <ast.Constant object at 0x7da1b0b350c0>, <ast.Constant object at 0x7da1b0b37760>, <ast.Constant object at 0x7da1b0b36080>, <ast.Constant object at 0x7da1b0b35ea0>, <ast.Constant object at 0x7da1b0b371f0>, <ast.Constant object at 0x7da1b0b37ac0>, <ast.Constant object at 0x7da1b0b374c0>, <ast.Constant object at 0x7da1b0b35a50>, <ast.Constant object at 0x7da1b0b370a0>, <ast.Constant object at 0x7da1b0b353c0>, <ast.Constant object at 0x7da1b0b36230>, <ast.Constant object at 0x7da1b0b37fd0>, <ast.Constant object at 0x7da1b0b37970>, <ast.Constant object at 0x7da204565f30>, <ast.Constant object at 0x7da204565060>], [<ast.Attribute object at 0x7da204566c20>, <ast.Attribute object at 0x7da204564af0>, <ast.Attribute object at 0x7da204567f10>, <ast.Attribute object at 0x7da204567070>, <ast.Attribute object at 0x7da204566470>, <ast.Attribute object at 0x7da204566aa0>, <ast.Attribute object at 0x7da204565000>, <ast.Attribute object at 0x7da204567a00>, <ast.Attribute object at 0x7da204565870>, <ast.Attribute object at 0x7da2045652a0>, <ast.Attribute object at 0x7da204565300>, <ast.Attribute object at 0x7da204564be0>, <ast.Attribute object at 0x7da204567040>, <ast.Attribute object at 0x7da2045671f0>, <ast.Attribute object at 0x7da2045642e0>, <ast.Attribute object at 0x7da204567520>, <ast.Attribute object at 0x7da2045660b0>, <ast.Attribute object at 0x7da204567340>, <ast.Attribute object at 0x7da2045678b0>, <ast.Attribute object at 0x7da204566920>, <ast.Attribute object at 0x7da2045644f0>, <ast.Attribute object at 0x7da204564eb0>, <ast.Attribute object at 0x7da204564b50>, <ast.Attribute object at 0x7da204566440>, <ast.Attribute object at 0x7da204566d40>, <ast.Attribute object at 0x7da204566e90>, <ast.Attribute object at 0x7da204564ca0>, <ast.Attribute object at 0x7da204564250>, <ast.Attribute object at 0x7da1b0bcb7c0>, <ast.Attribute object at 0x7da1b0bc90c0>, <ast.Attribute object at 0x7da1b0bcbcd0>, <ast.Attribute object at 0x7da1b0bc9e70>, <ast.Attribute object at 0x7da1b0bcb2e0>, <ast.Attribute object at 0x7da1b0bcacb0>, <ast.Attribute object at 0x7da1b0bc8af0>, <ast.Attribute object at 0x7da1b0bca3e0>, <ast.Attribute object at 0x7da1b0bc8670>, <ast.Attribute object at 0x7da1b0bcbdc0>, <ast.Attribute object at 0x7da1b0bca410>, <ast.Attribute object at 0x7da1b0bca860>, <ast.Attribute object at 0x7da1b0bcbaf0>, <ast.Attribute object at 0x7da1b0bc8ca0>, <ast.Attribute object at 0x7da1b0bcb700>, <ast.Attribute object at 0x7da1b0bcb910>, <ast.Attribute object at 0x7da1b0bc8c70>, <ast.Attribute object at 0x7da1b0bcad70>, <ast.Attribute object at 0x7da1b0bc92d0>, <ast.Attribute object at 0x7da1b0bcb010>, <ast.Attribute object at 0x7da1b0bcb970>, <ast.Attribute object at 0x7da1b0bcb6d0>, <ast.Attribute object at 0x7da1b0bc9d80>]]
return[name[fields_to_include]] | keyword[def] identifier[_dataframe_fields] ( identifier[self] ):
literal[string]
identifier[fields_to_include] ={
literal[string] : identifier[self] . identifier[assist_percentage] ,
literal[string] : identifier[self] . identifier[assists] ,
literal[string] : identifier[self] . identifier[block_percentage] ,
literal[string] : identifier[self] . identifier[blocks] ,
literal[string] : identifier[self] . identifier[box_plus_minus] ,
literal[string] : identifier[self] . identifier[conference] ,
literal[string] : identifier[self] . identifier[defensive_box_plus_minus] ,
literal[string] : identifier[self] . identifier[defensive_rebound_percentage] ,
literal[string] : identifier[self] . identifier[defensive_rebounds] ,
literal[string] : identifier[self] . identifier[defensive_win_shares] ,
literal[string] :
identifier[self] . identifier[effective_field_goal_percentage] ,
literal[string] : identifier[self] . identifier[field_goal_attempts] ,
literal[string] : identifier[self] . identifier[field_goal_percentage] ,
literal[string] : identifier[self] . identifier[field_goals] ,
literal[string] : identifier[self] . identifier[free_throw_attempt_rate] ,
literal[string] : identifier[self] . identifier[free_throw_attempts] ,
literal[string] : identifier[self] . identifier[free_throw_percentage] ,
literal[string] : identifier[self] . identifier[free_throws] ,
literal[string] : identifier[self] . identifier[games_played] ,
literal[string] : identifier[self] . identifier[games_started] ,
literal[string] : identifier[self] . identifier[height] ,
literal[string] : identifier[self] . identifier[minutes_played] ,
literal[string] : identifier[self] . identifier[offensive_box_plus_minus] ,
literal[string] : identifier[self] . identifier[offensive_rebound_percentage] ,
literal[string] : identifier[self] . identifier[offensive_rebounds] ,
literal[string] : identifier[self] . identifier[offensive_win_shares] ,
literal[string] : identifier[self] . identifier[personal_fouls] ,
literal[string] : identifier[self] . identifier[player_efficiency_rating] ,
literal[string] : identifier[self] . identifier[player_id] ,
literal[string] : identifier[self] . identifier[points] ,
literal[string] : identifier[self] . identifier[points_produced] ,
literal[string] : identifier[self] . identifier[position] ,
literal[string] : identifier[self] . identifier[steal_percentage] ,
literal[string] : identifier[self] . identifier[steals] ,
literal[string] : identifier[self] . identifier[team_abbreviation] ,
literal[string] : identifier[self] . identifier[three_point_attempt_rate] ,
literal[string] : identifier[self] . identifier[three_point_attempts] ,
literal[string] : identifier[self] . identifier[three_point_percentage] ,
literal[string] : identifier[self] . identifier[three_pointers] ,
literal[string] : identifier[self] . identifier[total_rebound_percentage] ,
literal[string] : identifier[self] . identifier[total_rebounds] ,
literal[string] : identifier[self] . identifier[true_shooting_percentage] ,
literal[string] : identifier[self] . identifier[turnover_percentage] ,
literal[string] : identifier[self] . identifier[turnovers] ,
literal[string] : identifier[self] . identifier[two_point_attempts] ,
literal[string] : identifier[self] . identifier[two_point_percentage] ,
literal[string] : identifier[self] . identifier[two_pointers] ,
literal[string] : identifier[self] . identifier[usage_percentage] ,
literal[string] : identifier[self] . identifier[weight] ,
literal[string] : identifier[self] . identifier[win_shares] ,
literal[string] : identifier[self] . identifier[win_shares_per_40_minutes] ,
}
keyword[return] identifier[fields_to_include] | def _dataframe_fields(self):
"""
Creates a dictionary of all fields to include with DataFrame.
With the result of the calls to class properties changing based on the
class index value, the dictionary should be regenerated every time the
index is changed when the dataframe property is requested.
Returns
-------
dictionary
Returns a dictionary where the keys are the shortened ``string``
attribute names and the values are the actual value for each
attribute for the specified index.
"""
fields_to_include = {'assist_percentage': self.assist_percentage, 'assists': self.assists, 'block_percentage': self.block_percentage, 'blocks': self.blocks, 'box_plus_minus': self.box_plus_minus, 'conference': self.conference, 'defensive_box_plus_minus': self.defensive_box_plus_minus, 'defensive_rebound_percentage': self.defensive_rebound_percentage, 'defensive_rebounds': self.defensive_rebounds, 'defensive_win_shares': self.defensive_win_shares, 'effective_field_goal_percentage': self.effective_field_goal_percentage, 'field_goal_attempts': self.field_goal_attempts, 'field_goal_percentage': self.field_goal_percentage, 'field_goals': self.field_goals, 'free_throw_attempt_rate': self.free_throw_attempt_rate, 'free_throw_attempts': self.free_throw_attempts, 'free_throw_percentage': self.free_throw_percentage, 'free_throws': self.free_throws, 'games_played': self.games_played, 'games_started': self.games_started, 'height': self.height, 'minutes_played': self.minutes_played, 'offensive_box_plus_minus': self.offensive_box_plus_minus, 'offensive_rebound_percentage': self.offensive_rebound_percentage, 'offensive_rebounds': self.offensive_rebounds, 'offensive_win_shares': self.offensive_win_shares, 'personal_fouls': self.personal_fouls, 'player_efficiency_rating': self.player_efficiency_rating, 'player_id': self.player_id, 'points': self.points, 'points_produced': self.points_produced, 'position': self.position, 'steal_percentage': self.steal_percentage, 'steals': self.steals, 'team_abbreviation': self.team_abbreviation, 'three_point_attempt_rate': self.three_point_attempt_rate, 'three_point_attempts': self.three_point_attempts, 'three_point_percentage': self.three_point_percentage, 'three_pointers': self.three_pointers, 'total_rebound_percentage': self.total_rebound_percentage, 'total_rebounds': self.total_rebounds, 'true_shooting_percentage': self.true_shooting_percentage, 'turnover_percentage': self.turnover_percentage, 'turnovers': self.turnovers, 'two_point_attempts': self.two_point_attempts, 'two_point_percentage': self.two_point_percentage, 'two_pointers': self.two_pointers, 'usage_percentage': self.usage_percentage, 'weight': self.weight, 'win_shares': self.win_shares, 'win_shares_per_40_minutes': self.win_shares_per_40_minutes}
return fields_to_include |
def backward(self, grad_output):
r"""Apply the adjoint of the derivative at ``grad_output``.
This method is usually not called explicitly but as a part of the
``cost.backward()`` pass of a backpropagation step.
Parameters
----------
grad_output : `torch.tensor._TensorBase`
Tensor to which the Jacobian should be applied. See Notes
for details.
Returns
-------
result : `torch.autograd.variable.Variable`
Variable holding the result of applying the Jacobian to
``grad_output``. See Notes for details.
Examples
--------
Compute the Jacobian adjoint of the matrix operator, which is the
operator of the transposed matrix. We compose with the ``sum``
functional to be able to evaluate ``grad``:
>>> matrix = np.array([[1, 0, 1],
... [0, 1, 1]], dtype='float32')
>>> odl_op = odl.MatrixOperator(matrix)
>>> torch_op = OperatorAsAutogradFunction(odl_op)
>>> x = torch.Tensor([1, 2, 3])
>>> x_var = torch.autograd.Variable(x, requires_grad=True)
>>> op_x_var = torch_op(x_var)
>>> cost = op_x_var.sum()
>>> cost.backward()
>>> x_var.grad # should be matrix.T.dot([1, 1])
Variable containing:
1
1
2
[torch.FloatTensor of size 3]
Compute the gradient of a custom functional:
>>> odl_func = odl.solvers.L2NormSquared(odl.rn(3, dtype='float32'))
>>> torch_func = OperatorAsAutogradFunction(odl_func)
>>> x = torch.Tensor([1, 2, 3])
>>> x_var = torch.autograd.Variable(x, requires_grad=True)
>>> func_x_var = torch_func(x_var)
>>> func_x_var
Variable containing:
14
[torch.FloatTensor of size 1]
>>> func_x_var.backward()
>>> x_var.grad # Should be 2 * x
Variable containing:
2
4
6
[torch.FloatTensor of size 3]
Notes
-----
This method applies the contribution of this node, i.e., the
transpose of the Jacobian of its outputs with respect to its inputs,
to the gradients of some cost function with respect to the outputs
of this node.
Example: Assume that this node computes :math:`x \mapsto C(f(x))`,
where :math:`x` is a tensor variable and :math:`C` is a scalar-valued
function. In ODL language, what ``backward`` should compute is
.. math::
\nabla(C \circ f)(x) = f'(x)^*\big(\nabla C (f(x))\big)
according to the chain rule. In ODL code, this corresponds to ::
f.derivative(x).adjoint(C.gradient(f(x))).
Hence, the parameter ``grad_output`` is a tensor variable containing
:math:`y = \nabla C(f(x))`. Then, ``backward`` boils down to
computing ``[f'(x)^*(y)]`` using the input ``x`` stored during
the previous `forward` pass.
"""
# TODO: implement directly for GPU data
if not self.operator.is_linear:
input_arr = self.saved_variables[0].data.cpu().numpy()
if any(s == 0 for s in input_arr.strides):
# TODO: remove when Numpy issue #9165 is fixed
# https://github.com/numpy/numpy/pull/9177
input_arr = input_arr.copy()
grad = None
# ODL weights spaces, pytorch doesn't, so we need to handle this
try:
dom_weight = self.operator.domain.weighting.const
except AttributeError:
dom_weight = 1.0
try:
ran_weight = self.operator.range.weighting.const
except AttributeError:
ran_weight = 1.0
scaling = dom_weight / ran_weight
if self.needs_input_grad[0]:
grad_output_arr = grad_output.cpu().numpy()
if any(s == 0 for s in grad_output_arr.strides):
# TODO: remove when Numpy issue #9165 is fixed
# https://github.com/numpy/numpy/pull/9177
grad_output_arr = grad_output_arr.copy()
if self.operator.is_linear:
adjoint = self.operator.adjoint
else:
adjoint = self.operator.derivative(input_arr).adjoint
grad_odl = adjoint(grad_output_arr)
if scaling != 1.0:
grad_odl *= scaling
grad = torch.from_numpy(np.array(grad_odl, copy=False, ndmin=1))
grad = grad.to(grad_output.device)
return grad | def function[backward, parameter[self, grad_output]]:
constant[Apply the adjoint of the derivative at ``grad_output``.
This method is usually not called explicitly but as a part of the
``cost.backward()`` pass of a backpropagation step.
Parameters
----------
grad_output : `torch.tensor._TensorBase`
Tensor to which the Jacobian should be applied. See Notes
for details.
Returns
-------
result : `torch.autograd.variable.Variable`
Variable holding the result of applying the Jacobian to
``grad_output``. See Notes for details.
Examples
--------
Compute the Jacobian adjoint of the matrix operator, which is the
operator of the transposed matrix. We compose with the ``sum``
functional to be able to evaluate ``grad``:
>>> matrix = np.array([[1, 0, 1],
... [0, 1, 1]], dtype='float32')
>>> odl_op = odl.MatrixOperator(matrix)
>>> torch_op = OperatorAsAutogradFunction(odl_op)
>>> x = torch.Tensor([1, 2, 3])
>>> x_var = torch.autograd.Variable(x, requires_grad=True)
>>> op_x_var = torch_op(x_var)
>>> cost = op_x_var.sum()
>>> cost.backward()
>>> x_var.grad # should be matrix.T.dot([1, 1])
Variable containing:
1
1
2
[torch.FloatTensor of size 3]
Compute the gradient of a custom functional:
>>> odl_func = odl.solvers.L2NormSquared(odl.rn(3, dtype='float32'))
>>> torch_func = OperatorAsAutogradFunction(odl_func)
>>> x = torch.Tensor([1, 2, 3])
>>> x_var = torch.autograd.Variable(x, requires_grad=True)
>>> func_x_var = torch_func(x_var)
>>> func_x_var
Variable containing:
14
[torch.FloatTensor of size 1]
>>> func_x_var.backward()
>>> x_var.grad # Should be 2 * x
Variable containing:
2
4
6
[torch.FloatTensor of size 3]
Notes
-----
This method applies the contribution of this node, i.e., the
transpose of the Jacobian of its outputs with respect to its inputs,
to the gradients of some cost function with respect to the outputs
of this node.
Example: Assume that this node computes :math:`x \mapsto C(f(x))`,
where :math:`x` is a tensor variable and :math:`C` is a scalar-valued
function. In ODL language, what ``backward`` should compute is
.. math::
\nabla(C \circ f)(x) = f'(x)^*\big(\nabla C (f(x))\big)
according to the chain rule. In ODL code, this corresponds to ::
f.derivative(x).adjoint(C.gradient(f(x))).
Hence, the parameter ``grad_output`` is a tensor variable containing
:math:`y = \nabla C(f(x))`. Then, ``backward`` boils down to
computing ``[f'(x)^*(y)]`` using the input ``x`` stored during
the previous `forward` pass.
]
if <ast.UnaryOp object at 0x7da1b1ea3e20> begin[:]
variable[input_arr] assign[=] call[call[call[name[self].saved_variables][constant[0]].data.cpu, parameter[]].numpy, parameter[]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1ea00d0>]] begin[:]
variable[input_arr] assign[=] call[name[input_arr].copy, parameter[]]
variable[grad] assign[=] constant[None]
<ast.Try object at 0x7da1b1ea0400>
<ast.Try object at 0x7da1b1ea0670>
variable[scaling] assign[=] binary_operation[name[dom_weight] / name[ran_weight]]
if call[name[self].needs_input_grad][constant[0]] begin[:]
variable[grad_output_arr] assign[=] call[call[name[grad_output].cpu, parameter[]].numpy, parameter[]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1ea0ca0>]] begin[:]
variable[grad_output_arr] assign[=] call[name[grad_output_arr].copy, parameter[]]
if name[self].operator.is_linear begin[:]
variable[adjoint] assign[=] name[self].operator.adjoint
variable[grad_odl] assign[=] call[name[adjoint], parameter[name[grad_output_arr]]]
if compare[name[scaling] not_equal[!=] constant[1.0]] begin[:]
<ast.AugAssign object at 0x7da1b1ea1de0>
variable[grad] assign[=] call[name[torch].from_numpy, parameter[call[name[np].array, parameter[name[grad_odl]]]]]
variable[grad] assign[=] call[name[grad].to, parameter[name[grad_output].device]]
return[name[grad]] | keyword[def] identifier[backward] ( identifier[self] , identifier[grad_output] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[operator] . identifier[is_linear] :
identifier[input_arr] = identifier[self] . identifier[saved_variables] [ literal[int] ]. identifier[data] . identifier[cpu] (). identifier[numpy] ()
keyword[if] identifier[any] ( identifier[s] == literal[int] keyword[for] identifier[s] keyword[in] identifier[input_arr] . identifier[strides] ):
identifier[input_arr] = identifier[input_arr] . identifier[copy] ()
identifier[grad] = keyword[None]
keyword[try] :
identifier[dom_weight] = identifier[self] . identifier[operator] . identifier[domain] . identifier[weighting] . identifier[const]
keyword[except] identifier[AttributeError] :
identifier[dom_weight] = literal[int]
keyword[try] :
identifier[ran_weight] = identifier[self] . identifier[operator] . identifier[range] . identifier[weighting] . identifier[const]
keyword[except] identifier[AttributeError] :
identifier[ran_weight] = literal[int]
identifier[scaling] = identifier[dom_weight] / identifier[ran_weight]
keyword[if] identifier[self] . identifier[needs_input_grad] [ literal[int] ]:
identifier[grad_output_arr] = identifier[grad_output] . identifier[cpu] (). identifier[numpy] ()
keyword[if] identifier[any] ( identifier[s] == literal[int] keyword[for] identifier[s] keyword[in] identifier[grad_output_arr] . identifier[strides] ):
identifier[grad_output_arr] = identifier[grad_output_arr] . identifier[copy] ()
keyword[if] identifier[self] . identifier[operator] . identifier[is_linear] :
identifier[adjoint] = identifier[self] . identifier[operator] . identifier[adjoint]
keyword[else] :
identifier[adjoint] = identifier[self] . identifier[operator] . identifier[derivative] ( identifier[input_arr] ). identifier[adjoint]
identifier[grad_odl] = identifier[adjoint] ( identifier[grad_output_arr] )
keyword[if] identifier[scaling] != literal[int] :
identifier[grad_odl] *= identifier[scaling]
identifier[grad] = identifier[torch] . identifier[from_numpy] ( identifier[np] . identifier[array] ( identifier[grad_odl] , identifier[copy] = keyword[False] , identifier[ndmin] = literal[int] ))
identifier[grad] = identifier[grad] . identifier[to] ( identifier[grad_output] . identifier[device] )
keyword[return] identifier[grad] | def backward(self, grad_output):
"""Apply the adjoint of the derivative at ``grad_output``.
This method is usually not called explicitly but as a part of the
``cost.backward()`` pass of a backpropagation step.
Parameters
----------
grad_output : `torch.tensor._TensorBase`
Tensor to which the Jacobian should be applied. See Notes
for details.
Returns
-------
result : `torch.autograd.variable.Variable`
Variable holding the result of applying the Jacobian to
``grad_output``. See Notes for details.
Examples
--------
Compute the Jacobian adjoint of the matrix operator, which is the
operator of the transposed matrix. We compose with the ``sum``
functional to be able to evaluate ``grad``:
>>> matrix = np.array([[1, 0, 1],
... [0, 1, 1]], dtype='float32')
>>> odl_op = odl.MatrixOperator(matrix)
>>> torch_op = OperatorAsAutogradFunction(odl_op)
>>> x = torch.Tensor([1, 2, 3])
>>> x_var = torch.autograd.Variable(x, requires_grad=True)
>>> op_x_var = torch_op(x_var)
>>> cost = op_x_var.sum()
>>> cost.backward()
>>> x_var.grad # should be matrix.T.dot([1, 1])
Variable containing:
1
1
2
[torch.FloatTensor of size 3]
Compute the gradient of a custom functional:
>>> odl_func = odl.solvers.L2NormSquared(odl.rn(3, dtype='float32'))
>>> torch_func = OperatorAsAutogradFunction(odl_func)
>>> x = torch.Tensor([1, 2, 3])
>>> x_var = torch.autograd.Variable(x, requires_grad=True)
>>> func_x_var = torch_func(x_var)
>>> func_x_var
Variable containing:
14
[torch.FloatTensor of size 1]
>>> func_x_var.backward()
>>> x_var.grad # Should be 2 * x
Variable containing:
2
4
6
[torch.FloatTensor of size 3]
Notes
-----
This method applies the contribution of this node, i.e., the
transpose of the Jacobian of its outputs with respect to its inputs,
to the gradients of some cost function with respect to the outputs
of this node.
Example: Assume that this node computes :math:`x \\mapsto C(f(x))`,
where :math:`x` is a tensor variable and :math:`C` is a scalar-valued
function. In ODL language, what ``backward`` should compute is
.. math::
\\nabla(C \\circ f)(x) = f'(x)^*\\big(\\nabla C (f(x))\\big)
according to the chain rule. In ODL code, this corresponds to ::
f.derivative(x).adjoint(C.gradient(f(x))).
Hence, the parameter ``grad_output`` is a tensor variable containing
:math:`y = \\nabla C(f(x))`. Then, ``backward`` boils down to
computing ``[f'(x)^*(y)]`` using the input ``x`` stored during
the previous `forward` pass.
"""
# TODO: implement directly for GPU data
if not self.operator.is_linear:
input_arr = self.saved_variables[0].data.cpu().numpy()
if any((s == 0 for s in input_arr.strides)):
# TODO: remove when Numpy issue #9165 is fixed
# https://github.com/numpy/numpy/pull/9177
input_arr = input_arr.copy() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
grad = None
# ODL weights spaces, pytorch doesn't, so we need to handle this
try:
dom_weight = self.operator.domain.weighting.const # depends on [control=['try'], data=[]]
except AttributeError:
dom_weight = 1.0 # depends on [control=['except'], data=[]]
try:
ran_weight = self.operator.range.weighting.const # depends on [control=['try'], data=[]]
except AttributeError:
ran_weight = 1.0 # depends on [control=['except'], data=[]]
scaling = dom_weight / ran_weight
if self.needs_input_grad[0]:
grad_output_arr = grad_output.cpu().numpy()
if any((s == 0 for s in grad_output_arr.strides)):
# TODO: remove when Numpy issue #9165 is fixed
# https://github.com/numpy/numpy/pull/9177
grad_output_arr = grad_output_arr.copy() # depends on [control=['if'], data=[]]
if self.operator.is_linear:
adjoint = self.operator.adjoint # depends on [control=['if'], data=[]]
else:
adjoint = self.operator.derivative(input_arr).adjoint
grad_odl = adjoint(grad_output_arr)
if scaling != 1.0:
grad_odl *= scaling # depends on [control=['if'], data=['scaling']]
grad = torch.from_numpy(np.array(grad_odl, copy=False, ndmin=1))
grad = grad.to(grad_output.device) # depends on [control=['if'], data=[]]
return grad |
def FlipLogic(self, **unused_kwargs):
"""Flip the boolean logic of the expression.
If an expression is configured to return True when the condition
is met this logic will flip that to False, and vice versa.
"""
if hasattr(self, 'flipped') and self.flipped:
raise errors.ParseError(
'The operator \'not\' can only be expressed once.')
if self.current_expression.args:
raise errors.ParseError(
'Unable to place the keyword \'not\' after an argument.')
self.flipped = True
# Check if this flip operation should be allowed.
self.FlipAllowed()
if hasattr(self.current_expression, 'FlipBool'):
self.current_expression.FlipBool()
logging.debug('Negative matching [flipping boolean logic].')
else:
logging.warning(
'Unable to perform a negative match, issuing a positive one.') | def function[FlipLogic, parameter[self]]:
constant[Flip the boolean logic of the expression.
If an expression is configured to return True when the condition
is met this logic will flip that to False, and vice versa.
]
if <ast.BoolOp object at 0x7da18ede5ed0> begin[:]
<ast.Raise object at 0x7da18ede4ac0>
if name[self].current_expression.args begin[:]
<ast.Raise object at 0x7da18ede4190>
name[self].flipped assign[=] constant[True]
call[name[self].FlipAllowed, parameter[]]
if call[name[hasattr], parameter[name[self].current_expression, constant[FlipBool]]] begin[:]
call[name[self].current_expression.FlipBool, parameter[]]
call[name[logging].debug, parameter[constant[Negative matching [flipping boolean logic].]]] | keyword[def] identifier[FlipLogic] ( identifier[self] ,** identifier[unused_kwargs] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[flipped] :
keyword[raise] identifier[errors] . identifier[ParseError] (
literal[string] )
keyword[if] identifier[self] . identifier[current_expression] . identifier[args] :
keyword[raise] identifier[errors] . identifier[ParseError] (
literal[string] )
identifier[self] . identifier[flipped] = keyword[True]
identifier[self] . identifier[FlipAllowed] ()
keyword[if] identifier[hasattr] ( identifier[self] . identifier[current_expression] , literal[string] ):
identifier[self] . identifier[current_expression] . identifier[FlipBool] ()
identifier[logging] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[logging] . identifier[warning] (
literal[string] ) | def FlipLogic(self, **unused_kwargs):
"""Flip the boolean logic of the expression.
If an expression is configured to return True when the condition
is met this logic will flip that to False, and vice versa.
"""
if hasattr(self, 'flipped') and self.flipped:
raise errors.ParseError("The operator 'not' can only be expressed once.") # depends on [control=['if'], data=[]]
if self.current_expression.args:
raise errors.ParseError("Unable to place the keyword 'not' after an argument.") # depends on [control=['if'], data=[]]
self.flipped = True
# Check if this flip operation should be allowed.
self.FlipAllowed()
if hasattr(self.current_expression, 'FlipBool'):
self.current_expression.FlipBool()
logging.debug('Negative matching [flipping boolean logic].') # depends on [control=['if'], data=[]]
else:
logging.warning('Unable to perform a negative match, issuing a positive one.') |
def isatty_to_env(cls, stdin, stdout, stderr):
"""Generate nailgun tty capability environment variables based on checking a set of fds.
:param file stdin: The stream to check for stdin tty capabilities.
:param file stdout: The stream to check for stdout tty capabilities.
:param file stderr: The stream to check for stderr tty capabilities.
:returns: A dict containing the tty capability environment variables.
"""
def gen_env_vars():
for fd_id, fd in zip(STDIO_DESCRIPTORS, (stdin, stdout, stderr)):
is_atty = fd.isatty()
yield (cls.TTY_ENV_TMPL.format(fd_id), cls.encode_env_var_value(int(is_atty)))
if is_atty:
yield (cls.TTY_PATH_ENV.format(fd_id), os.ttyname(fd.fileno()) or b'')
return dict(gen_env_vars()) | def function[isatty_to_env, parameter[cls, stdin, stdout, stderr]]:
constant[Generate nailgun tty capability environment variables based on checking a set of fds.
:param file stdin: The stream to check for stdin tty capabilities.
:param file stdout: The stream to check for stdout tty capabilities.
:param file stderr: The stream to check for stderr tty capabilities.
:returns: A dict containing the tty capability environment variables.
]
def function[gen_env_vars, parameter[]]:
for taget[tuple[[<ast.Name object at 0x7da1b227beb0>, <ast.Name object at 0x7da1b227a800>]]] in starred[call[name[zip], parameter[name[STDIO_DESCRIPTORS], tuple[[<ast.Name object at 0x7da1b2279f30>, <ast.Name object at 0x7da1b227a080>, <ast.Name object at 0x7da1b22799c0>]]]]] begin[:]
variable[is_atty] assign[=] call[name[fd].isatty, parameter[]]
<ast.Yield object at 0x7da1b1d6f700>
if name[is_atty] begin[:]
<ast.Yield object at 0x7da1b1d6de40>
return[call[name[dict], parameter[call[name[gen_env_vars], parameter[]]]]] | keyword[def] identifier[isatty_to_env] ( identifier[cls] , identifier[stdin] , identifier[stdout] , identifier[stderr] ):
literal[string]
keyword[def] identifier[gen_env_vars] ():
keyword[for] identifier[fd_id] , identifier[fd] keyword[in] identifier[zip] ( identifier[STDIO_DESCRIPTORS] ,( identifier[stdin] , identifier[stdout] , identifier[stderr] )):
identifier[is_atty] = identifier[fd] . identifier[isatty] ()
keyword[yield] ( identifier[cls] . identifier[TTY_ENV_TMPL] . identifier[format] ( identifier[fd_id] ), identifier[cls] . identifier[encode_env_var_value] ( identifier[int] ( identifier[is_atty] )))
keyword[if] identifier[is_atty] :
keyword[yield] ( identifier[cls] . identifier[TTY_PATH_ENV] . identifier[format] ( identifier[fd_id] ), identifier[os] . identifier[ttyname] ( identifier[fd] . identifier[fileno] ()) keyword[or] literal[string] )
keyword[return] identifier[dict] ( identifier[gen_env_vars] ()) | def isatty_to_env(cls, stdin, stdout, stderr):
"""Generate nailgun tty capability environment variables based on checking a set of fds.
:param file stdin: The stream to check for stdin tty capabilities.
:param file stdout: The stream to check for stdout tty capabilities.
:param file stderr: The stream to check for stderr tty capabilities.
:returns: A dict containing the tty capability environment variables.
"""
def gen_env_vars():
for (fd_id, fd) in zip(STDIO_DESCRIPTORS, (stdin, stdout, stderr)):
is_atty = fd.isatty()
yield (cls.TTY_ENV_TMPL.format(fd_id), cls.encode_env_var_value(int(is_atty)))
if is_atty:
yield (cls.TTY_PATH_ENV.format(fd_id), os.ttyname(fd.fileno()) or b'') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return dict(gen_env_vars()) |
def form_valid(self, redirect_to=None):
"""Save object.
Called when form is validated.
:param redirect_to: real url (created with url_for) to redirect to,
instead of the view by default.
"""
session = db.session()
with session.no_autoflush:
self.before_populate_obj()
self.form.populate_obj(self.obj)
session.add(self.obj)
self.after_populate_obj()
try:
session.flush()
self.send_activity()
session.commit()
except ValidationError as e:
rv = self.handle_commit_exception(e)
if rv is not None:
return rv
session.rollback()
flash(str(e), "error")
return self.get()
except sa.exc.IntegrityError as e:
rv = self.handle_commit_exception(e)
if rv is not None:
return rv
session.rollback()
logger.error(e)
flash(_("An entity with this name already exists in the system."), "error")
return self.get()
else:
self.commit_success()
flash(self.message_success(), "success")
if redirect_to:
return redirect(redirect_to)
else:
return self.redirect_to_view() | def function[form_valid, parameter[self, redirect_to]]:
constant[Save object.
Called when form is validated.
:param redirect_to: real url (created with url_for) to redirect to,
instead of the view by default.
]
variable[session] assign[=] call[name[db].session, parameter[]]
with name[session].no_autoflush begin[:]
call[name[self].before_populate_obj, parameter[]]
call[name[self].form.populate_obj, parameter[name[self].obj]]
call[name[session].add, parameter[name[self].obj]]
call[name[self].after_populate_obj, parameter[]]
<ast.Try object at 0x7da20c76e740> | keyword[def] identifier[form_valid] ( identifier[self] , identifier[redirect_to] = keyword[None] ):
literal[string]
identifier[session] = identifier[db] . identifier[session] ()
keyword[with] identifier[session] . identifier[no_autoflush] :
identifier[self] . identifier[before_populate_obj] ()
identifier[self] . identifier[form] . identifier[populate_obj] ( identifier[self] . identifier[obj] )
identifier[session] . identifier[add] ( identifier[self] . identifier[obj] )
identifier[self] . identifier[after_populate_obj] ()
keyword[try] :
identifier[session] . identifier[flush] ()
identifier[self] . identifier[send_activity] ()
identifier[session] . identifier[commit] ()
keyword[except] identifier[ValidationError] keyword[as] identifier[e] :
identifier[rv] = identifier[self] . identifier[handle_commit_exception] ( identifier[e] )
keyword[if] identifier[rv] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[rv]
identifier[session] . identifier[rollback] ()
identifier[flash] ( identifier[str] ( identifier[e] ), literal[string] )
keyword[return] identifier[self] . identifier[get] ()
keyword[except] identifier[sa] . identifier[exc] . identifier[IntegrityError] keyword[as] identifier[e] :
identifier[rv] = identifier[self] . identifier[handle_commit_exception] ( identifier[e] )
keyword[if] identifier[rv] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[rv]
identifier[session] . identifier[rollback] ()
identifier[logger] . identifier[error] ( identifier[e] )
identifier[flash] ( identifier[_] ( literal[string] ), literal[string] )
keyword[return] identifier[self] . identifier[get] ()
keyword[else] :
identifier[self] . identifier[commit_success] ()
identifier[flash] ( identifier[self] . identifier[message_success] (), literal[string] )
keyword[if] identifier[redirect_to] :
keyword[return] identifier[redirect] ( identifier[redirect_to] )
keyword[else] :
keyword[return] identifier[self] . identifier[redirect_to_view] () | def form_valid(self, redirect_to=None):
"""Save object.
Called when form is validated.
:param redirect_to: real url (created with url_for) to redirect to,
instead of the view by default.
"""
session = db.session()
with session.no_autoflush:
self.before_populate_obj()
self.form.populate_obj(self.obj)
session.add(self.obj)
self.after_populate_obj() # depends on [control=['with'], data=[]]
try:
session.flush()
self.send_activity()
session.commit() # depends on [control=['try'], data=[]]
except ValidationError as e:
rv = self.handle_commit_exception(e)
if rv is not None:
return rv # depends on [control=['if'], data=['rv']]
session.rollback()
flash(str(e), 'error')
return self.get() # depends on [control=['except'], data=['e']]
except sa.exc.IntegrityError as e:
rv = self.handle_commit_exception(e)
if rv is not None:
return rv # depends on [control=['if'], data=['rv']]
session.rollback()
logger.error(e)
flash(_('An entity with this name already exists in the system.'), 'error')
return self.get() # depends on [control=['except'], data=['e']]
else:
self.commit_success()
flash(self.message_success(), 'success')
if redirect_to:
return redirect(redirect_to) # depends on [control=['if'], data=[]]
else:
return self.redirect_to_view() |
def __regex(self, path, text, pattern, filter, annotators=None, properties=None):
"""Send a regex-related request to the CoreNLP server.
:param (str | unicode) path: the path for the regex endpoint
:param text: raw text for the CoreNLPServer to apply the regex
:param (str | unicode) pattern: regex pattern
:param (bool) filter: option to filter sentences that contain matches, if false returns matches
:param properties: option to filter sentences that contain matches, if false returns matches
:return: request result
"""
self.ensure_alive()
if properties is None:
properties = self.default_properties
properties.update({
'annotators': ','.join(annotators or self.default_annotators),
'inputFormat': 'text',
'outputFormat': self.default_output_format,
'serializer': 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'
})
elif "annotators" not in properties:
properties.update({'annotators': ','.join(annotators or self.default_annotators)})
# HACK: For some stupid reason, CoreNLPServer will timeout if we
# need to annotate something from scratch. So, we need to call
# this to ensure that the _regex call doesn't timeout.
self.annotate(text, properties=properties)
try:
# Error occurs unless put properties in params
input_format = properties.get("inputFormat", "text")
if input_format == "text":
ctype = "text/plain; charset=utf-8"
elif input_format == "serialized":
ctype = "application/x-protobuf"
else:
raise ValueError("Unrecognized inputFormat " + input_format)
# change request method from `get` to `post` as required by CoreNLP
r = requests.post(
self.endpoint + path, params={
'pattern': pattern,
'filter': filter,
'properties': str(properties)
}, data=text,
headers={'content-type': ctype},
timeout=(self.timeout*2)/1000,
)
r.raise_for_status()
return json.loads(r.text)
except requests.HTTPError as e:
if r.text.startswith("Timeout"):
raise TimeoutException(r.text)
else:
raise AnnotationException(r.text)
except json.JSONDecodeError:
raise AnnotationException(r.text) | def function[__regex, parameter[self, path, text, pattern, filter, annotators, properties]]:
constant[Send a regex-related request to the CoreNLP server.
:param (str | unicode) path: the path for the regex endpoint
:param text: raw text for the CoreNLPServer to apply the regex
:param (str | unicode) pattern: regex pattern
:param (bool) filter: option to filter sentences that contain matches, if false returns matches
:param properties: option to filter sentences that contain matches, if false returns matches
:return: request result
]
call[name[self].ensure_alive, parameter[]]
if compare[name[properties] is constant[None]] begin[:]
variable[properties] assign[=] name[self].default_properties
call[name[properties].update, parameter[dictionary[[<ast.Constant object at 0x7da18f721390>, <ast.Constant object at 0x7da18f7215d0>, <ast.Constant object at 0x7da18f720f40>, <ast.Constant object at 0x7da18f7238e0>], [<ast.Call object at 0x7da18f7200d0>, <ast.Constant object at 0x7da18f7225f0>, <ast.Attribute object at 0x7da18f722170>, <ast.Constant object at 0x7da18f722110>]]]]
call[name[self].annotate, parameter[name[text]]]
<ast.Try object at 0x7da18f722950> | keyword[def] identifier[__regex] ( identifier[self] , identifier[path] , identifier[text] , identifier[pattern] , identifier[filter] , identifier[annotators] = keyword[None] , identifier[properties] = keyword[None] ):
literal[string]
identifier[self] . identifier[ensure_alive] ()
keyword[if] identifier[properties] keyword[is] keyword[None] :
identifier[properties] = identifier[self] . identifier[default_properties]
identifier[properties] . identifier[update] ({
literal[string] : literal[string] . identifier[join] ( identifier[annotators] keyword[or] identifier[self] . identifier[default_annotators] ),
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[default_output_format] ,
literal[string] : literal[string]
})
keyword[elif] literal[string] keyword[not] keyword[in] identifier[properties] :
identifier[properties] . identifier[update] ({ literal[string] : literal[string] . identifier[join] ( identifier[annotators] keyword[or] identifier[self] . identifier[default_annotators] )})
identifier[self] . identifier[annotate] ( identifier[text] , identifier[properties] = identifier[properties] )
keyword[try] :
identifier[input_format] = identifier[properties] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[input_format] == literal[string] :
identifier[ctype] = literal[string]
keyword[elif] identifier[input_format] == literal[string] :
identifier[ctype] = literal[string]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] + identifier[input_format] )
identifier[r] = identifier[requests] . identifier[post] (
identifier[self] . identifier[endpoint] + identifier[path] , identifier[params] ={
literal[string] : identifier[pattern] ,
literal[string] : identifier[filter] ,
literal[string] : identifier[str] ( identifier[properties] )
}, identifier[data] = identifier[text] ,
identifier[headers] ={ literal[string] : identifier[ctype] },
identifier[timeout] =( identifier[self] . identifier[timeout] * literal[int] )/ literal[int] ,
)
identifier[r] . identifier[raise_for_status] ()
keyword[return] identifier[json] . identifier[loads] ( identifier[r] . identifier[text] )
keyword[except] identifier[requests] . identifier[HTTPError] keyword[as] identifier[e] :
keyword[if] identifier[r] . identifier[text] . identifier[startswith] ( literal[string] ):
keyword[raise] identifier[TimeoutException] ( identifier[r] . identifier[text] )
keyword[else] :
keyword[raise] identifier[AnnotationException] ( identifier[r] . identifier[text] )
keyword[except] identifier[json] . identifier[JSONDecodeError] :
keyword[raise] identifier[AnnotationException] ( identifier[r] . identifier[text] ) | def __regex(self, path, text, pattern, filter, annotators=None, properties=None):
"""Send a regex-related request to the CoreNLP server.
:param (str | unicode) path: the path for the regex endpoint
:param text: raw text for the CoreNLPServer to apply the regex
:param (str | unicode) pattern: regex pattern
:param (bool) filter: option to filter sentences that contain matches, if false returns matches
:param properties: option to filter sentences that contain matches, if false returns matches
:return: request result
"""
self.ensure_alive()
if properties is None:
properties = self.default_properties
properties.update({'annotators': ','.join(annotators or self.default_annotators), 'inputFormat': 'text', 'outputFormat': self.default_output_format, 'serializer': 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'}) # depends on [control=['if'], data=['properties']]
elif 'annotators' not in properties:
properties.update({'annotators': ','.join(annotators or self.default_annotators)}) # depends on [control=['if'], data=['properties']]
# HACK: For some stupid reason, CoreNLPServer will timeout if we
# need to annotate something from scratch. So, we need to call
# this to ensure that the _regex call doesn't timeout.
self.annotate(text, properties=properties)
try:
# Error occurs unless put properties in params
input_format = properties.get('inputFormat', 'text')
if input_format == 'text':
ctype = 'text/plain; charset=utf-8' # depends on [control=['if'], data=[]]
elif input_format == 'serialized':
ctype = 'application/x-protobuf' # depends on [control=['if'], data=[]]
else:
raise ValueError('Unrecognized inputFormat ' + input_format)
# change request method from `get` to `post` as required by CoreNLP
r = requests.post(self.endpoint + path, params={'pattern': pattern, 'filter': filter, 'properties': str(properties)}, data=text, headers={'content-type': ctype}, timeout=self.timeout * 2 / 1000)
r.raise_for_status()
return json.loads(r.text) # depends on [control=['try'], data=[]]
except requests.HTTPError as e:
if r.text.startswith('Timeout'):
raise TimeoutException(r.text) # depends on [control=['if'], data=[]]
else:
raise AnnotationException(r.text) # depends on [control=['except'], data=[]]
except json.JSONDecodeError:
raise AnnotationException(r.text) # depends on [control=['except'], data=[]] |
def expand_star(mod_name):
"""Expand something like 'unuk.tasks.*' into a list of all the modules
there.
"""
expanded = []
mod_dir = os.path.dirname(
__import__(mod_name[:-2], {}, {}, ['']).__file__)
for f in glob.glob1(mod_dir, "[!_]*.py"):
expanded.append('%s.%s' % (mod_name[:-2], f[:-3]))
return expanded | def function[expand_star, parameter[mod_name]]:
constant[Expand something like 'unuk.tasks.*' into a list of all the modules
there.
]
variable[expanded] assign[=] list[[]]
variable[mod_dir] assign[=] call[name[os].path.dirname, parameter[call[name[__import__], parameter[call[name[mod_name]][<ast.Slice object at 0x7da18f58f970>], dictionary[[], []], dictionary[[], []], list[[<ast.Constant object at 0x7da18f58ee90>]]]].__file__]]
for taget[name[f]] in starred[call[name[glob].glob1, parameter[name[mod_dir], constant[[!_]*.py]]]] begin[:]
call[name[expanded].append, parameter[binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da18f58e1a0>, <ast.Subscript object at 0x7da20e9b33d0>]]]]]
return[name[expanded]] | keyword[def] identifier[expand_star] ( identifier[mod_name] ):
literal[string]
identifier[expanded] =[]
identifier[mod_dir] = identifier[os] . identifier[path] . identifier[dirname] (
identifier[__import__] ( identifier[mod_name] [:- literal[int] ],{},{},[ literal[string] ]). identifier[__file__] )
keyword[for] identifier[f] keyword[in] identifier[glob] . identifier[glob1] ( identifier[mod_dir] , literal[string] ):
identifier[expanded] . identifier[append] ( literal[string] %( identifier[mod_name] [:- literal[int] ], identifier[f] [:- literal[int] ]))
keyword[return] identifier[expanded] | def expand_star(mod_name):
"""Expand something like 'unuk.tasks.*' into a list of all the modules
there.
"""
expanded = []
mod_dir = os.path.dirname(__import__(mod_name[:-2], {}, {}, ['']).__file__)
for f in glob.glob1(mod_dir, '[!_]*.py'):
expanded.append('%s.%s' % (mod_name[:-2], f[:-3])) # depends on [control=['for'], data=['f']]
return expanded |
def _digest_hasher(hasher, hashlen, base):
""" counterpart to _update_hasher """
# Get a 128 character hex string
hex_text = hasher.hexdigest()
# Shorten length of string (by increasing base)
base_text = _convert_hexstr_base(hex_text, base)
# Truncate
text = base_text[:hashlen]
return text | def function[_digest_hasher, parameter[hasher, hashlen, base]]:
constant[ counterpart to _update_hasher ]
variable[hex_text] assign[=] call[name[hasher].hexdigest, parameter[]]
variable[base_text] assign[=] call[name[_convert_hexstr_base], parameter[name[hex_text], name[base]]]
variable[text] assign[=] call[name[base_text]][<ast.Slice object at 0x7da207f024d0>]
return[name[text]] | keyword[def] identifier[_digest_hasher] ( identifier[hasher] , identifier[hashlen] , identifier[base] ):
literal[string]
identifier[hex_text] = identifier[hasher] . identifier[hexdigest] ()
identifier[base_text] = identifier[_convert_hexstr_base] ( identifier[hex_text] , identifier[base] )
identifier[text] = identifier[base_text] [: identifier[hashlen] ]
keyword[return] identifier[text] | def _digest_hasher(hasher, hashlen, base):
""" counterpart to _update_hasher """
# Get a 128 character hex string
hex_text = hasher.hexdigest()
# Shorten length of string (by increasing base)
base_text = _convert_hexstr_base(hex_text, base)
# Truncate
text = base_text[:hashlen]
return text |
def get_items(self) -> Iterator[StoryItem]:
"""Retrieve all associated highlight items."""
self._fetch_items()
yield from (StoryItem(self._context, item, self.owner_profile) for item in self._items) | def function[get_items, parameter[self]]:
constant[Retrieve all associated highlight items.]
call[name[self]._fetch_items, parameter[]]
<ast.YieldFrom object at 0x7da18f00e590> | keyword[def] identifier[get_items] ( identifier[self] )-> identifier[Iterator] [ identifier[StoryItem] ]:
literal[string]
identifier[self] . identifier[_fetch_items] ()
keyword[yield] keyword[from] ( identifier[StoryItem] ( identifier[self] . identifier[_context] , identifier[item] , identifier[self] . identifier[owner_profile] ) keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_items] ) | def get_items(self) -> Iterator[StoryItem]:
"""Retrieve all associated highlight items."""
self._fetch_items()
yield from (StoryItem(self._context, item, self.owner_profile) for item in self._items) |
def setattr(self, key, value):
u"""Sets an attribute on a node.
>>> xml = etree.Element('root')
>>> Node(xml).setattr('text', 'text2')
>>> Node(xml).getattr('text')
'text2'
>>> Node(xml).setattr('attr', 'val')
>>> Node(xml).getattr('attr')
'val'
"""
if key == 'text':
self._xml.text = str(value)
else:
self._xml.set(key, str(value)) | def function[setattr, parameter[self, key, value]]:
constant[Sets an attribute on a node.
>>> xml = etree.Element('root')
>>> Node(xml).setattr('text', 'text2')
>>> Node(xml).getattr('text')
'text2'
>>> Node(xml).setattr('attr', 'val')
>>> Node(xml).getattr('attr')
'val'
]
if compare[name[key] equal[==] constant[text]] begin[:]
name[self]._xml.text assign[=] call[name[str], parameter[name[value]]] | keyword[def] identifier[setattr] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] identifier[key] == literal[string] :
identifier[self] . identifier[_xml] . identifier[text] = identifier[str] ( identifier[value] )
keyword[else] :
identifier[self] . identifier[_xml] . identifier[set] ( identifier[key] , identifier[str] ( identifier[value] )) | def setattr(self, key, value):
u"""Sets an attribute on a node.
>>> xml = etree.Element('root')
>>> Node(xml).setattr('text', 'text2')
>>> Node(xml).getattr('text')
'text2'
>>> Node(xml).setattr('attr', 'val')
>>> Node(xml).getattr('attr')
'val'
"""
if key == 'text':
self._xml.text = str(value) # depends on [control=['if'], data=[]]
else:
self._xml.set(key, str(value)) |
def insert_id(self, sname, skind, skinds, stype):
"""Inserts a new identifier at the end of the symbol table, if possible.
Returns symbol index, or raises an exception if the symbol alredy exists
sname - symbol name
skind - symbol kind
skinds - symbol kinds to check for
stype - symbol type
"""
index = self.lookup_symbol(sname, skinds)
if index == None:
index = self.insert_symbol(sname, skind, stype)
return index
else:
raise SemanticException("Redefinition of '%s'" % sname) | def function[insert_id, parameter[self, sname, skind, skinds, stype]]:
constant[Inserts a new identifier at the end of the symbol table, if possible.
Returns symbol index, or raises an exception if the symbol alredy exists
sname - symbol name
skind - symbol kind
skinds - symbol kinds to check for
stype - symbol type
]
variable[index] assign[=] call[name[self].lookup_symbol, parameter[name[sname], name[skinds]]]
if compare[name[index] equal[==] constant[None]] begin[:]
variable[index] assign[=] call[name[self].insert_symbol, parameter[name[sname], name[skind], name[stype]]]
return[name[index]] | keyword[def] identifier[insert_id] ( identifier[self] , identifier[sname] , identifier[skind] , identifier[skinds] , identifier[stype] ):
literal[string]
identifier[index] = identifier[self] . identifier[lookup_symbol] ( identifier[sname] , identifier[skinds] )
keyword[if] identifier[index] == keyword[None] :
identifier[index] = identifier[self] . identifier[insert_symbol] ( identifier[sname] , identifier[skind] , identifier[stype] )
keyword[return] identifier[index]
keyword[else] :
keyword[raise] identifier[SemanticException] ( literal[string] % identifier[sname] ) | def insert_id(self, sname, skind, skinds, stype):
"""Inserts a new identifier at the end of the symbol table, if possible.
Returns symbol index, or raises an exception if the symbol alredy exists
sname - symbol name
skind - symbol kind
skinds - symbol kinds to check for
stype - symbol type
"""
index = self.lookup_symbol(sname, skinds)
if index == None:
index = self.insert_symbol(sname, skind, stype)
return index # depends on [control=['if'], data=['index']]
else:
raise SemanticException("Redefinition of '%s'" % sname) |
def list_jobs(tail):
"""Show info about the existing crawler jobs."""
query = (
db.session.query(models.CrawlerJob)
.order_by(models.CrawlerJob.id.desc())
)
if tail != 0:
query = query.limit(tail)
results = query.yield_per(10).all()
_show_table(results=results) | def function[list_jobs, parameter[tail]]:
constant[Show info about the existing crawler jobs.]
variable[query] assign[=] call[call[name[db].session.query, parameter[name[models].CrawlerJob]].order_by, parameter[call[name[models].CrawlerJob.id.desc, parameter[]]]]
if compare[name[tail] not_equal[!=] constant[0]] begin[:]
variable[query] assign[=] call[name[query].limit, parameter[name[tail]]]
variable[results] assign[=] call[call[name[query].yield_per, parameter[constant[10]]].all, parameter[]]
call[name[_show_table], parameter[]] | keyword[def] identifier[list_jobs] ( identifier[tail] ):
literal[string]
identifier[query] =(
identifier[db] . identifier[session] . identifier[query] ( identifier[models] . identifier[CrawlerJob] )
. identifier[order_by] ( identifier[models] . identifier[CrawlerJob] . identifier[id] . identifier[desc] ())
)
keyword[if] identifier[tail] != literal[int] :
identifier[query] = identifier[query] . identifier[limit] ( identifier[tail] )
identifier[results] = identifier[query] . identifier[yield_per] ( literal[int] ). identifier[all] ()
identifier[_show_table] ( identifier[results] = identifier[results] ) | def list_jobs(tail):
"""Show info about the existing crawler jobs."""
query = db.session.query(models.CrawlerJob).order_by(models.CrawlerJob.id.desc())
if tail != 0:
query = query.limit(tail) # depends on [control=['if'], data=['tail']]
results = query.yield_per(10).all()
_show_table(results=results) |
def gaussian_smear(self, r):
"""
Applies an isotropic Gaussian smear of width (standard deviation) r to
the potential field. This is necessary to avoid finding paths through
narrow minima or nodes that may exist in the field (although any
potential or charge distribution generated from GGA should be
relatively smooth anyway). The smearing obeys periodic
boundary conditions at the edges of the cell.
:param r - Smearing width in cartesian coordinates, in the same units
as the structure lattice vectors
"""
# Since scaling factor in fractional coords is not isotropic, have to
# have different radii in 3 directions
a_lat = self.__s.lattice.a
b_lat = self.__s.lattice.b
c_lat = self.__s.lattice.c
# Conversion factors for discretization of v
v_dim = self.__v.shape
r_frac = (r / a_lat, r / b_lat, r / c_lat)
r_disc = (int(math.ceil(r_frac[0] * v_dim[0])),
int(math.ceil(r_frac[1] * v_dim[1])),
int(math.ceil(r_frac[2] * v_dim[2])))
# Apply smearing
# Gaussian filter
gauss_dist = np.zeros(
(r_disc[0] * 4 + 1, r_disc[1] * 4 + 1, r_disc[2] * 4 + 1))
for g_a in np.arange(-2.0 * r_disc[0], 2.0 * r_disc[0] + 1, 1.0):
for g_b in np.arange(-2.0 * r_disc[1], 2.0 * r_disc[1] + 1, 1.0):
for g_c in np.arange(-2.0 * r_disc[2], 2.0 * r_disc[2] + 1,
1.0):
g = np.array(
[g_a / v_dim[0], g_b / v_dim[1], g_c / v_dim[2]]).T
gauss_dist[int(g_a + r_disc[0])][int(g_b + r_disc[1])][
int(g_c + r_disc[2])] = la.norm(
np.dot(self.__s.lattice.matrix, g)) / r
gauss = scipy.stats.norm.pdf(gauss_dist)
gauss = gauss / np.sum(gauss, dtype=float)
padded_v = np.pad(self.__v, (
(r_disc[0], r_disc[0]), (r_disc[1], r_disc[1]), (r_disc[2], r_disc[2])),
mode='wrap')
smeared_v = scipy.signal.convolve(padded_v, gauss, mode='valid')
self.__v = smeared_v | def function[gaussian_smear, parameter[self, r]]:
constant[
Applies an isotropic Gaussian smear of width (standard deviation) r to
the potential field. This is necessary to avoid finding paths through
narrow minima or nodes that may exist in the field (although any
potential or charge distribution generated from GGA should be
relatively smooth anyway). The smearing obeys periodic
boundary conditions at the edges of the cell.
:param r - Smearing width in cartesian coordinates, in the same units
as the structure lattice vectors
]
variable[a_lat] assign[=] name[self].__s.lattice.a
variable[b_lat] assign[=] name[self].__s.lattice.b
variable[c_lat] assign[=] name[self].__s.lattice.c
variable[v_dim] assign[=] name[self].__v.shape
variable[r_frac] assign[=] tuple[[<ast.BinOp object at 0x7da18eb54ee0>, <ast.BinOp object at 0x7da18eb55f90>, <ast.BinOp object at 0x7da18eb578b0>]]
variable[r_disc] assign[=] tuple[[<ast.Call object at 0x7da18eb553f0>, <ast.Call object at 0x7da18eb57f70>, <ast.Call object at 0x7da18eb56590>]]
variable[gauss_dist] assign[=] call[name[np].zeros, parameter[tuple[[<ast.BinOp object at 0x7da18eb56680>, <ast.BinOp object at 0x7da18eb541c0>, <ast.BinOp object at 0x7da18eb569b0>]]]]
for taget[name[g_a]] in starred[call[name[np].arange, parameter[binary_operation[<ast.UnaryOp object at 0x7da18eb56560> * call[name[r_disc]][constant[0]]], binary_operation[binary_operation[constant[2.0] * call[name[r_disc]][constant[0]]] + constant[1]], constant[1.0]]]] begin[:]
for taget[name[g_b]] in starred[call[name[np].arange, parameter[binary_operation[<ast.UnaryOp object at 0x7da18eb54e80> * call[name[r_disc]][constant[1]]], binary_operation[binary_operation[constant[2.0] * call[name[r_disc]][constant[1]]] + constant[1]], constant[1.0]]]] begin[:]
for taget[name[g_c]] in starred[call[name[np].arange, parameter[binary_operation[<ast.UnaryOp object at 0x7da18eb56c80> * call[name[r_disc]][constant[2]]], binary_operation[binary_operation[constant[2.0] * call[name[r_disc]][constant[2]]] + constant[1]], constant[1.0]]]] begin[:]
variable[g] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da18eb54a90>, <ast.BinOp object at 0x7da18eb55ab0>, <ast.BinOp object at 0x7da18eb56ce0>]]]].T
call[call[call[name[gauss_dist]][call[name[int], parameter[binary_operation[name[g_a] + call[name[r_disc]][constant[0]]]]]]][call[name[int], parameter[binary_operation[name[g_b] + call[name[r_disc]][constant[1]]]]]]][call[name[int], parameter[binary_operation[name[g_c] + call[name[r_disc]][constant[2]]]]]] assign[=] binary_operation[call[name[la].norm, parameter[call[name[np].dot, parameter[name[self].__s.lattice.matrix, name[g]]]]] / name[r]]
variable[gauss] assign[=] call[name[scipy].stats.norm.pdf, parameter[name[gauss_dist]]]
variable[gauss] assign[=] binary_operation[name[gauss] / call[name[np].sum, parameter[name[gauss]]]]
variable[padded_v] assign[=] call[name[np].pad, parameter[name[self].__v, tuple[[<ast.Tuple object at 0x7da18eb57850>, <ast.Tuple object at 0x7da18eb560e0>, <ast.Tuple object at 0x7da18eb571f0>]]]]
variable[smeared_v] assign[=] call[name[scipy].signal.convolve, parameter[name[padded_v], name[gauss]]]
name[self].__v assign[=] name[smeared_v] | keyword[def] identifier[gaussian_smear] ( identifier[self] , identifier[r] ):
literal[string]
identifier[a_lat] = identifier[self] . identifier[__s] . identifier[lattice] . identifier[a]
identifier[b_lat] = identifier[self] . identifier[__s] . identifier[lattice] . identifier[b]
identifier[c_lat] = identifier[self] . identifier[__s] . identifier[lattice] . identifier[c]
identifier[v_dim] = identifier[self] . identifier[__v] . identifier[shape]
identifier[r_frac] =( identifier[r] / identifier[a_lat] , identifier[r] / identifier[b_lat] , identifier[r] / identifier[c_lat] )
identifier[r_disc] =( identifier[int] ( identifier[math] . identifier[ceil] ( identifier[r_frac] [ literal[int] ]* identifier[v_dim] [ literal[int] ])),
identifier[int] ( identifier[math] . identifier[ceil] ( identifier[r_frac] [ literal[int] ]* identifier[v_dim] [ literal[int] ])),
identifier[int] ( identifier[math] . identifier[ceil] ( identifier[r_frac] [ literal[int] ]* identifier[v_dim] [ literal[int] ])))
identifier[gauss_dist] = identifier[np] . identifier[zeros] (
( identifier[r_disc] [ literal[int] ]* literal[int] + literal[int] , identifier[r_disc] [ literal[int] ]* literal[int] + literal[int] , identifier[r_disc] [ literal[int] ]* literal[int] + literal[int] ))
keyword[for] identifier[g_a] keyword[in] identifier[np] . identifier[arange] (- literal[int] * identifier[r_disc] [ literal[int] ], literal[int] * identifier[r_disc] [ literal[int] ]+ literal[int] , literal[int] ):
keyword[for] identifier[g_b] keyword[in] identifier[np] . identifier[arange] (- literal[int] * identifier[r_disc] [ literal[int] ], literal[int] * identifier[r_disc] [ literal[int] ]+ literal[int] , literal[int] ):
keyword[for] identifier[g_c] keyword[in] identifier[np] . identifier[arange] (- literal[int] * identifier[r_disc] [ literal[int] ], literal[int] * identifier[r_disc] [ literal[int] ]+ literal[int] ,
literal[int] ):
identifier[g] = identifier[np] . identifier[array] (
[ identifier[g_a] / identifier[v_dim] [ literal[int] ], identifier[g_b] / identifier[v_dim] [ literal[int] ], identifier[g_c] / identifier[v_dim] [ literal[int] ]]). identifier[T]
identifier[gauss_dist] [ identifier[int] ( identifier[g_a] + identifier[r_disc] [ literal[int] ])][ identifier[int] ( identifier[g_b] + identifier[r_disc] [ literal[int] ])][
identifier[int] ( identifier[g_c] + identifier[r_disc] [ literal[int] ])]= identifier[la] . identifier[norm] (
identifier[np] . identifier[dot] ( identifier[self] . identifier[__s] . identifier[lattice] . identifier[matrix] , identifier[g] ))/ identifier[r]
identifier[gauss] = identifier[scipy] . identifier[stats] . identifier[norm] . identifier[pdf] ( identifier[gauss_dist] )
identifier[gauss] = identifier[gauss] / identifier[np] . identifier[sum] ( identifier[gauss] , identifier[dtype] = identifier[float] )
identifier[padded_v] = identifier[np] . identifier[pad] ( identifier[self] . identifier[__v] ,(
( identifier[r_disc] [ literal[int] ], identifier[r_disc] [ literal[int] ]),( identifier[r_disc] [ literal[int] ], identifier[r_disc] [ literal[int] ]),( identifier[r_disc] [ literal[int] ], identifier[r_disc] [ literal[int] ])),
identifier[mode] = literal[string] )
identifier[smeared_v] = identifier[scipy] . identifier[signal] . identifier[convolve] ( identifier[padded_v] , identifier[gauss] , identifier[mode] = literal[string] )
identifier[self] . identifier[__v] = identifier[smeared_v] | def gaussian_smear(self, r):
"""
Applies an isotropic Gaussian smear of width (standard deviation) r to
the potential field. This is necessary to avoid finding paths through
narrow minima or nodes that may exist in the field (although any
potential or charge distribution generated from GGA should be
relatively smooth anyway). The smearing obeys periodic
boundary conditions at the edges of the cell.
:param r - Smearing width in cartesian coordinates, in the same units
as the structure lattice vectors
"""
# Since scaling factor in fractional coords is not isotropic, have to
# have different radii in 3 directions
a_lat = self.__s.lattice.a
b_lat = self.__s.lattice.b
c_lat = self.__s.lattice.c
# Conversion factors for discretization of v
v_dim = self.__v.shape
r_frac = (r / a_lat, r / b_lat, r / c_lat)
r_disc = (int(math.ceil(r_frac[0] * v_dim[0])), int(math.ceil(r_frac[1] * v_dim[1])), int(math.ceil(r_frac[2] * v_dim[2])))
# Apply smearing
# Gaussian filter
gauss_dist = np.zeros((r_disc[0] * 4 + 1, r_disc[1] * 4 + 1, r_disc[2] * 4 + 1))
for g_a in np.arange(-2.0 * r_disc[0], 2.0 * r_disc[0] + 1, 1.0):
for g_b in np.arange(-2.0 * r_disc[1], 2.0 * r_disc[1] + 1, 1.0):
for g_c in np.arange(-2.0 * r_disc[2], 2.0 * r_disc[2] + 1, 1.0):
g = np.array([g_a / v_dim[0], g_b / v_dim[1], g_c / v_dim[2]]).T
gauss_dist[int(g_a + r_disc[0])][int(g_b + r_disc[1])][int(g_c + r_disc[2])] = la.norm(np.dot(self.__s.lattice.matrix, g)) / r # depends on [control=['for'], data=['g_c']] # depends on [control=['for'], data=['g_b']] # depends on [control=['for'], data=['g_a']]
gauss = scipy.stats.norm.pdf(gauss_dist)
gauss = gauss / np.sum(gauss, dtype=float)
padded_v = np.pad(self.__v, ((r_disc[0], r_disc[0]), (r_disc[1], r_disc[1]), (r_disc[2], r_disc[2])), mode='wrap')
smeared_v = scipy.signal.convolve(padded_v, gauss, mode='valid')
self.__v = smeared_v |
def list(self, limit=None, marker=None, end_marker=None, prefix=None):
"""
List the containers in this account, using the parameters to control
the pagination of containers, since by default only the first 10,000
containers are returned.
"""
return self._manager.list(limit=limit, marker=marker,
end_marker=end_marker, prefix=prefix) | def function[list, parameter[self, limit, marker, end_marker, prefix]]:
constant[
List the containers in this account, using the parameters to control
the pagination of containers, since by default only the first 10,000
containers are returned.
]
return[call[name[self]._manager.list, parameter[]]] | keyword[def] identifier[list] ( identifier[self] , identifier[limit] = keyword[None] , identifier[marker] = keyword[None] , identifier[end_marker] = keyword[None] , identifier[prefix] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_manager] . identifier[list] ( identifier[limit] = identifier[limit] , identifier[marker] = identifier[marker] ,
identifier[end_marker] = identifier[end_marker] , identifier[prefix] = identifier[prefix] ) | def list(self, limit=None, marker=None, end_marker=None, prefix=None):
"""
List the containers in this account, using the parameters to control
the pagination of containers, since by default only the first 10,000
containers are returned.
"""
return self._manager.list(limit=limit, marker=marker, end_marker=end_marker, prefix=prefix) |
def start_loop(self, datapath):
"""start QUERY thread."""
self._datapath = datapath
self._querier_thread = hub.spawn(self._send_query)
self.logger.info("started a querier.") | def function[start_loop, parameter[self, datapath]]:
constant[start QUERY thread.]
name[self]._datapath assign[=] name[datapath]
name[self]._querier_thread assign[=] call[name[hub].spawn, parameter[name[self]._send_query]]
call[name[self].logger.info, parameter[constant[started a querier.]]] | keyword[def] identifier[start_loop] ( identifier[self] , identifier[datapath] ):
literal[string]
identifier[self] . identifier[_datapath] = identifier[datapath]
identifier[self] . identifier[_querier_thread] = identifier[hub] . identifier[spawn] ( identifier[self] . identifier[_send_query] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] ) | def start_loop(self, datapath):
"""start QUERY thread."""
self._datapath = datapath
self._querier_thread = hub.spawn(self._send_query)
self.logger.info('started a querier.') |
def Shacham_1980(Re, eD):
r'''Calculates Darcy friction factor using the method in Shacham (1980) [2]_
as shown in [1]_.
.. math::
\frac{1}{\sqrt{f_f}} = -4\log\left[\frac{\epsilon}{3.7D} -
\frac{5.02}{Re} \log\left(\frac{\epsilon}{3.7D}
+ \frac{14.5}{Re}\right)\right]
Parameters
----------
Re : float
Reynolds number, [-]
eD : float
Relative roughness, [-]
Returns
-------
fd : float
Darcy friction factor [-]
Notes
-----
Range is 4E3 <= Re <= 4E8
Examples
--------
>>> Shacham_1980(1E5, 1E-4)
0.01860641215097828
References
----------
.. [1] Winning, H. and T. Coole. "Explicit Friction Factor Accuracy and
Computational Efficiency for Turbulent Flow in Pipes." Flow, Turbulence
and Combustion 90, no. 1 (January 1, 2013): 1-27.
doi:10.1007/s10494-012-9419-7
.. [2] Shacham, M. "Comments on: 'An Explicit Equation for Friction
Factor in Pipe.'" Industrial & Engineering Chemistry Fundamentals 19,
no. 2 (May 1, 1980): 228-228. doi:10.1021/i160074a019.
'''
ff = (-4*log10(eD/3.7 - 5.02/Re*log10(eD/3.7 + 14.5/Re)))**-2
return 4*ff | def function[Shacham_1980, parameter[Re, eD]]:
constant[Calculates Darcy friction factor using the method in Shacham (1980) [2]_
as shown in [1]_.
.. math::
\frac{1}{\sqrt{f_f}} = -4\log\left[\frac{\epsilon}{3.7D} -
\frac{5.02}{Re} \log\left(\frac{\epsilon}{3.7D}
+ \frac{14.5}{Re}\right)\right]
Parameters
----------
Re : float
Reynolds number, [-]
eD : float
Relative roughness, [-]
Returns
-------
fd : float
Darcy friction factor [-]
Notes
-----
Range is 4E3 <= Re <= 4E8
Examples
--------
>>> Shacham_1980(1E5, 1E-4)
0.01860641215097828
References
----------
.. [1] Winning, H. and T. Coole. "Explicit Friction Factor Accuracy and
Computational Efficiency for Turbulent Flow in Pipes." Flow, Turbulence
and Combustion 90, no. 1 (January 1, 2013): 1-27.
doi:10.1007/s10494-012-9419-7
.. [2] Shacham, M. "Comments on: 'An Explicit Equation for Friction
Factor in Pipe.'" Industrial & Engineering Chemistry Fundamentals 19,
no. 2 (May 1, 1980): 228-228. doi:10.1021/i160074a019.
]
variable[ff] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b12ba440> * call[name[log10], parameter[binary_operation[binary_operation[name[eD] / constant[3.7]] - binary_operation[binary_operation[constant[5.02] / name[Re]] * call[name[log10], parameter[binary_operation[binary_operation[name[eD] / constant[3.7]] + binary_operation[constant[14.5] / name[Re]]]]]]]]]] ** <ast.UnaryOp object at 0x7da1b12bb490>]
return[binary_operation[constant[4] * name[ff]]] | keyword[def] identifier[Shacham_1980] ( identifier[Re] , identifier[eD] ):
literal[string]
identifier[ff] =(- literal[int] * identifier[log10] ( identifier[eD] / literal[int] - literal[int] / identifier[Re] * identifier[log10] ( identifier[eD] / literal[int] + literal[int] / identifier[Re] )))**- literal[int]
keyword[return] literal[int] * identifier[ff] | def Shacham_1980(Re, eD):
"""Calculates Darcy friction factor using the method in Shacham (1980) [2]_
as shown in [1]_.
.. math::
\\frac{1}{\\sqrt{f_f}} = -4\\log\\left[\\frac{\\epsilon}{3.7D} -
\\frac{5.02}{Re} \\log\\left(\\frac{\\epsilon}{3.7D}
+ \\frac{14.5}{Re}\\right)\\right]
Parameters
----------
Re : float
Reynolds number, [-]
eD : float
Relative roughness, [-]
Returns
-------
fd : float
Darcy friction factor [-]
Notes
-----
Range is 4E3 <= Re <= 4E8
Examples
--------
>>> Shacham_1980(1E5, 1E-4)
0.01860641215097828
References
----------
.. [1] Winning, H. and T. Coole. "Explicit Friction Factor Accuracy and
Computational Efficiency for Turbulent Flow in Pipes." Flow, Turbulence
and Combustion 90, no. 1 (January 1, 2013): 1-27.
doi:10.1007/s10494-012-9419-7
.. [2] Shacham, M. "Comments on: 'An Explicit Equation for Friction
Factor in Pipe.'" Industrial & Engineering Chemistry Fundamentals 19,
no. 2 (May 1, 1980): 228-228. doi:10.1021/i160074a019.
"""
ff = (-4 * log10(eD / 3.7 - 5.02 / Re * log10(eD / 3.7 + 14.5 / Re))) ** (-2)
return 4 * ff |
def sigterm_handler(signum, stack_frame):
"""
Just tell the server to exit.
WARNING: There are race conditions, for example with TimeoutSocket.accept.
We don't care: the user can just rekill the process after like 1 sec. if
the first kill did not work.
"""
# pylint: disable-msg=W0613
global _KILLED
for name, cmd in _COMMANDS.iteritems():
if cmd.at_stop:
LOG.info("at_stop: %r", name)
cmd.at_stop()
_KILLED = True
if _HTTP_SERVER:
_HTTP_SERVER.kill()
_HTTP_SERVER.server_close() | def function[sigterm_handler, parameter[signum, stack_frame]]:
constant[
Just tell the server to exit.
WARNING: There are race conditions, for example with TimeoutSocket.accept.
We don't care: the user can just rekill the process after like 1 sec. if
the first kill did not work.
]
<ast.Global object at 0x7da2049629b0>
for taget[tuple[[<ast.Name object at 0x7da2049619f0>, <ast.Name object at 0x7da204960d30>]]] in starred[call[name[_COMMANDS].iteritems, parameter[]]] begin[:]
if name[cmd].at_stop begin[:]
call[name[LOG].info, parameter[constant[at_stop: %r], name[name]]]
call[name[cmd].at_stop, parameter[]]
variable[_KILLED] assign[=] constant[True]
if name[_HTTP_SERVER] begin[:]
call[name[_HTTP_SERVER].kill, parameter[]]
call[name[_HTTP_SERVER].server_close, parameter[]] | keyword[def] identifier[sigterm_handler] ( identifier[signum] , identifier[stack_frame] ):
literal[string]
keyword[global] identifier[_KILLED]
keyword[for] identifier[name] , identifier[cmd] keyword[in] identifier[_COMMANDS] . identifier[iteritems] ():
keyword[if] identifier[cmd] . identifier[at_stop] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[name] )
identifier[cmd] . identifier[at_stop] ()
identifier[_KILLED] = keyword[True]
keyword[if] identifier[_HTTP_SERVER] :
identifier[_HTTP_SERVER] . identifier[kill] ()
identifier[_HTTP_SERVER] . identifier[server_close] () | def sigterm_handler(signum, stack_frame):
"""
Just tell the server to exit.
WARNING: There are race conditions, for example with TimeoutSocket.accept.
We don't care: the user can just rekill the process after like 1 sec. if
the first kill did not work.
"""
# pylint: disable-msg=W0613
global _KILLED
for (name, cmd) in _COMMANDS.iteritems():
if cmd.at_stop:
LOG.info('at_stop: %r', name)
cmd.at_stop() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
_KILLED = True
if _HTTP_SERVER:
_HTTP_SERVER.kill()
_HTTP_SERVER.server_close() # depends on [control=['if'], data=[]] |
def terms(self):
"""Initialization terms and options for Property"""
terms = PropertyTerms(
self.name,
self.__class__,
self._args,
self._kwargs,
self.meta
)
return terms | def function[terms, parameter[self]]:
constant[Initialization terms and options for Property]
variable[terms] assign[=] call[name[PropertyTerms], parameter[name[self].name, name[self].__class__, name[self]._args, name[self]._kwargs, name[self].meta]]
return[name[terms]] | keyword[def] identifier[terms] ( identifier[self] ):
literal[string]
identifier[terms] = identifier[PropertyTerms] (
identifier[self] . identifier[name] ,
identifier[self] . identifier[__class__] ,
identifier[self] . identifier[_args] ,
identifier[self] . identifier[_kwargs] ,
identifier[self] . identifier[meta]
)
keyword[return] identifier[terms] | def terms(self):
"""Initialization terms and options for Property"""
terms = PropertyTerms(self.name, self.__class__, self._args, self._kwargs, self.meta)
return terms |
def client(self, service_name, version, component, **kw):
"""Safely initialize a repository class to a property.
Args:
repository_class (class): The class to initialize.
version (str): The gcp service version for the repository.
Returns:
object: An instance of repository_class.
"""
service = _create_service_api(
self._credentials,
service_name,
version,
kw.get('developer_key'),
kw.get('cache_discovery', False),
self._http or _build_http())
return ServiceClient(
gcp_service=service,
component=component,
credentials=self._credentials,
rate_limiter=self._rate_limiter,
use_cached_http=self._use_cached_http,
http=self._http) | def function[client, parameter[self, service_name, version, component]]:
constant[Safely initialize a repository class to a property.
Args:
repository_class (class): The class to initialize.
version (str): The gcp service version for the repository.
Returns:
object: An instance of repository_class.
]
variable[service] assign[=] call[name[_create_service_api], parameter[name[self]._credentials, name[service_name], name[version], call[name[kw].get, parameter[constant[developer_key]]], call[name[kw].get, parameter[constant[cache_discovery], constant[False]]], <ast.BoolOp object at 0x7da18c4cd690>]]
return[call[name[ServiceClient], parameter[]]] | keyword[def] identifier[client] ( identifier[self] , identifier[service_name] , identifier[version] , identifier[component] ,** identifier[kw] ):
literal[string]
identifier[service] = identifier[_create_service_api] (
identifier[self] . identifier[_credentials] ,
identifier[service_name] ,
identifier[version] ,
identifier[kw] . identifier[get] ( literal[string] ),
identifier[kw] . identifier[get] ( literal[string] , keyword[False] ),
identifier[self] . identifier[_http] keyword[or] identifier[_build_http] ())
keyword[return] identifier[ServiceClient] (
identifier[gcp_service] = identifier[service] ,
identifier[component] = identifier[component] ,
identifier[credentials] = identifier[self] . identifier[_credentials] ,
identifier[rate_limiter] = identifier[self] . identifier[_rate_limiter] ,
identifier[use_cached_http] = identifier[self] . identifier[_use_cached_http] ,
identifier[http] = identifier[self] . identifier[_http] ) | def client(self, service_name, version, component, **kw):
"""Safely initialize a repository class to a property.
Args:
repository_class (class): The class to initialize.
version (str): The gcp service version for the repository.
Returns:
object: An instance of repository_class.
"""
service = _create_service_api(self._credentials, service_name, version, kw.get('developer_key'), kw.get('cache_discovery', False), self._http or _build_http())
return ServiceClient(gcp_service=service, component=component, credentials=self._credentials, rate_limiter=self._rate_limiter, use_cached_http=self._use_cached_http, http=self._http) |
def get_description(self, lang=None):
""" Get the DC description of the object
:param lang: Lang to retrieve
:return: Description string representation
:rtype: Literal
"""
return self.metadata.get_single(key=RDF_NAMESPACES.CTS.description, lang=lang) | def function[get_description, parameter[self, lang]]:
constant[ Get the DC description of the object
:param lang: Lang to retrieve
:return: Description string representation
:rtype: Literal
]
return[call[name[self].metadata.get_single, parameter[]]] | keyword[def] identifier[get_description] ( identifier[self] , identifier[lang] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[metadata] . identifier[get_single] ( identifier[key] = identifier[RDF_NAMESPACES] . identifier[CTS] . identifier[description] , identifier[lang] = identifier[lang] ) | def get_description(self, lang=None):
""" Get the DC description of the object
:param lang: Lang to retrieve
:return: Description string representation
:rtype: Literal
"""
return self.metadata.get_single(key=RDF_NAMESPACES.CTS.description, lang=lang) |
def get_me(self):
"""Return a LoggedInRedditor object.
Note: This function is only intended to be used with an 'identity'
providing OAuth2 grant.
"""
response = self.request_json(self.config['me'])
user = objects.Redditor(self, response['name'], response)
user.__class__ = objects.LoggedInRedditor
return user | def function[get_me, parameter[self]]:
constant[Return a LoggedInRedditor object.
Note: This function is only intended to be used with an 'identity'
providing OAuth2 grant.
]
variable[response] assign[=] call[name[self].request_json, parameter[call[name[self].config][constant[me]]]]
variable[user] assign[=] call[name[objects].Redditor, parameter[name[self], call[name[response]][constant[name]], name[response]]]
name[user].__class__ assign[=] name[objects].LoggedInRedditor
return[name[user]] | keyword[def] identifier[get_me] ( identifier[self] ):
literal[string]
identifier[response] = identifier[self] . identifier[request_json] ( identifier[self] . identifier[config] [ literal[string] ])
identifier[user] = identifier[objects] . identifier[Redditor] ( identifier[self] , identifier[response] [ literal[string] ], identifier[response] )
identifier[user] . identifier[__class__] = identifier[objects] . identifier[LoggedInRedditor]
keyword[return] identifier[user] | def get_me(self):
"""Return a LoggedInRedditor object.
Note: This function is only intended to be used with an 'identity'
providing OAuth2 grant.
"""
response = self.request_json(self.config['me'])
user = objects.Redditor(self, response['name'], response)
user.__class__ = objects.LoggedInRedditor
return user |
def prepare_c3(data: Union[List[Tuple[str, int]], Mapping[str, int]],
y_axis_label: str = 'y',
x_axis_label: str = 'x',
) -> str:
"""Prepares C3 JSON for making a bar chart from a Counter
:param data: A dictionary of {str: int} to display as bar chart
:param y_axis_label: The Y axis label
:param x_axis_label: X axis internal label. Should be left as default 'x')
:return: A JSON dictionary for making a C3 bar chart
"""
if not isinstance(data, list):
data = sorted(data.items(), key=itemgetter(1), reverse=True)
try:
labels, values = zip(*data)
except ValueError:
log.info(f'no values found for {x_axis_label}, {y_axis_label}')
labels, values = [], []
return json.dumps([
[x_axis_label] + list(labels),
[y_axis_label] + list(values),
]) | def function[prepare_c3, parameter[data, y_axis_label, x_axis_label]]:
constant[Prepares C3 JSON for making a bar chart from a Counter
:param data: A dictionary of {str: int} to display as bar chart
:param y_axis_label: The Y axis label
:param x_axis_label: X axis internal label. Should be left as default 'x')
:return: A JSON dictionary for making a C3 bar chart
]
if <ast.UnaryOp object at 0x7da20c7961d0> begin[:]
variable[data] assign[=] call[name[sorted], parameter[call[name[data].items, parameter[]]]]
<ast.Try object at 0x7da20c795e70>
return[call[name[json].dumps, parameter[list[[<ast.BinOp object at 0x7da1aff766b0>, <ast.BinOp object at 0x7da1aff76ef0>]]]]] | keyword[def] identifier[prepare_c3] ( identifier[data] : identifier[Union] [ identifier[List] [ identifier[Tuple] [ identifier[str] , identifier[int] ]], identifier[Mapping] [ identifier[str] , identifier[int] ]],
identifier[y_axis_label] : identifier[str] = literal[string] ,
identifier[x_axis_label] : identifier[str] = literal[string] ,
)-> identifier[str] :
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[list] ):
identifier[data] = identifier[sorted] ( identifier[data] . identifier[items] (), identifier[key] = identifier[itemgetter] ( literal[int] ), identifier[reverse] = keyword[True] )
keyword[try] :
identifier[labels] , identifier[values] = identifier[zip] (* identifier[data] )
keyword[except] identifier[ValueError] :
identifier[log] . identifier[info] ( literal[string] )
identifier[labels] , identifier[values] =[],[]
keyword[return] identifier[json] . identifier[dumps] ([
[ identifier[x_axis_label] ]+ identifier[list] ( identifier[labels] ),
[ identifier[y_axis_label] ]+ identifier[list] ( identifier[values] ),
]) | def prepare_c3(data: Union[List[Tuple[str, int]], Mapping[str, int]], y_axis_label: str='y', x_axis_label: str='x') -> str:
"""Prepares C3 JSON for making a bar chart from a Counter
:param data: A dictionary of {str: int} to display as bar chart
:param y_axis_label: The Y axis label
:param x_axis_label: X axis internal label. Should be left as default 'x')
:return: A JSON dictionary for making a C3 bar chart
"""
if not isinstance(data, list):
data = sorted(data.items(), key=itemgetter(1), reverse=True) # depends on [control=['if'], data=[]]
try:
(labels, values) = zip(*data) # depends on [control=['try'], data=[]]
except ValueError:
log.info(f'no values found for {x_axis_label}, {y_axis_label}')
(labels, values) = ([], []) # depends on [control=['except'], data=[]]
return json.dumps([[x_axis_label] + list(labels), [y_axis_label] + list(values)]) |
def getParamLabels(self):
"""
Parameters:
----------------------------------------------------------------------
retval: a dictionary of model parameter labels. For each entry
the key is the name of the parameter and the value
is the value chosen for it.
"""
params = self.__unwrapParams()
# Hypersearch v2 stores the flattened parameter settings in "particleState"
if "particleState" in params:
retval = dict()
queue = [(pair, retval) for pair in
params["particleState"]["varStates"].iteritems()]
while len(queue) > 0:
pair, output = queue.pop()
k, v = pair
if ("position" in v and "bestPosition" in v and
"velocity" in v):
output[k] = v["position"]
else:
if k not in output:
output[k] = dict()
queue.extend((pair, output[k]) for pair in v.iteritems())
return retval | def function[getParamLabels, parameter[self]]:
constant[
Parameters:
----------------------------------------------------------------------
retval: a dictionary of model parameter labels. For each entry
the key is the name of the parameter and the value
is the value chosen for it.
]
variable[params] assign[=] call[name[self].__unwrapParams, parameter[]]
if compare[constant[particleState] in name[params]] begin[:]
variable[retval] assign[=] call[name[dict], parameter[]]
variable[queue] assign[=] <ast.ListComp object at 0x7da18bc72e60>
while compare[call[name[len], parameter[name[queue]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da18bc71480> assign[=] call[name[queue].pop, parameter[]]
<ast.Tuple object at 0x7da18bc73880> assign[=] name[pair]
if <ast.BoolOp object at 0x7da18bc72da0> begin[:]
call[name[output]][name[k]] assign[=] call[name[v]][constant[position]]
return[name[retval]] | keyword[def] identifier[getParamLabels] ( identifier[self] ):
literal[string]
identifier[params] = identifier[self] . identifier[__unwrapParams] ()
keyword[if] literal[string] keyword[in] identifier[params] :
identifier[retval] = identifier[dict] ()
identifier[queue] =[( identifier[pair] , identifier[retval] ) keyword[for] identifier[pair] keyword[in]
identifier[params] [ literal[string] ][ literal[string] ]. identifier[iteritems] ()]
keyword[while] identifier[len] ( identifier[queue] )> literal[int] :
identifier[pair] , identifier[output] = identifier[queue] . identifier[pop] ()
identifier[k] , identifier[v] = identifier[pair]
keyword[if] ( literal[string] keyword[in] identifier[v] keyword[and] literal[string] keyword[in] identifier[v] keyword[and]
literal[string] keyword[in] identifier[v] ):
identifier[output] [ identifier[k] ]= identifier[v] [ literal[string] ]
keyword[else] :
keyword[if] identifier[k] keyword[not] keyword[in] identifier[output] :
identifier[output] [ identifier[k] ]= identifier[dict] ()
identifier[queue] . identifier[extend] (( identifier[pair] , identifier[output] [ identifier[k] ]) keyword[for] identifier[pair] keyword[in] identifier[v] . identifier[iteritems] ())
keyword[return] identifier[retval] | def getParamLabels(self):
"""
Parameters:
----------------------------------------------------------------------
retval: a dictionary of model parameter labels. For each entry
the key is the name of the parameter and the value
is the value chosen for it.
"""
params = self.__unwrapParams()
# Hypersearch v2 stores the flattened parameter settings in "particleState"
if 'particleState' in params:
retval = dict()
queue = [(pair, retval) for pair in params['particleState']['varStates'].iteritems()]
while len(queue) > 0:
(pair, output) = queue.pop()
(k, v) = pair
if 'position' in v and 'bestPosition' in v and ('velocity' in v):
output[k] = v['position'] # depends on [control=['if'], data=[]]
else:
if k not in output:
output[k] = dict() # depends on [control=['if'], data=['k', 'output']]
queue.extend(((pair, output[k]) for pair in v.iteritems())) # depends on [control=['while'], data=[]]
return retval # depends on [control=['if'], data=['params']] |
def at_least_one_schema_is_allowed(database):
"""
If the user has access to the database or all datasource
1. if schemas_allowed_for_csv_upload is empty
a) if database does not support schema
user is able to upload csv without specifying schema name
b) if database supports schema
user is able to upload csv to any schema
2. if schemas_allowed_for_csv_upload is not empty
a) if database does not support schema
This situation is impossible and upload will fail
b) if database supports schema
user is able to upload to schema in schemas_allowed_for_csv_upload
elif the user does not access to the database or all datasource
1. if schemas_allowed_for_csv_upload is empty
a) if database does not support schema
user is unable to upload csv
b) if database supports schema
user is unable to upload csv
2. if schemas_allowed_for_csv_upload is not empty
a) if database does not support schema
This situation is impossible and user is unable to upload csv
b) if database supports schema
user is able to upload to schema in schemas_allowed_for_csv_upload
"""
if (security_manager.database_access(database) or
security_manager.all_datasource_access()):
return True
schemas = database.get_schema_access_for_csv_upload()
if (schemas and
security_manager.schemas_accessible_by_user(
database, schemas, False)):
return True
return False | def function[at_least_one_schema_is_allowed, parameter[database]]:
constant[
If the user has access to the database or all datasource
1. if schemas_allowed_for_csv_upload is empty
a) if database does not support schema
user is able to upload csv without specifying schema name
b) if database supports schema
user is able to upload csv to any schema
2. if schemas_allowed_for_csv_upload is not empty
a) if database does not support schema
This situation is impossible and upload will fail
b) if database supports schema
user is able to upload to schema in schemas_allowed_for_csv_upload
elif the user does not access to the database or all datasource
1. if schemas_allowed_for_csv_upload is empty
a) if database does not support schema
user is unable to upload csv
b) if database supports schema
user is unable to upload csv
2. if schemas_allowed_for_csv_upload is not empty
a) if database does not support schema
This situation is impossible and user is unable to upload csv
b) if database supports schema
user is able to upload to schema in schemas_allowed_for_csv_upload
]
if <ast.BoolOp object at 0x7da1b20bae60> begin[:]
return[constant[True]]
variable[schemas] assign[=] call[name[database].get_schema_access_for_csv_upload, parameter[]]
if <ast.BoolOp object at 0x7da1b20ba710> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[at_least_one_schema_is_allowed] ( identifier[database] ):
literal[string]
keyword[if] ( identifier[security_manager] . identifier[database_access] ( identifier[database] ) keyword[or]
identifier[security_manager] . identifier[all_datasource_access] ()):
keyword[return] keyword[True]
identifier[schemas] = identifier[database] . identifier[get_schema_access_for_csv_upload] ()
keyword[if] ( identifier[schemas] keyword[and]
identifier[security_manager] . identifier[schemas_accessible_by_user] (
identifier[database] , identifier[schemas] , keyword[False] )):
keyword[return] keyword[True]
keyword[return] keyword[False] | def at_least_one_schema_is_allowed(database):
"""
If the user has access to the database or all datasource
1. if schemas_allowed_for_csv_upload is empty
a) if database does not support schema
user is able to upload csv without specifying schema name
b) if database supports schema
user is able to upload csv to any schema
2. if schemas_allowed_for_csv_upload is not empty
a) if database does not support schema
This situation is impossible and upload will fail
b) if database supports schema
user is able to upload to schema in schemas_allowed_for_csv_upload
elif the user does not access to the database or all datasource
1. if schemas_allowed_for_csv_upload is empty
a) if database does not support schema
user is unable to upload csv
b) if database supports schema
user is unable to upload csv
2. if schemas_allowed_for_csv_upload is not empty
a) if database does not support schema
This situation is impossible and user is unable to upload csv
b) if database supports schema
user is able to upload to schema in schemas_allowed_for_csv_upload
"""
if security_manager.database_access(database) or security_manager.all_datasource_access():
return True # depends on [control=['if'], data=[]]
schemas = database.get_schema_access_for_csv_upload()
if schemas and security_manager.schemas_accessible_by_user(database, schemas, False):
return True # depends on [control=['if'], data=[]]
return False |
def orbit_posvel(Ms,eccs,semimajors,mreds,obspos=None):
"""returns positions in projected AU and velocities in km/s for given mean anomalies
Returns positions and velocities as SkyCoord objects. Uses
``orbitutils.kepler.Efn`` to calculate eccentric anomalies using interpolation.
Parameters
----------
Ms, eccs, semimajors, mreds : float or array-like
Mean anomalies, eccentricities, semimajor axes (AU), reduced masses (Msun).
obspos : ``None``, (x,y,z) tuple or ``SkyCoord`` object
Locations of observers for which to return coordinates.
If ``None`` then populate randomly on sphere. If (x,y,z) or
``SkyCoord`` object provided, then use those.
Returns
-------
pos,vel : ``SkyCoord``
Objects representing the positions and velocities, the coordinates
of which are ``Quantity`` objects that have units. Positions are in
projected AU and velocities in km/s.
"""
Es = Efn(Ms,eccs) #eccentric anomalies by interpolation
rs = semimajors*(1-eccs*np.cos(Es))
nus = 2 * np.arctan2(np.sqrt(1+eccs)*np.sin(Es/2),np.sqrt(1-eccs)*np.cos(Es/2))
xs = semimajors*(np.cos(Es) - eccs) #AU
ys = semimajors*np.sqrt(1-eccs**2)*np.sin(Es) #AU
Edots = np.sqrt(G*mreds*MSUN/(semimajors*AU)**3)/(1-eccs*np.cos(Es))
xdots = -semimajors*AU*np.sin(Es)*Edots/1e5 #km/s
ydots = semimajors*AU*np.sqrt(1-eccs**2)*np.cos(Es)*Edots/1e5 # km/s
n = np.size(xs)
orbpos = SkyCoord(xs,ys,0*u.AU,representation='cartesian',unit='AU')
orbvel = SkyCoord(xdots,ydots,0*u.km/u.s,representation='cartesian',unit='km/s')
if obspos is None:
obspos = random_spherepos(n) #observer position
if type(obspos) == type((1,2,3)):
obspos = SkyCoord(obspos[0],obspos[1],obspos[2],
representation='cartesian').represent_as('physicsspherical')
if not hasattr(obspos,'theta'): #if obspos not physics spherical, make it
obspos = obspos.represent_as('physicsspherical')
#random orientation of the sky 'x-y' coordinates
psi = rand.random(n)*2*np.pi
#transform positions and velocities into observer coordinates
x,y,z = orbitproject(orbpos.x,orbpos.y,obspos.theta,obspos.phi,psi)
vx,vy,vz = orbitproject(orbvel.x,orbvel.y,obspos.theta,obspos.phi,psi)
return (SkyCoord(x,y,z,representation='cartesian'),
SkyCoord(vx,vy,vz,representation='cartesian')) | def function[orbit_posvel, parameter[Ms, eccs, semimajors, mreds, obspos]]:
constant[returns positions in projected AU and velocities in km/s for given mean anomalies
Returns positions and velocities as SkyCoord objects. Uses
``orbitutils.kepler.Efn`` to calculate eccentric anomalies using interpolation.
Parameters
----------
Ms, eccs, semimajors, mreds : float or array-like
Mean anomalies, eccentricities, semimajor axes (AU), reduced masses (Msun).
obspos : ``None``, (x,y,z) tuple or ``SkyCoord`` object
Locations of observers for which to return coordinates.
If ``None`` then populate randomly on sphere. If (x,y,z) or
``SkyCoord`` object provided, then use those.
Returns
-------
pos,vel : ``SkyCoord``
Objects representing the positions and velocities, the coordinates
of which are ``Quantity`` objects that have units. Positions are in
projected AU and velocities in km/s.
]
variable[Es] assign[=] call[name[Efn], parameter[name[Ms], name[eccs]]]
variable[rs] assign[=] binary_operation[name[semimajors] * binary_operation[constant[1] - binary_operation[name[eccs] * call[name[np].cos, parameter[name[Es]]]]]]
variable[nus] assign[=] binary_operation[constant[2] * call[name[np].arctan2, parameter[binary_operation[call[name[np].sqrt, parameter[binary_operation[constant[1] + name[eccs]]]] * call[name[np].sin, parameter[binary_operation[name[Es] / constant[2]]]]], binary_operation[call[name[np].sqrt, parameter[binary_operation[constant[1] - name[eccs]]]] * call[name[np].cos, parameter[binary_operation[name[Es] / constant[2]]]]]]]]
variable[xs] assign[=] binary_operation[name[semimajors] * binary_operation[call[name[np].cos, parameter[name[Es]]] - name[eccs]]]
variable[ys] assign[=] binary_operation[binary_operation[name[semimajors] * call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[eccs] ** constant[2]]]]]] * call[name[np].sin, parameter[name[Es]]]]
variable[Edots] assign[=] binary_operation[call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[name[G] * name[mreds]] * name[MSUN]] / binary_operation[binary_operation[name[semimajors] * name[AU]] ** constant[3]]]]] / binary_operation[constant[1] - binary_operation[name[eccs] * call[name[np].cos, parameter[name[Es]]]]]]
variable[xdots] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b28f1030> * name[AU]] * call[name[np].sin, parameter[name[Es]]]] * name[Edots]] / constant[100000.0]]
variable[ydots] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[semimajors] * name[AU]] * call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[eccs] ** constant[2]]]]]] * call[name[np].cos, parameter[name[Es]]]] * name[Edots]] / constant[100000.0]]
variable[n] assign[=] call[name[np].size, parameter[name[xs]]]
variable[orbpos] assign[=] call[name[SkyCoord], parameter[name[xs], name[ys], binary_operation[constant[0] * name[u].AU]]]
variable[orbvel] assign[=] call[name[SkyCoord], parameter[name[xdots], name[ydots], binary_operation[binary_operation[constant[0] * name[u].km] / name[u].s]]]
if compare[name[obspos] is constant[None]] begin[:]
variable[obspos] assign[=] call[name[random_spherepos], parameter[name[n]]]
if compare[call[name[type], parameter[name[obspos]]] equal[==] call[name[type], parameter[tuple[[<ast.Constant object at 0x7da1b28de680>, <ast.Constant object at 0x7da1b28ded10>, <ast.Constant object at 0x7da1b28df310>]]]]] begin[:]
variable[obspos] assign[=] call[call[name[SkyCoord], parameter[call[name[obspos]][constant[0]], call[name[obspos]][constant[1]], call[name[obspos]][constant[2]]]].represent_as, parameter[constant[physicsspherical]]]
if <ast.UnaryOp object at 0x7da1b28dde70> begin[:]
variable[obspos] assign[=] call[name[obspos].represent_as, parameter[constant[physicsspherical]]]
variable[psi] assign[=] binary_operation[binary_operation[call[name[rand].random, parameter[name[n]]] * constant[2]] * name[np].pi]
<ast.Tuple object at 0x7da1b28afd00> assign[=] call[name[orbitproject], parameter[name[orbpos].x, name[orbpos].y, name[obspos].theta, name[obspos].phi, name[psi]]]
<ast.Tuple object at 0x7da1b28ac880> assign[=] call[name[orbitproject], parameter[name[orbvel].x, name[orbvel].y, name[obspos].theta, name[obspos].phi, name[psi]]]
return[tuple[[<ast.Call object at 0x7da1b28ad6f0>, <ast.Call object at 0x7da1b28ac130>]]] | keyword[def] identifier[orbit_posvel] ( identifier[Ms] , identifier[eccs] , identifier[semimajors] , identifier[mreds] , identifier[obspos] = keyword[None] ):
literal[string]
identifier[Es] = identifier[Efn] ( identifier[Ms] , identifier[eccs] )
identifier[rs] = identifier[semimajors] *( literal[int] - identifier[eccs] * identifier[np] . identifier[cos] ( identifier[Es] ))
identifier[nus] = literal[int] * identifier[np] . identifier[arctan2] ( identifier[np] . identifier[sqrt] ( literal[int] + identifier[eccs] )* identifier[np] . identifier[sin] ( identifier[Es] / literal[int] ), identifier[np] . identifier[sqrt] ( literal[int] - identifier[eccs] )* identifier[np] . identifier[cos] ( identifier[Es] / literal[int] ))
identifier[xs] = identifier[semimajors] *( identifier[np] . identifier[cos] ( identifier[Es] )- identifier[eccs] )
identifier[ys] = identifier[semimajors] * identifier[np] . identifier[sqrt] ( literal[int] - identifier[eccs] ** literal[int] )* identifier[np] . identifier[sin] ( identifier[Es] )
identifier[Edots] = identifier[np] . identifier[sqrt] ( identifier[G] * identifier[mreds] * identifier[MSUN] /( identifier[semimajors] * identifier[AU] )** literal[int] )/( literal[int] - identifier[eccs] * identifier[np] . identifier[cos] ( identifier[Es] ))
identifier[xdots] =- identifier[semimajors] * identifier[AU] * identifier[np] . identifier[sin] ( identifier[Es] )* identifier[Edots] / literal[int]
identifier[ydots] = identifier[semimajors] * identifier[AU] * identifier[np] . identifier[sqrt] ( literal[int] - identifier[eccs] ** literal[int] )* identifier[np] . identifier[cos] ( identifier[Es] )* identifier[Edots] / literal[int]
identifier[n] = identifier[np] . identifier[size] ( identifier[xs] )
identifier[orbpos] = identifier[SkyCoord] ( identifier[xs] , identifier[ys] , literal[int] * identifier[u] . identifier[AU] , identifier[representation] = literal[string] , identifier[unit] = literal[string] )
identifier[orbvel] = identifier[SkyCoord] ( identifier[xdots] , identifier[ydots] , literal[int] * identifier[u] . identifier[km] / identifier[u] . identifier[s] , identifier[representation] = literal[string] , identifier[unit] = literal[string] )
keyword[if] identifier[obspos] keyword[is] keyword[None] :
identifier[obspos] = identifier[random_spherepos] ( identifier[n] )
keyword[if] identifier[type] ( identifier[obspos] )== identifier[type] (( literal[int] , literal[int] , literal[int] )):
identifier[obspos] = identifier[SkyCoord] ( identifier[obspos] [ literal[int] ], identifier[obspos] [ literal[int] ], identifier[obspos] [ literal[int] ],
identifier[representation] = literal[string] ). identifier[represent_as] ( literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[obspos] , literal[string] ):
identifier[obspos] = identifier[obspos] . identifier[represent_as] ( literal[string] )
identifier[psi] = identifier[rand] . identifier[random] ( identifier[n] )* literal[int] * identifier[np] . identifier[pi]
identifier[x] , identifier[y] , identifier[z] = identifier[orbitproject] ( identifier[orbpos] . identifier[x] , identifier[orbpos] . identifier[y] , identifier[obspos] . identifier[theta] , identifier[obspos] . identifier[phi] , identifier[psi] )
identifier[vx] , identifier[vy] , identifier[vz] = identifier[orbitproject] ( identifier[orbvel] . identifier[x] , identifier[orbvel] . identifier[y] , identifier[obspos] . identifier[theta] , identifier[obspos] . identifier[phi] , identifier[psi] )
keyword[return] ( identifier[SkyCoord] ( identifier[x] , identifier[y] , identifier[z] , identifier[representation] = literal[string] ),
identifier[SkyCoord] ( identifier[vx] , identifier[vy] , identifier[vz] , identifier[representation] = literal[string] )) | def orbit_posvel(Ms, eccs, semimajors, mreds, obspos=None):
"""returns positions in projected AU and velocities in km/s for given mean anomalies
Returns positions and velocities as SkyCoord objects. Uses
``orbitutils.kepler.Efn`` to calculate eccentric anomalies using interpolation.
Parameters
----------
Ms, eccs, semimajors, mreds : float or array-like
Mean anomalies, eccentricities, semimajor axes (AU), reduced masses (Msun).
obspos : ``None``, (x,y,z) tuple or ``SkyCoord`` object
Locations of observers for which to return coordinates.
If ``None`` then populate randomly on sphere. If (x,y,z) or
``SkyCoord`` object provided, then use those.
Returns
-------
pos,vel : ``SkyCoord``
Objects representing the positions and velocities, the coordinates
of which are ``Quantity`` objects that have units. Positions are in
projected AU and velocities in km/s.
"""
Es = Efn(Ms, eccs) #eccentric anomalies by interpolation
rs = semimajors * (1 - eccs * np.cos(Es))
nus = 2 * np.arctan2(np.sqrt(1 + eccs) * np.sin(Es / 2), np.sqrt(1 - eccs) * np.cos(Es / 2))
xs = semimajors * (np.cos(Es) - eccs) #AU
ys = semimajors * np.sqrt(1 - eccs ** 2) * np.sin(Es) #AU
Edots = np.sqrt(G * mreds * MSUN / (semimajors * AU) ** 3) / (1 - eccs * np.cos(Es))
xdots = -semimajors * AU * np.sin(Es) * Edots / 100000.0 #km/s
ydots = semimajors * AU * np.sqrt(1 - eccs ** 2) * np.cos(Es) * Edots / 100000.0 # km/s
n = np.size(xs)
orbpos = SkyCoord(xs, ys, 0 * u.AU, representation='cartesian', unit='AU')
orbvel = SkyCoord(xdots, ydots, 0 * u.km / u.s, representation='cartesian', unit='km/s')
if obspos is None:
obspos = random_spherepos(n) #observer position # depends on [control=['if'], data=['obspos']]
if type(obspos) == type((1, 2, 3)):
obspos = SkyCoord(obspos[0], obspos[1], obspos[2], representation='cartesian').represent_as('physicsspherical') # depends on [control=['if'], data=[]]
if not hasattr(obspos, 'theta'): #if obspos not physics spherical, make it
obspos = obspos.represent_as('physicsspherical') # depends on [control=['if'], data=[]]
#random orientation of the sky 'x-y' coordinates
psi = rand.random(n) * 2 * np.pi
#transform positions and velocities into observer coordinates
(x, y, z) = orbitproject(orbpos.x, orbpos.y, obspos.theta, obspos.phi, psi)
(vx, vy, vz) = orbitproject(orbvel.x, orbvel.y, obspos.theta, obspos.phi, psi)
return (SkyCoord(x, y, z, representation='cartesian'), SkyCoord(vx, vy, vz, representation='cartesian')) |
def file_zip(self, keys, confirm_download=None, meta_only=None):
"""file/zip
http://www.mediafire.com/developers/core_api/1.3/file/#zip
"""
return self.request('file/zip', QueryParams({
'keys': keys,
'confirm_download': confirm_download,
'meta_only': meta_only
})) | def function[file_zip, parameter[self, keys, confirm_download, meta_only]]:
constant[file/zip
http://www.mediafire.com/developers/core_api/1.3/file/#zip
]
return[call[name[self].request, parameter[constant[file/zip], call[name[QueryParams], parameter[dictionary[[<ast.Constant object at 0x7da1b0ebca30>, <ast.Constant object at 0x7da1b0ebd210>, <ast.Constant object at 0x7da1b0ebecb0>], [<ast.Name object at 0x7da1b0ebd900>, <ast.Name object at 0x7da1b0ebcac0>, <ast.Name object at 0x7da1b0ebd6f0>]]]]]]] | keyword[def] identifier[file_zip] ( identifier[self] , identifier[keys] , identifier[confirm_download] = keyword[None] , identifier[meta_only] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[QueryParams] ({
literal[string] : identifier[keys] ,
literal[string] : identifier[confirm_download] ,
literal[string] : identifier[meta_only]
})) | def file_zip(self, keys, confirm_download=None, meta_only=None):
"""file/zip
http://www.mediafire.com/developers/core_api/1.3/file/#zip
"""
return self.request('file/zip', QueryParams({'keys': keys, 'confirm_download': confirm_download, 'meta_only': meta_only})) |
def restart_service(service_name, minimum_running_time=None):
'''
Restart OpenStack service immediately, or only if it's running longer than
specified value
CLI Example:
.. code-block:: bash
salt '*' openstack_mng.restart_service neutron
salt '*' openstack_mng.restart_service neutron minimum_running_time=600
'''
if minimum_running_time:
ret_code = False
# get system services list for interesting openstack service
services = __salt__['cmd.run'](['/usr/bin/openstack-service', 'list', service_name]).split('\n')
for service in services:
service_info = __salt__['service.show'](service)
with salt.utils.files.fopen('/proc/uptime') as rfh:
boot_time = float(
salt.utils.stringutils.to_unicode(
rfh.read()
).split(' ')[0]
)
expr_time = int(service_info.get('ExecMainStartTimestampMonotonic', 0)) / 1000000 < boot_time - minimum_running_time
expr_active = service_info.get('ActiveState') == "active"
if expr_time or not expr_active:
# restart specific system service
ret = __salt__['service.restart'](service)
if ret:
ret_code = True
return ret_code
else:
# just restart
os_cmd = ['/usr/bin/openstack-service', 'restart', service_name]
return __salt__['cmd.retcode'](os_cmd) == 0 | def function[restart_service, parameter[service_name, minimum_running_time]]:
constant[
Restart OpenStack service immediately, or only if it's running longer than
specified value
CLI Example:
.. code-block:: bash
salt '*' openstack_mng.restart_service neutron
salt '*' openstack_mng.restart_service neutron minimum_running_time=600
]
if name[minimum_running_time] begin[:]
variable[ret_code] assign[=] constant[False]
variable[services] assign[=] call[call[call[name[__salt__]][constant[cmd.run]], parameter[list[[<ast.Constant object at 0x7da2044c0f40>, <ast.Constant object at 0x7da2044c2e60>, <ast.Name object at 0x7da2044c2a10>]]]].split, parameter[constant[
]]]
for taget[name[service]] in starred[name[services]] begin[:]
variable[service_info] assign[=] call[call[name[__salt__]][constant[service.show]], parameter[name[service]]]
with call[name[salt].utils.files.fopen, parameter[constant[/proc/uptime]]] begin[:]
variable[boot_time] assign[=] call[name[float], parameter[call[call[call[name[salt].utils.stringutils.to_unicode, parameter[call[name[rfh].read, parameter[]]]].split, parameter[constant[ ]]]][constant[0]]]]
variable[expr_time] assign[=] compare[binary_operation[call[name[int], parameter[call[name[service_info].get, parameter[constant[ExecMainStartTimestampMonotonic], constant[0]]]]] / constant[1000000]] less[<] binary_operation[name[boot_time] - name[minimum_running_time]]]
variable[expr_active] assign[=] compare[call[name[service_info].get, parameter[constant[ActiveState]]] equal[==] constant[active]]
if <ast.BoolOp object at 0x7da18f00eb60> begin[:]
variable[ret] assign[=] call[call[name[__salt__]][constant[service.restart]], parameter[name[service]]]
if name[ret] begin[:]
variable[ret_code] assign[=] constant[True]
return[name[ret_code]] | keyword[def] identifier[restart_service] ( identifier[service_name] , identifier[minimum_running_time] = keyword[None] ):
literal[string]
keyword[if] identifier[minimum_running_time] :
identifier[ret_code] = keyword[False]
identifier[services] = identifier[__salt__] [ literal[string] ]([ literal[string] , literal[string] , identifier[service_name] ]). identifier[split] ( literal[string] )
keyword[for] identifier[service] keyword[in] identifier[services] :
identifier[service_info] = identifier[__salt__] [ literal[string] ]( identifier[service] )
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( literal[string] ) keyword[as] identifier[rfh] :
identifier[boot_time] = identifier[float] (
identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] (
identifier[rfh] . identifier[read] ()
). identifier[split] ( literal[string] )[ literal[int] ]
)
identifier[expr_time] = identifier[int] ( identifier[service_info] . identifier[get] ( literal[string] , literal[int] ))/ literal[int] < identifier[boot_time] - identifier[minimum_running_time]
identifier[expr_active] = identifier[service_info] . identifier[get] ( literal[string] )== literal[string]
keyword[if] identifier[expr_time] keyword[or] keyword[not] identifier[expr_active] :
identifier[ret] = identifier[__salt__] [ literal[string] ]( identifier[service] )
keyword[if] identifier[ret] :
identifier[ret_code] = keyword[True]
keyword[return] identifier[ret_code]
keyword[else] :
identifier[os_cmd] =[ literal[string] , literal[string] , identifier[service_name] ]
keyword[return] identifier[__salt__] [ literal[string] ]( identifier[os_cmd] )== literal[int] | def restart_service(service_name, minimum_running_time=None):
"""
Restart OpenStack service immediately, or only if it's running longer than
specified value
CLI Example:
.. code-block:: bash
salt '*' openstack_mng.restart_service neutron
salt '*' openstack_mng.restart_service neutron minimum_running_time=600
"""
if minimum_running_time:
ret_code = False
# get system services list for interesting openstack service
services = __salt__['cmd.run'](['/usr/bin/openstack-service', 'list', service_name]).split('\n')
for service in services:
service_info = __salt__['service.show'](service)
with salt.utils.files.fopen('/proc/uptime') as rfh:
boot_time = float(salt.utils.stringutils.to_unicode(rfh.read()).split(' ')[0]) # depends on [control=['with'], data=['rfh']]
expr_time = int(service_info.get('ExecMainStartTimestampMonotonic', 0)) / 1000000 < boot_time - minimum_running_time
expr_active = service_info.get('ActiveState') == 'active'
if expr_time or not expr_active:
# restart specific system service
ret = __salt__['service.restart'](service)
if ret:
ret_code = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['service']]
return ret_code # depends on [control=['if'], data=[]]
else:
# just restart
os_cmd = ['/usr/bin/openstack-service', 'restart', service_name]
return __salt__['cmd.retcode'](os_cmd) == 0 |
def par_xstep(i):
r"""Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}_{G_i}`, one of the disjoint problems of optimizing
:math:`\mathbf{x}`.
Parameters
----------
i : int
Index of grouping to update
"""
global mp_X
global mp_DX
YU0f = sl.rfftn(mp_Y0[[i]] - mp_U0[[i]], mp_Nv, mp_axisN)
YU1f = sl.rfftn(mp_Y1[mp_grp[i]:mp_grp[i+1]] -
1/mp_alpha*mp_U1[mp_grp[i]:mp_grp[i+1]], mp_Nv, mp_axisN)
if mp_Cd == 1:
b = np.conj(mp_Df[mp_grp[i]:mp_grp[i+1]]) * YU0f + mp_alpha**2*YU1f
Xf = sl.solvedbi_sm(mp_Df[mp_grp[i]:mp_grp[i+1]], mp_alpha**2, b,
mp_cache[i], axis=mp_axisM)
else:
b = sl.inner(np.conj(mp_Df[mp_grp[i]:mp_grp[i+1]]), YU0f,
axis=mp_C) + mp_alpha**2*YU1f
Xf = sl.solvemdbi_ism(mp_Df[mp_grp[i]:mp_grp[i+1]], mp_alpha**2, b,
mp_axisM, mp_axisC)
mp_X[mp_grp[i]:mp_grp[i+1]] = sl.irfftn(Xf, mp_Nv,
mp_axisN)
mp_DX[i] = sl.irfftn(sl.inner(mp_Df[mp_grp[i]:mp_grp[i+1]], Xf,
mp_axisM), mp_Nv, mp_axisN) | def function[par_xstep, parameter[i]]:
constant[Minimise Augmented Lagrangian with respect to
:math:`\mathbf{x}_{G_i}`, one of the disjoint problems of optimizing
:math:`\mathbf{x}`.
Parameters
----------
i : int
Index of grouping to update
]
<ast.Global object at 0x7da1b061b220>
<ast.Global object at 0x7da1b0619b70>
variable[YU0f] assign[=] call[name[sl].rfftn, parameter[binary_operation[call[name[mp_Y0]][list[[<ast.Name object at 0x7da1b0618af0>]]] - call[name[mp_U0]][list[[<ast.Name object at 0x7da1b06180d0>]]]], name[mp_Nv], name[mp_axisN]]]
variable[YU1f] assign[=] call[name[sl].rfftn, parameter[binary_operation[call[name[mp_Y1]][<ast.Slice object at 0x7da1b0619090>] - binary_operation[binary_operation[constant[1] / name[mp_alpha]] * call[name[mp_U1]][<ast.Slice object at 0x7da1b0618df0>]]], name[mp_Nv], name[mp_axisN]]]
if compare[name[mp_Cd] equal[==] constant[1]] begin[:]
variable[b] assign[=] binary_operation[binary_operation[call[name[np].conj, parameter[call[name[mp_Df]][<ast.Slice object at 0x7da1b0618310>]]] * name[YU0f]] + binary_operation[binary_operation[name[mp_alpha] ** constant[2]] * name[YU1f]]]
variable[Xf] assign[=] call[name[sl].solvedbi_sm, parameter[call[name[mp_Df]][<ast.Slice object at 0x7da1b061b670>], binary_operation[name[mp_alpha] ** constant[2]], name[b], call[name[mp_cache]][name[i]]]]
call[name[mp_X]][<ast.Slice object at 0x7da1b061b760>] assign[=] call[name[sl].irfftn, parameter[name[Xf], name[mp_Nv], name[mp_axisN]]]
call[name[mp_DX]][name[i]] assign[=] call[name[sl].irfftn, parameter[call[name[sl].inner, parameter[call[name[mp_Df]][<ast.Slice object at 0x7da1b06e86d0>], name[Xf], name[mp_axisM]]], name[mp_Nv], name[mp_axisN]]] | keyword[def] identifier[par_xstep] ( identifier[i] ):
literal[string]
keyword[global] identifier[mp_X]
keyword[global] identifier[mp_DX]
identifier[YU0f] = identifier[sl] . identifier[rfftn] ( identifier[mp_Y0] [[ identifier[i] ]]- identifier[mp_U0] [[ identifier[i] ]], identifier[mp_Nv] , identifier[mp_axisN] )
identifier[YU1f] = identifier[sl] . identifier[rfftn] ( identifier[mp_Y1] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]]-
literal[int] / identifier[mp_alpha] * identifier[mp_U1] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]], identifier[mp_Nv] , identifier[mp_axisN] )
keyword[if] identifier[mp_Cd] == literal[int] :
identifier[b] = identifier[np] . identifier[conj] ( identifier[mp_Df] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]])* identifier[YU0f] + identifier[mp_alpha] ** literal[int] * identifier[YU1f]
identifier[Xf] = identifier[sl] . identifier[solvedbi_sm] ( identifier[mp_Df] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]], identifier[mp_alpha] ** literal[int] , identifier[b] ,
identifier[mp_cache] [ identifier[i] ], identifier[axis] = identifier[mp_axisM] )
keyword[else] :
identifier[b] = identifier[sl] . identifier[inner] ( identifier[np] . identifier[conj] ( identifier[mp_Df] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]]), identifier[YU0f] ,
identifier[axis] = identifier[mp_C] )+ identifier[mp_alpha] ** literal[int] * identifier[YU1f]
identifier[Xf] = identifier[sl] . identifier[solvemdbi_ism] ( identifier[mp_Df] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]], identifier[mp_alpha] ** literal[int] , identifier[b] ,
identifier[mp_axisM] , identifier[mp_axisC] )
identifier[mp_X] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]]= identifier[sl] . identifier[irfftn] ( identifier[Xf] , identifier[mp_Nv] ,
identifier[mp_axisN] )
identifier[mp_DX] [ identifier[i] ]= identifier[sl] . identifier[irfftn] ( identifier[sl] . identifier[inner] ( identifier[mp_Df] [ identifier[mp_grp] [ identifier[i] ]: identifier[mp_grp] [ identifier[i] + literal[int] ]], identifier[Xf] ,
identifier[mp_axisM] ), identifier[mp_Nv] , identifier[mp_axisN] ) | def par_xstep(i):
"""Minimise Augmented Lagrangian with respect to
:math:`\\mathbf{x}_{G_i}`, one of the disjoint problems of optimizing
:math:`\\mathbf{x}`.
Parameters
----------
i : int
Index of grouping to update
"""
global mp_X
global mp_DX
YU0f = sl.rfftn(mp_Y0[[i]] - mp_U0[[i]], mp_Nv, mp_axisN)
YU1f = sl.rfftn(mp_Y1[mp_grp[i]:mp_grp[i + 1]] - 1 / mp_alpha * mp_U1[mp_grp[i]:mp_grp[i + 1]], mp_Nv, mp_axisN)
if mp_Cd == 1:
b = np.conj(mp_Df[mp_grp[i]:mp_grp[i + 1]]) * YU0f + mp_alpha ** 2 * YU1f
Xf = sl.solvedbi_sm(mp_Df[mp_grp[i]:mp_grp[i + 1]], mp_alpha ** 2, b, mp_cache[i], axis=mp_axisM) # depends on [control=['if'], data=[]]
else:
b = sl.inner(np.conj(mp_Df[mp_grp[i]:mp_grp[i + 1]]), YU0f, axis=mp_C) + mp_alpha ** 2 * YU1f
Xf = sl.solvemdbi_ism(mp_Df[mp_grp[i]:mp_grp[i + 1]], mp_alpha ** 2, b, mp_axisM, mp_axisC)
mp_X[mp_grp[i]:mp_grp[i + 1]] = sl.irfftn(Xf, mp_Nv, mp_axisN)
mp_DX[i] = sl.irfftn(sl.inner(mp_Df[mp_grp[i]:mp_grp[i + 1]], Xf, mp_axisM), mp_Nv, mp_axisN) |
def zap_disk(block_device):
'''
Clear a block device of partition table. Relies on sgdisk, which is
installed as pat of the 'gdisk' package in Ubuntu.
:param block_device: str: Full path of block device to clean.
'''
# https://github.com/ceph/ceph/commit/fdd7f8d83afa25c4e09aaedd90ab93f3b64a677b
# sometimes sgdisk exits non-zero; this is OK, dd will clean up
call(['sgdisk', '--zap-all', '--', block_device])
call(['sgdisk', '--clear', '--mbrtogpt', '--', block_device])
dev_end = check_output(['blockdev', '--getsz',
block_device]).decode('UTF-8')
gpt_end = int(dev_end.split()[0]) - 100
check_call(['dd', 'if=/dev/zero', 'of=%s' % (block_device),
'bs=1M', 'count=1'])
check_call(['dd', 'if=/dev/zero', 'of=%s' % (block_device),
'bs=512', 'count=100', 'seek=%s' % (gpt_end)]) | def function[zap_disk, parameter[block_device]]:
constant[
Clear a block device of partition table. Relies on sgdisk, which is
installed as pat of the 'gdisk' package in Ubuntu.
:param block_device: str: Full path of block device to clean.
]
call[name[call], parameter[list[[<ast.Constant object at 0x7da1b121bc10>, <ast.Constant object at 0x7da1b1218550>, <ast.Constant object at 0x7da1b121af80>, <ast.Name object at 0x7da1b121b610>]]]]
call[name[call], parameter[list[[<ast.Constant object at 0x7da1b121b0d0>, <ast.Constant object at 0x7da1b12194e0>, <ast.Constant object at 0x7da1b121a500>, <ast.Constant object at 0x7da1b1218070>, <ast.Name object at 0x7da1b121b640>]]]]
variable[dev_end] assign[=] call[call[name[check_output], parameter[list[[<ast.Constant object at 0x7da1b121aaa0>, <ast.Constant object at 0x7da1b121a5c0>, <ast.Name object at 0x7da1b1219990>]]]].decode, parameter[constant[UTF-8]]]
variable[gpt_end] assign[=] binary_operation[call[name[int], parameter[call[call[name[dev_end].split, parameter[]]][constant[0]]]] - constant[100]]
call[name[check_call], parameter[list[[<ast.Constant object at 0x7da1b121a860>, <ast.Constant object at 0x7da1b1219210>, <ast.BinOp object at 0x7da1b121b340>, <ast.Constant object at 0x7da18bc70f40>, <ast.Constant object at 0x7da18bc738e0>]]]]
call[name[check_call], parameter[list[[<ast.Constant object at 0x7da18bc72c20>, <ast.Constant object at 0x7da18bc714b0>, <ast.BinOp object at 0x7da18bc73f70>, <ast.Constant object at 0x7da18bc73340>, <ast.Constant object at 0x7da18bc70670>, <ast.BinOp object at 0x7da18bc73eb0>]]]] | keyword[def] identifier[zap_disk] ( identifier[block_device] ):
literal[string]
identifier[call] ([ literal[string] , literal[string] , literal[string] , identifier[block_device] ])
identifier[call] ([ literal[string] , literal[string] , literal[string] , literal[string] , identifier[block_device] ])
identifier[dev_end] = identifier[check_output] ([ literal[string] , literal[string] ,
identifier[block_device] ]). identifier[decode] ( literal[string] )
identifier[gpt_end] = identifier[int] ( identifier[dev_end] . identifier[split] ()[ literal[int] ])- literal[int]
identifier[check_call] ([ literal[string] , literal[string] , literal[string] %( identifier[block_device] ),
literal[string] , literal[string] ])
identifier[check_call] ([ literal[string] , literal[string] , literal[string] %( identifier[block_device] ),
literal[string] , literal[string] , literal[string] %( identifier[gpt_end] )]) | def zap_disk(block_device):
"""
Clear a block device of partition table. Relies on sgdisk, which is
installed as pat of the 'gdisk' package in Ubuntu.
:param block_device: str: Full path of block device to clean.
"""
# https://github.com/ceph/ceph/commit/fdd7f8d83afa25c4e09aaedd90ab93f3b64a677b
# sometimes sgdisk exits non-zero; this is OK, dd will clean up
call(['sgdisk', '--zap-all', '--', block_device])
call(['sgdisk', '--clear', '--mbrtogpt', '--', block_device])
dev_end = check_output(['blockdev', '--getsz', block_device]).decode('UTF-8')
gpt_end = int(dev_end.split()[0]) - 100
check_call(['dd', 'if=/dev/zero', 'of=%s' % block_device, 'bs=1M', 'count=1'])
check_call(['dd', 'if=/dev/zero', 'of=%s' % block_device, 'bs=512', 'count=100', 'seek=%s' % gpt_end]) |
def mapred(self, inputs, query, timeout=None):
"""
Run a MapReduce query.
"""
# Construct the job, optionally set the timeout...
content = self._construct_mapred_json(inputs, query, timeout)
# Do the request...
url = self.mapred_path()
headers = {'Content-Type': 'application/json'}
status, headers, body = self._request('POST', url, headers, content)
# Make sure the expected status code came back...
if status != 200:
raise RiakError(
'Error running MapReduce operation. Headers: %s Body: %s' %
(repr(headers), repr(body)))
result = json.loads(bytes_to_str(body))
return result | def function[mapred, parameter[self, inputs, query, timeout]]:
constant[
Run a MapReduce query.
]
variable[content] assign[=] call[name[self]._construct_mapred_json, parameter[name[inputs], name[query], name[timeout]]]
variable[url] assign[=] call[name[self].mapred_path, parameter[]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18eb567a0>], [<ast.Constant object at 0x7da18eb543a0>]]
<ast.Tuple object at 0x7da18eb55480> assign[=] call[name[self]._request, parameter[constant[POST], name[url], name[headers], name[content]]]
if compare[name[status] not_equal[!=] constant[200]] begin[:]
<ast.Raise object at 0x7da18eb56c50>
variable[result] assign[=] call[name[json].loads, parameter[call[name[bytes_to_str], parameter[name[body]]]]]
return[name[result]] | keyword[def] identifier[mapred] ( identifier[self] , identifier[inputs] , identifier[query] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[content] = identifier[self] . identifier[_construct_mapred_json] ( identifier[inputs] , identifier[query] , identifier[timeout] )
identifier[url] = identifier[self] . identifier[mapred_path] ()
identifier[headers] ={ literal[string] : literal[string] }
identifier[status] , identifier[headers] , identifier[body] = identifier[self] . identifier[_request] ( literal[string] , identifier[url] , identifier[headers] , identifier[content] )
keyword[if] identifier[status] != literal[int] :
keyword[raise] identifier[RiakError] (
literal[string] %
( identifier[repr] ( identifier[headers] ), identifier[repr] ( identifier[body] )))
identifier[result] = identifier[json] . identifier[loads] ( identifier[bytes_to_str] ( identifier[body] ))
keyword[return] identifier[result] | def mapred(self, inputs, query, timeout=None):
"""
Run a MapReduce query.
"""
# Construct the job, optionally set the timeout...
content = self._construct_mapred_json(inputs, query, timeout)
# Do the request...
url = self.mapred_path()
headers = {'Content-Type': 'application/json'}
(status, headers, body) = self._request('POST', url, headers, content)
# Make sure the expected status code came back...
if status != 200:
raise RiakError('Error running MapReduce operation. Headers: %s Body: %s' % (repr(headers), repr(body))) # depends on [control=['if'], data=[]]
result = json.loads(bytes_to_str(body))
return result |
def _scalar_pattern_uniform_op_left(func):
"""Decorator for operator overloading when ScalarPatternUniform is on
the left."""
@wraps(func)
def verif(self, patt):
if isinstance(patt, ScalarPatternUniform):
if self._dsphere.shape == patt._dsphere.shape:
return ScalarPatternUniform(func(self, self._dsphere,
patt._dsphere),
doublesphere=True)
else:
raise ValueError(err_msg['SP_sz_msmtch'] % \
(self.nrows, self.ncols,
patt.nrows, patt.ncols))
elif isinstance(patt, numbers.Number):
return ScalarPatternUniform(func(self, self._dsphere, patt),
doublesphere=True)
else:
raise TypeError(err_msg['no_combi_SP'])
return verif | def function[_scalar_pattern_uniform_op_left, parameter[func]]:
constant[Decorator for operator overloading when ScalarPatternUniform is on
the left.]
def function[verif, parameter[self, patt]]:
if call[name[isinstance], parameter[name[patt], name[ScalarPatternUniform]]] begin[:]
if compare[name[self]._dsphere.shape equal[==] name[patt]._dsphere.shape] begin[:]
return[call[name[ScalarPatternUniform], parameter[call[name[func], parameter[name[self], name[self]._dsphere, name[patt]._dsphere]]]]]
return[name[verif]] | keyword[def] identifier[_scalar_pattern_uniform_op_left] ( identifier[func] ):
literal[string]
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[verif] ( identifier[self] , identifier[patt] ):
keyword[if] identifier[isinstance] ( identifier[patt] , identifier[ScalarPatternUniform] ):
keyword[if] identifier[self] . identifier[_dsphere] . identifier[shape] == identifier[patt] . identifier[_dsphere] . identifier[shape] :
keyword[return] identifier[ScalarPatternUniform] ( identifier[func] ( identifier[self] , identifier[self] . identifier[_dsphere] ,
identifier[patt] . identifier[_dsphere] ),
identifier[doublesphere] = keyword[True] )
keyword[else] :
keyword[raise] identifier[ValueError] ( identifier[err_msg] [ literal[string] ]%( identifier[self] . identifier[nrows] , identifier[self] . identifier[ncols] ,
identifier[patt] . identifier[nrows] , identifier[patt] . identifier[ncols] ))
keyword[elif] identifier[isinstance] ( identifier[patt] , identifier[numbers] . identifier[Number] ):
keyword[return] identifier[ScalarPatternUniform] ( identifier[func] ( identifier[self] , identifier[self] . identifier[_dsphere] , identifier[patt] ),
identifier[doublesphere] = keyword[True] )
keyword[else] :
keyword[raise] identifier[TypeError] ( identifier[err_msg] [ literal[string] ])
keyword[return] identifier[verif] | def _scalar_pattern_uniform_op_left(func):
"""Decorator for operator overloading when ScalarPatternUniform is on
the left."""
@wraps(func)
def verif(self, patt):
if isinstance(patt, ScalarPatternUniform):
if self._dsphere.shape == patt._dsphere.shape:
return ScalarPatternUniform(func(self, self._dsphere, patt._dsphere), doublesphere=True) # depends on [control=['if'], data=[]]
else:
raise ValueError(err_msg['SP_sz_msmtch'] % (self.nrows, self.ncols, patt.nrows, patt.ncols)) # depends on [control=['if'], data=[]]
elif isinstance(patt, numbers.Number):
return ScalarPatternUniform(func(self, self._dsphere, patt), doublesphere=True) # depends on [control=['if'], data=[]]
else:
raise TypeError(err_msg['no_combi_SP'])
return verif |
def mkdtemp(*args, **kwargs):
"""Create a temporary directory in a with-context
keyword remove: Remove the directory when leaving the
context if True. Default is True.
other keywords arguments are given to the tempfile.mkdtemp
function.
"""
remove = kwargs.pop('remove', True)
path = tempfile.mkdtemp(*args, **kwargs)
try:
yield path
finally:
if remove:
shutil.rmtree(path) | def function[mkdtemp, parameter[]]:
constant[Create a temporary directory in a with-context
keyword remove: Remove the directory when leaving the
context if True. Default is True.
other keywords arguments are given to the tempfile.mkdtemp
function.
]
variable[remove] assign[=] call[name[kwargs].pop, parameter[constant[remove], constant[True]]]
variable[path] assign[=] call[name[tempfile].mkdtemp, parameter[<ast.Starred object at 0x7da18f09d300>]]
<ast.Try object at 0x7da18f09fc10> | keyword[def] identifier[mkdtemp] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[remove] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
identifier[path] = identifier[tempfile] . identifier[mkdtemp] (* identifier[args] ,** identifier[kwargs] )
keyword[try] :
keyword[yield] identifier[path]
keyword[finally] :
keyword[if] identifier[remove] :
identifier[shutil] . identifier[rmtree] ( identifier[path] ) | def mkdtemp(*args, **kwargs):
"""Create a temporary directory in a with-context
keyword remove: Remove the directory when leaving the
context if True. Default is True.
other keywords arguments are given to the tempfile.mkdtemp
function.
"""
remove = kwargs.pop('remove', True)
path = tempfile.mkdtemp(*args, **kwargs)
try:
yield path # depends on [control=['try'], data=[]]
finally:
if remove:
shutil.rmtree(path) # depends on [control=['if'], data=[]] |
def parse_instancepath(self, tup_tree):
"""
Parse an INSTANCEPATH element and return the instance path it
represents as a CIMInstanceName object.
::
<!ELEMENT INSTANCEPATH (NAMESPACEPATH, INSTANCENAME)>
"""
self.check_node(tup_tree, 'INSTANCEPATH')
k = kids(tup_tree)
if len(k) != 2:
raise CIMXMLParseError(
_format("Element {0!A} has invalid number of child elements "
"{1!A} (expecting two child elements "
"(NAMESPACEPATH, INSTANCENAME))", name(tup_tree), k),
conn_id=self.conn_id)
host, namespace = self.parse_namespacepath(k[0])
inst_path = self.parse_instancename(k[1])
inst_path.host = host
inst_path.namespace = namespace
return inst_path | def function[parse_instancepath, parameter[self, tup_tree]]:
constant[
Parse an INSTANCEPATH element and return the instance path it
represents as a CIMInstanceName object.
::
<!ELEMENT INSTANCEPATH (NAMESPACEPATH, INSTANCENAME)>
]
call[name[self].check_node, parameter[name[tup_tree], constant[INSTANCEPATH]]]
variable[k] assign[=] call[name[kids], parameter[name[tup_tree]]]
if compare[call[name[len], parameter[name[k]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da18bcca8c0>
<ast.Tuple object at 0x7da18bccb490> assign[=] call[name[self].parse_namespacepath, parameter[call[name[k]][constant[0]]]]
variable[inst_path] assign[=] call[name[self].parse_instancename, parameter[call[name[k]][constant[1]]]]
name[inst_path].host assign[=] name[host]
name[inst_path].namespace assign[=] name[namespace]
return[name[inst_path]] | keyword[def] identifier[parse_instancepath] ( identifier[self] , identifier[tup_tree] ):
literal[string]
identifier[self] . identifier[check_node] ( identifier[tup_tree] , literal[string] )
identifier[k] = identifier[kids] ( identifier[tup_tree] )
keyword[if] identifier[len] ( identifier[k] )!= literal[int] :
keyword[raise] identifier[CIMXMLParseError] (
identifier[_format] ( literal[string]
literal[string]
literal[string] , identifier[name] ( identifier[tup_tree] ), identifier[k] ),
identifier[conn_id] = identifier[self] . identifier[conn_id] )
identifier[host] , identifier[namespace] = identifier[self] . identifier[parse_namespacepath] ( identifier[k] [ literal[int] ])
identifier[inst_path] = identifier[self] . identifier[parse_instancename] ( identifier[k] [ literal[int] ])
identifier[inst_path] . identifier[host] = identifier[host]
identifier[inst_path] . identifier[namespace] = identifier[namespace]
keyword[return] identifier[inst_path] | def parse_instancepath(self, tup_tree):
"""
Parse an INSTANCEPATH element and return the instance path it
represents as a CIMInstanceName object.
::
<!ELEMENT INSTANCEPATH (NAMESPACEPATH, INSTANCENAME)>
"""
self.check_node(tup_tree, 'INSTANCEPATH')
k = kids(tup_tree)
if len(k) != 2:
raise CIMXMLParseError(_format('Element {0!A} has invalid number of child elements {1!A} (expecting two child elements (NAMESPACEPATH, INSTANCENAME))', name(tup_tree), k), conn_id=self.conn_id) # depends on [control=['if'], data=[]]
(host, namespace) = self.parse_namespacepath(k[0])
inst_path = self.parse_instancename(k[1])
inst_path.host = host
inst_path.namespace = namespace
return inst_path |
def _sb_r1(self, term, r1_prefixes=None):
"""Return the R1 region, as defined in the Porter2 specification.
Parameters
----------
term : str
The term to examine
r1_prefixes : set
Prefixes to consider
Returns
-------
int
Length of the R1 region
"""
vowel_found = False
if hasattr(r1_prefixes, '__iter__'):
for prefix in r1_prefixes:
if term[: len(prefix)] == prefix:
return len(prefix)
for i in range(len(term)):
if not vowel_found and term[i] in self._vowels:
vowel_found = True
elif vowel_found and term[i] not in self._vowels:
return i + 1
return len(term) | def function[_sb_r1, parameter[self, term, r1_prefixes]]:
constant[Return the R1 region, as defined in the Porter2 specification.
Parameters
----------
term : str
The term to examine
r1_prefixes : set
Prefixes to consider
Returns
-------
int
Length of the R1 region
]
variable[vowel_found] assign[=] constant[False]
if call[name[hasattr], parameter[name[r1_prefixes], constant[__iter__]]] begin[:]
for taget[name[prefix]] in starred[name[r1_prefixes]] begin[:]
if compare[call[name[term]][<ast.Slice object at 0x7da1b01410f0>] equal[==] name[prefix]] begin[:]
return[call[name[len], parameter[name[prefix]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[term]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b0140af0> begin[:]
variable[vowel_found] assign[=] constant[True]
return[call[name[len], parameter[name[term]]]] | keyword[def] identifier[_sb_r1] ( identifier[self] , identifier[term] , identifier[r1_prefixes] = keyword[None] ):
literal[string]
identifier[vowel_found] = keyword[False]
keyword[if] identifier[hasattr] ( identifier[r1_prefixes] , literal[string] ):
keyword[for] identifier[prefix] keyword[in] identifier[r1_prefixes] :
keyword[if] identifier[term] [: identifier[len] ( identifier[prefix] )]== identifier[prefix] :
keyword[return] identifier[len] ( identifier[prefix] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[term] )):
keyword[if] keyword[not] identifier[vowel_found] keyword[and] identifier[term] [ identifier[i] ] keyword[in] identifier[self] . identifier[_vowels] :
identifier[vowel_found] = keyword[True]
keyword[elif] identifier[vowel_found] keyword[and] identifier[term] [ identifier[i] ] keyword[not] keyword[in] identifier[self] . identifier[_vowels] :
keyword[return] identifier[i] + literal[int]
keyword[return] identifier[len] ( identifier[term] ) | def _sb_r1(self, term, r1_prefixes=None):
"""Return the R1 region, as defined in the Porter2 specification.
Parameters
----------
term : str
The term to examine
r1_prefixes : set
Prefixes to consider
Returns
-------
int
Length of the R1 region
"""
vowel_found = False
if hasattr(r1_prefixes, '__iter__'):
for prefix in r1_prefixes:
if term[:len(prefix)] == prefix:
return len(prefix) # depends on [control=['if'], data=['prefix']] # depends on [control=['for'], data=['prefix']] # depends on [control=['if'], data=[]]
for i in range(len(term)):
if not vowel_found and term[i] in self._vowels:
vowel_found = True # depends on [control=['if'], data=[]]
elif vowel_found and term[i] not in self._vowels:
return i + 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return len(term) |
def checkpoint(self):
"""Saves execution state to `self._metadata_checkpoint_dir`.
Overwrites the current session checkpoint, which starts when self
is instantiated.
"""
if not self._metadata_checkpoint_dir:
return
metadata_checkpoint_dir = self._metadata_checkpoint_dir
if not os.path.exists(metadata_checkpoint_dir):
os.makedirs(metadata_checkpoint_dir)
runner_state = {
"checkpoints": list(
self.trial_executor.get_checkpoints().values()),
"runner_data": self.__getstate__(),
"timestamp": time.time()
}
tmp_file_name = os.path.join(metadata_checkpoint_dir,
".tmp_checkpoint")
with open(tmp_file_name, "w") as f:
json.dump(runner_state, f, indent=2, cls=_TuneFunctionEncoder)
os.rename(
tmp_file_name,
os.path.join(metadata_checkpoint_dir,
TrialRunner.CKPT_FILE_TMPL.format(self._session_str)))
return metadata_checkpoint_dir | def function[checkpoint, parameter[self]]:
constant[Saves execution state to `self._metadata_checkpoint_dir`.
Overwrites the current session checkpoint, which starts when self
is instantiated.
]
if <ast.UnaryOp object at 0x7da20e9600d0> begin[:]
return[None]
variable[metadata_checkpoint_dir] assign[=] name[self]._metadata_checkpoint_dir
if <ast.UnaryOp object at 0x7da20e963610> begin[:]
call[name[os].makedirs, parameter[name[metadata_checkpoint_dir]]]
variable[runner_state] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57100>, <ast.Constant object at 0x7da18eb55bd0>, <ast.Constant object at 0x7da18eb544c0>], [<ast.Call object at 0x7da18eb57f40>, <ast.Call object at 0x7da18eb56b30>, <ast.Call object at 0x7da18eb57400>]]
variable[tmp_file_name] assign[=] call[name[os].path.join, parameter[name[metadata_checkpoint_dir], constant[.tmp_checkpoint]]]
with call[name[open], parameter[name[tmp_file_name], constant[w]]] begin[:]
call[name[json].dump, parameter[name[runner_state], name[f]]]
call[name[os].rename, parameter[name[tmp_file_name], call[name[os].path.join, parameter[name[metadata_checkpoint_dir], call[name[TrialRunner].CKPT_FILE_TMPL.format, parameter[name[self]._session_str]]]]]]
return[name[metadata_checkpoint_dir]] | keyword[def] identifier[checkpoint] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_metadata_checkpoint_dir] :
keyword[return]
identifier[metadata_checkpoint_dir] = identifier[self] . identifier[_metadata_checkpoint_dir]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[metadata_checkpoint_dir] ):
identifier[os] . identifier[makedirs] ( identifier[metadata_checkpoint_dir] )
identifier[runner_state] ={
literal[string] : identifier[list] (
identifier[self] . identifier[trial_executor] . identifier[get_checkpoints] (). identifier[values] ()),
literal[string] : identifier[self] . identifier[__getstate__] (),
literal[string] : identifier[time] . identifier[time] ()
}
identifier[tmp_file_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[metadata_checkpoint_dir] ,
literal[string] )
keyword[with] identifier[open] ( identifier[tmp_file_name] , literal[string] ) keyword[as] identifier[f] :
identifier[json] . identifier[dump] ( identifier[runner_state] , identifier[f] , identifier[indent] = literal[int] , identifier[cls] = identifier[_TuneFunctionEncoder] )
identifier[os] . identifier[rename] (
identifier[tmp_file_name] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[metadata_checkpoint_dir] ,
identifier[TrialRunner] . identifier[CKPT_FILE_TMPL] . identifier[format] ( identifier[self] . identifier[_session_str] )))
keyword[return] identifier[metadata_checkpoint_dir] | def checkpoint(self):
"""Saves execution state to `self._metadata_checkpoint_dir`.
Overwrites the current session checkpoint, which starts when self
is instantiated.
"""
if not self._metadata_checkpoint_dir:
return # depends on [control=['if'], data=[]]
metadata_checkpoint_dir = self._metadata_checkpoint_dir
if not os.path.exists(metadata_checkpoint_dir):
os.makedirs(metadata_checkpoint_dir) # depends on [control=['if'], data=[]]
runner_state = {'checkpoints': list(self.trial_executor.get_checkpoints().values()), 'runner_data': self.__getstate__(), 'timestamp': time.time()}
tmp_file_name = os.path.join(metadata_checkpoint_dir, '.tmp_checkpoint')
with open(tmp_file_name, 'w') as f:
json.dump(runner_state, f, indent=2, cls=_TuneFunctionEncoder) # depends on [control=['with'], data=['f']]
os.rename(tmp_file_name, os.path.join(metadata_checkpoint_dir, TrialRunner.CKPT_FILE_TMPL.format(self._session_str)))
return metadata_checkpoint_dir |
def add_check(self, check):
"""
Please use rather `register_check` as a decorator.
"""
if self._add_check_callback is not None:
if not self._add_check_callback(self, check):
# rejected, skip!
return False
self._checkid2index[check.id] = len(self._checks)
self._checks.append(check)
return True | def function[add_check, parameter[self, check]]:
constant[
Please use rather `register_check` as a decorator.
]
if compare[name[self]._add_check_callback is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da20c7c8370> begin[:]
return[constant[False]]
call[name[self]._checkid2index][name[check].id] assign[=] call[name[len], parameter[name[self]._checks]]
call[name[self]._checks.append, parameter[name[check]]]
return[constant[True]] | keyword[def] identifier[add_check] ( identifier[self] , identifier[check] ):
literal[string]
keyword[if] identifier[self] . identifier[_add_check_callback] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[self] . identifier[_add_check_callback] ( identifier[self] , identifier[check] ):
keyword[return] keyword[False]
identifier[self] . identifier[_checkid2index] [ identifier[check] . identifier[id] ]= identifier[len] ( identifier[self] . identifier[_checks] )
identifier[self] . identifier[_checks] . identifier[append] ( identifier[check] )
keyword[return] keyword[True] | def add_check(self, check):
"""
Please use rather `register_check` as a decorator.
"""
if self._add_check_callback is not None:
if not self._add_check_callback(self, check):
# rejected, skip!
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self._checkid2index[check.id] = len(self._checks)
self._checks.append(check)
return True |
def full_newton(s, nodes1, t, nodes2):
r"""Perform a Newton iteration until convergence to a solution.
This assumes :math:`s` and :math:`t` are sufficiently close to an
intersection. It **does not** govern the maximum distance away
that the solution can lie, though the subdivided intervals that contain
:math:`s` and :math:`t` could be used.
To avoid round-off issues near ``0.0``, this reverses the direction
of a curve and replaces the parameter value :math:`\nu` with
:math:`1 - \nu` whenever :math:`\nu < \tau` (here we use a threshold
:math:`\tau` equal to :math:`2^{-10}`, i.e. ``ZERO_THRESHOLD``).
Args:
s (float): The parameter along the first curve where the iteration
will start.
nodes1 (numpy.ndarray): Control points of the first curve.
t (float): The parameter along the second curve where the iteration
will start.
nodes2 (numpy.ndarray): Control points of the second curve.
Returns:
Tuple[float, float]: The pair of :math:`s` and :math:`t` values that
Newton's method converged to.
"""
if s < ZERO_THRESHOLD:
reversed1 = np.asfortranarray(nodes1[:, ::-1])
if t < ZERO_THRESHOLD:
reversed2 = np.asfortranarray(nodes2[:, ::-1])
refined_s, refined_t = full_newton_nonzero(
1.0 - s, reversed1, 1.0 - t, reversed2
)
return 1.0 - refined_s, 1.0 - refined_t
else:
refined_s, refined_t = full_newton_nonzero(
1.0 - s, reversed1, t, nodes2
)
return 1.0 - refined_s, refined_t
else:
if t < ZERO_THRESHOLD:
reversed2 = np.asfortranarray(nodes2[:, ::-1])
refined_s, refined_t = full_newton_nonzero(
s, nodes1, 1.0 - t, reversed2
)
return refined_s, 1.0 - refined_t
else:
return full_newton_nonzero(s, nodes1, t, nodes2) | def function[full_newton, parameter[s, nodes1, t, nodes2]]:
constant[Perform a Newton iteration until convergence to a solution.
This assumes :math:`s` and :math:`t` are sufficiently close to an
intersection. It **does not** govern the maximum distance away
that the solution can lie, though the subdivided intervals that contain
:math:`s` and :math:`t` could be used.
To avoid round-off issues near ``0.0``, this reverses the direction
of a curve and replaces the parameter value :math:`\nu` with
:math:`1 - \nu` whenever :math:`\nu < \tau` (here we use a threshold
:math:`\tau` equal to :math:`2^{-10}`, i.e. ``ZERO_THRESHOLD``).
Args:
s (float): The parameter along the first curve where the iteration
will start.
nodes1 (numpy.ndarray): Control points of the first curve.
t (float): The parameter along the second curve where the iteration
will start.
nodes2 (numpy.ndarray): Control points of the second curve.
Returns:
Tuple[float, float]: The pair of :math:`s` and :math:`t` values that
Newton's method converged to.
]
if compare[name[s] less[<] name[ZERO_THRESHOLD]] begin[:]
variable[reversed1] assign[=] call[name[np].asfortranarray, parameter[call[name[nodes1]][tuple[[<ast.Slice object at 0x7da204622860>, <ast.Slice object at 0x7da2046229e0>]]]]]
if compare[name[t] less[<] name[ZERO_THRESHOLD]] begin[:]
variable[reversed2] assign[=] call[name[np].asfortranarray, parameter[call[name[nodes2]][tuple[[<ast.Slice object at 0x7da204623b50>, <ast.Slice object at 0x7da204621810>]]]]]
<ast.Tuple object at 0x7da2046219f0> assign[=] call[name[full_newton_nonzero], parameter[binary_operation[constant[1.0] - name[s]], name[reversed1], binary_operation[constant[1.0] - name[t]], name[reversed2]]]
return[tuple[[<ast.BinOp object at 0x7da204620e80>, <ast.BinOp object at 0x7da204622bf0>]]] | keyword[def] identifier[full_newton] ( identifier[s] , identifier[nodes1] , identifier[t] , identifier[nodes2] ):
literal[string]
keyword[if] identifier[s] < identifier[ZERO_THRESHOLD] :
identifier[reversed1] = identifier[np] . identifier[asfortranarray] ( identifier[nodes1] [:,::- literal[int] ])
keyword[if] identifier[t] < identifier[ZERO_THRESHOLD] :
identifier[reversed2] = identifier[np] . identifier[asfortranarray] ( identifier[nodes2] [:,::- literal[int] ])
identifier[refined_s] , identifier[refined_t] = identifier[full_newton_nonzero] (
literal[int] - identifier[s] , identifier[reversed1] , literal[int] - identifier[t] , identifier[reversed2]
)
keyword[return] literal[int] - identifier[refined_s] , literal[int] - identifier[refined_t]
keyword[else] :
identifier[refined_s] , identifier[refined_t] = identifier[full_newton_nonzero] (
literal[int] - identifier[s] , identifier[reversed1] , identifier[t] , identifier[nodes2]
)
keyword[return] literal[int] - identifier[refined_s] , identifier[refined_t]
keyword[else] :
keyword[if] identifier[t] < identifier[ZERO_THRESHOLD] :
identifier[reversed2] = identifier[np] . identifier[asfortranarray] ( identifier[nodes2] [:,::- literal[int] ])
identifier[refined_s] , identifier[refined_t] = identifier[full_newton_nonzero] (
identifier[s] , identifier[nodes1] , literal[int] - identifier[t] , identifier[reversed2]
)
keyword[return] identifier[refined_s] , literal[int] - identifier[refined_t]
keyword[else] :
keyword[return] identifier[full_newton_nonzero] ( identifier[s] , identifier[nodes1] , identifier[t] , identifier[nodes2] ) | def full_newton(s, nodes1, t, nodes2):
"""Perform a Newton iteration until convergence to a solution.
This assumes :math:`s` and :math:`t` are sufficiently close to an
intersection. It **does not** govern the maximum distance away
that the solution can lie, though the subdivided intervals that contain
:math:`s` and :math:`t` could be used.
To avoid round-off issues near ``0.0``, this reverses the direction
of a curve and replaces the parameter value :math:`\\nu` with
:math:`1 - \\nu` whenever :math:`\\nu < \\tau` (here we use a threshold
:math:`\\tau` equal to :math:`2^{-10}`, i.e. ``ZERO_THRESHOLD``).
Args:
s (float): The parameter along the first curve where the iteration
will start.
nodes1 (numpy.ndarray): Control points of the first curve.
t (float): The parameter along the second curve where the iteration
will start.
nodes2 (numpy.ndarray): Control points of the second curve.
Returns:
Tuple[float, float]: The pair of :math:`s` and :math:`t` values that
Newton's method converged to.
"""
if s < ZERO_THRESHOLD:
reversed1 = np.asfortranarray(nodes1[:, ::-1])
if t < ZERO_THRESHOLD:
reversed2 = np.asfortranarray(nodes2[:, ::-1])
(refined_s, refined_t) = full_newton_nonzero(1.0 - s, reversed1, 1.0 - t, reversed2)
return (1.0 - refined_s, 1.0 - refined_t) # depends on [control=['if'], data=['t']]
else:
(refined_s, refined_t) = full_newton_nonzero(1.0 - s, reversed1, t, nodes2)
return (1.0 - refined_s, refined_t) # depends on [control=['if'], data=['s', 'ZERO_THRESHOLD']]
elif t < ZERO_THRESHOLD:
reversed2 = np.asfortranarray(nodes2[:, ::-1])
(refined_s, refined_t) = full_newton_nonzero(s, nodes1, 1.0 - t, reversed2)
return (refined_s, 1.0 - refined_t) # depends on [control=['if'], data=['t']]
else:
return full_newton_nonzero(s, nodes1, t, nodes2) |
def _uncythonized_model(self, beta):
""" Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the scores for the time series
"""
parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
coefficients = np.zeros((self.X.shape[1],self.model_Y.shape[0]+1))
coefficients[:,0] = self.initial_values
theta = np.zeros(self.model_Y.shape[0]+1)
model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm)
# Loop over time series
theta, self.model_scores, coefficients = gas_reg_recursion(parm, theta, self.X, coefficients, self.model_scores, self.model_Y, self.model_Y.shape[0],
self.family.reg_score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag)
return theta[:-1], self.model_Y, self.model_scores, coefficients | def function[_uncythonized_model, parameter[self, beta]]:
constant[ Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the scores for the time series
]
variable[parm] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da20c6aafb0>]]
variable[coefficients] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Subscript object at 0x7da20c6a8730>, <ast.BinOp object at 0x7da20c6aaf80>]]]]
call[name[coefficients]][tuple[[<ast.Slice object at 0x7da20c6abf70>, <ast.Constant object at 0x7da20c6aa0e0>]]] assign[=] name[self].initial_values
variable[theta] assign[=] call[name[np].zeros, parameter[binary_operation[call[name[self].model_Y.shape][constant[0]] + constant[1]]]]
<ast.Tuple object at 0x7da2045653f0> assign[=] call[name[self]._get_scale_and_shape, parameter[name[parm]]]
<ast.Tuple object at 0x7da18f00cca0> assign[=] call[name[gas_reg_recursion], parameter[name[parm], name[theta], name[self].X, name[coefficients], name[self].model_scores, name[self].model_Y, call[name[self].model_Y.shape][constant[0]], name[self].family.reg_score_function, name[self].link, name[model_scale], name[model_shape], name[model_skewness], name[self].max_lag]]
return[tuple[[<ast.Subscript object at 0x7da20c6aa230>, <ast.Attribute object at 0x7da20c6a9720>, <ast.Attribute object at 0x7da20c6aa890>, <ast.Name object at 0x7da20c6a9d50>]]] | keyword[def] identifier[_uncythonized_model] ( identifier[self] , identifier[beta] ):
literal[string]
identifier[parm] = identifier[np] . identifier[array] ([ identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[beta] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[beta] . identifier[shape] [ literal[int] ])])
identifier[coefficients] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[X] . identifier[shape] [ literal[int] ], identifier[self] . identifier[model_Y] . identifier[shape] [ literal[int] ]+ literal[int] ))
identifier[coefficients] [:, literal[int] ]= identifier[self] . identifier[initial_values]
identifier[theta] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[model_Y] . identifier[shape] [ literal[int] ]+ literal[int] )
identifier[model_scale] , identifier[model_shape] , identifier[model_skewness] = identifier[self] . identifier[_get_scale_and_shape] ( identifier[parm] )
identifier[theta] , identifier[self] . identifier[model_scores] , identifier[coefficients] = identifier[gas_reg_recursion] ( identifier[parm] , identifier[theta] , identifier[self] . identifier[X] , identifier[coefficients] , identifier[self] . identifier[model_scores] , identifier[self] . identifier[model_Y] , identifier[self] . identifier[model_Y] . identifier[shape] [ literal[int] ],
identifier[self] . identifier[family] . identifier[reg_score_function] , identifier[self] . identifier[link] , identifier[model_scale] , identifier[model_shape] , identifier[model_skewness] , identifier[self] . identifier[max_lag] )
keyword[return] identifier[theta] [:- literal[int] ], identifier[self] . identifier[model_Y] , identifier[self] . identifier[model_scores] , identifier[coefficients] | def _uncythonized_model(self, beta):
""" Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the scores for the time series
"""
parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
coefficients = np.zeros((self.X.shape[1], self.model_Y.shape[0] + 1))
coefficients[:, 0] = self.initial_values
theta = np.zeros(self.model_Y.shape[0] + 1)
(model_scale, model_shape, model_skewness) = self._get_scale_and_shape(parm)
# Loop over time series
(theta, self.model_scores, coefficients) = gas_reg_recursion(parm, theta, self.X, coefficients, self.model_scores, self.model_Y, self.model_Y.shape[0], self.family.reg_score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag)
return (theta[:-1], self.model_Y, self.model_scores, coefficients) |
def get_centered_pagination(current, total, visible=5):
''' Return the range of pages to render in a pagination menu.
The current page is always kept in the middle except
for the edge cases.
Reeturns a dict
{ prev, first, current, last, next }
:param current: the current page
:param total: total number of pages available
:param visible: number of pages visible
'''
inc = visible/2
first = current - inc
last = current + inc
if (total <= visible):
first = 1
last = total
elif (last > total):
first = total - (visible-1)
last = total
elif (first < 1):
first = 1
last = visible
return dict(prev = current-1 if(current > 1) else None,
first=first,
current = current,
last=last,
next = current+1 if(current < total) else None) | def function[get_centered_pagination, parameter[current, total, visible]]:
constant[ Return the range of pages to render in a pagination menu.
The current page is always kept in the middle except
for the edge cases.
Reeturns a dict
{ prev, first, current, last, next }
:param current: the current page
:param total: total number of pages available
:param visible: number of pages visible
]
variable[inc] assign[=] binary_operation[name[visible] / constant[2]]
variable[first] assign[=] binary_operation[name[current] - name[inc]]
variable[last] assign[=] binary_operation[name[current] + name[inc]]
if compare[name[total] less_or_equal[<=] name[visible]] begin[:]
variable[first] assign[=] constant[1]
variable[last] assign[=] name[total]
return[call[name[dict], parameter[]]] | keyword[def] identifier[get_centered_pagination] ( identifier[current] , identifier[total] , identifier[visible] = literal[int] ):
literal[string]
identifier[inc] = identifier[visible] / literal[int]
identifier[first] = identifier[current] - identifier[inc]
identifier[last] = identifier[current] + identifier[inc]
keyword[if] ( identifier[total] <= identifier[visible] ):
identifier[first] = literal[int]
identifier[last] = identifier[total]
keyword[elif] ( identifier[last] > identifier[total] ):
identifier[first] = identifier[total] -( identifier[visible] - literal[int] )
identifier[last] = identifier[total]
keyword[elif] ( identifier[first] < literal[int] ):
identifier[first] = literal[int]
identifier[last] = identifier[visible]
keyword[return] identifier[dict] ( identifier[prev] = identifier[current] - literal[int] keyword[if] ( identifier[current] > literal[int] ) keyword[else] keyword[None] ,
identifier[first] = identifier[first] ,
identifier[current] = identifier[current] ,
identifier[last] = identifier[last] ,
identifier[next] = identifier[current] + literal[int] keyword[if] ( identifier[current] < identifier[total] ) keyword[else] keyword[None] ) | def get_centered_pagination(current, total, visible=5):
""" Return the range of pages to render in a pagination menu.
The current page is always kept in the middle except
for the edge cases.
Reeturns a dict
{ prev, first, current, last, next }
:param current: the current page
:param total: total number of pages available
:param visible: number of pages visible
"""
inc = visible / 2
first = current - inc
last = current + inc
if total <= visible:
first = 1
last = total # depends on [control=['if'], data=['total']]
elif last > total:
first = total - (visible - 1)
last = total # depends on [control=['if'], data=['last', 'total']]
elif first < 1:
first = 1
last = visible # depends on [control=['if'], data=['first']]
return dict(prev=current - 1 if current > 1 else None, first=first, current=current, last=last, next=current + 1 if current < total else None) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.