code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def meth_wdl(args):
''' Retrieve WDL for given version of a repository method'''
r = fapi.get_repository_method(args.namespace, args.method,
args.snapshot_id, True)
fapi._check_response_code(r, 200)
return r.text
|
def function[meth_wdl, parameter[args]]:
constant[ Retrieve WDL for given version of a repository method]
variable[r] assign[=] call[name[fapi].get_repository_method, parameter[name[args].namespace, name[args].method, name[args].snapshot_id, constant[True]]]
call[name[fapi]._check_response_code, parameter[name[r], constant[200]]]
return[name[r].text]
|
keyword[def] identifier[meth_wdl] ( identifier[args] ):
literal[string]
identifier[r] = identifier[fapi] . identifier[get_repository_method] ( identifier[args] . identifier[namespace] , identifier[args] . identifier[method] ,
identifier[args] . identifier[snapshot_id] , keyword[True] )
identifier[fapi] . identifier[_check_response_code] ( identifier[r] , literal[int] )
keyword[return] identifier[r] . identifier[text]
|
def meth_wdl(args):
""" Retrieve WDL for given version of a repository method"""
r = fapi.get_repository_method(args.namespace, args.method, args.snapshot_id, True)
fapi._check_response_code(r, 200)
return r.text
|
def p_annotation_date_1(self, p):
"""annotation_date : ANNOTATION_DATE DATE"""
try:
if six.PY2:
value = p[2].decode(encoding='utf-8')
else:
value = p[2]
self.builder.add_annotation_date(self.document, value)
except CardinalityError:
self.more_than_one_error('AnnotationDate', p.lineno(1))
except OrderError:
self.order_error('AnnotationDate', 'Annotator', p.lineno(1))
|
def function[p_annotation_date_1, parameter[self, p]]:
constant[annotation_date : ANNOTATION_DATE DATE]
<ast.Try object at 0x7da1b014d4e0>
|
keyword[def] identifier[p_annotation_date_1] ( identifier[self] , identifier[p] ):
literal[string]
keyword[try] :
keyword[if] identifier[six] . identifier[PY2] :
identifier[value] = identifier[p] [ literal[int] ]. identifier[decode] ( identifier[encoding] = literal[string] )
keyword[else] :
identifier[value] = identifier[p] [ literal[int] ]
identifier[self] . identifier[builder] . identifier[add_annotation_date] ( identifier[self] . identifier[document] , identifier[value] )
keyword[except] identifier[CardinalityError] :
identifier[self] . identifier[more_than_one_error] ( literal[string] , identifier[p] . identifier[lineno] ( literal[int] ))
keyword[except] identifier[OrderError] :
identifier[self] . identifier[order_error] ( literal[string] , literal[string] , identifier[p] . identifier[lineno] ( literal[int] ))
|
def p_annotation_date_1(self, p):
"""annotation_date : ANNOTATION_DATE DATE"""
try:
if six.PY2:
value = p[2].decode(encoding='utf-8') # depends on [control=['if'], data=[]]
else:
value = p[2]
self.builder.add_annotation_date(self.document, value) # depends on [control=['try'], data=[]]
except CardinalityError:
self.more_than_one_error('AnnotationDate', p.lineno(1)) # depends on [control=['except'], data=[]]
except OrderError:
self.order_error('AnnotationDate', 'Annotator', p.lineno(1)) # depends on [control=['except'], data=[]]
|
def recvRtspReply(self):
"""Receive RTSP reply from the server."""
while True:
reply = self.rtspSocket.recv(1024)
if reply:
self.parseRtspReply(reply)
# Close the RTSP socket upon requesting Teardown
if self.requestSent == self.TEARDOWN:
self.rtspSocket.shutdown(socket.SHUT_RDWR)
self.rtspSocket.close()
break
|
def function[recvRtspReply, parameter[self]]:
constant[Receive RTSP reply from the server.]
while constant[True] begin[:]
variable[reply] assign[=] call[name[self].rtspSocket.recv, parameter[constant[1024]]]
if name[reply] begin[:]
call[name[self].parseRtspReply, parameter[name[reply]]]
if compare[name[self].requestSent equal[==] name[self].TEARDOWN] begin[:]
call[name[self].rtspSocket.shutdown, parameter[name[socket].SHUT_RDWR]]
call[name[self].rtspSocket.close, parameter[]]
break
|
keyword[def] identifier[recvRtspReply] ( identifier[self] ):
literal[string]
keyword[while] keyword[True] :
identifier[reply] = identifier[self] . identifier[rtspSocket] . identifier[recv] ( literal[int] )
keyword[if] identifier[reply] :
identifier[self] . identifier[parseRtspReply] ( identifier[reply] )
keyword[if] identifier[self] . identifier[requestSent] == identifier[self] . identifier[TEARDOWN] :
identifier[self] . identifier[rtspSocket] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] )
identifier[self] . identifier[rtspSocket] . identifier[close] ()
keyword[break]
|
def recvRtspReply(self):
"""Receive RTSP reply from the server."""
while True:
reply = self.rtspSocket.recv(1024)
if reply:
self.parseRtspReply(reply) # depends on [control=['if'], data=[]]
# Close the RTSP socket upon requesting Teardown
if self.requestSent == self.TEARDOWN:
self.rtspSocket.shutdown(socket.SHUT_RDWR)
self.rtspSocket.close()
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
|
def load_django(self, query: "django query"):
"""Load the main dataframe from a django orm query
:param query: django query from a model
:type query: django query
:example: ``ds.load_django(Mymodel.objects.all())``
"""
try:
self.df = self._load_django(query)
except Exception as e:
self.err(e, "Can not load data from query")
|
def function[load_django, parameter[self, query]]:
constant[Load the main dataframe from a django orm query
:param query: django query from a model
:type query: django query
:example: ``ds.load_django(Mymodel.objects.all())``
]
<ast.Try object at 0x7da20c6c5ba0>
|
keyword[def] identifier[load_django] ( identifier[self] , identifier[query] : literal[string] ):
literal[string]
keyword[try] :
identifier[self] . identifier[df] = identifier[self] . identifier[_load_django] ( identifier[query] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[err] ( identifier[e] , literal[string] )
|
def load_django(self, query: 'django query'):
"""Load the main dataframe from a django orm query
:param query: django query from a model
:type query: django query
:example: ``ds.load_django(Mymodel.objects.all())``
"""
try:
self.df = self._load_django(query) # depends on [control=['try'], data=[]]
except Exception as e:
self.err(e, 'Can not load data from query') # depends on [control=['except'], data=['e']]
|
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, "api_version", None)
api_version = int(api_version)
except (ValueError, TypeError):
api_version = None
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False
|
def function[verifiable, parameter[self]]:
constant[
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
]
variable[trusted] assign[=] <ast.BoolOp object at 0x7da204622fe0>
if <ast.BoolOp object at 0x7da204621ae0> begin[:]
<ast.Try object at 0x7da204620160>
if <ast.BoolOp object at 0x7da204621f30> begin[:]
return[None]
if name[self].hash begin[:]
return[constant[True]]
|
keyword[def] identifier[verifiable] ( identifier[self] ):
literal[string]
identifier[trusted] = identifier[self] . identifier[trusted] keyword[or] identifier[getattr] ( identifier[self] . identifier[comes_from] , literal[string] , keyword[None] )
keyword[if] identifier[trusted] keyword[is] keyword[not] keyword[None] keyword[and] identifier[trusted] :
keyword[try] :
identifier[api_version] = identifier[getattr] ( identifier[self] . identifier[comes_from] , literal[string] , keyword[None] )
identifier[api_version] = identifier[int] ( identifier[api_version] )
keyword[except] ( identifier[ValueError] , identifier[TypeError] ):
identifier[api_version] = keyword[None]
keyword[if] identifier[api_version] keyword[is] keyword[None] keyword[or] identifier[api_version] <= literal[int] :
keyword[return]
keyword[if] identifier[self] . identifier[hash] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False]
keyword[elif] identifier[trusted] keyword[is] keyword[not] keyword[None] :
keyword[return] keyword[False]
|
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, 'trusted', None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, 'api_version', None)
api_version = int(api_version) # depends on [control=['try'], data=[]]
except (ValueError, TypeError):
api_version = None # depends on [control=['except'], data=[]]
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return # depends on [control=['if'], data=[]]
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True # depends on [control=['if'], data=[]]
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False # depends on [control=['if'], data=[]]
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False # depends on [control=['if'], data=[]]
|
def merge_text_nodes_on(self, node):
"""Merges all consecutive non-translatable text nodes into one"""
if not isinstance(node, ContainerNode) or not node.children:
return
new_children = []
text_run = []
for i in node.children:
if isinstance(i, Text) and not i.translatable:
text_run.append(i.escaped())
else:
if text_run:
new_children.append(EscapedText(''.join(text_run)))
text_run = []
new_children.append(i)
if text_run:
new_children.append(EscapedText(''.join(text_run)))
node.children = new_children
for i in node.children:
self.merge_text_nodes_on(i)
|
def function[merge_text_nodes_on, parameter[self, node]]:
constant[Merges all consecutive non-translatable text nodes into one]
if <ast.BoolOp object at 0x7da20c795cc0> begin[:]
return[None]
variable[new_children] assign[=] list[[]]
variable[text_run] assign[=] list[[]]
for taget[name[i]] in starred[name[node].children] begin[:]
if <ast.BoolOp object at 0x7da20c7960b0> begin[:]
call[name[text_run].append, parameter[call[name[i].escaped, parameter[]]]]
if name[text_run] begin[:]
call[name[new_children].append, parameter[call[name[EscapedText], parameter[call[constant[].join, parameter[name[text_run]]]]]]]
name[node].children assign[=] name[new_children]
for taget[name[i]] in starred[name[node].children] begin[:]
call[name[self].merge_text_nodes_on, parameter[name[i]]]
|
keyword[def] identifier[merge_text_nodes_on] ( identifier[self] , identifier[node] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[node] , identifier[ContainerNode] ) keyword[or] keyword[not] identifier[node] . identifier[children] :
keyword[return]
identifier[new_children] =[]
identifier[text_run] =[]
keyword[for] identifier[i] keyword[in] identifier[node] . identifier[children] :
keyword[if] identifier[isinstance] ( identifier[i] , identifier[Text] ) keyword[and] keyword[not] identifier[i] . identifier[translatable] :
identifier[text_run] . identifier[append] ( identifier[i] . identifier[escaped] ())
keyword[else] :
keyword[if] identifier[text_run] :
identifier[new_children] . identifier[append] ( identifier[EscapedText] ( literal[string] . identifier[join] ( identifier[text_run] )))
identifier[text_run] =[]
identifier[new_children] . identifier[append] ( identifier[i] )
keyword[if] identifier[text_run] :
identifier[new_children] . identifier[append] ( identifier[EscapedText] ( literal[string] . identifier[join] ( identifier[text_run] )))
identifier[node] . identifier[children] = identifier[new_children]
keyword[for] identifier[i] keyword[in] identifier[node] . identifier[children] :
identifier[self] . identifier[merge_text_nodes_on] ( identifier[i] )
|
def merge_text_nodes_on(self, node):
"""Merges all consecutive non-translatable text nodes into one"""
if not isinstance(node, ContainerNode) or not node.children:
return # depends on [control=['if'], data=[]]
new_children = []
text_run = []
for i in node.children:
if isinstance(i, Text) and (not i.translatable):
text_run.append(i.escaped()) # depends on [control=['if'], data=[]]
else:
if text_run:
new_children.append(EscapedText(''.join(text_run)))
text_run = [] # depends on [control=['if'], data=[]]
new_children.append(i) # depends on [control=['for'], data=['i']]
if text_run:
new_children.append(EscapedText(''.join(text_run))) # depends on [control=['if'], data=[]]
node.children = new_children
for i in node.children:
self.merge_text_nodes_on(i) # depends on [control=['for'], data=['i']]
|
def calc_euler_tour(g, start, end):
'''Calculates an Euler tour over the graph g from vertex start to vertex end.
Assumes start and end are odd-degree vertices and that there are no other odd-degree
vertices.'''
even_g = nx.subgraph(g, g.nodes()).copy()
if end in even_g.neighbors(start):
# If start and end are neighbors, remove the edge
even_g.remove_edge(start, end)
comps = list(nx.connected_components(even_g))
# If the graph did not split, just find the euler circuit
if len(comps) == 1:
trail = list(nx.eulerian_circuit(even_g, start))
trail.append((start, end))
elif len(comps) == 2:
subg1 = nx.subgraph(even_g, comps[0])
subg2 = nx.subgraph(even_g, comps[1])
start_subg, end_subg = (subg1, subg2) if start in subg1.nodes() else (subg2, subg1)
trail = list(nx.eulerian_circuit(start_subg, start)) + [(start, end)] + list(nx.eulerian_circuit(end_subg, end))
else:
raise Exception('Unknown edge case with connected components of size {0}:\n{1}'.format(len(comps), comps))
else:
# If they are not neighbors, we add an imaginary edge and calculate the euler circuit
even_g.add_edge(start, end)
circ = list(nx.eulerian_circuit(even_g, start))
try:
trail_start = circ.index((start, end))
except:
trail_start = circ.index((end, start))
trail = circ[trail_start+1:] + circ[:trail_start]
return trail
|
def function[calc_euler_tour, parameter[g, start, end]]:
constant[Calculates an Euler tour over the graph g from vertex start to vertex end.
Assumes start and end are odd-degree vertices and that there are no other odd-degree
vertices.]
variable[even_g] assign[=] call[call[name[nx].subgraph, parameter[name[g], call[name[g].nodes, parameter[]]]].copy, parameter[]]
if compare[name[end] in call[name[even_g].neighbors, parameter[name[start]]]] begin[:]
call[name[even_g].remove_edge, parameter[name[start], name[end]]]
variable[comps] assign[=] call[name[list], parameter[call[name[nx].connected_components, parameter[name[even_g]]]]]
if compare[call[name[len], parameter[name[comps]]] equal[==] constant[1]] begin[:]
variable[trail] assign[=] call[name[list], parameter[call[name[nx].eulerian_circuit, parameter[name[even_g], name[start]]]]]
call[name[trail].append, parameter[tuple[[<ast.Name object at 0x7da1b0eaab60>, <ast.Name object at 0x7da1b0eaa590>]]]]
return[name[trail]]
|
keyword[def] identifier[calc_euler_tour] ( identifier[g] , identifier[start] , identifier[end] ):
literal[string]
identifier[even_g] = identifier[nx] . identifier[subgraph] ( identifier[g] , identifier[g] . identifier[nodes] ()). identifier[copy] ()
keyword[if] identifier[end] keyword[in] identifier[even_g] . identifier[neighbors] ( identifier[start] ):
identifier[even_g] . identifier[remove_edge] ( identifier[start] , identifier[end] )
identifier[comps] = identifier[list] ( identifier[nx] . identifier[connected_components] ( identifier[even_g] ))
keyword[if] identifier[len] ( identifier[comps] )== literal[int] :
identifier[trail] = identifier[list] ( identifier[nx] . identifier[eulerian_circuit] ( identifier[even_g] , identifier[start] ))
identifier[trail] . identifier[append] (( identifier[start] , identifier[end] ))
keyword[elif] identifier[len] ( identifier[comps] )== literal[int] :
identifier[subg1] = identifier[nx] . identifier[subgraph] ( identifier[even_g] , identifier[comps] [ literal[int] ])
identifier[subg2] = identifier[nx] . identifier[subgraph] ( identifier[even_g] , identifier[comps] [ literal[int] ])
identifier[start_subg] , identifier[end_subg] =( identifier[subg1] , identifier[subg2] ) keyword[if] identifier[start] keyword[in] identifier[subg1] . identifier[nodes] () keyword[else] ( identifier[subg2] , identifier[subg1] )
identifier[trail] = identifier[list] ( identifier[nx] . identifier[eulerian_circuit] ( identifier[start_subg] , identifier[start] ))+[( identifier[start] , identifier[end] )]+ identifier[list] ( identifier[nx] . identifier[eulerian_circuit] ( identifier[end_subg] , identifier[end] ))
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[len] ( identifier[comps] ), identifier[comps] ))
keyword[else] :
identifier[even_g] . identifier[add_edge] ( identifier[start] , identifier[end] )
identifier[circ] = identifier[list] ( identifier[nx] . identifier[eulerian_circuit] ( identifier[even_g] , identifier[start] ))
keyword[try] :
identifier[trail_start] = identifier[circ] . identifier[index] (( identifier[start] , identifier[end] ))
keyword[except] :
identifier[trail_start] = identifier[circ] . identifier[index] (( identifier[end] , identifier[start] ))
identifier[trail] = identifier[circ] [ identifier[trail_start] + literal[int] :]+ identifier[circ] [: identifier[trail_start] ]
keyword[return] identifier[trail]
|
def calc_euler_tour(g, start, end):
"""Calculates an Euler tour over the graph g from vertex start to vertex end.
Assumes start and end are odd-degree vertices and that there are no other odd-degree
vertices."""
even_g = nx.subgraph(g, g.nodes()).copy()
if end in even_g.neighbors(start):
# If start and end are neighbors, remove the edge
even_g.remove_edge(start, end)
comps = list(nx.connected_components(even_g))
# If the graph did not split, just find the euler circuit
if len(comps) == 1:
trail = list(nx.eulerian_circuit(even_g, start))
trail.append((start, end)) # depends on [control=['if'], data=[]]
elif len(comps) == 2:
subg1 = nx.subgraph(even_g, comps[0])
subg2 = nx.subgraph(even_g, comps[1])
(start_subg, end_subg) = (subg1, subg2) if start in subg1.nodes() else (subg2, subg1)
trail = list(nx.eulerian_circuit(start_subg, start)) + [(start, end)] + list(nx.eulerian_circuit(end_subg, end)) # depends on [control=['if'], data=[]]
else:
raise Exception('Unknown edge case with connected components of size {0}:\n{1}'.format(len(comps), comps)) # depends on [control=['if'], data=['end']]
else:
# If they are not neighbors, we add an imaginary edge and calculate the euler circuit
even_g.add_edge(start, end)
circ = list(nx.eulerian_circuit(even_g, start))
try:
trail_start = circ.index((start, end)) # depends on [control=['try'], data=[]]
except:
trail_start = circ.index((end, start)) # depends on [control=['except'], data=[]]
trail = circ[trail_start + 1:] + circ[:trail_start]
return trail
|
def fix_spelling(self, words, join=True, joinstring=' '):
"""Simple function for quickly correcting misspelled words.
Parameters
----------
words: list of str or str
Either a list of pretokenized words or a string. In case of a string, it will be splitted using
default behaviour of string.split() function.
join: boolean (default: True)
Should we join the list of words into a single string.
joinstring: str (default: ' ')
The string that will be used to join together the fixed words.
Returns
-------
str
In case join is True
list of str
In case join is False.
"""
fixed_words = []
for word in self.spellcheck(words, suggestions=True):
if word['spelling']:
fixed_words.append(word['text'])
else:
suggestions = word['suggestions']
if len(suggestions) > 0:
fixed_words.append(suggestions[0])
else:
fixed_words.append(word['text'])
if join:
return joinstring.join(fixed_words)
else:
return fixed_words
|
def function[fix_spelling, parameter[self, words, join, joinstring]]:
constant[Simple function for quickly correcting misspelled words.
Parameters
----------
words: list of str or str
Either a list of pretokenized words or a string. In case of a string, it will be splitted using
default behaviour of string.split() function.
join: boolean (default: True)
Should we join the list of words into a single string.
joinstring: str (default: ' ')
The string that will be used to join together the fixed words.
Returns
-------
str
In case join is True
list of str
In case join is False.
]
variable[fixed_words] assign[=] list[[]]
for taget[name[word]] in starred[call[name[self].spellcheck, parameter[name[words]]]] begin[:]
if call[name[word]][constant[spelling]] begin[:]
call[name[fixed_words].append, parameter[call[name[word]][constant[text]]]]
if name[join] begin[:]
return[call[name[joinstring].join, parameter[name[fixed_words]]]]
|
keyword[def] identifier[fix_spelling] ( identifier[self] , identifier[words] , identifier[join] = keyword[True] , identifier[joinstring] = literal[string] ):
literal[string]
identifier[fixed_words] =[]
keyword[for] identifier[word] keyword[in] identifier[self] . identifier[spellcheck] ( identifier[words] , identifier[suggestions] = keyword[True] ):
keyword[if] identifier[word] [ literal[string] ]:
identifier[fixed_words] . identifier[append] ( identifier[word] [ literal[string] ])
keyword[else] :
identifier[suggestions] = identifier[word] [ literal[string] ]
keyword[if] identifier[len] ( identifier[suggestions] )> literal[int] :
identifier[fixed_words] . identifier[append] ( identifier[suggestions] [ literal[int] ])
keyword[else] :
identifier[fixed_words] . identifier[append] ( identifier[word] [ literal[string] ])
keyword[if] identifier[join] :
keyword[return] identifier[joinstring] . identifier[join] ( identifier[fixed_words] )
keyword[else] :
keyword[return] identifier[fixed_words]
|
def fix_spelling(self, words, join=True, joinstring=' '):
"""Simple function for quickly correcting misspelled words.
Parameters
----------
words: list of str or str
Either a list of pretokenized words or a string. In case of a string, it will be splitted using
default behaviour of string.split() function.
join: boolean (default: True)
Should we join the list of words into a single string.
joinstring: str (default: ' ')
The string that will be used to join together the fixed words.
Returns
-------
str
In case join is True
list of str
In case join is False.
"""
fixed_words = []
for word in self.spellcheck(words, suggestions=True):
if word['spelling']:
fixed_words.append(word['text']) # depends on [control=['if'], data=[]]
else:
suggestions = word['suggestions']
if len(suggestions) > 0:
fixed_words.append(suggestions[0]) # depends on [control=['if'], data=[]]
else:
fixed_words.append(word['text']) # depends on [control=['for'], data=['word']]
if join:
return joinstring.join(fixed_words) # depends on [control=['if'], data=[]]
else:
return fixed_words
|
def __search(self):
"""
Performs the search.
"""
self.__search_results = []
editorsFiles = self.__container.default_target in self.__location.targets and \
[editor.file for editor in self.__container.script_editor.list_editors()] or []
self.__search_editors_files(editorsFiles)
self.__search_files(self.__location.files)
for directory in self.__location.directories:
if self.__interrupt:
return
files_walker = foundations.walkers.files_walker(directory,
self.__location.filters_in,
list(itertools.chain(self.__location.filters_out,
self.__location.files,
editorsFiles)))
self.__search_files(files_walker)
not self.__interrupt and self.searchFinished.emit(self.__search_results)
|
def function[__search, parameter[self]]:
constant[
Performs the search.
]
name[self].__search_results assign[=] list[[]]
variable[editorsFiles] assign[=] <ast.BoolOp object at 0x7da1b0912680>
call[name[self].__search_editors_files, parameter[name[editorsFiles]]]
call[name[self].__search_files, parameter[name[self].__location.files]]
for taget[name[directory]] in starred[name[self].__location.directories] begin[:]
if name[self].__interrupt begin[:]
return[None]
variable[files_walker] assign[=] call[name[foundations].walkers.files_walker, parameter[name[directory], name[self].__location.filters_in, call[name[list], parameter[call[name[itertools].chain, parameter[name[self].__location.filters_out, name[self].__location.files, name[editorsFiles]]]]]]]
call[name[self].__search_files, parameter[name[files_walker]]]
<ast.BoolOp object at 0x7da18bcc87c0>
|
keyword[def] identifier[__search] ( identifier[self] ):
literal[string]
identifier[self] . identifier[__search_results] =[]
identifier[editorsFiles] = identifier[self] . identifier[__container] . identifier[default_target] keyword[in] identifier[self] . identifier[__location] . identifier[targets] keyword[and] [ identifier[editor] . identifier[file] keyword[for] identifier[editor] keyword[in] identifier[self] . identifier[__container] . identifier[script_editor] . identifier[list_editors] ()] keyword[or] []
identifier[self] . identifier[__search_editors_files] ( identifier[editorsFiles] )
identifier[self] . identifier[__search_files] ( identifier[self] . identifier[__location] . identifier[files] )
keyword[for] identifier[directory] keyword[in] identifier[self] . identifier[__location] . identifier[directories] :
keyword[if] identifier[self] . identifier[__interrupt] :
keyword[return]
identifier[files_walker] = identifier[foundations] . identifier[walkers] . identifier[files_walker] ( identifier[directory] ,
identifier[self] . identifier[__location] . identifier[filters_in] ,
identifier[list] ( identifier[itertools] . identifier[chain] ( identifier[self] . identifier[__location] . identifier[filters_out] ,
identifier[self] . identifier[__location] . identifier[files] ,
identifier[editorsFiles] )))
identifier[self] . identifier[__search_files] ( identifier[files_walker] )
keyword[not] identifier[self] . identifier[__interrupt] keyword[and] identifier[self] . identifier[searchFinished] . identifier[emit] ( identifier[self] . identifier[__search_results] )
|
def __search(self):
"""
Performs the search.
"""
self.__search_results = []
editorsFiles = self.__container.default_target in self.__location.targets and [editor.file for editor in self.__container.script_editor.list_editors()] or []
self.__search_editors_files(editorsFiles)
self.__search_files(self.__location.files)
for directory in self.__location.directories:
if self.__interrupt:
return # depends on [control=['if'], data=[]]
files_walker = foundations.walkers.files_walker(directory, self.__location.filters_in, list(itertools.chain(self.__location.filters_out, self.__location.files, editorsFiles)))
self.__search_files(files_walker) # depends on [control=['for'], data=['directory']]
not self.__interrupt and self.searchFinished.emit(self.__search_results)
|
def get_package_version(path):
'''Extracts the version'''
with open(VERSION_FILE, "rt") as f:
verstrline = f.read()
VERSION = r"^version = ['\"]([^'\"]*)['\"]"
results = re.search(VERSION, verstrline, re.M)
if results:
version = results.group(1)
else:
raise RuntimeError("Unable to find version string in {}.".format(path))
return version
|
def function[get_package_version, parameter[path]]:
constant[Extracts the version]
with call[name[open], parameter[name[VERSION_FILE], constant[rt]]] begin[:]
variable[verstrline] assign[=] call[name[f].read, parameter[]]
variable[VERSION] assign[=] constant[^version = ['\"]([^'\"]*)['\"]]
variable[results] assign[=] call[name[re].search, parameter[name[VERSION], name[verstrline], name[re].M]]
if name[results] begin[:]
variable[version] assign[=] call[name[results].group, parameter[constant[1]]]
return[name[version]]
|
keyword[def] identifier[get_package_version] ( identifier[path] ):
literal[string]
keyword[with] identifier[open] ( identifier[VERSION_FILE] , literal[string] ) keyword[as] identifier[f] :
identifier[verstrline] = identifier[f] . identifier[read] ()
identifier[VERSION] = literal[string]
identifier[results] = identifier[re] . identifier[search] ( identifier[VERSION] , identifier[verstrline] , identifier[re] . identifier[M] )
keyword[if] identifier[results] :
identifier[version] = identifier[results] . identifier[group] ( literal[int] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[path] ))
keyword[return] identifier[version]
|
def get_package_version(path):
"""Extracts the version"""
with open(VERSION_FILE, 'rt') as f:
verstrline = f.read() # depends on [control=['with'], data=['f']]
VERSION = '^version = [\'\\"]([^\'\\"]*)[\'\\"]'
results = re.search(VERSION, verstrline, re.M)
if results:
version = results.group(1) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Unable to find version string in {}.'.format(path))
return version
|
def python_mime(fn):
"""
Decorator, which adds correct MIME type for python source to the decorated
bottle API function.
"""
@wraps(fn)
def python_mime_decorator(*args, **kwargs):
response.content_type = "text/x-python"
return fn(*args, **kwargs)
return python_mime_decorator
|
def function[python_mime, parameter[fn]]:
constant[
Decorator, which adds correct MIME type for python source to the decorated
bottle API function.
]
def function[python_mime_decorator, parameter[]]:
name[response].content_type assign[=] constant[text/x-python]
return[call[name[fn], parameter[<ast.Starred object at 0x7da1b2344220>]]]
return[name[python_mime_decorator]]
|
keyword[def] identifier[python_mime] ( identifier[fn] ):
literal[string]
@ identifier[wraps] ( identifier[fn] )
keyword[def] identifier[python_mime_decorator] (* identifier[args] ,** identifier[kwargs] ):
identifier[response] . identifier[content_type] = literal[string]
keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[python_mime_decorator]
|
def python_mime(fn):
"""
Decorator, which adds correct MIME type for python source to the decorated
bottle API function.
"""
@wraps(fn)
def python_mime_decorator(*args, **kwargs):
response.content_type = 'text/x-python'
return fn(*args, **kwargs)
return python_mime_decorator
|
def p_else_part_endif(p):
""" else_part_inline : ELSE NEWLINE program_co endif
| ELSE NEWLINE statements_co endif
| ELSE NEWLINE co_statements_co endif
| ELSE NEWLINE endif
| ELSE NEWLINE LABEL statements_co endif
| ELSE NEWLINE LABEL co_statements_co endif
| ELSE statements_co endif
| ELSE co_statements_co endif
"""
if p[2] == '\n':
if len(p) == 4:
p[0] = [make_nop(), p[3]]
elif len(p) == 6:
p[0] = [make_label(p[3], p.lineno(3)), p[4], p[5]]
else:
p[0] = [p[3], p[4]]
else:
p[0] = [p[2], p[3]]
|
def function[p_else_part_endif, parameter[p]]:
constant[ else_part_inline : ELSE NEWLINE program_co endif
| ELSE NEWLINE statements_co endif
| ELSE NEWLINE co_statements_co endif
| ELSE NEWLINE endif
| ELSE NEWLINE LABEL statements_co endif
| ELSE NEWLINE LABEL co_statements_co endif
| ELSE statements_co endif
| ELSE co_statements_co endif
]
if compare[call[name[p]][constant[2]] equal[==] constant[
]] begin[:]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[4]] begin[:]
call[name[p]][constant[0]] assign[=] list[[<ast.Call object at 0x7da1b0651a80>, <ast.Subscript object at 0x7da1b0651510>]]
|
keyword[def] identifier[p_else_part_endif] ( identifier[p] ):
literal[string]
keyword[if] identifier[p] [ literal[int] ]== literal[string] :
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]=[ identifier[make_nop] (), identifier[p] [ literal[int] ]]
keyword[elif] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]=[ identifier[make_label] ( identifier[p] [ literal[int] ], identifier[p] . identifier[lineno] ( literal[int] )), identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]]
keyword[else] :
identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]]
keyword[else] :
identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]]
|
def p_else_part_endif(p):
""" else_part_inline : ELSE NEWLINE program_co endif
| ELSE NEWLINE statements_co endif
| ELSE NEWLINE co_statements_co endif
| ELSE NEWLINE endif
| ELSE NEWLINE LABEL statements_co endif
| ELSE NEWLINE LABEL co_statements_co endif
| ELSE statements_co endif
| ELSE co_statements_co endif
"""
if p[2] == '\n':
if len(p) == 4:
p[0] = [make_nop(), p[3]] # depends on [control=['if'], data=[]]
elif len(p) == 6:
p[0] = [make_label(p[3], p.lineno(3)), p[4], p[5]] # depends on [control=['if'], data=[]]
else:
p[0] = [p[3], p[4]] # depends on [control=['if'], data=[]]
else:
p[0] = [p[2], p[3]]
|
def _download_vswhere():
"""
Download vswhere to DOWNLOAD_PATH.
"""
print('downloading from', _get_latest_release_url())
try:
from urllib.request import urlopen
with urlopen(_get_latest_release_url()) as response, open(DOWNLOAD_PATH, 'wb') as outfile:
shutil.copyfileobj(response, outfile)
except ImportError:
# Python 2
import urllib
urllib.urlretrieve(_get_latest_release_url(), DOWNLOAD_PATH)
|
def function[_download_vswhere, parameter[]]:
constant[
Download vswhere to DOWNLOAD_PATH.
]
call[name[print], parameter[constant[downloading from], call[name[_get_latest_release_url], parameter[]]]]
<ast.Try object at 0x7da1b0aa6440>
|
keyword[def] identifier[_download_vswhere] ():
literal[string]
identifier[print] ( literal[string] , identifier[_get_latest_release_url] ())
keyword[try] :
keyword[from] identifier[urllib] . identifier[request] keyword[import] identifier[urlopen]
keyword[with] identifier[urlopen] ( identifier[_get_latest_release_url] ()) keyword[as] identifier[response] , identifier[open] ( identifier[DOWNLOAD_PATH] , literal[string] ) keyword[as] identifier[outfile] :
identifier[shutil] . identifier[copyfileobj] ( identifier[response] , identifier[outfile] )
keyword[except] identifier[ImportError] :
keyword[import] identifier[urllib]
identifier[urllib] . identifier[urlretrieve] ( identifier[_get_latest_release_url] (), identifier[DOWNLOAD_PATH] )
|
def _download_vswhere():
"""
Download vswhere to DOWNLOAD_PATH.
"""
print('downloading from', _get_latest_release_url())
try:
from urllib.request import urlopen
with urlopen(_get_latest_release_url()) as response, open(DOWNLOAD_PATH, 'wb') as outfile:
shutil.copyfileobj(response, outfile) # depends on [control=['with'], data=['response']] # depends on [control=['try'], data=[]]
except ImportError:
# Python 2
import urllib
urllib.urlretrieve(_get_latest_release_url(), DOWNLOAD_PATH) # depends on [control=['except'], data=[]]
|
def get_season(self, season_key, card_type="micro_card"):
"""
Calling Season API.
Arg:
season_key: key of the season
card_type: optional, default to micro_card. Accepted values are
micro_card & summary_card
Return:
json data
"""
season_url = self.api_path + "season/" + season_key + "/"
params = {}
params["card_type"] = card_type
response = self.get_response(season_url, params)
return response
|
def function[get_season, parameter[self, season_key, card_type]]:
constant[
Calling Season API.
Arg:
season_key: key of the season
card_type: optional, default to micro_card. Accepted values are
micro_card & summary_card
Return:
json data
]
variable[season_url] assign[=] binary_operation[binary_operation[binary_operation[name[self].api_path + constant[season/]] + name[season_key]] + constant[/]]
variable[params] assign[=] dictionary[[], []]
call[name[params]][constant[card_type]] assign[=] name[card_type]
variable[response] assign[=] call[name[self].get_response, parameter[name[season_url], name[params]]]
return[name[response]]
|
keyword[def] identifier[get_season] ( identifier[self] , identifier[season_key] , identifier[card_type] = literal[string] ):
literal[string]
identifier[season_url] = identifier[self] . identifier[api_path] + literal[string] + identifier[season_key] + literal[string]
identifier[params] ={}
identifier[params] [ literal[string] ]= identifier[card_type]
identifier[response] = identifier[self] . identifier[get_response] ( identifier[season_url] , identifier[params] )
keyword[return] identifier[response]
|
def get_season(self, season_key, card_type='micro_card'):
"""
Calling Season API.
Arg:
season_key: key of the season
card_type: optional, default to micro_card. Accepted values are
micro_card & summary_card
Return:
json data
"""
season_url = self.api_path + 'season/' + season_key + '/'
params = {}
params['card_type'] = card_type
response = self.get_response(season_url, params)
return response
|
def ellipse(n=1000, adaptive=False):
"""
Get a parameterized set of vectors defining
ellipse for a major and minor axis length.
Resulting vector bundle has major axes
along axes given.
"""
u = N.linspace(0,2*N.pi,n)
# Get a bundle of vectors defining
# a full rotation around the unit circle
return N.array([N.cos(u),N.sin(u)]).T
|
def function[ellipse, parameter[n, adaptive]]:
constant[
Get a parameterized set of vectors defining
ellipse for a major and minor axis length.
Resulting vector bundle has major axes
along axes given.
]
variable[u] assign[=] call[name[N].linspace, parameter[constant[0], binary_operation[constant[2] * name[N].pi], name[n]]]
return[call[name[N].array, parameter[list[[<ast.Call object at 0x7da1b184b820>, <ast.Call object at 0x7da1b184ae90>]]]].T]
|
keyword[def] identifier[ellipse] ( identifier[n] = literal[int] , identifier[adaptive] = keyword[False] ):
literal[string]
identifier[u] = identifier[N] . identifier[linspace] ( literal[int] , literal[int] * identifier[N] . identifier[pi] , identifier[n] )
keyword[return] identifier[N] . identifier[array] ([ identifier[N] . identifier[cos] ( identifier[u] ), identifier[N] . identifier[sin] ( identifier[u] )]). identifier[T]
|
def ellipse(n=1000, adaptive=False):
"""
Get a parameterized set of vectors defining
ellipse for a major and minor axis length.
Resulting vector bundle has major axes
along axes given.
"""
u = N.linspace(0, 2 * N.pi, n)
# Get a bundle of vectors defining
# a full rotation around the unit circle
return N.array([N.cos(u), N.sin(u)]).T
|
def get_dimension(self):
"""Get the dimension of the returned feature. This equals the number
of elements in the returned list of numbers."""
return int(round(float(self.strokes**2)/2 + float(self.strokes)/2))
|
def function[get_dimension, parameter[self]]:
constant[Get the dimension of the returned feature. This equals the number
of elements in the returned list of numbers.]
return[call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[call[name[float], parameter[binary_operation[name[self].strokes ** constant[2]]]] / constant[2]] + binary_operation[call[name[float], parameter[name[self].strokes]] / constant[2]]]]]]]]
|
keyword[def] identifier[get_dimension] ( identifier[self] ):
literal[string]
keyword[return] identifier[int] ( identifier[round] ( identifier[float] ( identifier[self] . identifier[strokes] ** literal[int] )/ literal[int] + identifier[float] ( identifier[self] . identifier[strokes] )/ literal[int] ))
|
def get_dimension(self):
"""Get the dimension of the returned feature. This equals the number
of elements in the returned list of numbers."""
return int(round(float(self.strokes ** 2) / 2 + float(self.strokes) / 2))
|
def nested_insert(self, item_list):
""" Create a series of nested LIVVDicts given a list """
if len(item_list) == 1:
self[item_list[0]] = LIVVDict()
elif len(item_list) > 1:
if item_list[0] not in self:
self[item_list[0]] = LIVVDict()
self[item_list[0]].nested_insert(item_list[1:])
|
def function[nested_insert, parameter[self, item_list]]:
constant[ Create a series of nested LIVVDicts given a list ]
if compare[call[name[len], parameter[name[item_list]]] equal[==] constant[1]] begin[:]
call[name[self]][call[name[item_list]][constant[0]]] assign[=] call[name[LIVVDict], parameter[]]
|
keyword[def] identifier[nested_insert] ( identifier[self] , identifier[item_list] ):
literal[string]
keyword[if] identifier[len] ( identifier[item_list] )== literal[int] :
identifier[self] [ identifier[item_list] [ literal[int] ]]= identifier[LIVVDict] ()
keyword[elif] identifier[len] ( identifier[item_list] )> literal[int] :
keyword[if] identifier[item_list] [ literal[int] ] keyword[not] keyword[in] identifier[self] :
identifier[self] [ identifier[item_list] [ literal[int] ]]= identifier[LIVVDict] ()
identifier[self] [ identifier[item_list] [ literal[int] ]]. identifier[nested_insert] ( identifier[item_list] [ literal[int] :])
|
def nested_insert(self, item_list):
""" Create a series of nested LIVVDicts given a list """
if len(item_list) == 1:
self[item_list[0]] = LIVVDict() # depends on [control=['if'], data=[]]
elif len(item_list) > 1:
if item_list[0] not in self:
self[item_list[0]] = LIVVDict() # depends on [control=['if'], data=['self']]
self[item_list[0]].nested_insert(item_list[1:]) # depends on [control=['if'], data=[]]
|
def rotate(self):
'''Move the first address to the last position.'''
item = self._address_infos.pop(0)
self._address_infos.append(item)
|
def function[rotate, parameter[self]]:
constant[Move the first address to the last position.]
variable[item] assign[=] call[name[self]._address_infos.pop, parameter[constant[0]]]
call[name[self]._address_infos.append, parameter[name[item]]]
|
keyword[def] identifier[rotate] ( identifier[self] ):
literal[string]
identifier[item] = identifier[self] . identifier[_address_infos] . identifier[pop] ( literal[int] )
identifier[self] . identifier[_address_infos] . identifier[append] ( identifier[item] )
|
def rotate(self):
"""Move the first address to the last position."""
item = self._address_infos.pop(0)
self._address_infos.append(item)
|
def shortcut_app_id(shortcut):
"""
Generates the app id for a given shortcut. Steam uses app ids as a unique
identifier for games, but since shortcuts dont have a canonical serverside
representation they need to be generated on the fly. The important part
about this function is that it will generate the same app id as Steam does
for a given shortcut
"""
algorithm = Crc(width = 32, poly = 0x04C11DB7, reflect_in = True, xor_in = 0xffffffff, reflect_out = True, xor_out = 0xffffffff)
crc_input = ''.join([shortcut.exe,shortcut.name])
high_32 = algorithm.bit_by_bit(crc_input) | 0x80000000
full_64 = (high_32 << 32) | 0x02000000
return str(full_64)
|
def function[shortcut_app_id, parameter[shortcut]]:
constant[
Generates the app id for a given shortcut. Steam uses app ids as a unique
identifier for games, but since shortcuts dont have a canonical serverside
representation they need to be generated on the fly. The important part
about this function is that it will generate the same app id as Steam does
for a given shortcut
]
variable[algorithm] assign[=] call[name[Crc], parameter[]]
variable[crc_input] assign[=] call[constant[].join, parameter[list[[<ast.Attribute object at 0x7da20c6a9de0>, <ast.Attribute object at 0x7da20c6ab2b0>]]]]
variable[high_32] assign[=] binary_operation[call[name[algorithm].bit_by_bit, parameter[name[crc_input]]] <ast.BitOr object at 0x7da2590d6aa0> constant[2147483648]]
variable[full_64] assign[=] binary_operation[binary_operation[name[high_32] <ast.LShift object at 0x7da2590d69e0> constant[32]] <ast.BitOr object at 0x7da2590d6aa0> constant[33554432]]
return[call[name[str], parameter[name[full_64]]]]
|
keyword[def] identifier[shortcut_app_id] ( identifier[shortcut] ):
literal[string]
identifier[algorithm] = identifier[Crc] ( identifier[width] = literal[int] , identifier[poly] = literal[int] , identifier[reflect_in] = keyword[True] , identifier[xor_in] = literal[int] , identifier[reflect_out] = keyword[True] , identifier[xor_out] = literal[int] )
identifier[crc_input] = literal[string] . identifier[join] ([ identifier[shortcut] . identifier[exe] , identifier[shortcut] . identifier[name] ])
identifier[high_32] = identifier[algorithm] . identifier[bit_by_bit] ( identifier[crc_input] )| literal[int]
identifier[full_64] =( identifier[high_32] << literal[int] )| literal[int]
keyword[return] identifier[str] ( identifier[full_64] )
|
def shortcut_app_id(shortcut):
"""
Generates the app id for a given shortcut. Steam uses app ids as a unique
identifier for games, but since shortcuts dont have a canonical serverside
representation they need to be generated on the fly. The important part
about this function is that it will generate the same app id as Steam does
for a given shortcut
"""
algorithm = Crc(width=32, poly=79764919, reflect_in=True, xor_in=4294967295, reflect_out=True, xor_out=4294967295)
crc_input = ''.join([shortcut.exe, shortcut.name])
high_32 = algorithm.bit_by_bit(crc_input) | 2147483648
full_64 = high_32 << 32 | 33554432
return str(full_64)
|
def keypair_setup():
"""Creates keypair if necessary, saves private key locally, returns contents
of private key file."""
os.system('mkdir -p ' + u.PRIVATE_KEY_LOCATION)
keypair_name = u.get_keypair_name()
keypair = u.get_keypair_dict().get(keypair_name, None)
keypair_fn = u.get_keypair_fn()
if keypair:
print("Reusing keypair " + keypair_name)
# check that local pem file exists and is readable
assert os.path.exists(
keypair_fn), "Keypair %s exists, but corresponding .pem file %s is not found, delete keypair %s through console and run again to recreate keypair/.pem together" % (
keypair_name, keypair_fn, keypair_name)
keypair_contents = open(keypair_fn).read()
assert len(keypair_contents) > 0
else:
print("Creating keypair " + keypair_name)
ec2 = u.get_ec2_resource()
assert not os.path.exists(
keypair_fn), "previous keypair exists, delete it with 'sudo rm %s' and also delete corresponding keypair through console" % (
keypair_fn)
keypair = ec2.create_key_pair(KeyName=keypair_name)
open(keypair_fn, 'w').write(keypair.key_material)
os.system('chmod 400 ' + keypair_fn)
return keypair
|
def function[keypair_setup, parameter[]]:
constant[Creates keypair if necessary, saves private key locally, returns contents
of private key file.]
call[name[os].system, parameter[binary_operation[constant[mkdir -p ] + name[u].PRIVATE_KEY_LOCATION]]]
variable[keypair_name] assign[=] call[name[u].get_keypair_name, parameter[]]
variable[keypair] assign[=] call[call[name[u].get_keypair_dict, parameter[]].get, parameter[name[keypair_name], constant[None]]]
variable[keypair_fn] assign[=] call[name[u].get_keypair_fn, parameter[]]
if name[keypair] begin[:]
call[name[print], parameter[binary_operation[constant[Reusing keypair ] + name[keypair_name]]]]
assert[call[name[os].path.exists, parameter[name[keypair_fn]]]]
variable[keypair_contents] assign[=] call[call[name[open], parameter[name[keypair_fn]]].read, parameter[]]
assert[compare[call[name[len], parameter[name[keypair_contents]]] greater[>] constant[0]]]
return[name[keypair]]
|
keyword[def] identifier[keypair_setup] ():
literal[string]
identifier[os] . identifier[system] ( literal[string] + identifier[u] . identifier[PRIVATE_KEY_LOCATION] )
identifier[keypair_name] = identifier[u] . identifier[get_keypair_name] ()
identifier[keypair] = identifier[u] . identifier[get_keypair_dict] (). identifier[get] ( identifier[keypair_name] , keyword[None] )
identifier[keypair_fn] = identifier[u] . identifier[get_keypair_fn] ()
keyword[if] identifier[keypair] :
identifier[print] ( literal[string] + identifier[keypair_name] )
keyword[assert] identifier[os] . identifier[path] . identifier[exists] (
identifier[keypair_fn] ), literal[string] %(
identifier[keypair_name] , identifier[keypair_fn] , identifier[keypair_name] )
identifier[keypair_contents] = identifier[open] ( identifier[keypair_fn] ). identifier[read] ()
keyword[assert] identifier[len] ( identifier[keypair_contents] )> literal[int]
keyword[else] :
identifier[print] ( literal[string] + identifier[keypair_name] )
identifier[ec2] = identifier[u] . identifier[get_ec2_resource] ()
keyword[assert] keyword[not] identifier[os] . identifier[path] . identifier[exists] (
identifier[keypair_fn] ), literal[string] %(
identifier[keypair_fn] )
identifier[keypair] = identifier[ec2] . identifier[create_key_pair] ( identifier[KeyName] = identifier[keypair_name] )
identifier[open] ( identifier[keypair_fn] , literal[string] ). identifier[write] ( identifier[keypair] . identifier[key_material] )
identifier[os] . identifier[system] ( literal[string] + identifier[keypair_fn] )
keyword[return] identifier[keypair]
|
def keypair_setup():
"""Creates keypair if necessary, saves private key locally, returns contents
of private key file."""
os.system('mkdir -p ' + u.PRIVATE_KEY_LOCATION)
keypair_name = u.get_keypair_name()
keypair = u.get_keypair_dict().get(keypair_name, None)
keypair_fn = u.get_keypair_fn()
if keypair:
print('Reusing keypair ' + keypair_name)
# check that local pem file exists and is readable
assert os.path.exists(keypair_fn), 'Keypair %s exists, but corresponding .pem file %s is not found, delete keypair %s through console and run again to recreate keypair/.pem together' % (keypair_name, keypair_fn, keypair_name)
keypair_contents = open(keypair_fn).read()
assert len(keypair_contents) > 0 # depends on [control=['if'], data=[]]
else:
print('Creating keypair ' + keypair_name)
ec2 = u.get_ec2_resource()
assert not os.path.exists(keypair_fn), "previous keypair exists, delete it with 'sudo rm %s' and also delete corresponding keypair through console" % keypair_fn
keypair = ec2.create_key_pair(KeyName=keypair_name)
open(keypair_fn, 'w').write(keypair.key_material)
os.system('chmod 400 ' + keypair_fn)
return keypair
|
def get_localized_property(context, field=None, language=None):
'''
When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned.
'''
if language:
return getattr(context, get_real_fieldname(field, language))
if hasattr(settings, 'FALLBACK_LANGUAGES'):
attrs = [translation.get_language()]
attrs += get_fallback_languages()
else:
attrs = [
translation.get_language(),
translation.get_language()[:2],
settings.LANGUAGE_CODE,
]
def predicate(x):
value = getattr(context, get_real_fieldname(field, x), None)
return value if valid_for_gettext(value) else None
return first_match(predicate, attrs)
|
def function[get_localized_property, parameter[context, field, language]]:
constant[
When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned.
]
if name[language] begin[:]
return[call[name[getattr], parameter[name[context], call[name[get_real_fieldname], parameter[name[field], name[language]]]]]]
if call[name[hasattr], parameter[name[settings], constant[FALLBACK_LANGUAGES]]] begin[:]
variable[attrs] assign[=] list[[<ast.Call object at 0x7da20c76e3e0>]]
<ast.AugAssign object at 0x7da20c76f100>
def function[predicate, parameter[x]]:
variable[value] assign[=] call[name[getattr], parameter[name[context], call[name[get_real_fieldname], parameter[name[field], name[x]]], constant[None]]]
return[<ast.IfExp object at 0x7da20c76dc30>]
return[call[name[first_match], parameter[name[predicate], name[attrs]]]]
|
keyword[def] identifier[get_localized_property] ( identifier[context] , identifier[field] = keyword[None] , identifier[language] = keyword[None] ):
literal[string]
keyword[if] identifier[language] :
keyword[return] identifier[getattr] ( identifier[context] , identifier[get_real_fieldname] ( identifier[field] , identifier[language] ))
keyword[if] identifier[hasattr] ( identifier[settings] , literal[string] ):
identifier[attrs] =[ identifier[translation] . identifier[get_language] ()]
identifier[attrs] += identifier[get_fallback_languages] ()
keyword[else] :
identifier[attrs] =[
identifier[translation] . identifier[get_language] (),
identifier[translation] . identifier[get_language] ()[: literal[int] ],
identifier[settings] . identifier[LANGUAGE_CODE] ,
]
keyword[def] identifier[predicate] ( identifier[x] ):
identifier[value] = identifier[getattr] ( identifier[context] , identifier[get_real_fieldname] ( identifier[field] , identifier[x] ), keyword[None] )
keyword[return] identifier[value] keyword[if] identifier[valid_for_gettext] ( identifier[value] ) keyword[else] keyword[None]
keyword[return] identifier[first_match] ( identifier[predicate] , identifier[attrs] )
|
def get_localized_property(context, field=None, language=None):
"""
When accessing to the name of the field itself, the value
in the current language will be returned. Unless it's set,
the value in the default language will be returned.
"""
if language:
return getattr(context, get_real_fieldname(field, language)) # depends on [control=['if'], data=[]]
if hasattr(settings, 'FALLBACK_LANGUAGES'):
attrs = [translation.get_language()]
attrs += get_fallback_languages() # depends on [control=['if'], data=[]]
else:
attrs = [translation.get_language(), translation.get_language()[:2], settings.LANGUAGE_CODE]
def predicate(x):
value = getattr(context, get_real_fieldname(field, x), None)
return value if valid_for_gettext(value) else None
return first_match(predicate, attrs)
|
def render_source(output_dir, package_spec, jenv=JENV):
"""
Render and output
"""
path, module_name = package_spec.filepath
java_template = jenv.get_template(TEMPLATE_NAME)
module_path = "com." + package_spec.identifier
yaml_filepath = "/".join(package_spec.filepath) + ".yaml"
includes = [".".join(i.split(".")[:-1]) for i in package_spec.includes]
includes = [i for i in includes if i != "types"]
for msg in package_spec.definitions:
msg_name = classnameify(msg.identifier) if msg.sbp_id else msg.identifier
l = "/".join(package_spec.filepath)
destination_filename = "%s/com/%s/%s.java" % (output_dir, l , msg_name)
# Create the output directory if it doesn't exist
if not os.path.exists(os.path.dirname(destination_filename)):
os.mkdir(os.path.dirname(destination_filename))
with open(destination_filename, 'w+') as f:
print(destination_filename)
f.write(java_template.render(m=msg,
filepath=yaml_filepath,
module_path=module_path,
include=includes,
description=package_spec.description))
|
def function[render_source, parameter[output_dir, package_spec, jenv]]:
constant[
Render and output
]
<ast.Tuple object at 0x7da1b26ad750> assign[=] name[package_spec].filepath
variable[java_template] assign[=] call[name[jenv].get_template, parameter[name[TEMPLATE_NAME]]]
variable[module_path] assign[=] binary_operation[constant[com.] + name[package_spec].identifier]
variable[yaml_filepath] assign[=] binary_operation[call[constant[/].join, parameter[name[package_spec].filepath]] + constant[.yaml]]
variable[includes] assign[=] <ast.ListComp object at 0x7da1b26ad630>
variable[includes] assign[=] <ast.ListComp object at 0x7da1b26ac0d0>
for taget[name[msg]] in starred[name[package_spec].definitions] begin[:]
variable[msg_name] assign[=] <ast.IfExp object at 0x7da18eb541c0>
variable[l] assign[=] call[constant[/].join, parameter[name[package_spec].filepath]]
variable[destination_filename] assign[=] binary_operation[constant[%s/com/%s/%s.java] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb544c0>, <ast.Name object at 0x7da18eb561d0>, <ast.Name object at 0x7da18eb579a0>]]]
if <ast.UnaryOp object at 0x7da18eb572e0> begin[:]
call[name[os].mkdir, parameter[call[name[os].path.dirname, parameter[name[destination_filename]]]]]
with call[name[open], parameter[name[destination_filename], constant[w+]]] begin[:]
call[name[print], parameter[name[destination_filename]]]
call[name[f].write, parameter[call[name[java_template].render, parameter[]]]]
|
keyword[def] identifier[render_source] ( identifier[output_dir] , identifier[package_spec] , identifier[jenv] = identifier[JENV] ):
literal[string]
identifier[path] , identifier[module_name] = identifier[package_spec] . identifier[filepath]
identifier[java_template] = identifier[jenv] . identifier[get_template] ( identifier[TEMPLATE_NAME] )
identifier[module_path] = literal[string] + identifier[package_spec] . identifier[identifier]
identifier[yaml_filepath] = literal[string] . identifier[join] ( identifier[package_spec] . identifier[filepath] )+ literal[string]
identifier[includes] =[ literal[string] . identifier[join] ( identifier[i] . identifier[split] ( literal[string] )[:- literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[package_spec] . identifier[includes] ]
identifier[includes] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[includes] keyword[if] identifier[i] != literal[string] ]
keyword[for] identifier[msg] keyword[in] identifier[package_spec] . identifier[definitions] :
identifier[msg_name] = identifier[classnameify] ( identifier[msg] . identifier[identifier] ) keyword[if] identifier[msg] . identifier[sbp_id] keyword[else] identifier[msg] . identifier[identifier]
identifier[l] = literal[string] . identifier[join] ( identifier[package_spec] . identifier[filepath] )
identifier[destination_filename] = literal[string] %( identifier[output_dir] , identifier[l] , identifier[msg_name] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[destination_filename] )):
identifier[os] . identifier[mkdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[destination_filename] ))
keyword[with] identifier[open] ( identifier[destination_filename] , literal[string] ) keyword[as] identifier[f] :
identifier[print] ( identifier[destination_filename] )
identifier[f] . identifier[write] ( identifier[java_template] . identifier[render] ( identifier[m] = identifier[msg] ,
identifier[filepath] = identifier[yaml_filepath] ,
identifier[module_path] = identifier[module_path] ,
identifier[include] = identifier[includes] ,
identifier[description] = identifier[package_spec] . identifier[description] ))
|
def render_source(output_dir, package_spec, jenv=JENV):
"""
Render and output
"""
(path, module_name) = package_spec.filepath
java_template = jenv.get_template(TEMPLATE_NAME)
module_path = 'com.' + package_spec.identifier
yaml_filepath = '/'.join(package_spec.filepath) + '.yaml'
includes = ['.'.join(i.split('.')[:-1]) for i in package_spec.includes]
includes = [i for i in includes if i != 'types']
for msg in package_spec.definitions:
msg_name = classnameify(msg.identifier) if msg.sbp_id else msg.identifier
l = '/'.join(package_spec.filepath)
destination_filename = '%s/com/%s/%s.java' % (output_dir, l, msg_name)
# Create the output directory if it doesn't exist
if not os.path.exists(os.path.dirname(destination_filename)):
os.mkdir(os.path.dirname(destination_filename)) # depends on [control=['if'], data=[]]
with open(destination_filename, 'w+') as f:
print(destination_filename)
f.write(java_template.render(m=msg, filepath=yaml_filepath, module_path=module_path, include=includes, description=package_spec.description)) # depends on [control=['with'], data=['f']] # depends on [control=['for'], data=['msg']]
|
def write(self, word):
"""
.. _SOwrite:
Write the ``chr`` representation of ``word`` to the ``open_stream``.
If ``chr(word)`` fails due ``OverflowError``, a ``"?"`` will be written.
"""
self.repr_.setvalue(word)
try:
self.open_stream.write(chr(self.repr_.getvalue()))
except OverflowError:
self.open_stream.write("?")
|
def function[write, parameter[self, word]]:
constant[
.. _SOwrite:
Write the ``chr`` representation of ``word`` to the ``open_stream``.
If ``chr(word)`` fails due ``OverflowError``, a ``"?"`` will be written.
]
call[name[self].repr_.setvalue, parameter[name[word]]]
<ast.Try object at 0x7da1b1462b30>
|
keyword[def] identifier[write] ( identifier[self] , identifier[word] ):
literal[string]
identifier[self] . identifier[repr_] . identifier[setvalue] ( identifier[word] )
keyword[try] :
identifier[self] . identifier[open_stream] . identifier[write] ( identifier[chr] ( identifier[self] . identifier[repr_] . identifier[getvalue] ()))
keyword[except] identifier[OverflowError] :
identifier[self] . identifier[open_stream] . identifier[write] ( literal[string] )
|
def write(self, word):
"""
.. _SOwrite:
Write the ``chr`` representation of ``word`` to the ``open_stream``.
If ``chr(word)`` fails due ``OverflowError``, a ``"?"`` will be written.
"""
self.repr_.setvalue(word)
try:
self.open_stream.write(chr(self.repr_.getvalue())) # depends on [control=['try'], data=[]]
except OverflowError:
self.open_stream.write('?') # depends on [control=['except'], data=[]]
|
def _updateNumbers(self, linenumers):
"""
add/remove line numbers
"""
b = self.blockCount()
c = b - linenumers
if c > 0:
# remove lines numbers
for _ in range(c):
# remove last line:
self.setFocus()
storeCursorPos = self.textCursor()
self.moveCursor(
QtGui.QTextCursor.End,
QtGui.QTextCursor.MoveAnchor)
self.moveCursor(
QtGui.QTextCursor.StartOfLine,
QtGui.QTextCursor.MoveAnchor)
self.moveCursor(
QtGui.QTextCursor.End,
QtGui.QTextCursor.KeepAnchor)
self.textCursor().removeSelectedText()
self.textCursor().deletePreviousChar()
self.setTextCursor(storeCursorPos)
elif c < 0:
# add line numbers
for i in range(-c):
self.appendPlainText(str(b + i + 1))
|
def function[_updateNumbers, parameter[self, linenumers]]:
constant[
add/remove line numbers
]
variable[b] assign[=] call[name[self].blockCount, parameter[]]
variable[c] assign[=] binary_operation[name[b] - name[linenumers]]
if compare[name[c] greater[>] constant[0]] begin[:]
for taget[name[_]] in starred[call[name[range], parameter[name[c]]]] begin[:]
call[name[self].setFocus, parameter[]]
variable[storeCursorPos] assign[=] call[name[self].textCursor, parameter[]]
call[name[self].moveCursor, parameter[name[QtGui].QTextCursor.End, name[QtGui].QTextCursor.MoveAnchor]]
call[name[self].moveCursor, parameter[name[QtGui].QTextCursor.StartOfLine, name[QtGui].QTextCursor.MoveAnchor]]
call[name[self].moveCursor, parameter[name[QtGui].QTextCursor.End, name[QtGui].QTextCursor.KeepAnchor]]
call[call[name[self].textCursor, parameter[]].removeSelectedText, parameter[]]
call[call[name[self].textCursor, parameter[]].deletePreviousChar, parameter[]]
call[name[self].setTextCursor, parameter[name[storeCursorPos]]]
|
keyword[def] identifier[_updateNumbers] ( identifier[self] , identifier[linenumers] ):
literal[string]
identifier[b] = identifier[self] . identifier[blockCount] ()
identifier[c] = identifier[b] - identifier[linenumers]
keyword[if] identifier[c] > literal[int] :
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[c] ):
identifier[self] . identifier[setFocus] ()
identifier[storeCursorPos] = identifier[self] . identifier[textCursor] ()
identifier[self] . identifier[moveCursor] (
identifier[QtGui] . identifier[QTextCursor] . identifier[End] ,
identifier[QtGui] . identifier[QTextCursor] . identifier[MoveAnchor] )
identifier[self] . identifier[moveCursor] (
identifier[QtGui] . identifier[QTextCursor] . identifier[StartOfLine] ,
identifier[QtGui] . identifier[QTextCursor] . identifier[MoveAnchor] )
identifier[self] . identifier[moveCursor] (
identifier[QtGui] . identifier[QTextCursor] . identifier[End] ,
identifier[QtGui] . identifier[QTextCursor] . identifier[KeepAnchor] )
identifier[self] . identifier[textCursor] (). identifier[removeSelectedText] ()
identifier[self] . identifier[textCursor] (). identifier[deletePreviousChar] ()
identifier[self] . identifier[setTextCursor] ( identifier[storeCursorPos] )
keyword[elif] identifier[c] < literal[int] :
keyword[for] identifier[i] keyword[in] identifier[range] (- identifier[c] ):
identifier[self] . identifier[appendPlainText] ( identifier[str] ( identifier[b] + identifier[i] + literal[int] ))
|
def _updateNumbers(self, linenumers):
"""
add/remove line numbers
"""
b = self.blockCount()
c = b - linenumers
if c > 0:
# remove lines numbers
for _ in range(c):
# remove last line:
self.setFocus()
storeCursorPos = self.textCursor()
self.moveCursor(QtGui.QTextCursor.End, QtGui.QTextCursor.MoveAnchor)
self.moveCursor(QtGui.QTextCursor.StartOfLine, QtGui.QTextCursor.MoveAnchor)
self.moveCursor(QtGui.QTextCursor.End, QtGui.QTextCursor.KeepAnchor)
self.textCursor().removeSelectedText()
self.textCursor().deletePreviousChar()
self.setTextCursor(storeCursorPos) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['c']]
elif c < 0:
# add line numbers
for i in range(-c):
self.appendPlainText(str(b + i + 1)) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['c']]
|
def emit_only(self, event: str, func_names: Union[str, List[str]], *args,
**kwargs) -> None:
""" Specifically only emits certain subscribed events.
:param event: Name of the event.
:type event: str
:param func_names: Function(s) to emit.
:type func_names: Union[ str | List[str] ]
"""
if isinstance(func_names, str):
func_names = [func_names]
for func in self._event_funcs(event):
if func.__name__ in func_names:
func(*args, **kwargs)
|
def function[emit_only, parameter[self, event, func_names]]:
constant[ Specifically only emits certain subscribed events.
:param event: Name of the event.
:type event: str
:param func_names: Function(s) to emit.
:type func_names: Union[ str | List[str] ]
]
if call[name[isinstance], parameter[name[func_names], name[str]]] begin[:]
variable[func_names] assign[=] list[[<ast.Name object at 0x7da18f720790>]]
for taget[name[func]] in starred[call[name[self]._event_funcs, parameter[name[event]]]] begin[:]
if compare[name[func].__name__ in name[func_names]] begin[:]
call[name[func], parameter[<ast.Starred object at 0x7da18f7216c0>]]
|
keyword[def] identifier[emit_only] ( identifier[self] , identifier[event] : identifier[str] , identifier[func_names] : identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]],* identifier[args] ,
** identifier[kwargs] )-> keyword[None] :
literal[string]
keyword[if] identifier[isinstance] ( identifier[func_names] , identifier[str] ):
identifier[func_names] =[ identifier[func_names] ]
keyword[for] identifier[func] keyword[in] identifier[self] . identifier[_event_funcs] ( identifier[event] ):
keyword[if] identifier[func] . identifier[__name__] keyword[in] identifier[func_names] :
identifier[func] (* identifier[args] ,** identifier[kwargs] )
|
def emit_only(self, event: str, func_names: Union[str, List[str]], *args, **kwargs) -> None:
""" Specifically only emits certain subscribed events.
:param event: Name of the event.
:type event: str
:param func_names: Function(s) to emit.
:type func_names: Union[ str | List[str] ]
"""
if isinstance(func_names, str):
func_names = [func_names] # depends on [control=['if'], data=[]]
for func in self._event_funcs(event):
if func.__name__ in func_names:
func(*args, **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['func']]
|
def set_meta(self, instance):
"""
Set django-meta stuff from LandingPageModel instance.
"""
self.use_title_tag = True
self.title = instance.title
|
def function[set_meta, parameter[self, instance]]:
constant[
Set django-meta stuff from LandingPageModel instance.
]
name[self].use_title_tag assign[=] constant[True]
name[self].title assign[=] name[instance].title
|
keyword[def] identifier[set_meta] ( identifier[self] , identifier[instance] ):
literal[string]
identifier[self] . identifier[use_title_tag] = keyword[True]
identifier[self] . identifier[title] = identifier[instance] . identifier[title]
|
def set_meta(self, instance):
"""
Set django-meta stuff from LandingPageModel instance.
"""
self.use_title_tag = True
self.title = instance.title
|
def get_image(self, float_key="floats", to_chw=True):
"""
get image list from ImageFrame
"""
tensors = callBigDlFunc(self.bigdl_type,
"localImageFrameToImageTensor", self.value, float_key, to_chw)
return map(lambda tensor: tensor.to_ndarray(), tensors)
|
def function[get_image, parameter[self, float_key, to_chw]]:
constant[
get image list from ImageFrame
]
variable[tensors] assign[=] call[name[callBigDlFunc], parameter[name[self].bigdl_type, constant[localImageFrameToImageTensor], name[self].value, name[float_key], name[to_chw]]]
return[call[name[map], parameter[<ast.Lambda object at 0x7da2054a66b0>, name[tensors]]]]
|
keyword[def] identifier[get_image] ( identifier[self] , identifier[float_key] = literal[string] , identifier[to_chw] = keyword[True] ):
literal[string]
identifier[tensors] = identifier[callBigDlFunc] ( identifier[self] . identifier[bigdl_type] ,
literal[string] , identifier[self] . identifier[value] , identifier[float_key] , identifier[to_chw] )
keyword[return] identifier[map] ( keyword[lambda] identifier[tensor] : identifier[tensor] . identifier[to_ndarray] (), identifier[tensors] )
|
def get_image(self, float_key='floats', to_chw=True):
"""
get image list from ImageFrame
"""
tensors = callBigDlFunc(self.bigdl_type, 'localImageFrameToImageTensor', self.value, float_key, to_chw)
return map(lambda tensor: tensor.to_ndarray(), tensors)
|
def discover(cls, device, timeout_sec=TIMEOUT_SEC):
"""Wait until the specified device has discovered the expected services
and characteristics for this service. Should be called once before other
calls are made on the service. Returns true if the service has been
discovered in the specified timeout, or false if not discovered.
"""
device.discover(cls.SERVICES, cls.CHARACTERISTICS, timeout_sec)
|
def function[discover, parameter[cls, device, timeout_sec]]:
constant[Wait until the specified device has discovered the expected services
and characteristics for this service. Should be called once before other
calls are made on the service. Returns true if the service has been
discovered in the specified timeout, or false if not discovered.
]
call[name[device].discover, parameter[name[cls].SERVICES, name[cls].CHARACTERISTICS, name[timeout_sec]]]
|
keyword[def] identifier[discover] ( identifier[cls] , identifier[device] , identifier[timeout_sec] = identifier[TIMEOUT_SEC] ):
literal[string]
identifier[device] . identifier[discover] ( identifier[cls] . identifier[SERVICES] , identifier[cls] . identifier[CHARACTERISTICS] , identifier[timeout_sec] )
|
def discover(cls, device, timeout_sec=TIMEOUT_SEC):
"""Wait until the specified device has discovered the expected services
and characteristics for this service. Should be called once before other
calls are made on the service. Returns true if the service has been
discovered in the specified timeout, or false if not discovered.
"""
device.discover(cls.SERVICES, cls.CHARACTERISTICS, timeout_sec)
|
def ParseSudoersEntry(self, entry, sudoers_config):
"""Parse an entry and add it to the given SudoersConfig rdfvalue."""
key = entry[0]
if key in SudoersFieldParser.ALIAS_TYPES:
# Alias.
alias_entry = rdf_config_file.SudoersAlias(
type=SudoersFieldParser.ALIAS_TYPES.get(key), name=entry[1])
# Members of this alias, comma-separated.
members, _ = self._ExtractList(entry[2:], ignores=(",", "="))
field = SudoersFieldParser.ALIAS_FIELDS.get(key)
getattr(alias_entry, field).Extend(members)
sudoers_config.aliases.append(alias_entry)
elif key.startswith(SudoersFieldParser.DEFAULTS_KEY):
# Default.
# Identify scope if one exists (Defaults<scope> ...)
scope = None
if len(key) > len(SudoersFieldParser.DEFAULTS_KEY):
scope = key[len(SudoersFieldParser.DEFAULTS_KEY) + 1:]
# There can be multiple defaults on a line, for the one scope.
entry = entry[1:]
defaults, _ = self._ExtractList(entry)
for default in defaults:
default_entry = rdf_config_file.SudoersDefault(scope=scope)
# Extract key name and value(s).
default_name = default
value = []
if "=" in default_name:
default_name, remainder = default_name.split("=", 1)
value = [remainder]
default_entry.name = default_name
if entry:
default_entry.value = " ".join(value)
sudoers_config.defaults.append(default_entry)
elif key in SudoersFieldParser.INCLUDE_KEYS:
# TODO(user): make #includedir more obvious in the RDFValue somewhere
target = " ".join(entry[1:])
sudoers_config.includes.append(target)
else:
users, entry = self._ExtractList(entry)
hosts, entry = self._ExtractList(entry, terminators=("=",))
# Remove = from <user> <host> = <specs>
if entry[0] == "=":
entry = entry[1:]
# Command specification.
sudoers_entry = rdf_config_file.SudoersEntry(
users=users, hosts=hosts, cmdspec=entry)
sudoers_config.entries.append(sudoers_entry)
|
def function[ParseSudoersEntry, parameter[self, entry, sudoers_config]]:
constant[Parse an entry and add it to the given SudoersConfig rdfvalue.]
variable[key] assign[=] call[name[entry]][constant[0]]
if compare[name[key] in name[SudoersFieldParser].ALIAS_TYPES] begin[:]
variable[alias_entry] assign[=] call[name[rdf_config_file].SudoersAlias, parameter[]]
<ast.Tuple object at 0x7da1b1b05cc0> assign[=] call[name[self]._ExtractList, parameter[call[name[entry]][<ast.Slice object at 0x7da1b1b07820>]]]
variable[field] assign[=] call[name[SudoersFieldParser].ALIAS_FIELDS.get, parameter[name[key]]]
call[call[name[getattr], parameter[name[alias_entry], name[field]]].Extend, parameter[name[members]]]
call[name[sudoers_config].aliases.append, parameter[name[alias_entry]]]
|
keyword[def] identifier[ParseSudoersEntry] ( identifier[self] , identifier[entry] , identifier[sudoers_config] ):
literal[string]
identifier[key] = identifier[entry] [ literal[int] ]
keyword[if] identifier[key] keyword[in] identifier[SudoersFieldParser] . identifier[ALIAS_TYPES] :
identifier[alias_entry] = identifier[rdf_config_file] . identifier[SudoersAlias] (
identifier[type] = identifier[SudoersFieldParser] . identifier[ALIAS_TYPES] . identifier[get] ( identifier[key] ), identifier[name] = identifier[entry] [ literal[int] ])
identifier[members] , identifier[_] = identifier[self] . identifier[_ExtractList] ( identifier[entry] [ literal[int] :], identifier[ignores] =( literal[string] , literal[string] ))
identifier[field] = identifier[SudoersFieldParser] . identifier[ALIAS_FIELDS] . identifier[get] ( identifier[key] )
identifier[getattr] ( identifier[alias_entry] , identifier[field] ). identifier[Extend] ( identifier[members] )
identifier[sudoers_config] . identifier[aliases] . identifier[append] ( identifier[alias_entry] )
keyword[elif] identifier[key] . identifier[startswith] ( identifier[SudoersFieldParser] . identifier[DEFAULTS_KEY] ):
identifier[scope] = keyword[None]
keyword[if] identifier[len] ( identifier[key] )> identifier[len] ( identifier[SudoersFieldParser] . identifier[DEFAULTS_KEY] ):
identifier[scope] = identifier[key] [ identifier[len] ( identifier[SudoersFieldParser] . identifier[DEFAULTS_KEY] )+ literal[int] :]
identifier[entry] = identifier[entry] [ literal[int] :]
identifier[defaults] , identifier[_] = identifier[self] . identifier[_ExtractList] ( identifier[entry] )
keyword[for] identifier[default] keyword[in] identifier[defaults] :
identifier[default_entry] = identifier[rdf_config_file] . identifier[SudoersDefault] ( identifier[scope] = identifier[scope] )
identifier[default_name] = identifier[default]
identifier[value] =[]
keyword[if] literal[string] keyword[in] identifier[default_name] :
identifier[default_name] , identifier[remainder] = identifier[default_name] . identifier[split] ( literal[string] , literal[int] )
identifier[value] =[ identifier[remainder] ]
identifier[default_entry] . identifier[name] = identifier[default_name]
keyword[if] identifier[entry] :
identifier[default_entry] . identifier[value] = literal[string] . identifier[join] ( identifier[value] )
identifier[sudoers_config] . identifier[defaults] . identifier[append] ( identifier[default_entry] )
keyword[elif] identifier[key] keyword[in] identifier[SudoersFieldParser] . identifier[INCLUDE_KEYS] :
identifier[target] = literal[string] . identifier[join] ( identifier[entry] [ literal[int] :])
identifier[sudoers_config] . identifier[includes] . identifier[append] ( identifier[target] )
keyword[else] :
identifier[users] , identifier[entry] = identifier[self] . identifier[_ExtractList] ( identifier[entry] )
identifier[hosts] , identifier[entry] = identifier[self] . identifier[_ExtractList] ( identifier[entry] , identifier[terminators] =( literal[string] ,))
keyword[if] identifier[entry] [ literal[int] ]== literal[string] :
identifier[entry] = identifier[entry] [ literal[int] :]
identifier[sudoers_entry] = identifier[rdf_config_file] . identifier[SudoersEntry] (
identifier[users] = identifier[users] , identifier[hosts] = identifier[hosts] , identifier[cmdspec] = identifier[entry] )
identifier[sudoers_config] . identifier[entries] . identifier[append] ( identifier[sudoers_entry] )
|
def ParseSudoersEntry(self, entry, sudoers_config):
"""Parse an entry and add it to the given SudoersConfig rdfvalue."""
key = entry[0]
if key in SudoersFieldParser.ALIAS_TYPES:
# Alias.
alias_entry = rdf_config_file.SudoersAlias(type=SudoersFieldParser.ALIAS_TYPES.get(key), name=entry[1])
# Members of this alias, comma-separated.
(members, _) = self._ExtractList(entry[2:], ignores=(',', '='))
field = SudoersFieldParser.ALIAS_FIELDS.get(key)
getattr(alias_entry, field).Extend(members)
sudoers_config.aliases.append(alias_entry) # depends on [control=['if'], data=['key']]
elif key.startswith(SudoersFieldParser.DEFAULTS_KEY):
# Default.
# Identify scope if one exists (Defaults<scope> ...)
scope = None
if len(key) > len(SudoersFieldParser.DEFAULTS_KEY):
scope = key[len(SudoersFieldParser.DEFAULTS_KEY) + 1:] # depends on [control=['if'], data=[]]
# There can be multiple defaults on a line, for the one scope.
entry = entry[1:]
(defaults, _) = self._ExtractList(entry)
for default in defaults:
default_entry = rdf_config_file.SudoersDefault(scope=scope)
# Extract key name and value(s).
default_name = default
value = []
if '=' in default_name:
(default_name, remainder) = default_name.split('=', 1)
value = [remainder] # depends on [control=['if'], data=['default_name']]
default_entry.name = default_name
if entry:
default_entry.value = ' '.join(value) # depends on [control=['if'], data=[]]
sudoers_config.defaults.append(default_entry) # depends on [control=['for'], data=['default']] # depends on [control=['if'], data=[]]
elif key in SudoersFieldParser.INCLUDE_KEYS:
# TODO(user): make #includedir more obvious in the RDFValue somewhere
target = ' '.join(entry[1:])
sudoers_config.includes.append(target) # depends on [control=['if'], data=[]]
else:
(users, entry) = self._ExtractList(entry)
(hosts, entry) = self._ExtractList(entry, terminators=('=',))
# Remove = from <user> <host> = <specs>
if entry[0] == '=':
entry = entry[1:] # depends on [control=['if'], data=[]]
# Command specification.
sudoers_entry = rdf_config_file.SudoersEntry(users=users, hosts=hosts, cmdspec=entry)
sudoers_config.entries.append(sudoers_entry)
|
def flatten(items,enter=lambda x:isinstance(x, list)):
# http://stackoverflow.com/a/40857703
# https://github.com/ctmakro/canton/blob/master/canton/misc.py
"""Yield items from any nested iterable; see REF."""
for x in items:
if enter(x):
yield from flatten(x)
else:
yield x
|
def function[flatten, parameter[items, enter]]:
constant[Yield items from any nested iterable; see REF.]
for taget[name[x]] in starred[name[items]] begin[:]
if call[name[enter], parameter[name[x]]] begin[:]
<ast.YieldFrom object at 0x7da1b2033430>
|
keyword[def] identifier[flatten] ( identifier[items] , identifier[enter] = keyword[lambda] identifier[x] : identifier[isinstance] ( identifier[x] , identifier[list] )):
literal[string]
keyword[for] identifier[x] keyword[in] identifier[items] :
keyword[if] identifier[enter] ( identifier[x] ):
keyword[yield] keyword[from] identifier[flatten] ( identifier[x] )
keyword[else] :
keyword[yield] identifier[x]
|
def flatten(items, enter=lambda x: isinstance(x, list)):
# http://stackoverflow.com/a/40857703
# https://github.com/ctmakro/canton/blob/master/canton/misc.py
'Yield items from any nested iterable; see REF.'
for x in items:
if enter(x):
yield from flatten(x) # depends on [control=['if'], data=[]]
else:
yield x # depends on [control=['for'], data=['x']]
|
def release():
"Cut a new release"
version = run('python setup.py --version').stdout.strip()
assert version, 'No version found in setup.py?'
print('### Releasing new version: {0}'.format(version))
run('git tag {0}'.format(version))
run('git push --tags')
run('python setup.py sdist bdist_wheel')
run('twine upload -s dist/*')
|
def function[release, parameter[]]:
constant[Cut a new release]
variable[version] assign[=] call[call[name[run], parameter[constant[python setup.py --version]]].stdout.strip, parameter[]]
assert[name[version]]
call[name[print], parameter[call[constant[### Releasing new version: {0}].format, parameter[name[version]]]]]
call[name[run], parameter[call[constant[git tag {0}].format, parameter[name[version]]]]]
call[name[run], parameter[constant[git push --tags]]]
call[name[run], parameter[constant[python setup.py sdist bdist_wheel]]]
call[name[run], parameter[constant[twine upload -s dist/*]]]
|
keyword[def] identifier[release] ():
literal[string]
identifier[version] = identifier[run] ( literal[string] ). identifier[stdout] . identifier[strip] ()
keyword[assert] identifier[version] , literal[string]
identifier[print] ( literal[string] . identifier[format] ( identifier[version] ))
identifier[run] ( literal[string] . identifier[format] ( identifier[version] ))
identifier[run] ( literal[string] )
identifier[run] ( literal[string] )
identifier[run] ( literal[string] )
|
def release():
"""Cut a new release"""
version = run('python setup.py --version').stdout.strip()
assert version, 'No version found in setup.py?'
print('### Releasing new version: {0}'.format(version))
run('git tag {0}'.format(version))
run('git push --tags')
run('python setup.py sdist bdist_wheel')
run('twine upload -s dist/*')
|
def _print_foreign_playlist_message(self):
""" reset previous message """
self.operation_mode = self.window_mode = NORMAL_MODE
self.refreshBody()
""" display new message """
txt='''A playlist by this name:
__"|{0}|"
already exists in the config directory.
This playlist was saved as:
__"|{1}|"
'''.format(self._cnf.foreign_filename_only_no_extension,
self._cnf.stations_filename_only_no_extension)
self._show_help(txt, FOREIGN_PLAYLIST_MESSAGE_MODE,
caption = ' Foreign playlist ',
prompt = ' Press any key ',
is_message=True)
|
def function[_print_foreign_playlist_message, parameter[self]]:
constant[ reset previous message ]
name[self].operation_mode assign[=] name[NORMAL_MODE]
call[name[self].refreshBody, parameter[]]
constant[ display new message ]
variable[txt] assign[=] call[constant[A playlist by this name:
__"|{0}|"
already exists in the config directory.
This playlist was saved as:
__"|{1}|"
].format, parameter[name[self]._cnf.foreign_filename_only_no_extension, name[self]._cnf.stations_filename_only_no_extension]]
call[name[self]._show_help, parameter[name[txt], name[FOREIGN_PLAYLIST_MESSAGE_MODE]]]
|
keyword[def] identifier[_print_foreign_playlist_message] ( identifier[self] ):
literal[string]
identifier[self] . identifier[operation_mode] = identifier[self] . identifier[window_mode] = identifier[NORMAL_MODE]
identifier[self] . identifier[refreshBody] ()
literal[string]
identifier[txt] = literal[string] . identifier[format] ( identifier[self] . identifier[_cnf] . identifier[foreign_filename_only_no_extension] ,
identifier[self] . identifier[_cnf] . identifier[stations_filename_only_no_extension] )
identifier[self] . identifier[_show_help] ( identifier[txt] , identifier[FOREIGN_PLAYLIST_MESSAGE_MODE] ,
identifier[caption] = literal[string] ,
identifier[prompt] = literal[string] ,
identifier[is_message] = keyword[True] )
|
def _print_foreign_playlist_message(self):
""" reset previous message """
self.operation_mode = self.window_mode = NORMAL_MODE
self.refreshBody()
' display new message '
txt = 'A playlist by this name:\n __"|{0}|"\n already exists in the config directory.\n\n This playlist was saved as:\n __"|{1}|"\n '.format(self._cnf.foreign_filename_only_no_extension, self._cnf.stations_filename_only_no_extension)
self._show_help(txt, FOREIGN_PLAYLIST_MESSAGE_MODE, caption=' Foreign playlist ', prompt=' Press any key ', is_message=True)
|
def actions(self, state):
'''Returns a list of the pieces we can move to the empty space.'''
rows = string_to_list(state)
row_e, col_e = find_location(rows, 'e')
actions = []
if row_e > 0:
actions.append(rows[row_e - 1][col_e])
if row_e < 2:
actions.append(rows[row_e + 1][col_e])
if col_e > 0:
actions.append(rows[row_e][col_e - 1])
if col_e < 2:
actions.append(rows[row_e][col_e + 1])
return actions
|
def function[actions, parameter[self, state]]:
constant[Returns a list of the pieces we can move to the empty space.]
variable[rows] assign[=] call[name[string_to_list], parameter[name[state]]]
<ast.Tuple object at 0x7da1b1504160> assign[=] call[name[find_location], parameter[name[rows], constant[e]]]
variable[actions] assign[=] list[[]]
if compare[name[row_e] greater[>] constant[0]] begin[:]
call[name[actions].append, parameter[call[call[name[rows]][binary_operation[name[row_e] - constant[1]]]][name[col_e]]]]
if compare[name[row_e] less[<] constant[2]] begin[:]
call[name[actions].append, parameter[call[call[name[rows]][binary_operation[name[row_e] + constant[1]]]][name[col_e]]]]
if compare[name[col_e] greater[>] constant[0]] begin[:]
call[name[actions].append, parameter[call[call[name[rows]][name[row_e]]][binary_operation[name[col_e] - constant[1]]]]]
if compare[name[col_e] less[<] constant[2]] begin[:]
call[name[actions].append, parameter[call[call[name[rows]][name[row_e]]][binary_operation[name[col_e] + constant[1]]]]]
return[name[actions]]
|
keyword[def] identifier[actions] ( identifier[self] , identifier[state] ):
literal[string]
identifier[rows] = identifier[string_to_list] ( identifier[state] )
identifier[row_e] , identifier[col_e] = identifier[find_location] ( identifier[rows] , literal[string] )
identifier[actions] =[]
keyword[if] identifier[row_e] > literal[int] :
identifier[actions] . identifier[append] ( identifier[rows] [ identifier[row_e] - literal[int] ][ identifier[col_e] ])
keyword[if] identifier[row_e] < literal[int] :
identifier[actions] . identifier[append] ( identifier[rows] [ identifier[row_e] + literal[int] ][ identifier[col_e] ])
keyword[if] identifier[col_e] > literal[int] :
identifier[actions] . identifier[append] ( identifier[rows] [ identifier[row_e] ][ identifier[col_e] - literal[int] ])
keyword[if] identifier[col_e] < literal[int] :
identifier[actions] . identifier[append] ( identifier[rows] [ identifier[row_e] ][ identifier[col_e] + literal[int] ])
keyword[return] identifier[actions]
|
def actions(self, state):
"""Returns a list of the pieces we can move to the empty space."""
rows = string_to_list(state)
(row_e, col_e) = find_location(rows, 'e')
actions = []
if row_e > 0:
actions.append(rows[row_e - 1][col_e]) # depends on [control=['if'], data=['row_e']]
if row_e < 2:
actions.append(rows[row_e + 1][col_e]) # depends on [control=['if'], data=['row_e']]
if col_e > 0:
actions.append(rows[row_e][col_e - 1]) # depends on [control=['if'], data=['col_e']]
if col_e < 2:
actions.append(rows[row_e][col_e + 1]) # depends on [control=['if'], data=['col_e']]
return actions
|
def get_log_entry(self, log_entry_id):
"""Gets the ``LogEntry`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``LogEntry`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``LogEntry`` and retained for
compatibility.
arg: log_entry_id (osid.id.Id): the ``Id`` of the
``LogEntry`` to retrieve
return: (osid.logging.LogEntry) - the returned ``LogEntry``
raise: NotFound - no ``LogEntry`` found with the given ``Id``
raise: NullArgument - ``log_entry_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resource
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('logging',
collection='LogEntry',
runtime=self._runtime)
result = collection.find_one(
dict({'_id': ObjectId(self._get_id(log_entry_id, 'logging').get_identifier())},
**self._view_filter()))
return objects.LogEntry(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
|
def function[get_log_entry, parameter[self, log_entry_id]]:
constant[Gets the ``LogEntry`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``LogEntry`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``LogEntry`` and retained for
compatibility.
arg: log_entry_id (osid.id.Id): the ``Id`` of the
``LogEntry`` to retrieve
return: (osid.logging.LogEntry) - the returned ``LogEntry``
raise: NotFound - no ``LogEntry`` found with the given ``Id``
raise: NullArgument - ``log_entry_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[logging]]]
variable[result] assign[=] call[name[collection].find_one, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da2041dbe20>], [<ast.Call object at 0x7da2041d8dc0>]]]]]]
return[call[name[objects].LogEntry, parameter[]]]
|
keyword[def] identifier[get_log_entry] ( identifier[self] , identifier[log_entry_id] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
identifier[result] = identifier[collection] . identifier[find_one] (
identifier[dict] ({ literal[string] : identifier[ObjectId] ( identifier[self] . identifier[_get_id] ( identifier[log_entry_id] , literal[string] ). identifier[get_identifier] ())},
** identifier[self] . identifier[_view_filter] ()))
keyword[return] identifier[objects] . identifier[LogEntry] ( identifier[osid_object_map] = identifier[result] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] )
|
def get_log_entry(self, log_entry_id):
"""Gets the ``LogEntry`` specified by its ``Id``.
In plenary mode, the exact ``Id`` is found or a ``NotFound``
results. Otherwise, the returned ``LogEntry`` may have a
different ``Id`` than requested, such as the case where a
duplicate ``Id`` was assigned to a ``LogEntry`` and retained for
compatibility.
arg: log_entry_id (osid.id.Id): the ``Id`` of the
``LogEntry`` to retrieve
return: (osid.logging.LogEntry) - the returned ``LogEntry``
raise: NotFound - no ``LogEntry`` found with the given ``Id``
raise: NullArgument - ``log_entry_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resource
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('logging', collection='LogEntry', runtime=self._runtime)
result = collection.find_one(dict({'_id': ObjectId(self._get_id(log_entry_id, 'logging').get_identifier())}, **self._view_filter()))
return objects.LogEntry(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
|
def wait_for_parent_image_build(self, nvr):
"""
Given image NVR, wait for the build that produced it to show up in koji.
If it doesn't within the timeout, raise an error.
:return build info dict with 'nvr' and 'id' keys
"""
self.log.info('Waiting for Koji build for parent image %s', nvr)
poll_start = time.time()
while time.time() - poll_start < self.poll_timeout:
build = self.koji_session.getBuild(nvr)
if build:
self.log.info('Parent image Koji build found with id %s', build.get('id'))
if build['state'] != koji.BUILD_STATES['COMPLETE']:
exc_msg = ('Parent image Koji build for {} with id {} state is not COMPLETE.')
raise KojiParentBuildMissing(exc_msg.format(nvr, build.get('id')))
return build
time.sleep(self.poll_interval)
raise KojiParentBuildMissing('Parent image Koji build NOT found for {}!'.format(nvr))
|
def function[wait_for_parent_image_build, parameter[self, nvr]]:
constant[
Given image NVR, wait for the build that produced it to show up in koji.
If it doesn't within the timeout, raise an error.
:return build info dict with 'nvr' and 'id' keys
]
call[name[self].log.info, parameter[constant[Waiting for Koji build for parent image %s], name[nvr]]]
variable[poll_start] assign[=] call[name[time].time, parameter[]]
while compare[binary_operation[call[name[time].time, parameter[]] - name[poll_start]] less[<] name[self].poll_timeout] begin[:]
variable[build] assign[=] call[name[self].koji_session.getBuild, parameter[name[nvr]]]
if name[build] begin[:]
call[name[self].log.info, parameter[constant[Parent image Koji build found with id %s], call[name[build].get, parameter[constant[id]]]]]
if compare[call[name[build]][constant[state]] not_equal[!=] call[name[koji].BUILD_STATES][constant[COMPLETE]]] begin[:]
variable[exc_msg] assign[=] constant[Parent image Koji build for {} with id {} state is not COMPLETE.]
<ast.Raise object at 0x7da207f9ba00>
return[name[build]]
call[name[time].sleep, parameter[name[self].poll_interval]]
<ast.Raise object at 0x7da207f98730>
|
keyword[def] identifier[wait_for_parent_image_build] ( identifier[self] , identifier[nvr] ):
literal[string]
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[nvr] )
identifier[poll_start] = identifier[time] . identifier[time] ()
keyword[while] identifier[time] . identifier[time] ()- identifier[poll_start] < identifier[self] . identifier[poll_timeout] :
identifier[build] = identifier[self] . identifier[koji_session] . identifier[getBuild] ( identifier[nvr] )
keyword[if] identifier[build] :
identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[build] . identifier[get] ( literal[string] ))
keyword[if] identifier[build] [ literal[string] ]!= identifier[koji] . identifier[BUILD_STATES] [ literal[string] ]:
identifier[exc_msg] =( literal[string] )
keyword[raise] identifier[KojiParentBuildMissing] ( identifier[exc_msg] . identifier[format] ( identifier[nvr] , identifier[build] . identifier[get] ( literal[string] )))
keyword[return] identifier[build]
identifier[time] . identifier[sleep] ( identifier[self] . identifier[poll_interval] )
keyword[raise] identifier[KojiParentBuildMissing] ( literal[string] . identifier[format] ( identifier[nvr] ))
|
def wait_for_parent_image_build(self, nvr):
"""
Given image NVR, wait for the build that produced it to show up in koji.
If it doesn't within the timeout, raise an error.
:return build info dict with 'nvr' and 'id' keys
"""
self.log.info('Waiting for Koji build for parent image %s', nvr)
poll_start = time.time()
while time.time() - poll_start < self.poll_timeout:
build = self.koji_session.getBuild(nvr)
if build:
self.log.info('Parent image Koji build found with id %s', build.get('id'))
if build['state'] != koji.BUILD_STATES['COMPLETE']:
exc_msg = 'Parent image Koji build for {} with id {} state is not COMPLETE.'
raise KojiParentBuildMissing(exc_msg.format(nvr, build.get('id'))) # depends on [control=['if'], data=[]]
return build # depends on [control=['if'], data=[]]
time.sleep(self.poll_interval) # depends on [control=['while'], data=[]]
raise KojiParentBuildMissing('Parent image Koji build NOT found for {}!'.format(nvr))
|
def plot_polygon(polygon, show=True, **kwargs):
"""
Plot a shapely polygon using matplotlib.
Parameters
------------
polygon : shapely.geometry.Polygon
Polygon to be plotted
show : bool
If True will display immediately
**kwargs
Passed to plt.plot
"""
import matplotlib.pyplot as plt
def plot_single(single):
plt.plot(*single.exterior.xy, **kwargs)
for interior in single.interiors:
plt.plot(*interior.xy, **kwargs)
# make aspect ratio non- stupid
plt.axes().set_aspect('equal', 'datalim')
if util.is_sequence(polygon):
[plot_single(i) for i in polygon]
else:
plot_single(polygon)
if show:
plt.show()
|
def function[plot_polygon, parameter[polygon, show]]:
constant[
Plot a shapely polygon using matplotlib.
Parameters
------------
polygon : shapely.geometry.Polygon
Polygon to be plotted
show : bool
If True will display immediately
**kwargs
Passed to plt.plot
]
import module[matplotlib.pyplot] as alias[plt]
def function[plot_single, parameter[single]]:
call[name[plt].plot, parameter[<ast.Starred object at 0x7da20c7c9330>]]
for taget[name[interior]] in starred[name[single].interiors] begin[:]
call[name[plt].plot, parameter[<ast.Starred object at 0x7da20c7ca500>]]
call[call[name[plt].axes, parameter[]].set_aspect, parameter[constant[equal], constant[datalim]]]
if call[name[util].is_sequence, parameter[name[polygon]]] begin[:]
<ast.ListComp object at 0x7da20c7cbc10>
if name[show] begin[:]
call[name[plt].show, parameter[]]
|
keyword[def] identifier[plot_polygon] ( identifier[polygon] , identifier[show] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
keyword[def] identifier[plot_single] ( identifier[single] ):
identifier[plt] . identifier[plot] (* identifier[single] . identifier[exterior] . identifier[xy] ,** identifier[kwargs] )
keyword[for] identifier[interior] keyword[in] identifier[single] . identifier[interiors] :
identifier[plt] . identifier[plot] (* identifier[interior] . identifier[xy] ,** identifier[kwargs] )
identifier[plt] . identifier[axes] (). identifier[set_aspect] ( literal[string] , literal[string] )
keyword[if] identifier[util] . identifier[is_sequence] ( identifier[polygon] ):
[ identifier[plot_single] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[polygon] ]
keyword[else] :
identifier[plot_single] ( identifier[polygon] )
keyword[if] identifier[show] :
identifier[plt] . identifier[show] ()
|
def plot_polygon(polygon, show=True, **kwargs):
"""
Plot a shapely polygon using matplotlib.
Parameters
------------
polygon : shapely.geometry.Polygon
Polygon to be plotted
show : bool
If True will display immediately
**kwargs
Passed to plt.plot
"""
import matplotlib.pyplot as plt
def plot_single(single):
plt.plot(*single.exterior.xy, **kwargs)
for interior in single.interiors:
plt.plot(*interior.xy, **kwargs) # depends on [control=['for'], data=['interior']]
# make aspect ratio non- stupid
plt.axes().set_aspect('equal', 'datalim')
if util.is_sequence(polygon):
[plot_single(i) for i in polygon] # depends on [control=['if'], data=[]]
else:
plot_single(polygon)
if show:
plt.show() # depends on [control=['if'], data=[]]
|
def get_parent_gradebooks(self, gradebook_id):
"""Gets the parents of the given gradebook.
arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook
return: (osid.grading.GradebookList) - the parents of the
gradebook
raise: NotFound - ``gradebook_id`` is not found
raise: NullArgument - ``gradebook_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_parent_bins
if self._catalog_session is not None:
return self._catalog_session.get_parent_catalogs(catalog_id=gradebook_id)
return GradebookLookupSession(
self._proxy,
self._runtime).get_gradebooks_by_ids(
list(self.get_parent_gradebook_ids(gradebook_id)))
|
def function[get_parent_gradebooks, parameter[self, gradebook_id]]:
constant[Gets the parents of the given gradebook.
arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook
return: (osid.grading.GradebookList) - the parents of the
gradebook
raise: NotFound - ``gradebook_id`` is not found
raise: NullArgument - ``gradebook_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.get_parent_catalogs, parameter[]]]
return[call[call[name[GradebookLookupSession], parameter[name[self]._proxy, name[self]._runtime]].get_gradebooks_by_ids, parameter[call[name[list], parameter[call[name[self].get_parent_gradebook_ids, parameter[name[gradebook_id]]]]]]]]
|
keyword[def] identifier[get_parent_gradebooks] ( identifier[self] , identifier[gradebook_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[get_parent_catalogs] ( identifier[catalog_id] = identifier[gradebook_id] )
keyword[return] identifier[GradebookLookupSession] (
identifier[self] . identifier[_proxy] ,
identifier[self] . identifier[_runtime] ). identifier[get_gradebooks_by_ids] (
identifier[list] ( identifier[self] . identifier[get_parent_gradebook_ids] ( identifier[gradebook_id] )))
|
def get_parent_gradebooks(self, gradebook_id):
"""Gets the parents of the given gradebook.
arg: gradebook_id (osid.id.Id): the ``Id`` of a gradebook
return: (osid.grading.GradebookList) - the parents of the
gradebook
raise: NotFound - ``gradebook_id`` is not found
raise: NullArgument - ``gradebook_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchySession.get_parent_bins
if self._catalog_session is not None:
return self._catalog_session.get_parent_catalogs(catalog_id=gradebook_id) # depends on [control=['if'], data=[]]
return GradebookLookupSession(self._proxy, self._runtime).get_gradebooks_by_ids(list(self.get_parent_gradebook_ids(gradebook_id)))
|
def cublasSgemm(handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc):
"""
Matrix-matrix product for real general matrix.
"""
status = _libcublas.cublasSgemm_v2(handle,
_CUBLAS_OP[transa],
_CUBLAS_OP[transb], m, n, k,
ctypes.byref(ctypes.c_float(alpha)),
int(A), lda, int(B), ldb,
ctypes.byref(ctypes.c_float(beta)),
int(C), ldc)
cublasCheckStatus(status)
|
def function[cublasSgemm, parameter[handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc]]:
constant[
Matrix-matrix product for real general matrix.
]
variable[status] assign[=] call[name[_libcublas].cublasSgemm_v2, parameter[name[handle], call[name[_CUBLAS_OP]][name[transa]], call[name[_CUBLAS_OP]][name[transb]], name[m], name[n], name[k], call[name[ctypes].byref, parameter[call[name[ctypes].c_float, parameter[name[alpha]]]]], call[name[int], parameter[name[A]]], name[lda], call[name[int], parameter[name[B]]], name[ldb], call[name[ctypes].byref, parameter[call[name[ctypes].c_float, parameter[name[beta]]]]], call[name[int], parameter[name[C]]], name[ldc]]]
call[name[cublasCheckStatus], parameter[name[status]]]
|
keyword[def] identifier[cublasSgemm] ( identifier[handle] , identifier[transa] , identifier[transb] , identifier[m] , identifier[n] , identifier[k] , identifier[alpha] , identifier[A] , identifier[lda] , identifier[B] , identifier[ldb] , identifier[beta] , identifier[C] , identifier[ldc] ):
literal[string]
identifier[status] = identifier[_libcublas] . identifier[cublasSgemm_v2] ( identifier[handle] ,
identifier[_CUBLAS_OP] [ identifier[transa] ],
identifier[_CUBLAS_OP] [ identifier[transb] ], identifier[m] , identifier[n] , identifier[k] ,
identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_float] ( identifier[alpha] )),
identifier[int] ( identifier[A] ), identifier[lda] , identifier[int] ( identifier[B] ), identifier[ldb] ,
identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_float] ( identifier[beta] )),
identifier[int] ( identifier[C] ), identifier[ldc] )
identifier[cublasCheckStatus] ( identifier[status] )
|
def cublasSgemm(handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc):
"""
Matrix-matrix product for real general matrix.
"""
status = _libcublas.cublasSgemm_v2(handle, _CUBLAS_OP[transa], _CUBLAS_OP[transb], m, n, k, ctypes.byref(ctypes.c_float(alpha)), int(A), lda, int(B), ldb, ctypes.byref(ctypes.c_float(beta)), int(C), ldc)
cublasCheckStatus(status)
|
def getVolumes(self, obj=None, renderer=None):
"""
Return the list of the rendered Volumes.
"""
if renderer is None:
renderer = self.renderer
elif isinstance(renderer, int):
renderer = self.renderers.index(renderer)
else:
return []
if obj is None or isinstance(obj, int):
if obj is None:
acs = renderer.GetVolumes()
elif obj >= len(self.renderers):
colors.printc("~timesError in getVolumes: non existing renderer", obj, c=1)
return []
else:
acs = self.renderers[obj].GetVolumes()
vols = []
acs.InitTraversal()
for i in range(acs.GetNumberOfItems()):
a = acs.GetNextItem()
if a.GetPickable():
r = self.renderers.index(renderer)
if a == self.axes_exist[r]:
continue
vols.append(a)
return vols
|
def function[getVolumes, parameter[self, obj, renderer]]:
constant[
Return the list of the rendered Volumes.
]
if compare[name[renderer] is constant[None]] begin[:]
variable[renderer] assign[=] name[self].renderer
if <ast.BoolOp object at 0x7da1b06086d0> begin[:]
if compare[name[obj] is constant[None]] begin[:]
variable[acs] assign[=] call[name[renderer].GetVolumes, parameter[]]
variable[vols] assign[=] list[[]]
call[name[acs].InitTraversal, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[acs].GetNumberOfItems, parameter[]]]]] begin[:]
variable[a] assign[=] call[name[acs].GetNextItem, parameter[]]
if call[name[a].GetPickable, parameter[]] begin[:]
variable[r] assign[=] call[name[self].renderers.index, parameter[name[renderer]]]
if compare[name[a] equal[==] call[name[self].axes_exist][name[r]]] begin[:]
continue
call[name[vols].append, parameter[name[a]]]
return[name[vols]]
|
keyword[def] identifier[getVolumes] ( identifier[self] , identifier[obj] = keyword[None] , identifier[renderer] = keyword[None] ):
literal[string]
keyword[if] identifier[renderer] keyword[is] keyword[None] :
identifier[renderer] = identifier[self] . identifier[renderer]
keyword[elif] identifier[isinstance] ( identifier[renderer] , identifier[int] ):
identifier[renderer] = identifier[self] . identifier[renderers] . identifier[index] ( identifier[renderer] )
keyword[else] :
keyword[return] []
keyword[if] identifier[obj] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[obj] , identifier[int] ):
keyword[if] identifier[obj] keyword[is] keyword[None] :
identifier[acs] = identifier[renderer] . identifier[GetVolumes] ()
keyword[elif] identifier[obj] >= identifier[len] ( identifier[self] . identifier[renderers] ):
identifier[colors] . identifier[printc] ( literal[string] , identifier[obj] , identifier[c] = literal[int] )
keyword[return] []
keyword[else] :
identifier[acs] = identifier[self] . identifier[renderers] [ identifier[obj] ]. identifier[GetVolumes] ()
identifier[vols] =[]
identifier[acs] . identifier[InitTraversal] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[acs] . identifier[GetNumberOfItems] ()):
identifier[a] = identifier[acs] . identifier[GetNextItem] ()
keyword[if] identifier[a] . identifier[GetPickable] ():
identifier[r] = identifier[self] . identifier[renderers] . identifier[index] ( identifier[renderer] )
keyword[if] identifier[a] == identifier[self] . identifier[axes_exist] [ identifier[r] ]:
keyword[continue]
identifier[vols] . identifier[append] ( identifier[a] )
keyword[return] identifier[vols]
|
def getVolumes(self, obj=None, renderer=None):
"""
Return the list of the rendered Volumes.
"""
if renderer is None:
renderer = self.renderer # depends on [control=['if'], data=['renderer']]
elif isinstance(renderer, int):
renderer = self.renderers.index(renderer) # depends on [control=['if'], data=[]]
else:
return []
if obj is None or isinstance(obj, int):
if obj is None:
acs = renderer.GetVolumes() # depends on [control=['if'], data=[]]
elif obj >= len(self.renderers):
colors.printc('~timesError in getVolumes: non existing renderer', obj, c=1)
return [] # depends on [control=['if'], data=['obj']]
else:
acs = self.renderers[obj].GetVolumes()
vols = []
acs.InitTraversal()
for i in range(acs.GetNumberOfItems()):
a = acs.GetNextItem()
if a.GetPickable():
r = self.renderers.index(renderer)
if a == self.axes_exist[r]:
continue # depends on [control=['if'], data=[]]
vols.append(a) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return vols # depends on [control=['if'], data=[]]
|
def square(self, n_coeffs, do_overlap_add=False):
"""
Compute a "square" view of the frequency adaptive transform, by
resampling each frequency band such that they all contain the same
number of samples, and performing an overlap-add procedure in the
case where the sample frequency and duration differ
:param n_coeffs: The common size to which each frequency band should
be resampled
"""
resampled_bands = [
self._resample(band, n_coeffs)
for band in self.iter_bands()]
stacked = np.vstack(resampled_bands).T
fdim = FrequencyDimension(self.scale)
# TODO: This feels like it could be wrapped up nicely elsewhere
chunk_frequency = Picoseconds(int(np.round(
self.time_dimension.duration / Picoseconds(1) / n_coeffs)))
td = TimeDimension(frequency=chunk_frequency)
arr = ConstantRateTimeSeries(ArrayWithUnits(
stacked.reshape(-1, n_coeffs, self.n_bands),
dimensions=[self.time_dimension, td, fdim]))
if not do_overlap_add:
return arr
# Begin the overlap add procedure
overlap_ratio = self.time_dimension.overlap_ratio
if overlap_ratio == 0:
# no overlap add is necessary
return ArrayWithUnits(stacked, [td, fdim])
step_size_samples = int(n_coeffs * overlap_ratio)
first_dim = int(np.round(
(stacked.shape[0] * overlap_ratio) + (n_coeffs * overlap_ratio)))
output = ArrayWithUnits(
np.zeros((first_dim, self.n_bands)),
dimensions=[td, fdim])
for i, chunk in enumerate(arr):
start = step_size_samples * i
stop = start + n_coeffs
output[start: stop] += chunk.reshape((-1, self.n_bands))
return output
|
def function[square, parameter[self, n_coeffs, do_overlap_add]]:
constant[
Compute a "square" view of the frequency adaptive transform, by
resampling each frequency band such that they all contain the same
number of samples, and performing an overlap-add procedure in the
case where the sample frequency and duration differ
:param n_coeffs: The common size to which each frequency band should
be resampled
]
variable[resampled_bands] assign[=] <ast.ListComp object at 0x7da1b19a8850>
variable[stacked] assign[=] call[name[np].vstack, parameter[name[resampled_bands]]].T
variable[fdim] assign[=] call[name[FrequencyDimension], parameter[name[self].scale]]
variable[chunk_frequency] assign[=] call[name[Picoseconds], parameter[call[name[int], parameter[call[name[np].round, parameter[binary_operation[binary_operation[name[self].time_dimension.duration / call[name[Picoseconds], parameter[constant[1]]]] / name[n_coeffs]]]]]]]]
variable[td] assign[=] call[name[TimeDimension], parameter[]]
variable[arr] assign[=] call[name[ConstantRateTimeSeries], parameter[call[name[ArrayWithUnits], parameter[call[name[stacked].reshape, parameter[<ast.UnaryOp object at 0x7da1b1a1d0f0>, name[n_coeffs], name[self].n_bands]]]]]]
if <ast.UnaryOp object at 0x7da1b1a1f2b0> begin[:]
return[name[arr]]
variable[overlap_ratio] assign[=] name[self].time_dimension.overlap_ratio
if compare[name[overlap_ratio] equal[==] constant[0]] begin[:]
return[call[name[ArrayWithUnits], parameter[name[stacked], list[[<ast.Name object at 0x7da1b1a1faf0>, <ast.Name object at 0x7da1b1a1fb20>]]]]]
variable[step_size_samples] assign[=] call[name[int], parameter[binary_operation[name[n_coeffs] * name[overlap_ratio]]]]
variable[first_dim] assign[=] call[name[int], parameter[call[name[np].round, parameter[binary_operation[binary_operation[call[name[stacked].shape][constant[0]] * name[overlap_ratio]] + binary_operation[name[n_coeffs] * name[overlap_ratio]]]]]]]
variable[output] assign[=] call[name[ArrayWithUnits], parameter[call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1b0f7c0>, <ast.Attribute object at 0x7da1b1b0cb80>]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1b0cd60>, <ast.Name object at 0x7da1b1b0e920>]]] in starred[call[name[enumerate], parameter[name[arr]]]] begin[:]
variable[start] assign[=] binary_operation[name[step_size_samples] * name[i]]
variable[stop] assign[=] binary_operation[name[start] + name[n_coeffs]]
<ast.AugAssign object at 0x7da1b1b0cf40>
return[name[output]]
|
keyword[def] identifier[square] ( identifier[self] , identifier[n_coeffs] , identifier[do_overlap_add] = keyword[False] ):
literal[string]
identifier[resampled_bands] =[
identifier[self] . identifier[_resample] ( identifier[band] , identifier[n_coeffs] )
keyword[for] identifier[band] keyword[in] identifier[self] . identifier[iter_bands] ()]
identifier[stacked] = identifier[np] . identifier[vstack] ( identifier[resampled_bands] ). identifier[T]
identifier[fdim] = identifier[FrequencyDimension] ( identifier[self] . identifier[scale] )
identifier[chunk_frequency] = identifier[Picoseconds] ( identifier[int] ( identifier[np] . identifier[round] (
identifier[self] . identifier[time_dimension] . identifier[duration] / identifier[Picoseconds] ( literal[int] )/ identifier[n_coeffs] )))
identifier[td] = identifier[TimeDimension] ( identifier[frequency] = identifier[chunk_frequency] )
identifier[arr] = identifier[ConstantRateTimeSeries] ( identifier[ArrayWithUnits] (
identifier[stacked] . identifier[reshape] (- literal[int] , identifier[n_coeffs] , identifier[self] . identifier[n_bands] ),
identifier[dimensions] =[ identifier[self] . identifier[time_dimension] , identifier[td] , identifier[fdim] ]))
keyword[if] keyword[not] identifier[do_overlap_add] :
keyword[return] identifier[arr]
identifier[overlap_ratio] = identifier[self] . identifier[time_dimension] . identifier[overlap_ratio]
keyword[if] identifier[overlap_ratio] == literal[int] :
keyword[return] identifier[ArrayWithUnits] ( identifier[stacked] ,[ identifier[td] , identifier[fdim] ])
identifier[step_size_samples] = identifier[int] ( identifier[n_coeffs] * identifier[overlap_ratio] )
identifier[first_dim] = identifier[int] ( identifier[np] . identifier[round] (
( identifier[stacked] . identifier[shape] [ literal[int] ]* identifier[overlap_ratio] )+( identifier[n_coeffs] * identifier[overlap_ratio] )))
identifier[output] = identifier[ArrayWithUnits] (
identifier[np] . identifier[zeros] (( identifier[first_dim] , identifier[self] . identifier[n_bands] )),
identifier[dimensions] =[ identifier[td] , identifier[fdim] ])
keyword[for] identifier[i] , identifier[chunk] keyword[in] identifier[enumerate] ( identifier[arr] ):
identifier[start] = identifier[step_size_samples] * identifier[i]
identifier[stop] = identifier[start] + identifier[n_coeffs]
identifier[output] [ identifier[start] : identifier[stop] ]+= identifier[chunk] . identifier[reshape] ((- literal[int] , identifier[self] . identifier[n_bands] ))
keyword[return] identifier[output]
|
def square(self, n_coeffs, do_overlap_add=False):
"""
Compute a "square" view of the frequency adaptive transform, by
resampling each frequency band such that they all contain the same
number of samples, and performing an overlap-add procedure in the
case where the sample frequency and duration differ
:param n_coeffs: The common size to which each frequency band should
be resampled
"""
resampled_bands = [self._resample(band, n_coeffs) for band in self.iter_bands()]
stacked = np.vstack(resampled_bands).T
fdim = FrequencyDimension(self.scale)
# TODO: This feels like it could be wrapped up nicely elsewhere
chunk_frequency = Picoseconds(int(np.round(self.time_dimension.duration / Picoseconds(1) / n_coeffs)))
td = TimeDimension(frequency=chunk_frequency)
arr = ConstantRateTimeSeries(ArrayWithUnits(stacked.reshape(-1, n_coeffs, self.n_bands), dimensions=[self.time_dimension, td, fdim]))
if not do_overlap_add:
return arr # depends on [control=['if'], data=[]]
# Begin the overlap add procedure
overlap_ratio = self.time_dimension.overlap_ratio
if overlap_ratio == 0:
# no overlap add is necessary
return ArrayWithUnits(stacked, [td, fdim]) # depends on [control=['if'], data=[]]
step_size_samples = int(n_coeffs * overlap_ratio)
first_dim = int(np.round(stacked.shape[0] * overlap_ratio + n_coeffs * overlap_ratio))
output = ArrayWithUnits(np.zeros((first_dim, self.n_bands)), dimensions=[td, fdim])
for (i, chunk) in enumerate(arr):
start = step_size_samples * i
stop = start + n_coeffs
output[start:stop] += chunk.reshape((-1, self.n_bands)) # depends on [control=['for'], data=[]]
return output
|
def do_timer(self, args, arguments):
"""
::
Usage:
timer on
timer off
timer list
timer start NAME
timer stop NAME
timer resume NAME
timer reset [NAME]
Description (NOT IMPLEMENTED YET):
timer on | off
switches timers on and off not yet implemented.
If the timer is on each command will be timed and its
time is printed after the command. Please note that
background command times are not added.
timer list
list all timers
timer start NAME
starts the timer with the name. A start resets the timer to 0.
timer stop NAME
stops the timer
timer resume NAME
resumes the timer
timer reset NAME
resets the named timer to 0. If no name is specified all
timers are reset
Implementation note: we have a stopwatch in cloudmesh,
that we could copy into cmd3
"""
# print arguments
# print "args", args
args = args.lower()
if args in ("on", "off"):
self.with_timers = (args == "on")
Console.ok ("Timers are now: {0}".format(args))
if args == 'list':
self.list_timers()
else:
self.do_timer.__doc__
|
def function[do_timer, parameter[self, args, arguments]]:
constant[
::
Usage:
timer on
timer off
timer list
timer start NAME
timer stop NAME
timer resume NAME
timer reset [NAME]
Description (NOT IMPLEMENTED YET):
timer on | off
switches timers on and off not yet implemented.
If the timer is on each command will be timed and its
time is printed after the command. Please note that
background command times are not added.
timer list
list all timers
timer start NAME
starts the timer with the name. A start resets the timer to 0.
timer stop NAME
stops the timer
timer resume NAME
resumes the timer
timer reset NAME
resets the named timer to 0. If no name is specified all
timers are reset
Implementation note: we have a stopwatch in cloudmesh,
that we could copy into cmd3
]
variable[args] assign[=] call[name[args].lower, parameter[]]
if compare[name[args] in tuple[[<ast.Constant object at 0x7da20c6a9630>, <ast.Constant object at 0x7da20c6ab010>]]] begin[:]
name[self].with_timers assign[=] compare[name[args] equal[==] constant[on]]
call[name[Console].ok, parameter[call[constant[Timers are now: {0}].format, parameter[name[args]]]]]
if compare[name[args] equal[==] constant[list]] begin[:]
call[name[self].list_timers, parameter[]]
|
keyword[def] identifier[do_timer] ( identifier[self] , identifier[args] , identifier[arguments] ):
literal[string]
identifier[args] = identifier[args] . identifier[lower] ()
keyword[if] identifier[args] keyword[in] ( literal[string] , literal[string] ):
identifier[self] . identifier[with_timers] =( identifier[args] == literal[string] )
identifier[Console] . identifier[ok] ( literal[string] . identifier[format] ( identifier[args] ))
keyword[if] identifier[args] == literal[string] :
identifier[self] . identifier[list_timers] ()
keyword[else] :
identifier[self] . identifier[do_timer] . identifier[__doc__]
|
def do_timer(self, args, arguments):
"""
::
Usage:
timer on
timer off
timer list
timer start NAME
timer stop NAME
timer resume NAME
timer reset [NAME]
Description (NOT IMPLEMENTED YET):
timer on | off
switches timers on and off not yet implemented.
If the timer is on each command will be timed and its
time is printed after the command. Please note that
background command times are not added.
timer list
list all timers
timer start NAME
starts the timer with the name. A start resets the timer to 0.
timer stop NAME
stops the timer
timer resume NAME
resumes the timer
timer reset NAME
resets the named timer to 0. If no name is specified all
timers are reset
Implementation note: we have a stopwatch in cloudmesh,
that we could copy into cmd3
"""
# print arguments
# print "args", args
args = args.lower()
if args in ('on', 'off'):
self.with_timers = args == 'on'
Console.ok('Timers are now: {0}'.format(args)) # depends on [control=['if'], data=['args']]
if args == 'list':
self.list_timers() # depends on [control=['if'], data=[]]
else:
self.do_timer.__doc__
|
def _adjust_cwd(self, path):
"""
Return an adjusted path if we're emulating a "current working
directory" for the server.
"""
if type(path) is unicode:
path = path.encode('utf-8')
if self._cwd is None:
return path
if (len(path) > 0) and (path[0] == '/'):
# absolute path
return path
if self._cwd == '/':
return self._cwd + path
return self._cwd + '/' + path
|
def function[_adjust_cwd, parameter[self, path]]:
constant[
Return an adjusted path if we're emulating a "current working
directory" for the server.
]
if compare[call[name[type], parameter[name[path]]] is name[unicode]] begin[:]
variable[path] assign[=] call[name[path].encode, parameter[constant[utf-8]]]
if compare[name[self]._cwd is constant[None]] begin[:]
return[name[path]]
if <ast.BoolOp object at 0x7da1b10c6b90> begin[:]
return[name[path]]
if compare[name[self]._cwd equal[==] constant[/]] begin[:]
return[binary_operation[name[self]._cwd + name[path]]]
return[binary_operation[binary_operation[name[self]._cwd + constant[/]] + name[path]]]
|
keyword[def] identifier[_adjust_cwd] ( identifier[self] , identifier[path] ):
literal[string]
keyword[if] identifier[type] ( identifier[path] ) keyword[is] identifier[unicode] :
identifier[path] = identifier[path] . identifier[encode] ( literal[string] )
keyword[if] identifier[self] . identifier[_cwd] keyword[is] keyword[None] :
keyword[return] identifier[path]
keyword[if] ( identifier[len] ( identifier[path] )> literal[int] ) keyword[and] ( identifier[path] [ literal[int] ]== literal[string] ):
keyword[return] identifier[path]
keyword[if] identifier[self] . identifier[_cwd] == literal[string] :
keyword[return] identifier[self] . identifier[_cwd] + identifier[path]
keyword[return] identifier[self] . identifier[_cwd] + literal[string] + identifier[path]
|
def _adjust_cwd(self, path):
"""
Return an adjusted path if we're emulating a "current working
directory" for the server.
"""
if type(path) is unicode:
path = path.encode('utf-8') # depends on [control=['if'], data=[]]
if self._cwd is None:
return path # depends on [control=['if'], data=[]]
if len(path) > 0 and path[0] == '/':
# absolute path
return path # depends on [control=['if'], data=[]]
if self._cwd == '/':
return self._cwd + path # depends on [control=['if'], data=[]]
return self._cwd + '/' + path
|
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for k,v in iteritems(self.attrs):
if k in ["_matches"]: continue # attributes to specifically ignore
try: simpleAttrs[k] = v.type
except: simpleAttrs[k] = v
return simpleAttrs
|
def function[simpleAttrs, parameter[self]]:
constant[provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object]
variable[simpleAttrs] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0911930>, <ast.Name object at 0x7da1b09135e0>]]] in starred[call[name[iteritems], parameter[name[self].attrs]]] begin[:]
if compare[name[k] in list[[<ast.Constant object at 0x7da1b0911870>]]] begin[:]
continue
<ast.Try object at 0x7da1b09116c0>
return[name[simpleAttrs]]
|
keyword[def] identifier[simpleAttrs] ( identifier[self] ):
literal[string]
identifier[simpleAttrs] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[self] . identifier[attrs] ):
keyword[if] identifier[k] keyword[in] [ literal[string] ]: keyword[continue]
keyword[try] : identifier[simpleAttrs] [ identifier[k] ]= identifier[v] . identifier[type]
keyword[except] : identifier[simpleAttrs] [ identifier[k] ]= identifier[v]
keyword[return] identifier[simpleAttrs]
|
def simpleAttrs(self):
"""provide a copy of this player's attributes as a dictionary, but with objects flattened into a string representation of the object"""
simpleAttrs = {}
for (k, v) in iteritems(self.attrs):
if k in ['_matches']:
continue # attributes to specifically ignore # depends on [control=['if'], data=[]]
try:
simpleAttrs[k] = v.type # depends on [control=['try'], data=[]]
except:
simpleAttrs[k] = v # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
return simpleAttrs
|
async def getChatMember(self, chat_id, user_id):
""" See: https://core.telegram.org/bots/api#getchatmember """
p = _strip(locals())
return await self._api_request('getChatMember', _rectify(p))
|
<ast.AsyncFunctionDef object at 0x7da1b1bad900>
|
keyword[async] keyword[def] identifier[getChatMember] ( identifier[self] , identifier[chat_id] , identifier[user_id] ):
literal[string]
identifier[p] = identifier[_strip] ( identifier[locals] ())
keyword[return] keyword[await] identifier[self] . identifier[_api_request] ( literal[string] , identifier[_rectify] ( identifier[p] ))
|
async def getChatMember(self, chat_id, user_id):
""" See: https://core.telegram.org/bots/api#getchatmember """
p = _strip(locals())
return await self._api_request('getChatMember', _rectify(p))
|
def validate_VALUERANGE(in_value, restriction):
"""
Test to ensure that a value sits between a lower and upper bound.
Parameters: A Decimal value and a tuple, containing a lower and upper bound,
both as Decimal values.
"""
if len(restriction) != 2:
raise ValidationError("Template ERROR: Only two values can be specified in a date range.")
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_VALUERANGE(subval, restriction)
else:
min_val = Decimal(restriction[0])
max_val = Decimal(restriction[1])
val = Decimal(value)
if val < min_val or val > max_val:
raise ValidationError("VALUERANGE: %s, %s"%(min_val, max_val))
|
def function[validate_VALUERANGE, parameter[in_value, restriction]]:
constant[
Test to ensure that a value sits between a lower and upper bound.
Parameters: A Decimal value and a tuple, containing a lower and upper bound,
both as Decimal values.
]
if compare[call[name[len], parameter[name[restriction]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da20c796320>
variable[value] assign[=] call[name[_get_val], parameter[name[in_value]]]
if compare[call[name[type], parameter[name[value]]] is name[list]] begin[:]
for taget[name[subval]] in starred[name[value]] begin[:]
if compare[call[name[type], parameter[name[subval]]] is name[tuple]] begin[:]
variable[subval] assign[=] call[name[subval]][constant[1]]
call[name[validate_VALUERANGE], parameter[name[subval], name[restriction]]]
|
keyword[def] identifier[validate_VALUERANGE] ( identifier[in_value] , identifier[restriction] ):
literal[string]
keyword[if] identifier[len] ( identifier[restriction] )!= literal[int] :
keyword[raise] identifier[ValidationError] ( literal[string] )
identifier[value] = identifier[_get_val] ( identifier[in_value] )
keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[list] :
keyword[for] identifier[subval] keyword[in] identifier[value] :
keyword[if] identifier[type] ( identifier[subval] ) keyword[is] identifier[tuple] :
identifier[subval] = identifier[subval] [ literal[int] ]
identifier[validate_VALUERANGE] ( identifier[subval] , identifier[restriction] )
keyword[else] :
identifier[min_val] = identifier[Decimal] ( identifier[restriction] [ literal[int] ])
identifier[max_val] = identifier[Decimal] ( identifier[restriction] [ literal[int] ])
identifier[val] = identifier[Decimal] ( identifier[value] )
keyword[if] identifier[val] < identifier[min_val] keyword[or] identifier[val] > identifier[max_val] :
keyword[raise] identifier[ValidationError] ( literal[string] %( identifier[min_val] , identifier[max_val] ))
|
def validate_VALUERANGE(in_value, restriction):
"""
Test to ensure that a value sits between a lower and upper bound.
Parameters: A Decimal value and a tuple, containing a lower and upper bound,
both as Decimal values.
"""
if len(restriction) != 2:
raise ValidationError('Template ERROR: Only two values can be specified in a date range.') # depends on [control=['if'], data=[]]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1] # depends on [control=['if'], data=[]]
validate_VALUERANGE(subval, restriction) # depends on [control=['for'], data=['subval']] # depends on [control=['if'], data=[]]
else:
min_val = Decimal(restriction[0])
max_val = Decimal(restriction[1])
val = Decimal(value)
if val < min_val or val > max_val:
raise ValidationError('VALUERANGE: %s, %s' % (min_val, max_val)) # depends on [control=['if'], data=[]]
|
def maintained_selection():
"""Maintain selection during context
Example:
>>> with maintained_selection():
... # Modify selection
... node.setSelected(on=False, clear_all_selected=True)
>>> # Selection restored
"""
previous_selection = hou.selectedNodes()
try:
yield
finally:
if previous_selection:
for node in previous_selection:
node.setSelected(on=True)
else:
for node in previous_selection:
node.setSelected(on=False)
|
def function[maintained_selection, parameter[]]:
constant[Maintain selection during context
Example:
>>> with maintained_selection():
... # Modify selection
... node.setSelected(on=False, clear_all_selected=True)
>>> # Selection restored
]
variable[previous_selection] assign[=] call[name[hou].selectedNodes, parameter[]]
<ast.Try object at 0x7da1b25d9390>
|
keyword[def] identifier[maintained_selection] ():
literal[string]
identifier[previous_selection] = identifier[hou] . identifier[selectedNodes] ()
keyword[try] :
keyword[yield]
keyword[finally] :
keyword[if] identifier[previous_selection] :
keyword[for] identifier[node] keyword[in] identifier[previous_selection] :
identifier[node] . identifier[setSelected] ( identifier[on] = keyword[True] )
keyword[else] :
keyword[for] identifier[node] keyword[in] identifier[previous_selection] :
identifier[node] . identifier[setSelected] ( identifier[on] = keyword[False] )
|
def maintained_selection():
"""Maintain selection during context
Example:
>>> with maintained_selection():
... # Modify selection
... node.setSelected(on=False, clear_all_selected=True)
>>> # Selection restored
"""
previous_selection = hou.selectedNodes()
try:
yield # depends on [control=['try'], data=[]]
finally:
if previous_selection:
for node in previous_selection:
node.setSelected(on=True) # depends on [control=['for'], data=['node']] # depends on [control=['if'], data=[]]
else:
for node in previous_selection:
node.setSelected(on=False) # depends on [control=['for'], data=['node']]
|
def _get_tz(tz):
""" for a tz-aware type, return an encoded zone """
zone = timezones.get_timezone(tz)
if zone is None:
zone = tz.utcoffset().total_seconds()
return zone
|
def function[_get_tz, parameter[tz]]:
constant[ for a tz-aware type, return an encoded zone ]
variable[zone] assign[=] call[name[timezones].get_timezone, parameter[name[tz]]]
if compare[name[zone] is constant[None]] begin[:]
variable[zone] assign[=] call[call[name[tz].utcoffset, parameter[]].total_seconds, parameter[]]
return[name[zone]]
|
keyword[def] identifier[_get_tz] ( identifier[tz] ):
literal[string]
identifier[zone] = identifier[timezones] . identifier[get_timezone] ( identifier[tz] )
keyword[if] identifier[zone] keyword[is] keyword[None] :
identifier[zone] = identifier[tz] . identifier[utcoffset] (). identifier[total_seconds] ()
keyword[return] identifier[zone]
|
def _get_tz(tz):
""" for a tz-aware type, return an encoded zone """
zone = timezones.get_timezone(tz)
if zone is None:
zone = tz.utcoffset().total_seconds() # depends on [control=['if'], data=['zone']]
return zone
|
def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
"""Sends an outgoing packet."""
# This is a quick test to see if we can parse the packets we generate
#temp = DNSIncoming(out.packet())
for i in self.intf.values():
try:
return i.sendto(out.packet(), 0, (addr, port))
except:
traceback.print_exc()
# Ignore this, it may be a temporary loss of network connection
return -1
|
def function[send, parameter[self, out, addr, port]]:
constant[Sends an outgoing packet.]
for taget[name[i]] in starred[call[name[self].intf.values, parameter[]]] begin[:]
<ast.Try object at 0x7da1b0fea4a0>
|
keyword[def] identifier[send] ( identifier[self] , identifier[out] , identifier[addr] = identifier[_MDNS_ADDR] , identifier[port] = identifier[_MDNS_PORT] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[intf] . identifier[values] ():
keyword[try] :
keyword[return] identifier[i] . identifier[sendto] ( identifier[out] . identifier[packet] (), literal[int] ,( identifier[addr] , identifier[port] ))
keyword[except] :
identifier[traceback] . identifier[print_exc] ()
keyword[return] - literal[int]
|
def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
"""Sends an outgoing packet."""
# This is a quick test to see if we can parse the packets we generate
#temp = DNSIncoming(out.packet())
for i in self.intf.values():
try:
return i.sendto(out.packet(), 0, (addr, port)) # depends on [control=['try'], data=[]]
except:
traceback.print_exc()
# Ignore this, it may be a temporary loss of network connection
return -1 # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
|
def concatenate(cls, overlay):
"""
Concatenates an NdOverlay of Image types into a single 3D
xarray Dataset.
"""
if not isinstance(overlay, NdOverlay):
raise ValueError('Only NdOverlays can be concatenated')
xarr = xr.concat([v.data.transpose() for v in overlay.values()],
pd.Index(overlay.keys(), name=overlay.kdims[0].name))
params = dict(get_param_values(overlay.last),
vdims=overlay.last.vdims,
kdims=overlay.kdims+overlay.last.kdims)
return Dataset(xarr.transpose(), datatype=['xarray'], **params)
|
def function[concatenate, parameter[cls, overlay]]:
constant[
Concatenates an NdOverlay of Image types into a single 3D
xarray Dataset.
]
if <ast.UnaryOp object at 0x7da20c7cab90> begin[:]
<ast.Raise object at 0x7da20c7ca980>
variable[xarr] assign[=] call[name[xr].concat, parameter[<ast.ListComp object at 0x7da20c7c92a0>, call[name[pd].Index, parameter[call[name[overlay].keys, parameter[]]]]]]
variable[params] assign[=] call[name[dict], parameter[call[name[get_param_values], parameter[name[overlay].last]]]]
return[call[name[Dataset], parameter[call[name[xarr].transpose, parameter[]]]]]
|
keyword[def] identifier[concatenate] ( identifier[cls] , identifier[overlay] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[overlay] , identifier[NdOverlay] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[xarr] = identifier[xr] . identifier[concat] ([ identifier[v] . identifier[data] . identifier[transpose] () keyword[for] identifier[v] keyword[in] identifier[overlay] . identifier[values] ()],
identifier[pd] . identifier[Index] ( identifier[overlay] . identifier[keys] (), identifier[name] = identifier[overlay] . identifier[kdims] [ literal[int] ]. identifier[name] ))
identifier[params] = identifier[dict] ( identifier[get_param_values] ( identifier[overlay] . identifier[last] ),
identifier[vdims] = identifier[overlay] . identifier[last] . identifier[vdims] ,
identifier[kdims] = identifier[overlay] . identifier[kdims] + identifier[overlay] . identifier[last] . identifier[kdims] )
keyword[return] identifier[Dataset] ( identifier[xarr] . identifier[transpose] (), identifier[datatype] =[ literal[string] ],** identifier[params] )
|
def concatenate(cls, overlay):
"""
Concatenates an NdOverlay of Image types into a single 3D
xarray Dataset.
"""
if not isinstance(overlay, NdOverlay):
raise ValueError('Only NdOverlays can be concatenated') # depends on [control=['if'], data=[]]
xarr = xr.concat([v.data.transpose() for v in overlay.values()], pd.Index(overlay.keys(), name=overlay.kdims[0].name))
params = dict(get_param_values(overlay.last), vdims=overlay.last.vdims, kdims=overlay.kdims + overlay.last.kdims)
return Dataset(xarr.transpose(), datatype=['xarray'], **params)
|
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TaskQueueContext for this TaskQueueInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task_queue.TaskQueueContext
"""
if self._context is None:
self._context = TaskQueueContext(
self._version,
workspace_sid=self._solution['workspace_sid'],
sid=self._solution['sid'],
)
return self._context
|
def function[_proxy, parameter[self]]:
constant[
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TaskQueueContext for this TaskQueueInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task_queue.TaskQueueContext
]
if compare[name[self]._context is constant[None]] begin[:]
name[self]._context assign[=] call[name[TaskQueueContext], parameter[name[self]._version]]
return[name[self]._context]
|
keyword[def] identifier[_proxy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] :
identifier[self] . identifier[_context] = identifier[TaskQueueContext] (
identifier[self] . identifier[_version] ,
identifier[workspace_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_context]
|
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TaskQueueContext for this TaskQueueInstance
:rtype: twilio.rest.taskrouter.v1.workspace.task_queue.TaskQueueContext
"""
if self._context is None:
self._context = TaskQueueContext(self._version, workspace_sid=self._solution['workspace_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._context
|
def table_dump(self, table):
"""dump all the rows of the given table name"""
if not table: raise ValueError("no table")
print('------- dumping table {}'.format(table))
pipes = ["gzip"]
outfile_path = self._get_outfile_path(table)
cmd = self._get_args(
"pg_dump",
"--table={}".format(table),
#"--data-only",
"--clean",
"--no-owner",
"--column-inserts",
)
cmd = ' '.join(cmd)
cmd += ' | {}'.format(' | '.join(pipes))
cmd += ' > {}'.format(outfile_path)
self._run_cmd(cmd)
print('------- dumped table {}'.format(table))
return True
|
def function[table_dump, parameter[self, table]]:
constant[dump all the rows of the given table name]
if <ast.UnaryOp object at 0x7da20c76ff70> begin[:]
<ast.Raise object at 0x7da20c76fee0>
call[name[print], parameter[call[constant[------- dumping table {}].format, parameter[name[table]]]]]
variable[pipes] assign[=] list[[<ast.Constant object at 0x7da20c76dd50>]]
variable[outfile_path] assign[=] call[name[self]._get_outfile_path, parameter[name[table]]]
variable[cmd] assign[=] call[name[self]._get_args, parameter[constant[pg_dump], call[constant[--table={}].format, parameter[name[table]]], constant[--clean], constant[--no-owner], constant[--column-inserts]]]
variable[cmd] assign[=] call[constant[ ].join, parameter[name[cmd]]]
<ast.AugAssign object at 0x7da20e962590>
<ast.AugAssign object at 0x7da20e962770>
call[name[self]._run_cmd, parameter[name[cmd]]]
call[name[print], parameter[call[constant[------- dumped table {}].format, parameter[name[table]]]]]
return[constant[True]]
|
keyword[def] identifier[table_dump] ( identifier[self] , identifier[table] ):
literal[string]
keyword[if] keyword[not] identifier[table] : keyword[raise] identifier[ValueError] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[table] ))
identifier[pipes] =[ literal[string] ]
identifier[outfile_path] = identifier[self] . identifier[_get_outfile_path] ( identifier[table] )
identifier[cmd] = identifier[self] . identifier[_get_args] (
literal[string] ,
literal[string] . identifier[format] ( identifier[table] ),
literal[string] ,
literal[string] ,
literal[string] ,
)
identifier[cmd] = literal[string] . identifier[join] ( identifier[cmd] )
identifier[cmd] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[pipes] ))
identifier[cmd] += literal[string] . identifier[format] ( identifier[outfile_path] )
identifier[self] . identifier[_run_cmd] ( identifier[cmd] )
identifier[print] ( literal[string] . identifier[format] ( identifier[table] ))
keyword[return] keyword[True]
|
def table_dump(self, table):
"""dump all the rows of the given table name"""
if not table:
raise ValueError('no table') # depends on [control=['if'], data=[]]
print('------- dumping table {}'.format(table))
pipes = ['gzip']
outfile_path = self._get_outfile_path(table)
#"--data-only",
cmd = self._get_args('pg_dump', '--table={}'.format(table), '--clean', '--no-owner', '--column-inserts')
cmd = ' '.join(cmd)
cmd += ' | {}'.format(' | '.join(pipes))
cmd += ' > {}'.format(outfile_path)
self._run_cmd(cmd)
print('------- dumped table {}'.format(table))
return True
|
def IDENTITY(val):
'''
This is a basic "equality" index keygen, primarily meant to be used for
things like::
Model.query.filter(col='value')
Where ``FULL_TEXT`` would transform a sentence like "A Simple Sentence" into
an inverted index searchable by the words "a", "simple", and/or "sentence",
``IDENTITY`` will only be searchable by the orginal full sentence with the
same capitalization - "A Simple Sentence". See ``IDENTITY_CI`` for the
same function, only case-insensitive.
'''
if not val:
return None
if not isinstance(val, six.string_types_ex):
val = str(val)
return [val]
|
def function[IDENTITY, parameter[val]]:
constant[
This is a basic "equality" index keygen, primarily meant to be used for
things like::
Model.query.filter(col='value')
Where ``FULL_TEXT`` would transform a sentence like "A Simple Sentence" into
an inverted index searchable by the words "a", "simple", and/or "sentence",
``IDENTITY`` will only be searchable by the orginal full sentence with the
same capitalization - "A Simple Sentence". See ``IDENTITY_CI`` for the
same function, only case-insensitive.
]
if <ast.UnaryOp object at 0x7da1b05c68f0> begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da1b05c7f40> begin[:]
variable[val] assign[=] call[name[str], parameter[name[val]]]
return[list[[<ast.Name object at 0x7da1b05c69b0>]]]
|
keyword[def] identifier[IDENTITY] ( identifier[val] ):
literal[string]
keyword[if] keyword[not] identifier[val] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[string_types_ex] ):
identifier[val] = identifier[str] ( identifier[val] )
keyword[return] [ identifier[val] ]
|
def IDENTITY(val):
"""
This is a basic "equality" index keygen, primarily meant to be used for
things like::
Model.query.filter(col='value')
Where ``FULL_TEXT`` would transform a sentence like "A Simple Sentence" into
an inverted index searchable by the words "a", "simple", and/or "sentence",
``IDENTITY`` will only be searchable by the orginal full sentence with the
same capitalization - "A Simple Sentence". See ``IDENTITY_CI`` for the
same function, only case-insensitive.
"""
if not val:
return None # depends on [control=['if'], data=[]]
if not isinstance(val, six.string_types_ex):
val = str(val) # depends on [control=['if'], data=[]]
return [val]
|
def current_git_dir():
"""Locate the .git directory."""
path = os.path.abspath(os.curdir)
while path != '/':
if os.path.isdir(os.path.join(path, '.git')):
return os.path.join(path, '.git')
path = os.path.dirname(path)
return None
|
def function[current_git_dir, parameter[]]:
constant[Locate the .git directory.]
variable[path] assign[=] call[name[os].path.abspath, parameter[name[os].curdir]]
while compare[name[path] not_equal[!=] constant[/]] begin[:]
if call[name[os].path.isdir, parameter[call[name[os].path.join, parameter[name[path], constant[.git]]]]] begin[:]
return[call[name[os].path.join, parameter[name[path], constant[.git]]]]
variable[path] assign[=] call[name[os].path.dirname, parameter[name[path]]]
return[constant[None]]
|
keyword[def] identifier[current_git_dir] ():
literal[string]
identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[curdir] )
keyword[while] identifier[path] != literal[string] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )):
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] )
identifier[path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] )
keyword[return] keyword[None]
|
def current_git_dir():
"""Locate the .git directory."""
path = os.path.abspath(os.curdir)
while path != '/':
if os.path.isdir(os.path.join(path, '.git')):
return os.path.join(path, '.git') # depends on [control=['if'], data=[]]
path = os.path.dirname(path) # depends on [control=['while'], data=['path']]
return None
|
def on_epoch_end(self) -> None:
'Updates indexes after each epoch for shuffling'
self.indexes = np.arange(self.nrows)
if self.shuffle:
np.random.shuffle(self.indexes)
|
def function[on_epoch_end, parameter[self]]:
constant[Updates indexes after each epoch for shuffling]
name[self].indexes assign[=] call[name[np].arange, parameter[name[self].nrows]]
if name[self].shuffle begin[:]
call[name[np].random.shuffle, parameter[name[self].indexes]]
|
keyword[def] identifier[on_epoch_end] ( identifier[self] )-> keyword[None] :
literal[string]
identifier[self] . identifier[indexes] = identifier[np] . identifier[arange] ( identifier[self] . identifier[nrows] )
keyword[if] identifier[self] . identifier[shuffle] :
identifier[np] . identifier[random] . identifier[shuffle] ( identifier[self] . identifier[indexes] )
|
def on_epoch_end(self) -> None:
"""Updates indexes after each epoch for shuffling"""
self.indexes = np.arange(self.nrows)
if self.shuffle:
np.random.shuffle(self.indexes) # depends on [control=['if'], data=[]]
|
def works(self, type_id):
"""
This method retrieve a iterable of Works of the given type.
args: Crossref allowed document Types (String)
return: Works()
"""
context = '%s/%s' % (self.ENDPOINT, str(type_id))
return Works(context=context)
|
def function[works, parameter[self, type_id]]:
constant[
This method retrieve a iterable of Works of the given type.
args: Crossref allowed document Types (String)
return: Works()
]
variable[context] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b113a7a0>, <ast.Call object at 0x7da1b113aaa0>]]]
return[call[name[Works], parameter[]]]
|
keyword[def] identifier[works] ( identifier[self] , identifier[type_id] ):
literal[string]
identifier[context] = literal[string] %( identifier[self] . identifier[ENDPOINT] , identifier[str] ( identifier[type_id] ))
keyword[return] identifier[Works] ( identifier[context] = identifier[context] )
|
def works(self, type_id):
"""
This method retrieve a iterable of Works of the given type.
args: Crossref allowed document Types (String)
return: Works()
"""
context = '%s/%s' % (self.ENDPOINT, str(type_id))
return Works(context=context)
|
def archive_observer(population, num_generations, num_evaluations, args):
"""Print the current archive to the screen.
This function displays the current archive of the evolutionary
computation to the screen.
.. Arguments:
population -- the population of Individuals
num_generations -- the number of elapsed generations
num_evaluations -- the number of candidate solution evaluations
args -- a dictionary of keyword arguments
"""
archive = args['_ec'].archive
print('----------------------------------------------------------------------------')
print(' Archive ({0:5} individuals)'.format(len(archive)))
print('----------------------------------------------------------------------------')
for a in archive:
print(a)
print('----------------------------------------------------------------------------')
|
def function[archive_observer, parameter[population, num_generations, num_evaluations, args]]:
constant[Print the current archive to the screen.
This function displays the current archive of the evolutionary
computation to the screen.
.. Arguments:
population -- the population of Individuals
num_generations -- the number of elapsed generations
num_evaluations -- the number of candidate solution evaluations
args -- a dictionary of keyword arguments
]
variable[archive] assign[=] call[name[args]][constant[_ec]].archive
call[name[print], parameter[constant[----------------------------------------------------------------------------]]]
call[name[print], parameter[call[constant[ Archive ({0:5} individuals)].format, parameter[call[name[len], parameter[name[archive]]]]]]]
call[name[print], parameter[constant[----------------------------------------------------------------------------]]]
for taget[name[a]] in starred[name[archive]] begin[:]
call[name[print], parameter[name[a]]]
call[name[print], parameter[constant[----------------------------------------------------------------------------]]]
|
keyword[def] identifier[archive_observer] ( identifier[population] , identifier[num_generations] , identifier[num_evaluations] , identifier[args] ):
literal[string]
identifier[archive] = identifier[args] [ literal[string] ]. identifier[archive]
identifier[print] ( literal[string] )
identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[archive] )))
identifier[print] ( literal[string] )
keyword[for] identifier[a] keyword[in] identifier[archive] :
identifier[print] ( identifier[a] )
identifier[print] ( literal[string] )
|
def archive_observer(population, num_generations, num_evaluations, args):
"""Print the current archive to the screen.
This function displays the current archive of the evolutionary
computation to the screen.
.. Arguments:
population -- the population of Individuals
num_generations -- the number of elapsed generations
num_evaluations -- the number of candidate solution evaluations
args -- a dictionary of keyword arguments
"""
archive = args['_ec'].archive
print('----------------------------------------------------------------------------')
print(' Archive ({0:5} individuals)'.format(len(archive)))
print('----------------------------------------------------------------------------')
for a in archive:
print(a) # depends on [control=['for'], data=['a']]
print('----------------------------------------------------------------------------')
|
def register_new_node(suffix_node_id=None):
"""Factory method, registers new node.
"""
node_id = uuid4()
event = Node.Created(originator_id=node_id, suffix_node_id=suffix_node_id)
entity = Node.mutate(event=event)
publish(event)
return entity
|
def function[register_new_node, parameter[suffix_node_id]]:
constant[Factory method, registers new node.
]
variable[node_id] assign[=] call[name[uuid4], parameter[]]
variable[event] assign[=] call[name[Node].Created, parameter[]]
variable[entity] assign[=] call[name[Node].mutate, parameter[]]
call[name[publish], parameter[name[event]]]
return[name[entity]]
|
keyword[def] identifier[register_new_node] ( identifier[suffix_node_id] = keyword[None] ):
literal[string]
identifier[node_id] = identifier[uuid4] ()
identifier[event] = identifier[Node] . identifier[Created] ( identifier[originator_id] = identifier[node_id] , identifier[suffix_node_id] = identifier[suffix_node_id] )
identifier[entity] = identifier[Node] . identifier[mutate] ( identifier[event] = identifier[event] )
identifier[publish] ( identifier[event] )
keyword[return] identifier[entity]
|
def register_new_node(suffix_node_id=None):
"""Factory method, registers new node.
"""
node_id = uuid4()
event = Node.Created(originator_id=node_id, suffix_node_id=suffix_node_id)
entity = Node.mutate(event=event)
publish(event)
return entity
|
def _colorbar_format(minval, maxval):
"""Return the format string for the colorbar."""
if not (np.isfinite(minval) and np.isfinite(maxval)):
return str(maxval)
else:
return '%.{}f'.format(_digits(minval, maxval))
|
def function[_colorbar_format, parameter[minval, maxval]]:
constant[Return the format string for the colorbar.]
if <ast.UnaryOp object at 0x7da1b1e90760> begin[:]
return[call[name[str], parameter[name[maxval]]]]
|
keyword[def] identifier[_colorbar_format] ( identifier[minval] , identifier[maxval] ):
literal[string]
keyword[if] keyword[not] ( identifier[np] . identifier[isfinite] ( identifier[minval] ) keyword[and] identifier[np] . identifier[isfinite] ( identifier[maxval] )):
keyword[return] identifier[str] ( identifier[maxval] )
keyword[else] :
keyword[return] literal[string] . identifier[format] ( identifier[_digits] ( identifier[minval] , identifier[maxval] ))
|
def _colorbar_format(minval, maxval):
"""Return the format string for the colorbar."""
if not (np.isfinite(minval) and np.isfinite(maxval)):
return str(maxval) # depends on [control=['if'], data=[]]
else:
return '%.{}f'.format(_digits(minval, maxval))
|
def _vasprun_float(f):
"""
Large numbers are often represented as ********* in the vasprun.
This function parses these values as np.nan
"""
try:
return float(f)
except ValueError as e:
f = f.strip()
if f == '*' * len(f):
warnings.warn('Float overflow (*******) encountered in vasprun')
return np.nan
raise e
|
def function[_vasprun_float, parameter[f]]:
constant[
Large numbers are often represented as ********* in the vasprun.
This function parses these values as np.nan
]
<ast.Try object at 0x7da204347010>
|
keyword[def] identifier[_vasprun_float] ( identifier[f] ):
literal[string]
keyword[try] :
keyword[return] identifier[float] ( identifier[f] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
identifier[f] = identifier[f] . identifier[strip] ()
keyword[if] identifier[f] == literal[string] * identifier[len] ( identifier[f] ):
identifier[warnings] . identifier[warn] ( literal[string] )
keyword[return] identifier[np] . identifier[nan]
keyword[raise] identifier[e]
|
def _vasprun_float(f):
"""
Large numbers are often represented as ********* in the vasprun.
This function parses these values as np.nan
"""
try:
return float(f) # depends on [control=['try'], data=[]]
except ValueError as e:
f = f.strip()
if f == '*' * len(f):
warnings.warn('Float overflow (*******) encountered in vasprun')
return np.nan # depends on [control=['if'], data=[]]
raise e # depends on [control=['except'], data=['e']]
|
def _plot_figure(self, idx, fig_format='json'):
"""
Returns the figure in html format on the
first call and
"""
self.plot.update(idx)
if self.embed:
return self.renderer.diff(self.plot)
|
def function[_plot_figure, parameter[self, idx, fig_format]]:
constant[
Returns the figure in html format on the
first call and
]
call[name[self].plot.update, parameter[name[idx]]]
if name[self].embed begin[:]
return[call[name[self].renderer.diff, parameter[name[self].plot]]]
|
keyword[def] identifier[_plot_figure] ( identifier[self] , identifier[idx] , identifier[fig_format] = literal[string] ):
literal[string]
identifier[self] . identifier[plot] . identifier[update] ( identifier[idx] )
keyword[if] identifier[self] . identifier[embed] :
keyword[return] identifier[self] . identifier[renderer] . identifier[diff] ( identifier[self] . identifier[plot] )
|
def _plot_figure(self, idx, fig_format='json'):
"""
Returns the figure in html format on the
first call and
"""
self.plot.update(idx)
if self.embed:
return self.renderer.diff(self.plot) # depends on [control=['if'], data=[]]
|
def exclude(self, func=None):
"""
Return a new Collection excluding some items
Parameters:
func : function(Node) -> Scalar
A function that, when called on each item
in the collection, returns a boolean-like
value. If no function is provided, then
truthy items will be removed.
Returns:
A new Collection consisting of the items
where bool(func(item)) == False
"""
func = _make_callable(func)
inverse = lambda x: not func(x)
return self.filter(inverse)
|
def function[exclude, parameter[self, func]]:
constant[
Return a new Collection excluding some items
Parameters:
func : function(Node) -> Scalar
A function that, when called on each item
in the collection, returns a boolean-like
value. If no function is provided, then
truthy items will be removed.
Returns:
A new Collection consisting of the items
where bool(func(item)) == False
]
variable[func] assign[=] call[name[_make_callable], parameter[name[func]]]
variable[inverse] assign[=] <ast.Lambda object at 0x7da2041d8700>
return[call[name[self].filter, parameter[name[inverse]]]]
|
keyword[def] identifier[exclude] ( identifier[self] , identifier[func] = keyword[None] ):
literal[string]
identifier[func] = identifier[_make_callable] ( identifier[func] )
identifier[inverse] = keyword[lambda] identifier[x] : keyword[not] identifier[func] ( identifier[x] )
keyword[return] identifier[self] . identifier[filter] ( identifier[inverse] )
|
def exclude(self, func=None):
"""
Return a new Collection excluding some items
Parameters:
func : function(Node) -> Scalar
A function that, when called on each item
in the collection, returns a boolean-like
value. If no function is provided, then
truthy items will be removed.
Returns:
A new Collection consisting of the items
where bool(func(item)) == False
"""
func = _make_callable(func)
inverse = lambda x: not func(x)
return self.filter(inverse)
|
def create_anchor_from_header(header, existing_anchors=None):
"""
Creates GitHub style auto-generated anchor tags from header line strings
:param header: The portion of the line that should be converted
:param existing_anchors: A dictionary of AnchorHub tags to auto-generated
anchors
:return: A string auto-generated anchor in the GitHub format
"""
# Strip white space on the left/right and make lower case
out = header.strip().lower()
# Replace groups of white space with hyphens
out = re.sub(r"\s+", lambda x: "-", out, flags=re.UNICODE)
# Remove characters that aren't alphanumeric, hyphens, or spaces
out = re.sub(r"[^\w\- ]+", lambda x: "", out, flags=re.UNICODE)
if existing_anchors and out in existing_anchors.values():
i = 1
while (out + "-" + str(i)) in existing_anchors:
i += 1
return out + "-" + str(i)
else:
return out
|
def function[create_anchor_from_header, parameter[header, existing_anchors]]:
constant[
Creates GitHub style auto-generated anchor tags from header line strings
:param header: The portion of the line that should be converted
:param existing_anchors: A dictionary of AnchorHub tags to auto-generated
anchors
:return: A string auto-generated anchor in the GitHub format
]
variable[out] assign[=] call[call[name[header].strip, parameter[]].lower, parameter[]]
variable[out] assign[=] call[name[re].sub, parameter[constant[\s+], <ast.Lambda object at 0x7da1b087bca0>, name[out]]]
variable[out] assign[=] call[name[re].sub, parameter[constant[[^\w\- ]+], <ast.Lambda object at 0x7da1b087ae00>, name[out]]]
if <ast.BoolOp object at 0x7da1b087b0a0> begin[:]
variable[i] assign[=] constant[1]
while compare[binary_operation[binary_operation[name[out] + constant[-]] + call[name[str], parameter[name[i]]]] in name[existing_anchors]] begin[:]
<ast.AugAssign object at 0x7da1b0879fc0>
return[binary_operation[binary_operation[name[out] + constant[-]] + call[name[str], parameter[name[i]]]]]
|
keyword[def] identifier[create_anchor_from_header] ( identifier[header] , identifier[existing_anchors] = keyword[None] ):
literal[string]
identifier[out] = identifier[header] . identifier[strip] (). identifier[lower] ()
identifier[out] = identifier[re] . identifier[sub] ( literal[string] , keyword[lambda] identifier[x] : literal[string] , identifier[out] , identifier[flags] = identifier[re] . identifier[UNICODE] )
identifier[out] = identifier[re] . identifier[sub] ( literal[string] , keyword[lambda] identifier[x] : literal[string] , identifier[out] , identifier[flags] = identifier[re] . identifier[UNICODE] )
keyword[if] identifier[existing_anchors] keyword[and] identifier[out] keyword[in] identifier[existing_anchors] . identifier[values] ():
identifier[i] = literal[int]
keyword[while] ( identifier[out] + literal[string] + identifier[str] ( identifier[i] )) keyword[in] identifier[existing_anchors] :
identifier[i] += literal[int]
keyword[return] identifier[out] + literal[string] + identifier[str] ( identifier[i] )
keyword[else] :
keyword[return] identifier[out]
|
def create_anchor_from_header(header, existing_anchors=None):
"""
Creates GitHub style auto-generated anchor tags from header line strings
:param header: The portion of the line that should be converted
:param existing_anchors: A dictionary of AnchorHub tags to auto-generated
anchors
:return: A string auto-generated anchor in the GitHub format
"""
# Strip white space on the left/right and make lower case
out = header.strip().lower()
# Replace groups of white space with hyphens
out = re.sub('\\s+', lambda x: '-', out, flags=re.UNICODE)
# Remove characters that aren't alphanumeric, hyphens, or spaces
out = re.sub('[^\\w\\- ]+', lambda x: '', out, flags=re.UNICODE)
if existing_anchors and out in existing_anchors.values():
i = 1
while out + '-' + str(i) in existing_anchors:
i += 1 # depends on [control=['while'], data=[]]
return out + '-' + str(i) # depends on [control=['if'], data=[]]
else:
return out
|
def natsorted(seq, key=None, reverse=False, alg=ns.DEFAULT):
"""
Sorts an iterable naturally.
Parameters
----------
seq : iterable
The input to sort.
key : callable, optional
A key used to determine how to sort each element of the iterable.
It is **not** applied recursively.
It should accept a single argument and return a single value.
reverse : {{True, False}}, optional
Return the list in reversed sorted order. The default is
`False`.
alg : ns enum, optional
This option is used to control which algorithm `natsort`
uses when sorting. For details into these options, please see
the :class:`ns` class documentation. The default is `ns.INT`.
Returns
-------
out: list
The sorted input.
See Also
--------
natsort_keygen : Generates the key that makes natural sorting possible.
realsorted : A wrapper for ``natsorted(seq, alg=ns.REAL)``.
humansorted : A wrapper for ``natsorted(seq, alg=ns.LOCALE)``.
index_natsorted : Returns the sorted indexes from `natsorted`.
Examples
--------
Use `natsorted` just like the builtin `sorted`::
>>> a = ['num3', 'num5', 'num2']
>>> natsorted(a)
[{u}'num2', {u}'num3', {u}'num5']
"""
key = natsort_keygen(key, alg)
return sorted(seq, reverse=reverse, key=key)
|
def function[natsorted, parameter[seq, key, reverse, alg]]:
constant[
Sorts an iterable naturally.
Parameters
----------
seq : iterable
The input to sort.
key : callable, optional
A key used to determine how to sort each element of the iterable.
It is **not** applied recursively.
It should accept a single argument and return a single value.
reverse : {{True, False}}, optional
Return the list in reversed sorted order. The default is
`False`.
alg : ns enum, optional
This option is used to control which algorithm `natsort`
uses when sorting. For details into these options, please see
the :class:`ns` class documentation. The default is `ns.INT`.
Returns
-------
out: list
The sorted input.
See Also
--------
natsort_keygen : Generates the key that makes natural sorting possible.
realsorted : A wrapper for ``natsorted(seq, alg=ns.REAL)``.
humansorted : A wrapper for ``natsorted(seq, alg=ns.LOCALE)``.
index_natsorted : Returns the sorted indexes from `natsorted`.
Examples
--------
Use `natsorted` just like the builtin `sorted`::
>>> a = ['num3', 'num5', 'num2']
>>> natsorted(a)
[{u}'num2', {u}'num3', {u}'num5']
]
variable[key] assign[=] call[name[natsort_keygen], parameter[name[key], name[alg]]]
return[call[name[sorted], parameter[name[seq]]]]
|
keyword[def] identifier[natsorted] ( identifier[seq] , identifier[key] = keyword[None] , identifier[reverse] = keyword[False] , identifier[alg] = identifier[ns] . identifier[DEFAULT] ):
literal[string]
identifier[key] = identifier[natsort_keygen] ( identifier[key] , identifier[alg] )
keyword[return] identifier[sorted] ( identifier[seq] , identifier[reverse] = identifier[reverse] , identifier[key] = identifier[key] )
|
def natsorted(seq, key=None, reverse=False, alg=ns.DEFAULT):
"""
Sorts an iterable naturally.
Parameters
----------
seq : iterable
The input to sort.
key : callable, optional
A key used to determine how to sort each element of the iterable.
It is **not** applied recursively.
It should accept a single argument and return a single value.
reverse : {{True, False}}, optional
Return the list in reversed sorted order. The default is
`False`.
alg : ns enum, optional
This option is used to control which algorithm `natsort`
uses when sorting. For details into these options, please see
the :class:`ns` class documentation. The default is `ns.INT`.
Returns
-------
out: list
The sorted input.
See Also
--------
natsort_keygen : Generates the key that makes natural sorting possible.
realsorted : A wrapper for ``natsorted(seq, alg=ns.REAL)``.
humansorted : A wrapper for ``natsorted(seq, alg=ns.LOCALE)``.
index_natsorted : Returns the sorted indexes from `natsorted`.
Examples
--------
Use `natsorted` just like the builtin `sorted`::
>>> a = ['num3', 'num5', 'num2']
>>> natsorted(a)
[{u}'num2', {u}'num3', {u}'num5']
"""
key = natsort_keygen(key, alg)
return sorted(seq, reverse=reverse, key=key)
|
def validate_keys(dict_, expected, funcname):
"""Validate that a dictionary has an expected set of keys.
"""
expected = set(expected)
received = set(dict_)
missing = expected - received
if missing:
raise ValueError(
"Missing keys in {}:\n"
"Expected Keys: {}\n"
"Received Keys: {}".format(
funcname,
sorted(expected),
sorted(received),
)
)
unexpected = received - expected
if unexpected:
raise ValueError(
"Unexpected keys in {}:\n"
"Expected Keys: {}\n"
"Received Keys: {}".format(
funcname,
sorted(expected),
sorted(received),
)
)
|
def function[validate_keys, parameter[dict_, expected, funcname]]:
constant[Validate that a dictionary has an expected set of keys.
]
variable[expected] assign[=] call[name[set], parameter[name[expected]]]
variable[received] assign[=] call[name[set], parameter[name[dict_]]]
variable[missing] assign[=] binary_operation[name[expected] - name[received]]
if name[missing] begin[:]
<ast.Raise object at 0x7da1b1e8e1d0>
variable[unexpected] assign[=] binary_operation[name[received] - name[expected]]
if name[unexpected] begin[:]
<ast.Raise object at 0x7da1b1e8dde0>
|
keyword[def] identifier[validate_keys] ( identifier[dict_] , identifier[expected] , identifier[funcname] ):
literal[string]
identifier[expected] = identifier[set] ( identifier[expected] )
identifier[received] = identifier[set] ( identifier[dict_] )
identifier[missing] = identifier[expected] - identifier[received]
keyword[if] identifier[missing] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[funcname] ,
identifier[sorted] ( identifier[expected] ),
identifier[sorted] ( identifier[received] ),
)
)
identifier[unexpected] = identifier[received] - identifier[expected]
keyword[if] identifier[unexpected] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[funcname] ,
identifier[sorted] ( identifier[expected] ),
identifier[sorted] ( identifier[received] ),
)
)
|
def validate_keys(dict_, expected, funcname):
"""Validate that a dictionary has an expected set of keys.
"""
expected = set(expected)
received = set(dict_)
missing = expected - received
if missing:
raise ValueError('Missing keys in {}:\nExpected Keys: {}\nReceived Keys: {}'.format(funcname, sorted(expected), sorted(received))) # depends on [control=['if'], data=[]]
unexpected = received - expected
if unexpected:
raise ValueError('Unexpected keys in {}:\nExpected Keys: {}\nReceived Keys: {}'.format(funcname, sorted(expected), sorted(received))) # depends on [control=['if'], data=[]]
|
def relation_for_unit(unit=None, rid=None):
"""Get the json represenation of a unit's relation"""
unit = unit or remote_unit()
relation = relation_get(unit=unit, rid=rid)
for key in relation:
if key.endswith('-list'):
relation[key] = relation[key].split()
relation['__unit__'] = unit
return relation
|
def function[relation_for_unit, parameter[unit, rid]]:
constant[Get the json represenation of a unit's relation]
variable[unit] assign[=] <ast.BoolOp object at 0x7da18bc720b0>
variable[relation] assign[=] call[name[relation_get], parameter[]]
for taget[name[key]] in starred[name[relation]] begin[:]
if call[name[key].endswith, parameter[constant[-list]]] begin[:]
call[name[relation]][name[key]] assign[=] call[call[name[relation]][name[key]].split, parameter[]]
call[name[relation]][constant[__unit__]] assign[=] name[unit]
return[name[relation]]
|
keyword[def] identifier[relation_for_unit] ( identifier[unit] = keyword[None] , identifier[rid] = keyword[None] ):
literal[string]
identifier[unit] = identifier[unit] keyword[or] identifier[remote_unit] ()
identifier[relation] = identifier[relation_get] ( identifier[unit] = identifier[unit] , identifier[rid] = identifier[rid] )
keyword[for] identifier[key] keyword[in] identifier[relation] :
keyword[if] identifier[key] . identifier[endswith] ( literal[string] ):
identifier[relation] [ identifier[key] ]= identifier[relation] [ identifier[key] ]. identifier[split] ()
identifier[relation] [ literal[string] ]= identifier[unit]
keyword[return] identifier[relation]
|
def relation_for_unit(unit=None, rid=None):
"""Get the json represenation of a unit's relation"""
unit = unit or remote_unit()
relation = relation_get(unit=unit, rid=rid)
for key in relation:
if key.endswith('-list'):
relation[key] = relation[key].split() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
relation['__unit__'] = unit
return relation
|
def locality_preserving_projections(self, coordinates, num_dims=None):
'''Locality Preserving Projections (LPP, linearized Laplacian Eigenmaps).'''
X = np.atleast_2d(coordinates) # n x d
L = self.laplacian(normed=True) # n x n
u,s,_ = np.linalg.svd(X.T.dot(X))
Fplus = np.linalg.pinv(u * np.sqrt(s)) # d x d
n, d = X.shape
if n >= d: # optimized order: F(X'LX)F'
T = Fplus.dot(X.T.dot(L.dot(X))).dot(Fplus.T)
else: # optimized order: (FX')L(XF')
T = Fplus.dot(X.T).dot(L.dot(X.dot(Fplus.T)))
L = 0.5*(T+T.T)
return _null_space(L, num_vecs=num_dims, overwrite=True)
|
def function[locality_preserving_projections, parameter[self, coordinates, num_dims]]:
constant[Locality Preserving Projections (LPP, linearized Laplacian Eigenmaps).]
variable[X] assign[=] call[name[np].atleast_2d, parameter[name[coordinates]]]
variable[L] assign[=] call[name[self].laplacian, parameter[]]
<ast.Tuple object at 0x7da18bcc9f30> assign[=] call[name[np].linalg.svd, parameter[call[name[X].T.dot, parameter[name[X]]]]]
variable[Fplus] assign[=] call[name[np].linalg.pinv, parameter[binary_operation[name[u] * call[name[np].sqrt, parameter[name[s]]]]]]
<ast.Tuple object at 0x7da18bcc9e70> assign[=] name[X].shape
if compare[name[n] greater_or_equal[>=] name[d]] begin[:]
variable[T] assign[=] call[call[name[Fplus].dot, parameter[call[name[X].T.dot, parameter[call[name[L].dot, parameter[name[X]]]]]]].dot, parameter[name[Fplus].T]]
variable[L] assign[=] binary_operation[constant[0.5] * binary_operation[name[T] + name[T].T]]
return[call[name[_null_space], parameter[name[L]]]]
|
keyword[def] identifier[locality_preserving_projections] ( identifier[self] , identifier[coordinates] , identifier[num_dims] = keyword[None] ):
literal[string]
identifier[X] = identifier[np] . identifier[atleast_2d] ( identifier[coordinates] )
identifier[L] = identifier[self] . identifier[laplacian] ( identifier[normed] = keyword[True] )
identifier[u] , identifier[s] , identifier[_] = identifier[np] . identifier[linalg] . identifier[svd] ( identifier[X] . identifier[T] . identifier[dot] ( identifier[X] ))
identifier[Fplus] = identifier[np] . identifier[linalg] . identifier[pinv] ( identifier[u] * identifier[np] . identifier[sqrt] ( identifier[s] ))
identifier[n] , identifier[d] = identifier[X] . identifier[shape]
keyword[if] identifier[n] >= identifier[d] :
identifier[T] = identifier[Fplus] . identifier[dot] ( identifier[X] . identifier[T] . identifier[dot] ( identifier[L] . identifier[dot] ( identifier[X] ))). identifier[dot] ( identifier[Fplus] . identifier[T] )
keyword[else] :
identifier[T] = identifier[Fplus] . identifier[dot] ( identifier[X] . identifier[T] ). identifier[dot] ( identifier[L] . identifier[dot] ( identifier[X] . identifier[dot] ( identifier[Fplus] . identifier[T] )))
identifier[L] = literal[int] *( identifier[T] + identifier[T] . identifier[T] )
keyword[return] identifier[_null_space] ( identifier[L] , identifier[num_vecs] = identifier[num_dims] , identifier[overwrite] = keyword[True] )
|
def locality_preserving_projections(self, coordinates, num_dims=None):
"""Locality Preserving Projections (LPP, linearized Laplacian Eigenmaps)."""
X = np.atleast_2d(coordinates) # n x d
L = self.laplacian(normed=True) # n x n
(u, s, _) = np.linalg.svd(X.T.dot(X))
Fplus = np.linalg.pinv(u * np.sqrt(s)) # d x d
(n, d) = X.shape
if n >= d: # optimized order: F(X'LX)F'
T = Fplus.dot(X.T.dot(L.dot(X))).dot(Fplus.T) # depends on [control=['if'], data=[]]
else: # optimized order: (FX')L(XF')
T = Fplus.dot(X.T).dot(L.dot(X.dot(Fplus.T)))
L = 0.5 * (T + T.T)
return _null_space(L, num_vecs=num_dims, overwrite=True)
|
def writeFasta(sequence, sequence_name, output_file):
"""
Writes a fasta sequence into a file.
:param sequence: a string with the sequence to be written
:param sequence_name: name of the the fasta sequence
:param output_file: /path/to/file.fa to be written
:returns: nothing
"""
i=0
f=open(output_file,'w')
f.write(">"+str(sequence_name)+"\n")
while i <= len(sequence):
f.write(sequence[i:i+60]+"\n")
i=i+60
f.close()
|
def function[writeFasta, parameter[sequence, sequence_name, output_file]]:
constant[
Writes a fasta sequence into a file.
:param sequence: a string with the sequence to be written
:param sequence_name: name of the the fasta sequence
:param output_file: /path/to/file.fa to be written
:returns: nothing
]
variable[i] assign[=] constant[0]
variable[f] assign[=] call[name[open], parameter[name[output_file], constant[w]]]
call[name[f].write, parameter[binary_operation[binary_operation[constant[>] + call[name[str], parameter[name[sequence_name]]]] + constant[
]]]]
while compare[name[i] less_or_equal[<=] call[name[len], parameter[name[sequence]]]] begin[:]
call[name[f].write, parameter[binary_operation[call[name[sequence]][<ast.Slice object at 0x7da20e9b0760>] + constant[
]]]]
variable[i] assign[=] binary_operation[name[i] + constant[60]]
call[name[f].close, parameter[]]
|
keyword[def] identifier[writeFasta] ( identifier[sequence] , identifier[sequence_name] , identifier[output_file] ):
literal[string]
identifier[i] = literal[int]
identifier[f] = identifier[open] ( identifier[output_file] , literal[string] )
identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[sequence_name] )+ literal[string] )
keyword[while] identifier[i] <= identifier[len] ( identifier[sequence] ):
identifier[f] . identifier[write] ( identifier[sequence] [ identifier[i] : identifier[i] + literal[int] ]+ literal[string] )
identifier[i] = identifier[i] + literal[int]
identifier[f] . identifier[close] ()
|
def writeFasta(sequence, sequence_name, output_file):
"""
Writes a fasta sequence into a file.
:param sequence: a string with the sequence to be written
:param sequence_name: name of the the fasta sequence
:param output_file: /path/to/file.fa to be written
:returns: nothing
"""
i = 0
f = open(output_file, 'w')
f.write('>' + str(sequence_name) + '\n')
while i <= len(sequence):
f.write(sequence[i:i + 60] + '\n')
i = i + 60 # depends on [control=['while'], data=['i']]
f.close()
|
def parse_prices_from_file_stream(self, file_stream) -> List[PriceModel]:
"""
Reads a file stream (i.e. from web form) containing a csv prices
into a list of Price models.
"""
content = file_stream.read().decode("utf-8")
file_stream.close()
if not content:
raise ValueError("The file is empty!")
result = self.get_prices_from_csv(content)
return result
|
def function[parse_prices_from_file_stream, parameter[self, file_stream]]:
constant[
Reads a file stream (i.e. from web form) containing a csv prices
into a list of Price models.
]
variable[content] assign[=] call[call[name[file_stream].read, parameter[]].decode, parameter[constant[utf-8]]]
call[name[file_stream].close, parameter[]]
if <ast.UnaryOp object at 0x7da207f9bfd0> begin[:]
<ast.Raise object at 0x7da207f9aa70>
variable[result] assign[=] call[name[self].get_prices_from_csv, parameter[name[content]]]
return[name[result]]
|
keyword[def] identifier[parse_prices_from_file_stream] ( identifier[self] , identifier[file_stream] )-> identifier[List] [ identifier[PriceModel] ]:
literal[string]
identifier[content] = identifier[file_stream] . identifier[read] (). identifier[decode] ( literal[string] )
identifier[file_stream] . identifier[close] ()
keyword[if] keyword[not] identifier[content] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[result] = identifier[self] . identifier[get_prices_from_csv] ( identifier[content] )
keyword[return] identifier[result]
|
def parse_prices_from_file_stream(self, file_stream) -> List[PriceModel]:
"""
Reads a file stream (i.e. from web form) containing a csv prices
into a list of Price models.
"""
content = file_stream.read().decode('utf-8')
file_stream.close()
if not content:
raise ValueError('The file is empty!') # depends on [control=['if'], data=[]]
result = self.get_prices_from_csv(content)
return result
|
def maha_dist(df):
"""Compute the squared Mahalanobis Distance for each row in the dataframe
Given a list of rows `x`, each with `p` elements, a vector :math:\mu of the
row means of length `p`, and the :math:pxp covarence matrix of the columns :math:\Sigma,
The returned value for each row is:
.. math::
D^{2} = (x - \mu)^{T} \Sigma^{-1} (x - \mu)
Args:
df: The input DataFrame
Returns:
Series: The squared Mahalanobis Distance for each row
Notes:
This implimentation is based on the `R function`_ for the same mahalanobis calculation
.. _R function:
https://stat.ethz.ch/R-manual/R-devel/library/stats/html/mahalanobis.html
"""
mean = df.mean()
S_1 = np.linalg.inv(df.cov())
def fun(row):
A = np.dot((row.T - mean), S_1)
return np.dot(A, (row-mean))
return df.apply(fun, axis=1)
|
def function[maha_dist, parameter[df]]:
constant[Compute the squared Mahalanobis Distance for each row in the dataframe
Given a list of rows `x`, each with `p` elements, a vector :math:\mu of the
row means of length `p`, and the :math:pxp covarence matrix of the columns :math:\Sigma,
The returned value for each row is:
.. math::
D^{2} = (x - \mu)^{T} \Sigma^{-1} (x - \mu)
Args:
df: The input DataFrame
Returns:
Series: The squared Mahalanobis Distance for each row
Notes:
This implimentation is based on the `R function`_ for the same mahalanobis calculation
.. _R function:
https://stat.ethz.ch/R-manual/R-devel/library/stats/html/mahalanobis.html
]
variable[mean] assign[=] call[name[df].mean, parameter[]]
variable[S_1] assign[=] call[name[np].linalg.inv, parameter[call[name[df].cov, parameter[]]]]
def function[fun, parameter[row]]:
variable[A] assign[=] call[name[np].dot, parameter[binary_operation[name[row].T - name[mean]], name[S_1]]]
return[call[name[np].dot, parameter[name[A], binary_operation[name[row] - name[mean]]]]]
return[call[name[df].apply, parameter[name[fun]]]]
|
keyword[def] identifier[maha_dist] ( identifier[df] ):
literal[string]
identifier[mean] = identifier[df] . identifier[mean] ()
identifier[S_1] = identifier[np] . identifier[linalg] . identifier[inv] ( identifier[df] . identifier[cov] ())
keyword[def] identifier[fun] ( identifier[row] ):
identifier[A] = identifier[np] . identifier[dot] (( identifier[row] . identifier[T] - identifier[mean] ), identifier[S_1] )
keyword[return] identifier[np] . identifier[dot] ( identifier[A] ,( identifier[row] - identifier[mean] ))
keyword[return] identifier[df] . identifier[apply] ( identifier[fun] , identifier[axis] = literal[int] )
|
def maha_dist(df):
"""Compute the squared Mahalanobis Distance for each row in the dataframe
Given a list of rows `x`, each with `p` elements, a vector :math:\\mu of the
row means of length `p`, and the :math:pxp covarence matrix of the columns :math:\\Sigma,
The returned value for each row is:
.. math::
D^{2} = (x - \\mu)^{T} \\Sigma^{-1} (x - \\mu)
Args:
df: The input DataFrame
Returns:
Series: The squared Mahalanobis Distance for each row
Notes:
This implimentation is based on the `R function`_ for the same mahalanobis calculation
.. _R function:
https://stat.ethz.ch/R-manual/R-devel/library/stats/html/mahalanobis.html
"""
mean = df.mean()
S_1 = np.linalg.inv(df.cov())
def fun(row):
A = np.dot(row.T - mean, S_1)
return np.dot(A, row - mean)
return df.apply(fun, axis=1)
|
def get_observatory_status(self, observatory_id, status_time=None):
"""
Get details of the specified camera's status
:param string observatory_id:
a observatory ID, as returned by list_observatories()
:param float status_time:
optional, if specified attempts to get the status for the given camera at a particular point in time
specified as a datetime instance. This is useful if you want to retrieve the status of the camera at the
time a given event or file was produced. If this is None or not specified the time is 'now'.
:return:
a dictionary, or None if there was either no observatory found.
"""
if status_time is None:
response = requests.get(
self.base_url + '/obstory/{0}/statusdict'.format(observatory_id))
else:
response = requests.get(
self.base_url + '/obstory/{0}/statusdict/{1}'.format(observatory_id, str(status_time)))
if response.status_code == 200:
d = safe_load(response.text)
if 'status' in d:
return d['status']
return None
|
def function[get_observatory_status, parameter[self, observatory_id, status_time]]:
constant[
Get details of the specified camera's status
:param string observatory_id:
a observatory ID, as returned by list_observatories()
:param float status_time:
optional, if specified attempts to get the status for the given camera at a particular point in time
specified as a datetime instance. This is useful if you want to retrieve the status of the camera at the
time a given event or file was produced. If this is None or not specified the time is 'now'.
:return:
a dictionary, or None if there was either no observatory found.
]
if compare[name[status_time] is constant[None]] begin[:]
variable[response] assign[=] call[name[requests].get, parameter[binary_operation[name[self].base_url + call[constant[/obstory/{0}/statusdict].format, parameter[name[observatory_id]]]]]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
variable[d] assign[=] call[name[safe_load], parameter[name[response].text]]
if compare[constant[status] in name[d]] begin[:]
return[call[name[d]][constant[status]]]
return[constant[None]]
|
keyword[def] identifier[get_observatory_status] ( identifier[self] , identifier[observatory_id] , identifier[status_time] = keyword[None] ):
literal[string]
keyword[if] identifier[status_time] keyword[is] keyword[None] :
identifier[response] = identifier[requests] . identifier[get] (
identifier[self] . identifier[base_url] + literal[string] . identifier[format] ( identifier[observatory_id] ))
keyword[else] :
identifier[response] = identifier[requests] . identifier[get] (
identifier[self] . identifier[base_url] + literal[string] . identifier[format] ( identifier[observatory_id] , identifier[str] ( identifier[status_time] )))
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[d] = identifier[safe_load] ( identifier[response] . identifier[text] )
keyword[if] literal[string] keyword[in] identifier[d] :
keyword[return] identifier[d] [ literal[string] ]
keyword[return] keyword[None]
|
def get_observatory_status(self, observatory_id, status_time=None):
"""
Get details of the specified camera's status
:param string observatory_id:
a observatory ID, as returned by list_observatories()
:param float status_time:
optional, if specified attempts to get the status for the given camera at a particular point in time
specified as a datetime instance. This is useful if you want to retrieve the status of the camera at the
time a given event or file was produced. If this is None or not specified the time is 'now'.
:return:
a dictionary, or None if there was either no observatory found.
"""
if status_time is None:
response = requests.get(self.base_url + '/obstory/{0}/statusdict'.format(observatory_id)) # depends on [control=['if'], data=[]]
else:
response = requests.get(self.base_url + '/obstory/{0}/statusdict/{1}'.format(observatory_id, str(status_time)))
if response.status_code == 200:
d = safe_load(response.text)
if 'status' in d:
return d['status'] # depends on [control=['if'], data=['d']] # depends on [control=['if'], data=[]]
return None
|
def wkt_to_rectangle(extent):
"""Compute the rectangle from a WKT string.
It returns None if the extent is not valid WKT rectangle.
:param extent: The extent.
:type extent: basestring
:return: The rectangle or None if it is not a valid WKT rectangle.
:rtype: QgsRectangle
"""
geometry = QgsGeometry.fromWkt(extent)
if not geometry.isGeosValid():
return None
polygon = geometry.asPolygon()[0]
if len(polygon) != 5:
return None
if polygon[0] != polygon[4]:
return None
rectangle = QgsRectangle(
QgsPointXY(polygon[0].x(), polygon[0].y()),
QgsPointXY(polygon[2].x(), polygon[2].y()))
return rectangle
|
def function[wkt_to_rectangle, parameter[extent]]:
constant[Compute the rectangle from a WKT string.
It returns None if the extent is not valid WKT rectangle.
:param extent: The extent.
:type extent: basestring
:return: The rectangle or None if it is not a valid WKT rectangle.
:rtype: QgsRectangle
]
variable[geometry] assign[=] call[name[QgsGeometry].fromWkt, parameter[name[extent]]]
if <ast.UnaryOp object at 0x7da20c991060> begin[:]
return[constant[None]]
variable[polygon] assign[=] call[call[name[geometry].asPolygon, parameter[]]][constant[0]]
if compare[call[name[len], parameter[name[polygon]]] not_equal[!=] constant[5]] begin[:]
return[constant[None]]
if compare[call[name[polygon]][constant[0]] not_equal[!=] call[name[polygon]][constant[4]]] begin[:]
return[constant[None]]
variable[rectangle] assign[=] call[name[QgsRectangle], parameter[call[name[QgsPointXY], parameter[call[call[name[polygon]][constant[0]].x, parameter[]], call[call[name[polygon]][constant[0]].y, parameter[]]]], call[name[QgsPointXY], parameter[call[call[name[polygon]][constant[2]].x, parameter[]], call[call[name[polygon]][constant[2]].y, parameter[]]]]]]
return[name[rectangle]]
|
keyword[def] identifier[wkt_to_rectangle] ( identifier[extent] ):
literal[string]
identifier[geometry] = identifier[QgsGeometry] . identifier[fromWkt] ( identifier[extent] )
keyword[if] keyword[not] identifier[geometry] . identifier[isGeosValid] ():
keyword[return] keyword[None]
identifier[polygon] = identifier[geometry] . identifier[asPolygon] ()[ literal[int] ]
keyword[if] identifier[len] ( identifier[polygon] )!= literal[int] :
keyword[return] keyword[None]
keyword[if] identifier[polygon] [ literal[int] ]!= identifier[polygon] [ literal[int] ]:
keyword[return] keyword[None]
identifier[rectangle] = identifier[QgsRectangle] (
identifier[QgsPointXY] ( identifier[polygon] [ literal[int] ]. identifier[x] (), identifier[polygon] [ literal[int] ]. identifier[y] ()),
identifier[QgsPointXY] ( identifier[polygon] [ literal[int] ]. identifier[x] (), identifier[polygon] [ literal[int] ]. identifier[y] ()))
keyword[return] identifier[rectangle]
|
def wkt_to_rectangle(extent):
"""Compute the rectangle from a WKT string.
It returns None if the extent is not valid WKT rectangle.
:param extent: The extent.
:type extent: basestring
:return: The rectangle or None if it is not a valid WKT rectangle.
:rtype: QgsRectangle
"""
geometry = QgsGeometry.fromWkt(extent)
if not geometry.isGeosValid():
return None # depends on [control=['if'], data=[]]
polygon = geometry.asPolygon()[0]
if len(polygon) != 5:
return None # depends on [control=['if'], data=[]]
if polygon[0] != polygon[4]:
return None # depends on [control=['if'], data=[]]
rectangle = QgsRectangle(QgsPointXY(polygon[0].x(), polygon[0].y()), QgsPointXY(polygon[2].x(), polygon[2].y()))
return rectangle
|
def find_parent_outputs(provider: Provider, utxo: TxIn) -> TxOut:
'''due to design of the btcpy library, TxIn object must be converted to TxOut object before signing'''
network_params = net_query(provider.network)
index = utxo.txout # utxo index
return TxOut.from_json(provider.getrawtransaction(utxo.txid,
1)['vout'][index],
network=network_params)
|
def function[find_parent_outputs, parameter[provider, utxo]]:
constant[due to design of the btcpy library, TxIn object must be converted to TxOut object before signing]
variable[network_params] assign[=] call[name[net_query], parameter[name[provider].network]]
variable[index] assign[=] name[utxo].txout
return[call[name[TxOut].from_json, parameter[call[call[call[name[provider].getrawtransaction, parameter[name[utxo].txid, constant[1]]]][constant[vout]]][name[index]]]]]
|
keyword[def] identifier[find_parent_outputs] ( identifier[provider] : identifier[Provider] , identifier[utxo] : identifier[TxIn] )-> identifier[TxOut] :
literal[string]
identifier[network_params] = identifier[net_query] ( identifier[provider] . identifier[network] )
identifier[index] = identifier[utxo] . identifier[txout]
keyword[return] identifier[TxOut] . identifier[from_json] ( identifier[provider] . identifier[getrawtransaction] ( identifier[utxo] . identifier[txid] ,
literal[int] )[ literal[string] ][ identifier[index] ],
identifier[network] = identifier[network_params] )
|
def find_parent_outputs(provider: Provider, utxo: TxIn) -> TxOut:
"""due to design of the btcpy library, TxIn object must be converted to TxOut object before signing"""
network_params = net_query(provider.network)
index = utxo.txout # utxo index
return TxOut.from_json(provider.getrawtransaction(utxo.txid, 1)['vout'][index], network=network_params)
|
def round_sig_error2(x, ex1, ex2, n):
'''Find min(ex1,ex2) rounded to n sig-figs and make the floating point x
and max(ex,ex2) match the number of decimals.'''
minerr = min(ex1,ex2)
minstex = round_sig(minerr,n)
if minstex.find('.') < 0:
extra_zeros = len(minstex) - n
sigfigs = len(str(int(x))) - extra_zeros
stx = round_sig(x,sigfigs)
maxstex = round_sig(max(ex1,ex2),sigfigs)
else:
num_after_dec = len(string.split(minstex,'.')[1])
stx = ("%%.%df" % num_after_dec) % (x)
maxstex = ("%%.%df" % num_after_dec) % (max(ex1,ex2))
if ex1 < ex2:
return stx,minstex,maxstex
else:
return stx,maxstex,minstex
|
def function[round_sig_error2, parameter[x, ex1, ex2, n]]:
constant[Find min(ex1,ex2) rounded to n sig-figs and make the floating point x
and max(ex,ex2) match the number of decimals.]
variable[minerr] assign[=] call[name[min], parameter[name[ex1], name[ex2]]]
variable[minstex] assign[=] call[name[round_sig], parameter[name[minerr], name[n]]]
if compare[call[name[minstex].find, parameter[constant[.]]] less[<] constant[0]] begin[:]
variable[extra_zeros] assign[=] binary_operation[call[name[len], parameter[name[minstex]]] - name[n]]
variable[sigfigs] assign[=] binary_operation[call[name[len], parameter[call[name[str], parameter[call[name[int], parameter[name[x]]]]]]] - name[extra_zeros]]
variable[stx] assign[=] call[name[round_sig], parameter[name[x], name[sigfigs]]]
variable[maxstex] assign[=] call[name[round_sig], parameter[call[name[max], parameter[name[ex1], name[ex2]]], name[sigfigs]]]
if compare[name[ex1] less[<] name[ex2]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b10a43a0>, <ast.Name object at 0x7da1b10a63b0>, <ast.Name object at 0x7da1b10a76d0>]]]
|
keyword[def] identifier[round_sig_error2] ( identifier[x] , identifier[ex1] , identifier[ex2] , identifier[n] ):
literal[string]
identifier[minerr] = identifier[min] ( identifier[ex1] , identifier[ex2] )
identifier[minstex] = identifier[round_sig] ( identifier[minerr] , identifier[n] )
keyword[if] identifier[minstex] . identifier[find] ( literal[string] )< literal[int] :
identifier[extra_zeros] = identifier[len] ( identifier[minstex] )- identifier[n]
identifier[sigfigs] = identifier[len] ( identifier[str] ( identifier[int] ( identifier[x] )))- identifier[extra_zeros]
identifier[stx] = identifier[round_sig] ( identifier[x] , identifier[sigfigs] )
identifier[maxstex] = identifier[round_sig] ( identifier[max] ( identifier[ex1] , identifier[ex2] ), identifier[sigfigs] )
keyword[else] :
identifier[num_after_dec] = identifier[len] ( identifier[string] . identifier[split] ( identifier[minstex] , literal[string] )[ literal[int] ])
identifier[stx] =( literal[string] % identifier[num_after_dec] )%( identifier[x] )
identifier[maxstex] =( literal[string] % identifier[num_after_dec] )%( identifier[max] ( identifier[ex1] , identifier[ex2] ))
keyword[if] identifier[ex1] < identifier[ex2] :
keyword[return] identifier[stx] , identifier[minstex] , identifier[maxstex]
keyword[else] :
keyword[return] identifier[stx] , identifier[maxstex] , identifier[minstex]
|
def round_sig_error2(x, ex1, ex2, n):
"""Find min(ex1,ex2) rounded to n sig-figs and make the floating point x
and max(ex,ex2) match the number of decimals."""
minerr = min(ex1, ex2)
minstex = round_sig(minerr, n)
if minstex.find('.') < 0:
extra_zeros = len(minstex) - n
sigfigs = len(str(int(x))) - extra_zeros
stx = round_sig(x, sigfigs)
maxstex = round_sig(max(ex1, ex2), sigfigs) # depends on [control=['if'], data=[]]
else:
num_after_dec = len(string.split(minstex, '.')[1])
stx = '%%.%df' % num_after_dec % x
maxstex = '%%.%df' % num_after_dec % max(ex1, ex2)
if ex1 < ex2:
return (stx, minstex, maxstex) # depends on [control=['if'], data=[]]
else:
return (stx, maxstex, minstex)
|
def consume(self, callback, *, no_local=False, no_ack=False, exclusive=False, arguments=None):
"""
Start a consumer on the queue. Messages will be delivered asynchronously to the consumer.
The callback function will be called whenever a new message arrives on the queue.
Advanced usage: the callback object must be callable
(it must be a function or define a ``__call__`` method),
but may also define some further methods:
* ``callback.on_cancel()``: called with no parameters when the consumer is successfully cancelled.
* ``callback.on_error(exc)``: called when the channel is closed due to an error.
The argument passed is the exception which caused the error.
This method is a :ref:`coroutine <coroutine>`.
:param callable callback: a callback to be called when a message is delivered.
The callback must accept a single argument (an instance of :class:`~asynqp.message.IncomingMessage`).
:keyword bool no_local: If true, the server will not deliver messages that were
published by this connection.
:keyword bool no_ack: If true, messages delivered to the consumer don't require acknowledgement.
:keyword bool exclusive: If true, only this consumer can access the queue.
:keyword dict arguments: Table of optional parameters for extensions to the AMQP protocol. See :ref:`extensions`.
:return: The newly created :class:`Consumer` object.
"""
if self.deleted:
raise Deleted("Queue {} was deleted".format(self.name))
self.sender.send_BasicConsume(self.name, no_local, no_ack, exclusive, arguments or {})
tag = yield from self.synchroniser.wait(spec.BasicConsumeOK)
consumer = Consumer(
tag, callback, self.sender, self.synchroniser, self.reader,
loop=self._loop)
self.consumers.add_consumer(consumer)
self.reader.ready()
return consumer
|
def function[consume, parameter[self, callback]]:
constant[
Start a consumer on the queue. Messages will be delivered asynchronously to the consumer.
The callback function will be called whenever a new message arrives on the queue.
Advanced usage: the callback object must be callable
(it must be a function or define a ``__call__`` method),
but may also define some further methods:
* ``callback.on_cancel()``: called with no parameters when the consumer is successfully cancelled.
* ``callback.on_error(exc)``: called when the channel is closed due to an error.
The argument passed is the exception which caused the error.
This method is a :ref:`coroutine <coroutine>`.
:param callable callback: a callback to be called when a message is delivered.
The callback must accept a single argument (an instance of :class:`~asynqp.message.IncomingMessage`).
:keyword bool no_local: If true, the server will not deliver messages that were
published by this connection.
:keyword bool no_ack: If true, messages delivered to the consumer don't require acknowledgement.
:keyword bool exclusive: If true, only this consumer can access the queue.
:keyword dict arguments: Table of optional parameters for extensions to the AMQP protocol. See :ref:`extensions`.
:return: The newly created :class:`Consumer` object.
]
if name[self].deleted begin[:]
<ast.Raise object at 0x7da1b2345000>
call[name[self].sender.send_BasicConsume, parameter[name[self].name, name[no_local], name[no_ack], name[exclusive], <ast.BoolOp object at 0x7da20cabe590>]]
variable[tag] assign[=] <ast.YieldFrom object at 0x7da20cabd690>
variable[consumer] assign[=] call[name[Consumer], parameter[name[tag], name[callback], name[self].sender, name[self].synchroniser, name[self].reader]]
call[name[self].consumers.add_consumer, parameter[name[consumer]]]
call[name[self].reader.ready, parameter[]]
return[name[consumer]]
|
keyword[def] identifier[consume] ( identifier[self] , identifier[callback] ,*, identifier[no_local] = keyword[False] , identifier[no_ack] = keyword[False] , identifier[exclusive] = keyword[False] , identifier[arguments] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[deleted] :
keyword[raise] identifier[Deleted] ( literal[string] . identifier[format] ( identifier[self] . identifier[name] ))
identifier[self] . identifier[sender] . identifier[send_BasicConsume] ( identifier[self] . identifier[name] , identifier[no_local] , identifier[no_ack] , identifier[exclusive] , identifier[arguments] keyword[or] {})
identifier[tag] = keyword[yield] keyword[from] identifier[self] . identifier[synchroniser] . identifier[wait] ( identifier[spec] . identifier[BasicConsumeOK] )
identifier[consumer] = identifier[Consumer] (
identifier[tag] , identifier[callback] , identifier[self] . identifier[sender] , identifier[self] . identifier[synchroniser] , identifier[self] . identifier[reader] ,
identifier[loop] = identifier[self] . identifier[_loop] )
identifier[self] . identifier[consumers] . identifier[add_consumer] ( identifier[consumer] )
identifier[self] . identifier[reader] . identifier[ready] ()
keyword[return] identifier[consumer]
|
def consume(self, callback, *, no_local=False, no_ack=False, exclusive=False, arguments=None):
"""
Start a consumer on the queue. Messages will be delivered asynchronously to the consumer.
The callback function will be called whenever a new message arrives on the queue.
Advanced usage: the callback object must be callable
(it must be a function or define a ``__call__`` method),
but may also define some further methods:
* ``callback.on_cancel()``: called with no parameters when the consumer is successfully cancelled.
* ``callback.on_error(exc)``: called when the channel is closed due to an error.
The argument passed is the exception which caused the error.
This method is a :ref:`coroutine <coroutine>`.
:param callable callback: a callback to be called when a message is delivered.
The callback must accept a single argument (an instance of :class:`~asynqp.message.IncomingMessage`).
:keyword bool no_local: If true, the server will not deliver messages that were
published by this connection.
:keyword bool no_ack: If true, messages delivered to the consumer don't require acknowledgement.
:keyword bool exclusive: If true, only this consumer can access the queue.
:keyword dict arguments: Table of optional parameters for extensions to the AMQP protocol. See :ref:`extensions`.
:return: The newly created :class:`Consumer` object.
"""
if self.deleted:
raise Deleted('Queue {} was deleted'.format(self.name)) # depends on [control=['if'], data=[]]
self.sender.send_BasicConsume(self.name, no_local, no_ack, exclusive, arguments or {})
tag = (yield from self.synchroniser.wait(spec.BasicConsumeOK))
consumer = Consumer(tag, callback, self.sender, self.synchroniser, self.reader, loop=self._loop)
self.consumers.add_consumer(consumer)
self.reader.ready()
return consumer
|
def timer(self, key, **dims):
"""Adds timer with dimensions to the registry"""
return super(RegexRegistry, self).timer(self._get_key(key), **dims)
|
def function[timer, parameter[self, key]]:
constant[Adds timer with dimensions to the registry]
return[call[call[name[super], parameter[name[RegexRegistry], name[self]]].timer, parameter[call[name[self]._get_key, parameter[name[key]]]]]]
|
keyword[def] identifier[timer] ( identifier[self] , identifier[key] ,** identifier[dims] ):
literal[string]
keyword[return] identifier[super] ( identifier[RegexRegistry] , identifier[self] ). identifier[timer] ( identifier[self] . identifier[_get_key] ( identifier[key] ),** identifier[dims] )
|
def timer(self, key, **dims):
"""Adds timer with dimensions to the registry"""
return super(RegexRegistry, self).timer(self._get_key(key), **dims)
|
def cosine(vec1, vec2):
"""Compare vectors. Borrowed from A. Parish."""
if norm(vec1) > 0 and norm(vec2) > 0:
return dot(vec1, vec2) / (norm(vec1) * norm(vec2))
else:
return 0.0
|
def function[cosine, parameter[vec1, vec2]]:
constant[Compare vectors. Borrowed from A. Parish.]
if <ast.BoolOp object at 0x7da20c6aaf50> begin[:]
return[binary_operation[call[name[dot], parameter[name[vec1], name[vec2]]] / binary_operation[call[name[norm], parameter[name[vec1]]] * call[name[norm], parameter[name[vec2]]]]]]
|
keyword[def] identifier[cosine] ( identifier[vec1] , identifier[vec2] ):
literal[string]
keyword[if] identifier[norm] ( identifier[vec1] )> literal[int] keyword[and] identifier[norm] ( identifier[vec2] )> literal[int] :
keyword[return] identifier[dot] ( identifier[vec1] , identifier[vec2] )/( identifier[norm] ( identifier[vec1] )* identifier[norm] ( identifier[vec2] ))
keyword[else] :
keyword[return] literal[int]
|
def cosine(vec1, vec2):
"""Compare vectors. Borrowed from A. Parish."""
if norm(vec1) > 0 and norm(vec2) > 0:
return dot(vec1, vec2) / (norm(vec1) * norm(vec2)) # depends on [control=['if'], data=[]]
else:
return 0.0
|
def _find_script(script_name):
""" Find the script.
If the input is not a file, then $PATH will be searched.
"""
if os.path.isfile(script_name):
return script_name
path = os.getenv('PATH', os.defpath).split(os.pathsep)
for folder in path:
if not folder:
continue
fn = os.path.join(folder, script_name)
if os.path.isfile(fn):
return fn
sys.stderr.write('Could not find script {0}\n'.format(script_name))
raise SystemExit(1)
|
def function[_find_script, parameter[script_name]]:
constant[ Find the script.
If the input is not a file, then $PATH will be searched.
]
if call[name[os].path.isfile, parameter[name[script_name]]] begin[:]
return[name[script_name]]
variable[path] assign[=] call[call[name[os].getenv, parameter[constant[PATH], name[os].defpath]].split, parameter[name[os].pathsep]]
for taget[name[folder]] in starred[name[path]] begin[:]
if <ast.UnaryOp object at 0x7da18f723580> begin[:]
continue
variable[fn] assign[=] call[name[os].path.join, parameter[name[folder], name[script_name]]]
if call[name[os].path.isfile, parameter[name[fn]]] begin[:]
return[name[fn]]
call[name[sys].stderr.write, parameter[call[constant[Could not find script {0}
].format, parameter[name[script_name]]]]]
<ast.Raise object at 0x7da18f7221a0>
|
keyword[def] identifier[_find_script] ( identifier[script_name] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[script_name] ):
keyword[return] identifier[script_name]
identifier[path] = identifier[os] . identifier[getenv] ( literal[string] , identifier[os] . identifier[defpath] ). identifier[split] ( identifier[os] . identifier[pathsep] )
keyword[for] identifier[folder] keyword[in] identifier[path] :
keyword[if] keyword[not] identifier[folder] :
keyword[continue]
identifier[fn] = identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , identifier[script_name] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fn] ):
keyword[return] identifier[fn]
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] . identifier[format] ( identifier[script_name] ))
keyword[raise] identifier[SystemExit] ( literal[int] )
|
def _find_script(script_name):
""" Find the script.
If the input is not a file, then $PATH will be searched.
"""
if os.path.isfile(script_name):
return script_name # depends on [control=['if'], data=[]]
path = os.getenv('PATH', os.defpath).split(os.pathsep)
for folder in path:
if not folder:
continue # depends on [control=['if'], data=[]]
fn = os.path.join(folder, script_name)
if os.path.isfile(fn):
return fn # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['folder']]
sys.stderr.write('Could not find script {0}\n'.format(script_name))
raise SystemExit(1)
|
def dispatch(self, event):
"""Given an event, send it to all the subscribers.
Args
event (:class:`~bigchaindb.events.EventTypes`): the event to
dispatch to all the subscribers.
"""
for event_types, queues in self.queues.items():
if event.type & event_types:
for queue in queues:
queue.put(event)
|
def function[dispatch, parameter[self, event]]:
constant[Given an event, send it to all the subscribers.
Args
event (:class:`~bigchaindb.events.EventTypes`): the event to
dispatch to all the subscribers.
]
for taget[tuple[[<ast.Name object at 0x7da1b1b60bb0>, <ast.Name object at 0x7da1b1b63ac0>]]] in starred[call[name[self].queues.items, parameter[]]] begin[:]
if binary_operation[name[event].type <ast.BitAnd object at 0x7da2590d6b60> name[event_types]] begin[:]
for taget[name[queue]] in starred[name[queues]] begin[:]
call[name[queue].put, parameter[name[event]]]
|
keyword[def] identifier[dispatch] ( identifier[self] , identifier[event] ):
literal[string]
keyword[for] identifier[event_types] , identifier[queues] keyword[in] identifier[self] . identifier[queues] . identifier[items] ():
keyword[if] identifier[event] . identifier[type] & identifier[event_types] :
keyword[for] identifier[queue] keyword[in] identifier[queues] :
identifier[queue] . identifier[put] ( identifier[event] )
|
def dispatch(self, event):
"""Given an event, send it to all the subscribers.
Args
event (:class:`~bigchaindb.events.EventTypes`): the event to
dispatch to all the subscribers.
"""
for (event_types, queues) in self.queues.items():
if event.type & event_types:
for queue in queues:
queue.put(event) # depends on [control=['for'], data=['queue']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
|
def _next_sample_index(self):
"""ShuffledMux chooses its next sample stream randomly,
conditioned on the stream weights.
"""
return self.rng.choice(self.n_streams,
p=(self.stream_weights_ /
self.weight_norm_))
|
def function[_next_sample_index, parameter[self]]:
constant[ShuffledMux chooses its next sample stream randomly,
conditioned on the stream weights.
]
return[call[name[self].rng.choice, parameter[name[self].n_streams]]]
|
keyword[def] identifier[_next_sample_index] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[rng] . identifier[choice] ( identifier[self] . identifier[n_streams] ,
identifier[p] =( identifier[self] . identifier[stream_weights_] /
identifier[self] . identifier[weight_norm_] ))
|
def _next_sample_index(self):
"""ShuffledMux chooses its next sample stream randomly,
conditioned on the stream weights.
"""
return self.rng.choice(self.n_streams, p=self.stream_weights_ / self.weight_norm_)
|
def sendmail(self, to, message):
"""Send mail to one or more recipients. The required arguments are a
list of RFC 822 to-address strings (a bare string will be treated as a
list with 1 address), and a message string.
"""
# If we were passed a bare string as the To: address, convert it to
# a single element list.
if isinstance(to, str):
to = [ to, ]
# Send one email with the appropriate recipient list.
server = self._smtp_server()
server.sendmail(self.get_rfc2822_address(), to, message)
server.quit()
|
def function[sendmail, parameter[self, to, message]]:
constant[Send mail to one or more recipients. The required arguments are a
list of RFC 822 to-address strings (a bare string will be treated as a
list with 1 address), and a message string.
]
if call[name[isinstance], parameter[name[to], name[str]]] begin[:]
variable[to] assign[=] list[[<ast.Name object at 0x7da1b1416440>]]
variable[server] assign[=] call[name[self]._smtp_server, parameter[]]
call[name[server].sendmail, parameter[call[name[self].get_rfc2822_address, parameter[]], name[to], name[message]]]
call[name[server].quit, parameter[]]
|
keyword[def] identifier[sendmail] ( identifier[self] , identifier[to] , identifier[message] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[to] , identifier[str] ):
identifier[to] =[ identifier[to] ,]
identifier[server] = identifier[self] . identifier[_smtp_server] ()
identifier[server] . identifier[sendmail] ( identifier[self] . identifier[get_rfc2822_address] (), identifier[to] , identifier[message] )
identifier[server] . identifier[quit] ()
|
def sendmail(self, to, message):
"""Send mail to one or more recipients. The required arguments are a
list of RFC 822 to-address strings (a bare string will be treated as a
list with 1 address), and a message string.
"""
# If we were passed a bare string as the To: address, convert it to
# a single element list.
if isinstance(to, str):
to = [to] # depends on [control=['if'], data=[]]
# Send one email with the appropriate recipient list.
server = self._smtp_server()
server.sendmail(self.get_rfc2822_address(), to, message)
server.quit()
|
def image_create(comptparms, clrspc):
"""Creates a new image structure.
Wraps the openjp2 library function opj_image_create.
Parameters
----------
cmptparms : comptparms_t
The component parameters.
clrspc : int
Specifies the color space.
Returns
-------
image : ImageType
Reference to ImageType instance.
"""
OPENJP2.opj_image_create.argtypes = [ctypes.c_uint32,
ctypes.POINTER(ImageComptParmType),
COLOR_SPACE_TYPE]
OPENJP2.opj_image_create.restype = ctypes.POINTER(ImageType)
image = OPENJP2.opj_image_create(len(comptparms),
comptparms,
clrspc)
return image
|
def function[image_create, parameter[comptparms, clrspc]]:
constant[Creates a new image structure.
Wraps the openjp2 library function opj_image_create.
Parameters
----------
cmptparms : comptparms_t
The component parameters.
clrspc : int
Specifies the color space.
Returns
-------
image : ImageType
Reference to ImageType instance.
]
name[OPENJP2].opj_image_create.argtypes assign[=] list[[<ast.Attribute object at 0x7da20c6c4a90>, <ast.Call object at 0x7da20c6c7910>, <ast.Name object at 0x7da20c6c4f40>]]
name[OPENJP2].opj_image_create.restype assign[=] call[name[ctypes].POINTER, parameter[name[ImageType]]]
variable[image] assign[=] call[name[OPENJP2].opj_image_create, parameter[call[name[len], parameter[name[comptparms]]], name[comptparms], name[clrspc]]]
return[name[image]]
|
keyword[def] identifier[image_create] ( identifier[comptparms] , identifier[clrspc] ):
literal[string]
identifier[OPENJP2] . identifier[opj_image_create] . identifier[argtypes] =[ identifier[ctypes] . identifier[c_uint32] ,
identifier[ctypes] . identifier[POINTER] ( identifier[ImageComptParmType] ),
identifier[COLOR_SPACE_TYPE] ]
identifier[OPENJP2] . identifier[opj_image_create] . identifier[restype] = identifier[ctypes] . identifier[POINTER] ( identifier[ImageType] )
identifier[image] = identifier[OPENJP2] . identifier[opj_image_create] ( identifier[len] ( identifier[comptparms] ),
identifier[comptparms] ,
identifier[clrspc] )
keyword[return] identifier[image]
|
def image_create(comptparms, clrspc):
"""Creates a new image structure.
Wraps the openjp2 library function opj_image_create.
Parameters
----------
cmptparms : comptparms_t
The component parameters.
clrspc : int
Specifies the color space.
Returns
-------
image : ImageType
Reference to ImageType instance.
"""
OPENJP2.opj_image_create.argtypes = [ctypes.c_uint32, ctypes.POINTER(ImageComptParmType), COLOR_SPACE_TYPE]
OPENJP2.opj_image_create.restype = ctypes.POINTER(ImageType)
image = OPENJP2.opj_image_create(len(comptparms), comptparms, clrspc)
return image
|
def hosts2map (hosts):
"""
Return a set of named hosts, and a list of subnets (host/netmask
adresses).
Only IPv4 host/netmasks are supported.
"""
hostset = set()
nets = []
for host in hosts:
if _host_cidrmask_re.match(host):
host, mask = host.split("/")
mask = int(mask)
if not is_valid_cidrmask(mask):
log.error(LOG_CHECK,
"CIDR mask %d is not a valid network mask", mask)
continue
if not is_valid_ipv4(host):
log.error(LOG_CHECK, "host %r is not a valid ip address", host)
continue
nets.append(dq2net(host, cidr2mask(mask)))
elif _host_netmask_re.match(host):
host, mask = host.split("/")
if not is_valid_ipv4(host):
log.error(LOG_CHECK, "host %r is not a valid ip address", host)
continue
if not is_valid_ipv4(mask):
log.error(LOG_CHECK,
"mask %r is not a valid ip network mask", mask)
continue
nets.append(dq2net(host, netmask2mask(mask)))
elif is_valid_ip(host):
hostset.add(expand_ip(host)[0])
else:
hostset |= set(resolve_host(host))
return (hostset, nets)
|
def function[hosts2map, parameter[hosts]]:
constant[
Return a set of named hosts, and a list of subnets (host/netmask
adresses).
Only IPv4 host/netmasks are supported.
]
variable[hostset] assign[=] call[name[set], parameter[]]
variable[nets] assign[=] list[[]]
for taget[name[host]] in starred[name[hosts]] begin[:]
if call[name[_host_cidrmask_re].match, parameter[name[host]]] begin[:]
<ast.Tuple object at 0x7da20e962bc0> assign[=] call[name[host].split, parameter[constant[/]]]
variable[mask] assign[=] call[name[int], parameter[name[mask]]]
if <ast.UnaryOp object at 0x7da20e9619c0> begin[:]
call[name[log].error, parameter[name[LOG_CHECK], constant[CIDR mask %d is not a valid network mask], name[mask]]]
continue
if <ast.UnaryOp object at 0x7da1b2347ca0> begin[:]
call[name[log].error, parameter[name[LOG_CHECK], constant[host %r is not a valid ip address], name[host]]]
continue
call[name[nets].append, parameter[call[name[dq2net], parameter[name[host], call[name[cidr2mask], parameter[name[mask]]]]]]]
return[tuple[[<ast.Name object at 0x7da1b2347d30>, <ast.Name object at 0x7da1b2347550>]]]
|
keyword[def] identifier[hosts2map] ( identifier[hosts] ):
literal[string]
identifier[hostset] = identifier[set] ()
identifier[nets] =[]
keyword[for] identifier[host] keyword[in] identifier[hosts] :
keyword[if] identifier[_host_cidrmask_re] . identifier[match] ( identifier[host] ):
identifier[host] , identifier[mask] = identifier[host] . identifier[split] ( literal[string] )
identifier[mask] = identifier[int] ( identifier[mask] )
keyword[if] keyword[not] identifier[is_valid_cidrmask] ( identifier[mask] ):
identifier[log] . identifier[error] ( identifier[LOG_CHECK] ,
literal[string] , identifier[mask] )
keyword[continue]
keyword[if] keyword[not] identifier[is_valid_ipv4] ( identifier[host] ):
identifier[log] . identifier[error] ( identifier[LOG_CHECK] , literal[string] , identifier[host] )
keyword[continue]
identifier[nets] . identifier[append] ( identifier[dq2net] ( identifier[host] , identifier[cidr2mask] ( identifier[mask] )))
keyword[elif] identifier[_host_netmask_re] . identifier[match] ( identifier[host] ):
identifier[host] , identifier[mask] = identifier[host] . identifier[split] ( literal[string] )
keyword[if] keyword[not] identifier[is_valid_ipv4] ( identifier[host] ):
identifier[log] . identifier[error] ( identifier[LOG_CHECK] , literal[string] , identifier[host] )
keyword[continue]
keyword[if] keyword[not] identifier[is_valid_ipv4] ( identifier[mask] ):
identifier[log] . identifier[error] ( identifier[LOG_CHECK] ,
literal[string] , identifier[mask] )
keyword[continue]
identifier[nets] . identifier[append] ( identifier[dq2net] ( identifier[host] , identifier[netmask2mask] ( identifier[mask] )))
keyword[elif] identifier[is_valid_ip] ( identifier[host] ):
identifier[hostset] . identifier[add] ( identifier[expand_ip] ( identifier[host] )[ literal[int] ])
keyword[else] :
identifier[hostset] |= identifier[set] ( identifier[resolve_host] ( identifier[host] ))
keyword[return] ( identifier[hostset] , identifier[nets] )
|
def hosts2map(hosts):
"""
Return a set of named hosts, and a list of subnets (host/netmask
adresses).
Only IPv4 host/netmasks are supported.
"""
hostset = set()
nets = []
for host in hosts:
if _host_cidrmask_re.match(host):
(host, mask) = host.split('/')
mask = int(mask)
if not is_valid_cidrmask(mask):
log.error(LOG_CHECK, 'CIDR mask %d is not a valid network mask', mask)
continue # depends on [control=['if'], data=[]]
if not is_valid_ipv4(host):
log.error(LOG_CHECK, 'host %r is not a valid ip address', host)
continue # depends on [control=['if'], data=[]]
nets.append(dq2net(host, cidr2mask(mask))) # depends on [control=['if'], data=[]]
elif _host_netmask_re.match(host):
(host, mask) = host.split('/')
if not is_valid_ipv4(host):
log.error(LOG_CHECK, 'host %r is not a valid ip address', host)
continue # depends on [control=['if'], data=[]]
if not is_valid_ipv4(mask):
log.error(LOG_CHECK, 'mask %r is not a valid ip network mask', mask)
continue # depends on [control=['if'], data=[]]
nets.append(dq2net(host, netmask2mask(mask))) # depends on [control=['if'], data=[]]
elif is_valid_ip(host):
hostset.add(expand_ip(host)[0]) # depends on [control=['if'], data=[]]
else:
hostset |= set(resolve_host(host)) # depends on [control=['for'], data=['host']]
return (hostset, nets)
|
def bc02(data):
"""
get APWP from Besse and Courtillot 2002 paper
Parameters
----------
Takes input as [plate, site_lat, site_lon, age]
plate : string (options: AF, ANT, AU, EU, GL, IN, NA, SA)
site_lat : float
site_lon : float
age : float in Myr
Returns
----------
"""
plate, site_lat, site_lon, age = data[0], data[1], data[2], data[3]
apwp = get_plate_data(plate)
recs = apwp.split()
#
# put it into usable form in plate_data
#
k, plate_data = 0, []
while k < len(recs) - 3:
rec = [float(recs[k]), float(recs[k + 1]), float(recs[k + 2])]
plate_data.append(rec)
k = k + 3
#
# find the right pole for the age
#
for i in range(len(plate_data)):
if age >= plate_data[i][0] and age <= plate_data[i + 1][0]:
if (age - plate_data[i][0]) < (plate_data[i][0] - age):
rec = i
else:
rec = i + 1
break
pole_lat = plate_data[rec][1]
pole_lon = plate_data[rec][2]
return pole_lat, pole_lon
|
def function[bc02, parameter[data]]:
constant[
get APWP from Besse and Courtillot 2002 paper
Parameters
----------
Takes input as [plate, site_lat, site_lon, age]
plate : string (options: AF, ANT, AU, EU, GL, IN, NA, SA)
site_lat : float
site_lon : float
age : float in Myr
Returns
----------
]
<ast.Tuple object at 0x7da18c4cc9d0> assign[=] tuple[[<ast.Subscript object at 0x7da18c4ce2f0>, <ast.Subscript object at 0x7da18c4cdcc0>, <ast.Subscript object at 0x7da18c4cdc60>, <ast.Subscript object at 0x7da1b04e4c40>]]
variable[apwp] assign[=] call[name[get_plate_data], parameter[name[plate]]]
variable[recs] assign[=] call[name[apwp].split, parameter[]]
<ast.Tuple object at 0x7da2044c2350> assign[=] tuple[[<ast.Constant object at 0x7da2044c39a0>, <ast.List object at 0x7da2044c20e0>]]
while compare[name[k] less[<] binary_operation[call[name[len], parameter[name[recs]]] - constant[3]]] begin[:]
variable[rec] assign[=] list[[<ast.Call object at 0x7da1b0566080>, <ast.Call object at 0x7da1b0564c40>, <ast.Call object at 0x7da1b0564910>]]
call[name[plate_data].append, parameter[name[rec]]]
variable[k] assign[=] binary_operation[name[k] + constant[3]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[plate_data]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b05655a0> begin[:]
if compare[binary_operation[name[age] - call[call[name[plate_data]][name[i]]][constant[0]]] less[<] binary_operation[call[call[name[plate_data]][name[i]]][constant[0]] - name[age]]] begin[:]
variable[rec] assign[=] name[i]
break
variable[pole_lat] assign[=] call[call[name[plate_data]][name[rec]]][constant[1]]
variable[pole_lon] assign[=] call[call[name[plate_data]][name[rec]]][constant[2]]
return[tuple[[<ast.Name object at 0x7da1b0564610>, <ast.Name object at 0x7da1b05645e0>]]]
|
keyword[def] identifier[bc02] ( identifier[data] ):
literal[string]
identifier[plate] , identifier[site_lat] , identifier[site_lon] , identifier[age] = identifier[data] [ literal[int] ], identifier[data] [ literal[int] ], identifier[data] [ literal[int] ], identifier[data] [ literal[int] ]
identifier[apwp] = identifier[get_plate_data] ( identifier[plate] )
identifier[recs] = identifier[apwp] . identifier[split] ()
identifier[k] , identifier[plate_data] = literal[int] ,[]
keyword[while] identifier[k] < identifier[len] ( identifier[recs] )- literal[int] :
identifier[rec] =[ identifier[float] ( identifier[recs] [ identifier[k] ]), identifier[float] ( identifier[recs] [ identifier[k] + literal[int] ]), identifier[float] ( identifier[recs] [ identifier[k] + literal[int] ])]
identifier[plate_data] . identifier[append] ( identifier[rec] )
identifier[k] = identifier[k] + literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[plate_data] )):
keyword[if] identifier[age] >= identifier[plate_data] [ identifier[i] ][ literal[int] ] keyword[and] identifier[age] <= identifier[plate_data] [ identifier[i] + literal[int] ][ literal[int] ]:
keyword[if] ( identifier[age] - identifier[plate_data] [ identifier[i] ][ literal[int] ])<( identifier[plate_data] [ identifier[i] ][ literal[int] ]- identifier[age] ):
identifier[rec] = identifier[i]
keyword[else] :
identifier[rec] = identifier[i] + literal[int]
keyword[break]
identifier[pole_lat] = identifier[plate_data] [ identifier[rec] ][ literal[int] ]
identifier[pole_lon] = identifier[plate_data] [ identifier[rec] ][ literal[int] ]
keyword[return] identifier[pole_lat] , identifier[pole_lon]
|
def bc02(data):
"""
get APWP from Besse and Courtillot 2002 paper
Parameters
----------
Takes input as [plate, site_lat, site_lon, age]
plate : string (options: AF, ANT, AU, EU, GL, IN, NA, SA)
site_lat : float
site_lon : float
age : float in Myr
Returns
----------
"""
(plate, site_lat, site_lon, age) = (data[0], data[1], data[2], data[3])
apwp = get_plate_data(plate)
recs = apwp.split()
#
# put it into usable form in plate_data
#
(k, plate_data) = (0, [])
while k < len(recs) - 3:
rec = [float(recs[k]), float(recs[k + 1]), float(recs[k + 2])]
plate_data.append(rec)
k = k + 3 # depends on [control=['while'], data=['k']]
#
# find the right pole for the age
#
for i in range(len(plate_data)):
if age >= plate_data[i][0] and age <= plate_data[i + 1][0]:
if age - plate_data[i][0] < plate_data[i][0] - age:
rec = i # depends on [control=['if'], data=[]]
else:
rec = i + 1
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
pole_lat = plate_data[rec][1]
pole_lon = plate_data[rec][2]
return (pole_lat, pole_lon)
|
def send_deferred(self, auth):
"""Send all deferred requests for a particular CIK/auth."""
if self.deferred.has_requests(auth):
method_arg_pairs = self.deferred.get_method_args_pairs(auth)
calls = self._composeCalls(method_arg_pairs)
# should this call be made with no timeout? (e.g. is there a
# wait())
notimeout = self.deferred.get_notimeout(auth)
try:
r = self._callJsonRPC(auth, calls, returnreq=True, notimeout=notimeout)
finally:
# remove deferred calls
self.deferred.reset(auth)
return r
raise JsonRPCRequestException('No deferred requests to send.')
|
def function[send_deferred, parameter[self, auth]]:
constant[Send all deferred requests for a particular CIK/auth.]
if call[name[self].deferred.has_requests, parameter[name[auth]]] begin[:]
variable[method_arg_pairs] assign[=] call[name[self].deferred.get_method_args_pairs, parameter[name[auth]]]
variable[calls] assign[=] call[name[self]._composeCalls, parameter[name[method_arg_pairs]]]
variable[notimeout] assign[=] call[name[self].deferred.get_notimeout, parameter[name[auth]]]
<ast.Try object at 0x7da18bcc9f60>
return[name[r]]
<ast.Raise object at 0x7da18bcca020>
|
keyword[def] identifier[send_deferred] ( identifier[self] , identifier[auth] ):
literal[string]
keyword[if] identifier[self] . identifier[deferred] . identifier[has_requests] ( identifier[auth] ):
identifier[method_arg_pairs] = identifier[self] . identifier[deferred] . identifier[get_method_args_pairs] ( identifier[auth] )
identifier[calls] = identifier[self] . identifier[_composeCalls] ( identifier[method_arg_pairs] )
identifier[notimeout] = identifier[self] . identifier[deferred] . identifier[get_notimeout] ( identifier[auth] )
keyword[try] :
identifier[r] = identifier[self] . identifier[_callJsonRPC] ( identifier[auth] , identifier[calls] , identifier[returnreq] = keyword[True] , identifier[notimeout] = identifier[notimeout] )
keyword[finally] :
identifier[self] . identifier[deferred] . identifier[reset] ( identifier[auth] )
keyword[return] identifier[r]
keyword[raise] identifier[JsonRPCRequestException] ( literal[string] )
|
def send_deferred(self, auth):
"""Send all deferred requests for a particular CIK/auth."""
if self.deferred.has_requests(auth):
method_arg_pairs = self.deferred.get_method_args_pairs(auth)
calls = self._composeCalls(method_arg_pairs)
# should this call be made with no timeout? (e.g. is there a
# wait())
notimeout = self.deferred.get_notimeout(auth)
try:
r = self._callJsonRPC(auth, calls, returnreq=True, notimeout=notimeout) # depends on [control=['try'], data=[]]
finally:
# remove deferred calls
self.deferred.reset(auth)
return r # depends on [control=['if'], data=[]]
raise JsonRPCRequestException('No deferred requests to send.')
|
def getRejectionReasons(self,dd):
"""
'd' is a dictionary with the stored data in the widget like:
{u'selected': [u'a', u'b'], u'checkbox': True, u'other': 'dsadas', u'checkbox_other': True}
Returns a string with the options both from selected and input items
"""
keys = dd.keys()
reasons = []
if not('checkbox' in keys ) or not(dd['checkbox']):
return 0
if 'selected' in keys:
reasons += dd['selected']
if 'other' in keys and dd['checkbox_other']:
reasons.append(dd['other'])
if len(reasons) < 1:
return "Yes, unknow"
return ', '.join(reasons)
|
def function[getRejectionReasons, parameter[self, dd]]:
constant[
'd' is a dictionary with the stored data in the widget like:
{u'selected': [u'a', u'b'], u'checkbox': True, u'other': 'dsadas', u'checkbox_other': True}
Returns a string with the options both from selected and input items
]
variable[keys] assign[=] call[name[dd].keys, parameter[]]
variable[reasons] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b231e1a0> begin[:]
return[constant[0]]
if compare[constant[selected] in name[keys]] begin[:]
<ast.AugAssign object at 0x7da1b231ebf0>
if <ast.BoolOp object at 0x7da1b231dbd0> begin[:]
call[name[reasons].append, parameter[call[name[dd]][constant[other]]]]
if compare[call[name[len], parameter[name[reasons]]] less[<] constant[1]] begin[:]
return[constant[Yes, unknow]]
return[call[constant[, ].join, parameter[name[reasons]]]]
|
keyword[def] identifier[getRejectionReasons] ( identifier[self] , identifier[dd] ):
literal[string]
identifier[keys] = identifier[dd] . identifier[keys] ()
identifier[reasons] =[]
keyword[if] keyword[not] ( literal[string] keyword[in] identifier[keys] ) keyword[or] keyword[not] ( identifier[dd] [ literal[string] ]):
keyword[return] literal[int]
keyword[if] literal[string] keyword[in] identifier[keys] :
identifier[reasons] += identifier[dd] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[keys] keyword[and] identifier[dd] [ literal[string] ]:
identifier[reasons] . identifier[append] ( identifier[dd] [ literal[string] ])
keyword[if] identifier[len] ( identifier[reasons] )< literal[int] :
keyword[return] literal[string]
keyword[return] literal[string] . identifier[join] ( identifier[reasons] )
|
def getRejectionReasons(self, dd):
"""
'd' is a dictionary with the stored data in the widget like:
{u'selected': [u'a', u'b'], u'checkbox': True, u'other': 'dsadas', u'checkbox_other': True}
Returns a string with the options both from selected and input items
"""
keys = dd.keys()
reasons = []
if not 'checkbox' in keys or not dd['checkbox']:
return 0 # depends on [control=['if'], data=[]]
if 'selected' in keys:
reasons += dd['selected'] # depends on [control=['if'], data=[]]
if 'other' in keys and dd['checkbox_other']:
reasons.append(dd['other']) # depends on [control=['if'], data=[]]
if len(reasons) < 1:
return 'Yes, unknow' # depends on [control=['if'], data=[]]
return ', '.join(reasons)
|
def wrapModel(self, model):
"""
Converts application-provided model objects to L{IResource} providers.
"""
res = IResource(model, None)
if res is None:
frag = INavigableFragment(model)
fragmentName = getattr(frag, 'fragmentName', None)
if fragmentName is not None:
fragDocFactory = self._getDocFactory(fragmentName)
if fragDocFactory is not None:
frag.docFactory = fragDocFactory
if frag.docFactory is None:
raise CouldNotLoadFromThemes(frag, self._preferredThemes())
useAthena = isinstance(frag, (athena.LiveFragment, athena.LiveElement))
return self._wrapNavFrag(frag, useAthena)
else:
return res
|
def function[wrapModel, parameter[self, model]]:
constant[
Converts application-provided model objects to L{IResource} providers.
]
variable[res] assign[=] call[name[IResource], parameter[name[model], constant[None]]]
if compare[name[res] is constant[None]] begin[:]
variable[frag] assign[=] call[name[INavigableFragment], parameter[name[model]]]
variable[fragmentName] assign[=] call[name[getattr], parameter[name[frag], constant[fragmentName], constant[None]]]
if compare[name[fragmentName] is_not constant[None]] begin[:]
variable[fragDocFactory] assign[=] call[name[self]._getDocFactory, parameter[name[fragmentName]]]
if compare[name[fragDocFactory] is_not constant[None]] begin[:]
name[frag].docFactory assign[=] name[fragDocFactory]
if compare[name[frag].docFactory is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0a31180>
variable[useAthena] assign[=] call[name[isinstance], parameter[name[frag], tuple[[<ast.Attribute object at 0x7da1b0bd6d10>, <ast.Attribute object at 0x7da1b0bd45e0>]]]]
return[call[name[self]._wrapNavFrag, parameter[name[frag], name[useAthena]]]]
|
keyword[def] identifier[wrapModel] ( identifier[self] , identifier[model] ):
literal[string]
identifier[res] = identifier[IResource] ( identifier[model] , keyword[None] )
keyword[if] identifier[res] keyword[is] keyword[None] :
identifier[frag] = identifier[INavigableFragment] ( identifier[model] )
identifier[fragmentName] = identifier[getattr] ( identifier[frag] , literal[string] , keyword[None] )
keyword[if] identifier[fragmentName] keyword[is] keyword[not] keyword[None] :
identifier[fragDocFactory] = identifier[self] . identifier[_getDocFactory] ( identifier[fragmentName] )
keyword[if] identifier[fragDocFactory] keyword[is] keyword[not] keyword[None] :
identifier[frag] . identifier[docFactory] = identifier[fragDocFactory]
keyword[if] identifier[frag] . identifier[docFactory] keyword[is] keyword[None] :
keyword[raise] identifier[CouldNotLoadFromThemes] ( identifier[frag] , identifier[self] . identifier[_preferredThemes] ())
identifier[useAthena] = identifier[isinstance] ( identifier[frag] ,( identifier[athena] . identifier[LiveFragment] , identifier[athena] . identifier[LiveElement] ))
keyword[return] identifier[self] . identifier[_wrapNavFrag] ( identifier[frag] , identifier[useAthena] )
keyword[else] :
keyword[return] identifier[res]
|
def wrapModel(self, model):
"""
Converts application-provided model objects to L{IResource} providers.
"""
res = IResource(model, None)
if res is None:
frag = INavigableFragment(model)
fragmentName = getattr(frag, 'fragmentName', None)
if fragmentName is not None:
fragDocFactory = self._getDocFactory(fragmentName)
if fragDocFactory is not None:
frag.docFactory = fragDocFactory # depends on [control=['if'], data=['fragDocFactory']] # depends on [control=['if'], data=['fragmentName']]
if frag.docFactory is None:
raise CouldNotLoadFromThemes(frag, self._preferredThemes()) # depends on [control=['if'], data=[]]
useAthena = isinstance(frag, (athena.LiveFragment, athena.LiveElement))
return self._wrapNavFrag(frag, useAthena) # depends on [control=['if'], data=[]]
else:
return res
|
def compute(self, sensorToBodyByColumn, sensorToSpecificObjectByColumn):
"""
Compute the
"body's location relative to a specific object"
from an array of
"sensor's location relative to a specific object"
and an array of
"sensor's location relative to body"
These arrays consist of one module per cortical column.
This is a metric computation, similar to that of the
SensorToSpecificObjectModule, but with voting. In effect, the columns vote
on "the body's location relative to a specific object".
Note: Each column can vote for an arbitrary number of cells, but it can't
vote for a single cell more than once. This is necessary because we don't
want ambiguity in a column to cause some cells to get extra votes. There are
a few ways that this could be biologically plausible:
- Explanation 1: Nearby dendritic segments are independent coincidence
detectors, but perhaps their dendritic spikes don't sum. Meanwhile,
maybe dendritic spikes from far away dendritic segments do sum.
- Explanation 2: Dendritic spikes from different columns are separated
temporally, not spatially. All the spikes from one column "arrive" at
the cell at the same time, but the dendritic spikes from other columns
arrive at other times. With each of these temporally-separated dendritic
spikes, the unsupported cells are inhibited, or the spikes' effects are
summed.
- Explanation 3: Another population of cells within the cortical column
might calculate the "body's location relative to a specific object" in
this same "metric" way, but without tallying any votes. Then it relays
this SDR subcortically, voting 0 or 1 times for each cell.
@param sensorToBodyInputs (list of numpy arrays)
The "sensor's location relative to the body" input from each cortical column
@param sensorToSpecificObjectInputs (list of numpy arrays)
The "sensor's location relative to specific object" input from each
cortical column
"""
votesByCell = np.zeros(self.cellCount, dtype="int")
self.activeSegmentsByColumn = []
for (connections,
activeSensorToBodyCells,
activeSensorToSpecificObjectCells) in zip(self.connectionsByColumn,
sensorToBodyByColumn,
sensorToSpecificObjectByColumn):
overlaps = connections.computeActivity({
"sensorToBody": activeSensorToBodyCells,
"sensorToSpecificObject": activeSensorToSpecificObjectCells,
})
activeSegments = np.where(overlaps >= 2)[0]
votes = connections.mapSegmentsToCells(activeSegments)
votes = np.unique(votes) # Only allow a column to vote for a cell once.
votesByCell[votes] += 1
self.activeSegmentsByColumn.append(activeSegments)
candidates = np.where(votesByCell == np.max(votesByCell))[0]
# If possible, select only from current active cells.
#
# If we were to always activate all candidates, there would be an explosive
# back-and-forth between this layer and the sensorToSpecificObject layer.
self.activeCells = np.intersect1d(self.activeCells, candidates)
if self.activeCells.size == 0:
# Otherwise, activate all cells with the maximum number of active
# segments.
self.activeCells = candidates
self.inhibitedCells = np.setdiff1d(np.where(votesByCell > 0)[0],
self.activeCells)
|
def function[compute, parameter[self, sensorToBodyByColumn, sensorToSpecificObjectByColumn]]:
constant[
Compute the
"body's location relative to a specific object"
from an array of
"sensor's location relative to a specific object"
and an array of
"sensor's location relative to body"
These arrays consist of one module per cortical column.
This is a metric computation, similar to that of the
SensorToSpecificObjectModule, but with voting. In effect, the columns vote
on "the body's location relative to a specific object".
Note: Each column can vote for an arbitrary number of cells, but it can't
vote for a single cell more than once. This is necessary because we don't
want ambiguity in a column to cause some cells to get extra votes. There are
a few ways that this could be biologically plausible:
- Explanation 1: Nearby dendritic segments are independent coincidence
detectors, but perhaps their dendritic spikes don't sum. Meanwhile,
maybe dendritic spikes from far away dendritic segments do sum.
- Explanation 2: Dendritic spikes from different columns are separated
temporally, not spatially. All the spikes from one column "arrive" at
the cell at the same time, but the dendritic spikes from other columns
arrive at other times. With each of these temporally-separated dendritic
spikes, the unsupported cells are inhibited, or the spikes' effects are
summed.
- Explanation 3: Another population of cells within the cortical column
might calculate the "body's location relative to a specific object" in
this same "metric" way, but without tallying any votes. Then it relays
this SDR subcortically, voting 0 or 1 times for each cell.
@param sensorToBodyInputs (list of numpy arrays)
The "sensor's location relative to the body" input from each cortical column
@param sensorToSpecificObjectInputs (list of numpy arrays)
The "sensor's location relative to specific object" input from each
cortical column
]
variable[votesByCell] assign[=] call[name[np].zeros, parameter[name[self].cellCount]]
name[self].activeSegmentsByColumn assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b08443a0>, <ast.Name object at 0x7da1b0847a30>, <ast.Name object at 0x7da1b08442b0>]]] in starred[call[name[zip], parameter[name[self].connectionsByColumn, name[sensorToBodyByColumn], name[sensorToSpecificObjectByColumn]]]] begin[:]
variable[overlaps] assign[=] call[name[connections].computeActivity, parameter[dictionary[[<ast.Constant object at 0x7da1b0844580>, <ast.Constant object at 0x7da1b0844cd0>], [<ast.Name object at 0x7da1b0845bd0>, <ast.Name object at 0x7da1b08464a0>]]]]
variable[activeSegments] assign[=] call[call[name[np].where, parameter[compare[name[overlaps] greater_or_equal[>=] constant[2]]]]][constant[0]]
variable[votes] assign[=] call[name[connections].mapSegmentsToCells, parameter[name[activeSegments]]]
variable[votes] assign[=] call[name[np].unique, parameter[name[votes]]]
<ast.AugAssign object at 0x7da1b086c820>
call[name[self].activeSegmentsByColumn.append, parameter[name[activeSegments]]]
variable[candidates] assign[=] call[call[name[np].where, parameter[compare[name[votesByCell] equal[==] call[name[np].max, parameter[name[votesByCell]]]]]]][constant[0]]
name[self].activeCells assign[=] call[name[np].intersect1d, parameter[name[self].activeCells, name[candidates]]]
if compare[name[self].activeCells.size equal[==] constant[0]] begin[:]
name[self].activeCells assign[=] name[candidates]
name[self].inhibitedCells assign[=] call[name[np].setdiff1d, parameter[call[call[name[np].where, parameter[compare[name[votesByCell] greater[>] constant[0]]]]][constant[0]], name[self].activeCells]]
|
keyword[def] identifier[compute] ( identifier[self] , identifier[sensorToBodyByColumn] , identifier[sensorToSpecificObjectByColumn] ):
literal[string]
identifier[votesByCell] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[cellCount] , identifier[dtype] = literal[string] )
identifier[self] . identifier[activeSegmentsByColumn] =[]
keyword[for] ( identifier[connections] ,
identifier[activeSensorToBodyCells] ,
identifier[activeSensorToSpecificObjectCells] ) keyword[in] identifier[zip] ( identifier[self] . identifier[connectionsByColumn] ,
identifier[sensorToBodyByColumn] ,
identifier[sensorToSpecificObjectByColumn] ):
identifier[overlaps] = identifier[connections] . identifier[computeActivity] ({
literal[string] : identifier[activeSensorToBodyCells] ,
literal[string] : identifier[activeSensorToSpecificObjectCells] ,
})
identifier[activeSegments] = identifier[np] . identifier[where] ( identifier[overlaps] >= literal[int] )[ literal[int] ]
identifier[votes] = identifier[connections] . identifier[mapSegmentsToCells] ( identifier[activeSegments] )
identifier[votes] = identifier[np] . identifier[unique] ( identifier[votes] )
identifier[votesByCell] [ identifier[votes] ]+= literal[int]
identifier[self] . identifier[activeSegmentsByColumn] . identifier[append] ( identifier[activeSegments] )
identifier[candidates] = identifier[np] . identifier[where] ( identifier[votesByCell] == identifier[np] . identifier[max] ( identifier[votesByCell] ))[ literal[int] ]
identifier[self] . identifier[activeCells] = identifier[np] . identifier[intersect1d] ( identifier[self] . identifier[activeCells] , identifier[candidates] )
keyword[if] identifier[self] . identifier[activeCells] . identifier[size] == literal[int] :
identifier[self] . identifier[activeCells] = identifier[candidates]
identifier[self] . identifier[inhibitedCells] = identifier[np] . identifier[setdiff1d] ( identifier[np] . identifier[where] ( identifier[votesByCell] > literal[int] )[ literal[int] ],
identifier[self] . identifier[activeCells] )
|
def compute(self, sensorToBodyByColumn, sensorToSpecificObjectByColumn):
"""
Compute the
"body's location relative to a specific object"
from an array of
"sensor's location relative to a specific object"
and an array of
"sensor's location relative to body"
These arrays consist of one module per cortical column.
This is a metric computation, similar to that of the
SensorToSpecificObjectModule, but with voting. In effect, the columns vote
on "the body's location relative to a specific object".
Note: Each column can vote for an arbitrary number of cells, but it can't
vote for a single cell more than once. This is necessary because we don't
want ambiguity in a column to cause some cells to get extra votes. There are
a few ways that this could be biologically plausible:
- Explanation 1: Nearby dendritic segments are independent coincidence
detectors, but perhaps their dendritic spikes don't sum. Meanwhile,
maybe dendritic spikes from far away dendritic segments do sum.
- Explanation 2: Dendritic spikes from different columns are separated
temporally, not spatially. All the spikes from one column "arrive" at
the cell at the same time, but the dendritic spikes from other columns
arrive at other times. With each of these temporally-separated dendritic
spikes, the unsupported cells are inhibited, or the spikes' effects are
summed.
- Explanation 3: Another population of cells within the cortical column
might calculate the "body's location relative to a specific object" in
this same "metric" way, but without tallying any votes. Then it relays
this SDR subcortically, voting 0 or 1 times for each cell.
@param sensorToBodyInputs (list of numpy arrays)
The "sensor's location relative to the body" input from each cortical column
@param sensorToSpecificObjectInputs (list of numpy arrays)
The "sensor's location relative to specific object" input from each
cortical column
"""
votesByCell = np.zeros(self.cellCount, dtype='int')
self.activeSegmentsByColumn = []
for (connections, activeSensorToBodyCells, activeSensorToSpecificObjectCells) in zip(self.connectionsByColumn, sensorToBodyByColumn, sensorToSpecificObjectByColumn):
overlaps = connections.computeActivity({'sensorToBody': activeSensorToBodyCells, 'sensorToSpecificObject': activeSensorToSpecificObjectCells})
activeSegments = np.where(overlaps >= 2)[0]
votes = connections.mapSegmentsToCells(activeSegments)
votes = np.unique(votes) # Only allow a column to vote for a cell once.
votesByCell[votes] += 1
self.activeSegmentsByColumn.append(activeSegments) # depends on [control=['for'], data=[]]
candidates = np.where(votesByCell == np.max(votesByCell))[0]
# If possible, select only from current active cells.
#
# If we were to always activate all candidates, there would be an explosive
# back-and-forth between this layer and the sensorToSpecificObject layer.
self.activeCells = np.intersect1d(self.activeCells, candidates)
if self.activeCells.size == 0:
# Otherwise, activate all cells with the maximum number of active
# segments.
self.activeCells = candidates # depends on [control=['if'], data=[]]
self.inhibitedCells = np.setdiff1d(np.where(votesByCell > 0)[0], self.activeCells)
|
def knot_removal_alpha_j(u, degree, knotvector, num, idx):
""" Computes :math:`\\alpha_{j}` coefficient for knot removal algorithm.
Please refer to Eq. 5.29 of The NURBS Book by Piegl & Tiller, 2nd Edition, p.184 for details.
:param u: knot
:type u: float
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: tuple
:param num: knot removal index
:type num: int
:param idx: iterator index
:type idx: int
:return: coefficient value
:rtype: float
"""
return (u - knotvector[idx - num]) / (knotvector[idx + degree + 1] - knotvector[idx - num])
|
def function[knot_removal_alpha_j, parameter[u, degree, knotvector, num, idx]]:
constant[ Computes :math:`\alpha_{j}` coefficient for knot removal algorithm.
Please refer to Eq. 5.29 of The NURBS Book by Piegl & Tiller, 2nd Edition, p.184 for details.
:param u: knot
:type u: float
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: tuple
:param num: knot removal index
:type num: int
:param idx: iterator index
:type idx: int
:return: coefficient value
:rtype: float
]
return[binary_operation[binary_operation[name[u] - call[name[knotvector]][binary_operation[name[idx] - name[num]]]] / binary_operation[call[name[knotvector]][binary_operation[binary_operation[name[idx] + name[degree]] + constant[1]]] - call[name[knotvector]][binary_operation[name[idx] - name[num]]]]]]
|
keyword[def] identifier[knot_removal_alpha_j] ( identifier[u] , identifier[degree] , identifier[knotvector] , identifier[num] , identifier[idx] ):
literal[string]
keyword[return] ( identifier[u] - identifier[knotvector] [ identifier[idx] - identifier[num] ])/( identifier[knotvector] [ identifier[idx] + identifier[degree] + literal[int] ]- identifier[knotvector] [ identifier[idx] - identifier[num] ])
|
def knot_removal_alpha_j(u, degree, knotvector, num, idx):
""" Computes :math:`\\alpha_{j}` coefficient for knot removal algorithm.
Please refer to Eq. 5.29 of The NURBS Book by Piegl & Tiller, 2nd Edition, p.184 for details.
:param u: knot
:type u: float
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: tuple
:param num: knot removal index
:type num: int
:param idx: iterator index
:type idx: int
:return: coefficient value
:rtype: float
"""
return (u - knotvector[idx - num]) / (knotvector[idx + degree + 1] - knotvector[idx - num])
|
def connectExec(connection, protocol, commandLine):
"""Connect a Protocol to a ssh exec session
"""
deferred = connectSession(connection, protocol)
@deferred.addCallback
def requestSubsystem(session):
return session.requestExec(commandLine)
return deferred
|
def function[connectExec, parameter[connection, protocol, commandLine]]:
constant[Connect a Protocol to a ssh exec session
]
variable[deferred] assign[=] call[name[connectSession], parameter[name[connection], name[protocol]]]
def function[requestSubsystem, parameter[session]]:
return[call[name[session].requestExec, parameter[name[commandLine]]]]
return[name[deferred]]
|
keyword[def] identifier[connectExec] ( identifier[connection] , identifier[protocol] , identifier[commandLine] ):
literal[string]
identifier[deferred] = identifier[connectSession] ( identifier[connection] , identifier[protocol] )
@ identifier[deferred] . identifier[addCallback]
keyword[def] identifier[requestSubsystem] ( identifier[session] ):
keyword[return] identifier[session] . identifier[requestExec] ( identifier[commandLine] )
keyword[return] identifier[deferred]
|
def connectExec(connection, protocol, commandLine):
"""Connect a Protocol to a ssh exec session
"""
deferred = connectSession(connection, protocol)
@deferred.addCallback
def requestSubsystem(session):
return session.requestExec(commandLine)
return deferred
|
def interrupt(self):
"""
Invoked by the renderering.Renderer, if the image has changed.
"""
self.image = io.BytesIO()
self.renderer.screen.save(self.image, "png")
|
def function[interrupt, parameter[self]]:
constant[
Invoked by the renderering.Renderer, if the image has changed.
]
name[self].image assign[=] call[name[io].BytesIO, parameter[]]
call[name[self].renderer.screen.save, parameter[name[self].image, constant[png]]]
|
keyword[def] identifier[interrupt] ( identifier[self] ):
literal[string]
identifier[self] . identifier[image] = identifier[io] . identifier[BytesIO] ()
identifier[self] . identifier[renderer] . identifier[screen] . identifier[save] ( identifier[self] . identifier[image] , literal[string] )
|
def interrupt(self):
"""
Invoked by the renderering.Renderer, if the image has changed.
"""
self.image = io.BytesIO()
self.renderer.screen.save(self.image, 'png')
|
def section_areas(neurites, neurite_type=NeuriteType.all):
'''section areas in a collection of neurites'''
return map_sections(sectionfunc.section_area, neurites, neurite_type=neurite_type)
|
def function[section_areas, parameter[neurites, neurite_type]]:
constant[section areas in a collection of neurites]
return[call[name[map_sections], parameter[name[sectionfunc].section_area, name[neurites]]]]
|
keyword[def] identifier[section_areas] ( identifier[neurites] , identifier[neurite_type] = identifier[NeuriteType] . identifier[all] ):
literal[string]
keyword[return] identifier[map_sections] ( identifier[sectionfunc] . identifier[section_area] , identifier[neurites] , identifier[neurite_type] = identifier[neurite_type] )
|
def section_areas(neurites, neurite_type=NeuriteType.all):
"""section areas in a collection of neurites"""
return map_sections(sectionfunc.section_area, neurites, neurite_type=neurite_type)
|
def save_four_ds9(rectwv_coeff, debugplot=0):
"""Save the 4 possible ds9 region files.
Parameters
----------
rectwv_coeff : RectWaveCoeff instance
Rectification and wavelength calibration coefficients for the
particular CSU configuration.
debugplot : int
Debugging level for messages and plots. For details see
'numina.array.display.pause_debugplot.py'.
"""
for limits, rectified, suffix in zip(
['frontiers', 'frontiers', 'boundaries', 'boundaries'],
[False, True, False, True],
['rawimage', 'rectified', 'rawimage', 'rectified']
):
output = rectwv_coeff_to_ds9(rectwv_coeff=rectwv_coeff,
limits=limits,
rectified=rectified)
filename = 'ds9_' + limits + '_' + suffix + '.reg'
if abs(debugplot) >= 10:
print('>>> Saving: ', filename)
save_ds9(output, filename)
|
def function[save_four_ds9, parameter[rectwv_coeff, debugplot]]:
constant[Save the 4 possible ds9 region files.
Parameters
----------
rectwv_coeff : RectWaveCoeff instance
Rectification and wavelength calibration coefficients for the
particular CSU configuration.
debugplot : int
Debugging level for messages and plots. For details see
'numina.array.display.pause_debugplot.py'.
]
for taget[tuple[[<ast.Name object at 0x7da2054a4e20>, <ast.Name object at 0x7da2054a74f0>, <ast.Name object at 0x7da2054a4f10>]]] in starred[call[name[zip], parameter[list[[<ast.Constant object at 0x7da2054a5240>, <ast.Constant object at 0x7da2054a4c70>, <ast.Constant object at 0x7da2054a4040>, <ast.Constant object at 0x7da2054a6140>]], list[[<ast.Constant object at 0x7da2054a7940>, <ast.Constant object at 0x7da2054a4190>, <ast.Constant object at 0x7da2054a7ca0>, <ast.Constant object at 0x7da2054a6080>]], list[[<ast.Constant object at 0x7da2054a51b0>, <ast.Constant object at 0x7da2054a77f0>, <ast.Constant object at 0x7da2054a4520>, <ast.Constant object at 0x7da2054a4b50>]]]]] begin[:]
variable[output] assign[=] call[name[rectwv_coeff_to_ds9], parameter[]]
variable[filename] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[ds9_] + name[limits]] + constant[_]] + name[suffix]] + constant[.reg]]
if compare[call[name[abs], parameter[name[debugplot]]] greater_or_equal[>=] constant[10]] begin[:]
call[name[print], parameter[constant[>>> Saving: ], name[filename]]]
call[name[save_ds9], parameter[name[output], name[filename]]]
|
keyword[def] identifier[save_four_ds9] ( identifier[rectwv_coeff] , identifier[debugplot] = literal[int] ):
literal[string]
keyword[for] identifier[limits] , identifier[rectified] , identifier[suffix] keyword[in] identifier[zip] (
[ literal[string] , literal[string] , literal[string] , literal[string] ],
[ keyword[False] , keyword[True] , keyword[False] , keyword[True] ],
[ literal[string] , literal[string] , literal[string] , literal[string] ]
):
identifier[output] = identifier[rectwv_coeff_to_ds9] ( identifier[rectwv_coeff] = identifier[rectwv_coeff] ,
identifier[limits] = identifier[limits] ,
identifier[rectified] = identifier[rectified] )
identifier[filename] = literal[string] + identifier[limits] + literal[string] + identifier[suffix] + literal[string]
keyword[if] identifier[abs] ( identifier[debugplot] )>= literal[int] :
identifier[print] ( literal[string] , identifier[filename] )
identifier[save_ds9] ( identifier[output] , identifier[filename] )
|
def save_four_ds9(rectwv_coeff, debugplot=0):
"""Save the 4 possible ds9 region files.
Parameters
----------
rectwv_coeff : RectWaveCoeff instance
Rectification and wavelength calibration coefficients for the
particular CSU configuration.
debugplot : int
Debugging level for messages and plots. For details see
'numina.array.display.pause_debugplot.py'.
"""
for (limits, rectified, suffix) in zip(['frontiers', 'frontiers', 'boundaries', 'boundaries'], [False, True, False, True], ['rawimage', 'rectified', 'rawimage', 'rectified']):
output = rectwv_coeff_to_ds9(rectwv_coeff=rectwv_coeff, limits=limits, rectified=rectified)
filename = 'ds9_' + limits + '_' + suffix + '.reg'
if abs(debugplot) >= 10:
print('>>> Saving: ', filename) # depends on [control=['if'], data=[]]
save_ds9(output, filename) # depends on [control=['for'], data=[]]
|
def phantompeakqualtools_general_stats(self):
""" Add columns to General Statistics table """
headers = OrderedDict()
headers['Estimated_Fragment_Length_bp'] = {
'title': 'Frag Length',
'description': 'Estimated fragment length (bp)',
'min': 0,
'format': '{:,.0f}'
}
headers['NSC'] = {
'title': 'NSC',
'description': 'Normalized strand cross-correlation',
'max': 10,
'min': 0,
'format': '{:,.2f}',
'scale': 'RdYlGn-rev'
}
headers['RSC'] = {
'title': 'RSC',
'description': 'Relative strand cross-correlation',
'max': 10,
'min': 0,
'format': '{:,.2f}',
'scale': 'RdYlBu-rev'
}
self.general_stats_addcols(self.phantompeakqualtools_data, headers)
|
def function[phantompeakqualtools_general_stats, parameter[self]]:
constant[ Add columns to General Statistics table ]
variable[headers] assign[=] call[name[OrderedDict], parameter[]]
call[name[headers]][constant[Estimated_Fragment_Length_bp]] assign[=] dictionary[[<ast.Constant object at 0x7da20cabd180>, <ast.Constant object at 0x7da20cabeb00>, <ast.Constant object at 0x7da20cabf820>, <ast.Constant object at 0x7da20cabd0c0>], [<ast.Constant object at 0x7da20cabfd90>, <ast.Constant object at 0x7da20cabf5e0>, <ast.Constant object at 0x7da20cabe650>, <ast.Constant object at 0x7da20cabea40>]]
call[name[headers]][constant[NSC]] assign[=] dictionary[[<ast.Constant object at 0x7da20cabdd20>, <ast.Constant object at 0x7da20cabfa30>, <ast.Constant object at 0x7da20cabc5e0>, <ast.Constant object at 0x7da20cabedd0>, <ast.Constant object at 0x7da20cabfa90>, <ast.Constant object at 0x7da20cabf490>], [<ast.Constant object at 0x7da20cabf700>, <ast.Constant object at 0x7da20cabfd60>, <ast.Constant object at 0x7da20cabdf00>, <ast.Constant object at 0x7da20cabf580>, <ast.Constant object at 0x7da20cabe980>, <ast.Constant object at 0x7da20cabe140>]]
call[name[headers]][constant[RSC]] assign[=] dictionary[[<ast.Constant object at 0x7da20cabe920>, <ast.Constant object at 0x7da20cabf1c0>, <ast.Constant object at 0x7da20cabc880>, <ast.Constant object at 0x7da20cabfb80>, <ast.Constant object at 0x7da20cabebf0>, <ast.Constant object at 0x7da20cabc790>], [<ast.Constant object at 0x7da20cabc0a0>, <ast.Constant object at 0x7da20cabcee0>, <ast.Constant object at 0x7da20cabee60>, <ast.Constant object at 0x7da20cabe860>, <ast.Constant object at 0x7da20cabfa00>, <ast.Constant object at 0x7da1b1e5cb20>]]
call[name[self].general_stats_addcols, parameter[name[self].phantompeakqualtools_data, name[headers]]]
|
keyword[def] identifier[phantompeakqualtools_general_stats] ( identifier[self] ):
literal[string]
identifier[headers] = identifier[OrderedDict] ()
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[string]
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[headers] [ literal[string] ]={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[self] . identifier[general_stats_addcols] ( identifier[self] . identifier[phantompeakqualtools_data] , identifier[headers] )
|
def phantompeakqualtools_general_stats(self):
""" Add columns to General Statistics table """
headers = OrderedDict()
headers['Estimated_Fragment_Length_bp'] = {'title': 'Frag Length', 'description': 'Estimated fragment length (bp)', 'min': 0, 'format': '{:,.0f}'}
headers['NSC'] = {'title': 'NSC', 'description': 'Normalized strand cross-correlation', 'max': 10, 'min': 0, 'format': '{:,.2f}', 'scale': 'RdYlGn-rev'}
headers['RSC'] = {'title': 'RSC', 'description': 'Relative strand cross-correlation', 'max': 10, 'min': 0, 'format': '{:,.2f}', 'scale': 'RdYlBu-rev'}
self.general_stats_addcols(self.phantompeakqualtools_data, headers)
|
def get_data(self, name):
r"""
Get the data associated with a node.
:param name: Node name
:type name: :ref:`NodeName`
:rtype: any type or list of objects of any type
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
"""
if self._validate_node_name(name):
raise RuntimeError("Argument `name` is not valid")
self._node_in_tree(name)
return self._db[name]["data"]
|
def function[get_data, parameter[self, name]]:
constant[
Get the data associated with a node.
:param name: Node name
:type name: :ref:`NodeName`
:rtype: any type or list of objects of any type
:raises:
* RuntimeError (Argument \`name\` is not valid)
* RuntimeError (Node *[name]* not in tree)
]
if call[name[self]._validate_node_name, parameter[name[name]]] begin[:]
<ast.Raise object at 0x7da1b10d7e80>
call[name[self]._node_in_tree, parameter[name[name]]]
return[call[call[name[self]._db][name[name]]][constant[data]]]
|
keyword[def] identifier[get_data] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] identifier[self] . identifier[_validate_node_name] ( identifier[name] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[_node_in_tree] ( identifier[name] )
keyword[return] identifier[self] . identifier[_db] [ identifier[name] ][ literal[string] ]
|
def get_data(self, name):
"""
Get the data associated with a node.
:param name: Node name
:type name: :ref:`NodeName`
:rtype: any type or list of objects of any type
:raises:
* RuntimeError (Argument \\`name\\` is not valid)
* RuntimeError (Node *[name]* not in tree)
"""
if self._validate_node_name(name):
raise RuntimeError('Argument `name` is not valid') # depends on [control=['if'], data=[]]
self._node_in_tree(name)
return self._db[name]['data']
|
def date(self):
""":return: datetime object"""
if self.commit_time:
return datetime.utcfromtimestamp(self.commit_time)
else:
return datetime.now()
|
def function[date, parameter[self]]:
constant[:return: datetime object]
if name[self].commit_time begin[:]
return[call[name[datetime].utcfromtimestamp, parameter[name[self].commit_time]]]
|
keyword[def] identifier[date] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[commit_time] :
keyword[return] identifier[datetime] . identifier[utcfromtimestamp] ( identifier[self] . identifier[commit_time] )
keyword[else] :
keyword[return] identifier[datetime] . identifier[now] ()
|
def date(self):
""":return: datetime object"""
if self.commit_time:
return datetime.utcfromtimestamp(self.commit_time) # depends on [control=['if'], data=[]]
else:
return datetime.now()
|
def get_comparable_values(self):
"""Return a tupple of values representing the unicity of the object
"""
return (not self.generic, int(self.code), str(self.message), str(self.description))
|
def function[get_comparable_values, parameter[self]]:
constant[Return a tupple of values representing the unicity of the object
]
return[tuple[[<ast.UnaryOp object at 0x7da207f00b20>, <ast.Call object at 0x7da207f00df0>, <ast.Call object at 0x7da207f03a90>, <ast.Call object at 0x7da207f03310>]]]
|
keyword[def] identifier[get_comparable_values] ( identifier[self] ):
literal[string]
keyword[return] ( keyword[not] identifier[self] . identifier[generic] , identifier[int] ( identifier[self] . identifier[code] ), identifier[str] ( identifier[self] . identifier[message] ), identifier[str] ( identifier[self] . identifier[description] ))
|
def get_comparable_values(self):
"""Return a tupple of values representing the unicity of the object
"""
return (not self.generic, int(self.code), str(self.message), str(self.description))
|
def _get_zeropoint(expnum, ccd, prefix=None, version='p'):
"""
Retrieve the zeropoint stored in the tags associated with this image.
@param expnum: Exposure number
@param ccd: ccd of the exposure
@param prefix: possible prefix (such as 'fk')
@param version: which version: p, s, or o ?
@return: zeropoint
"""
if prefix is not None:
DeprecationWarning("Prefix is no longer used here as the 'fk' and 's' have the same zeropoint.")
key = "zeropoint_{:1s}{:02d}".format(version, int(ccd))
return get_tag(expnum, key)
|
def function[_get_zeropoint, parameter[expnum, ccd, prefix, version]]:
constant[
Retrieve the zeropoint stored in the tags associated with this image.
@param expnum: Exposure number
@param ccd: ccd of the exposure
@param prefix: possible prefix (such as 'fk')
@param version: which version: p, s, or o ?
@return: zeropoint
]
if compare[name[prefix] is_not constant[None]] begin[:]
call[name[DeprecationWarning], parameter[constant[Prefix is no longer used here as the 'fk' and 's' have the same zeropoint.]]]
variable[key] assign[=] call[constant[zeropoint_{:1s}{:02d}].format, parameter[name[version], call[name[int], parameter[name[ccd]]]]]
return[call[name[get_tag], parameter[name[expnum], name[key]]]]
|
keyword[def] identifier[_get_zeropoint] ( identifier[expnum] , identifier[ccd] , identifier[prefix] = keyword[None] , identifier[version] = literal[string] ):
literal[string]
keyword[if] identifier[prefix] keyword[is] keyword[not] keyword[None] :
identifier[DeprecationWarning] ( literal[string] )
identifier[key] = literal[string] . identifier[format] ( identifier[version] , identifier[int] ( identifier[ccd] ))
keyword[return] identifier[get_tag] ( identifier[expnum] , identifier[key] )
|
def _get_zeropoint(expnum, ccd, prefix=None, version='p'):
"""
Retrieve the zeropoint stored in the tags associated with this image.
@param expnum: Exposure number
@param ccd: ccd of the exposure
@param prefix: possible prefix (such as 'fk')
@param version: which version: p, s, or o ?
@return: zeropoint
"""
if prefix is not None:
DeprecationWarning("Prefix is no longer used here as the 'fk' and 's' have the same zeropoint.") # depends on [control=['if'], data=[]]
key = 'zeropoint_{:1s}{:02d}'.format(version, int(ccd))
return get_tag(expnum, key)
|
def collapse_if_tuple(abi):
"""Converts a tuple from a dict to a parenthesized list of its types.
>>> from eth_utils.abi import collapse_if_tuple
>>> collapse_if_tuple(
... {
... 'components': [
... {'name': 'anAddress', 'type': 'address'},
... {'name': 'anInt', 'type': 'uint256'},
... {'name': 'someBytes', 'type': 'bytes'},
... ],
... 'type': 'tuple',
... }
... )
'(address,uint256,bytes)'
"""
typ = abi["type"]
if not typ.startswith("tuple"):
return typ
delimited = ",".join(collapse_if_tuple(c) for c in abi["components"])
# Whatever comes after "tuple" is the array dims. The ABI spec states that
# this will have the form "", "[]", or "[k]".
array_dim = typ[5:]
collapsed = "({}){}".format(delimited, array_dim)
return collapsed
|
def function[collapse_if_tuple, parameter[abi]]:
constant[Converts a tuple from a dict to a parenthesized list of its types.
>>> from eth_utils.abi import collapse_if_tuple
>>> collapse_if_tuple(
... {
... 'components': [
... {'name': 'anAddress', 'type': 'address'},
... {'name': 'anInt', 'type': 'uint256'},
... {'name': 'someBytes', 'type': 'bytes'},
... ],
... 'type': 'tuple',
... }
... )
'(address,uint256,bytes)'
]
variable[typ] assign[=] call[name[abi]][constant[type]]
if <ast.UnaryOp object at 0x7da18bc72e30> begin[:]
return[name[typ]]
variable[delimited] assign[=] call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da18bc700a0>]]
variable[array_dim] assign[=] call[name[typ]][<ast.Slice object at 0x7da18bc71b10>]
variable[collapsed] assign[=] call[constant[({}){}].format, parameter[name[delimited], name[array_dim]]]
return[name[collapsed]]
|
keyword[def] identifier[collapse_if_tuple] ( identifier[abi] ):
literal[string]
identifier[typ] = identifier[abi] [ literal[string] ]
keyword[if] keyword[not] identifier[typ] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[typ]
identifier[delimited] = literal[string] . identifier[join] ( identifier[collapse_if_tuple] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[abi] [ literal[string] ])
identifier[array_dim] = identifier[typ] [ literal[int] :]
identifier[collapsed] = literal[string] . identifier[format] ( identifier[delimited] , identifier[array_dim] )
keyword[return] identifier[collapsed]
|
def collapse_if_tuple(abi):
"""Converts a tuple from a dict to a parenthesized list of its types.
>>> from eth_utils.abi import collapse_if_tuple
>>> collapse_if_tuple(
... {
... 'components': [
... {'name': 'anAddress', 'type': 'address'},
... {'name': 'anInt', 'type': 'uint256'},
... {'name': 'someBytes', 'type': 'bytes'},
... ],
... 'type': 'tuple',
... }
... )
'(address,uint256,bytes)'
"""
typ = abi['type']
if not typ.startswith('tuple'):
return typ # depends on [control=['if'], data=[]]
delimited = ','.join((collapse_if_tuple(c) for c in abi['components']))
# Whatever comes after "tuple" is the array dims. The ABI spec states that
# this will have the form "", "[]", or "[k]".
array_dim = typ[5:]
collapsed = '({}){}'.format(delimited, array_dim)
return collapsed
|
def set_exit_handler(self):
"""Set the signal handler to manage_signal (defined in this class)
Only set handlers for signal.SIGTERM, signal.SIGINT, signal.SIGUSR1, signal.SIGUSR2
:return: None
"""
signal.signal(signal.SIGINT, self.manage_signal)
signal.signal(signal.SIGTERM, self.manage_signal)
signal.signal(signal.SIGHUP, self.manage_signal)
signal.signal(signal.SIGQUIT, self.manage_signal)
|
def function[set_exit_handler, parameter[self]]:
constant[Set the signal handler to manage_signal (defined in this class)
Only set handlers for signal.SIGTERM, signal.SIGINT, signal.SIGUSR1, signal.SIGUSR2
:return: None
]
call[name[signal].signal, parameter[name[signal].SIGINT, name[self].manage_signal]]
call[name[signal].signal, parameter[name[signal].SIGTERM, name[self].manage_signal]]
call[name[signal].signal, parameter[name[signal].SIGHUP, name[self].manage_signal]]
call[name[signal].signal, parameter[name[signal].SIGQUIT, name[self].manage_signal]]
|
keyword[def] identifier[set_exit_handler] ( identifier[self] ):
literal[string]
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[self] . identifier[manage_signal] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGTERM] , identifier[self] . identifier[manage_signal] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGHUP] , identifier[self] . identifier[manage_signal] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGQUIT] , identifier[self] . identifier[manage_signal] )
|
def set_exit_handler(self):
"""Set the signal handler to manage_signal (defined in this class)
Only set handlers for signal.SIGTERM, signal.SIGINT, signal.SIGUSR1, signal.SIGUSR2
:return: None
"""
signal.signal(signal.SIGINT, self.manage_signal)
signal.signal(signal.SIGTERM, self.manage_signal)
signal.signal(signal.SIGHUP, self.manage_signal)
signal.signal(signal.SIGQUIT, self.manage_signal)
|
def decrease_writes_in_percent(
current_provisioning, percent, min_provisioned_writes, log_tag):
""" Decrease the current_provisioning with percent %
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type percent: int
:param percent: How many percent should we decrease with
:returns: int -- New provisioning value
:type min_provisioned_writes: int
:param min_provisioned_writes: Configured min provisioned writes
:type log_tag: str
:param log_tag: Prefix for the log
"""
percent = float(percent)
decrease = int(float(current_provisioning)*(float(percent)/100))
updated_provisioning = current_provisioning - decrease
min_provisioned_writes = __get_min_writes(
current_provisioning,
min_provisioned_writes,
log_tag)
if updated_provisioning < min_provisioned_writes:
logger.info(
'{0} - Reached provisioned writes min limit: {1:d}'.format(
log_tag,
int(min_provisioned_writes)))
return min_provisioned_writes
logger.debug(
'{0} - Write provisioning will be decreased to {1:d} units'.format(
log_tag,
int(updated_provisioning)))
return updated_provisioning
|
def function[decrease_writes_in_percent, parameter[current_provisioning, percent, min_provisioned_writes, log_tag]]:
constant[ Decrease the current_provisioning with percent %
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type percent: int
:param percent: How many percent should we decrease with
:returns: int -- New provisioning value
:type min_provisioned_writes: int
:param min_provisioned_writes: Configured min provisioned writes
:type log_tag: str
:param log_tag: Prefix for the log
]
variable[percent] assign[=] call[name[float], parameter[name[percent]]]
variable[decrease] assign[=] call[name[int], parameter[binary_operation[call[name[float], parameter[name[current_provisioning]]] * binary_operation[call[name[float], parameter[name[percent]]] / constant[100]]]]]
variable[updated_provisioning] assign[=] binary_operation[name[current_provisioning] - name[decrease]]
variable[min_provisioned_writes] assign[=] call[name[__get_min_writes], parameter[name[current_provisioning], name[min_provisioned_writes], name[log_tag]]]
if compare[name[updated_provisioning] less[<] name[min_provisioned_writes]] begin[:]
call[name[logger].info, parameter[call[constant[{0} - Reached provisioned writes min limit: {1:d}].format, parameter[name[log_tag], call[name[int], parameter[name[min_provisioned_writes]]]]]]]
return[name[min_provisioned_writes]]
call[name[logger].debug, parameter[call[constant[{0} - Write provisioning will be decreased to {1:d} units].format, parameter[name[log_tag], call[name[int], parameter[name[updated_provisioning]]]]]]]
return[name[updated_provisioning]]
|
keyword[def] identifier[decrease_writes_in_percent] (
identifier[current_provisioning] , identifier[percent] , identifier[min_provisioned_writes] , identifier[log_tag] ):
literal[string]
identifier[percent] = identifier[float] ( identifier[percent] )
identifier[decrease] = identifier[int] ( identifier[float] ( identifier[current_provisioning] )*( identifier[float] ( identifier[percent] )/ literal[int] ))
identifier[updated_provisioning] = identifier[current_provisioning] - identifier[decrease]
identifier[min_provisioned_writes] = identifier[__get_min_writes] (
identifier[current_provisioning] ,
identifier[min_provisioned_writes] ,
identifier[log_tag] )
keyword[if] identifier[updated_provisioning] < identifier[min_provisioned_writes] :
identifier[logger] . identifier[info] (
literal[string] . identifier[format] (
identifier[log_tag] ,
identifier[int] ( identifier[min_provisioned_writes] )))
keyword[return] identifier[min_provisioned_writes]
identifier[logger] . identifier[debug] (
literal[string] . identifier[format] (
identifier[log_tag] ,
identifier[int] ( identifier[updated_provisioning] )))
keyword[return] identifier[updated_provisioning]
|
def decrease_writes_in_percent(current_provisioning, percent, min_provisioned_writes, log_tag):
""" Decrease the current_provisioning with percent %
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type percent: int
:param percent: How many percent should we decrease with
:returns: int -- New provisioning value
:type min_provisioned_writes: int
:param min_provisioned_writes: Configured min provisioned writes
:type log_tag: str
:param log_tag: Prefix for the log
"""
percent = float(percent)
decrease = int(float(current_provisioning) * (float(percent) / 100))
updated_provisioning = current_provisioning - decrease
min_provisioned_writes = __get_min_writes(current_provisioning, min_provisioned_writes, log_tag)
if updated_provisioning < min_provisioned_writes:
logger.info('{0} - Reached provisioned writes min limit: {1:d}'.format(log_tag, int(min_provisioned_writes)))
return min_provisioned_writes # depends on [control=['if'], data=['min_provisioned_writes']]
logger.debug('{0} - Write provisioning will be decreased to {1:d} units'.format(log_tag, int(updated_provisioning)))
return updated_provisioning
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.