code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def cut(self, buffer):
"""
Turn text object into `ClipboardData` instance.
"""
from_, to = self.operator_range(buffer.document)
from_ += buffer.cursor_position
to += buffer.cursor_position
to -= 1 # SelectionState does not include the end position, `operator_range` does.
document = Document(buffer.text, to, SelectionState(
original_cursor_position=from_, type=self.selection_type))
new_document, clipboard_data = document.cut_selection()
return new_document, clipboard_data
|
def function[cut, parameter[self, buffer]]:
constant[
Turn text object into `ClipboardData` instance.
]
<ast.Tuple object at 0x7da18bcca6b0> assign[=] call[name[self].operator_range, parameter[name[buffer].document]]
<ast.AugAssign object at 0x7da18bcca080>
<ast.AugAssign object at 0x7da18bcc9ed0>
<ast.AugAssign object at 0x7da18bcc9690>
variable[document] assign[=] call[name[Document], parameter[name[buffer].text, name[to], call[name[SelectionState], parameter[]]]]
<ast.Tuple object at 0x7da18bccba90> assign[=] call[name[document].cut_selection, parameter[]]
return[tuple[[<ast.Name object at 0x7da18bccacb0>, <ast.Name object at 0x7da18bcc8850>]]]
|
keyword[def] identifier[cut] ( identifier[self] , identifier[buffer] ):
literal[string]
identifier[from_] , identifier[to] = identifier[self] . identifier[operator_range] ( identifier[buffer] . identifier[document] )
identifier[from_] += identifier[buffer] . identifier[cursor_position]
identifier[to] += identifier[buffer] . identifier[cursor_position]
identifier[to] -= literal[int]
identifier[document] = identifier[Document] ( identifier[buffer] . identifier[text] , identifier[to] , identifier[SelectionState] (
identifier[original_cursor_position] = identifier[from_] , identifier[type] = identifier[self] . identifier[selection_type] ))
identifier[new_document] , identifier[clipboard_data] = identifier[document] . identifier[cut_selection] ()
keyword[return] identifier[new_document] , identifier[clipboard_data]
|
def cut(self, buffer):
"""
Turn text object into `ClipboardData` instance.
"""
(from_, to) = self.operator_range(buffer.document)
from_ += buffer.cursor_position
to += buffer.cursor_position
to -= 1 # SelectionState does not include the end position, `operator_range` does.
document = Document(buffer.text, to, SelectionState(original_cursor_position=from_, type=self.selection_type))
(new_document, clipboard_data) = document.cut_selection()
return (new_document, clipboard_data)
|
def humanize_bytes(size):
"""
Convert given number of bytes into a human readable representation, i.e. add
prefix such as KB, MB, GB, etc. The `size` argument must be a non-negative
integer.
:param size: integer representing byte size of something
:return: string representation of the size, in human-readable form
"""
if size == 0: return "0"
if size is None: return ""
assert size >= 0, "`size` cannot be negative, got %d" % size
suffixes = "TGMK"
maxl = len(suffixes)
for i in range(maxl + 1):
shift = (maxl - i) * 10
if size >> shift == 0: continue
ndigits = 0
for nd in [3, 2, 1]:
if size >> (shift + 12 - nd * 3) == 0:
ndigits = nd
break
if ndigits == 0 or size == (size >> shift) << shift:
rounded_val = str(size >> shift)
else:
rounded_val = "%.*f" % (ndigits, size / (1 << shift))
return "%s%sB" % (rounded_val, suffixes[i] if i < maxl else "")
|
def function[humanize_bytes, parameter[size]]:
constant[
Convert given number of bytes into a human readable representation, i.e. add
prefix such as KB, MB, GB, etc. The `size` argument must be a non-negative
integer.
:param size: integer representing byte size of something
:return: string representation of the size, in human-readable form
]
if compare[name[size] equal[==] constant[0]] begin[:]
return[constant[0]]
if compare[name[size] is constant[None]] begin[:]
return[constant[]]
assert[compare[name[size] greater_or_equal[>=] constant[0]]]
variable[suffixes] assign[=] constant[TGMK]
variable[maxl] assign[=] call[name[len], parameter[name[suffixes]]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[maxl] + constant[1]]]]] begin[:]
variable[shift] assign[=] binary_operation[binary_operation[name[maxl] - name[i]] * constant[10]]
if compare[binary_operation[name[size] <ast.RShift object at 0x7da2590d6a40> name[shift]] equal[==] constant[0]] begin[:]
continue
variable[ndigits] assign[=] constant[0]
for taget[name[nd]] in starred[list[[<ast.Constant object at 0x7da1b1bc99f0>, <ast.Constant object at 0x7da1b1bee1a0>, <ast.Constant object at 0x7da1b1bee800>]]] begin[:]
if compare[binary_operation[name[size] <ast.RShift object at 0x7da2590d6a40> binary_operation[binary_operation[name[shift] + constant[12]] - binary_operation[name[nd] * constant[3]]]] equal[==] constant[0]] begin[:]
variable[ndigits] assign[=] name[nd]
break
if <ast.BoolOp object at 0x7da1b1bed8a0> begin[:]
variable[rounded_val] assign[=] call[name[str], parameter[binary_operation[name[size] <ast.RShift object at 0x7da2590d6a40> name[shift]]]]
return[binary_operation[constant[%s%sB] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1bab250>, <ast.IfExp object at 0x7da1b1bab3a0>]]]]
|
keyword[def] identifier[humanize_bytes] ( identifier[size] ):
literal[string]
keyword[if] identifier[size] == literal[int] : keyword[return] literal[string]
keyword[if] identifier[size] keyword[is] keyword[None] : keyword[return] literal[string]
keyword[assert] identifier[size] >= literal[int] , literal[string] % identifier[size]
identifier[suffixes] = literal[string]
identifier[maxl] = identifier[len] ( identifier[suffixes] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[maxl] + literal[int] ):
identifier[shift] =( identifier[maxl] - identifier[i] )* literal[int]
keyword[if] identifier[size] >> identifier[shift] == literal[int] : keyword[continue]
identifier[ndigits] = literal[int]
keyword[for] identifier[nd] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
keyword[if] identifier[size] >>( identifier[shift] + literal[int] - identifier[nd] * literal[int] )== literal[int] :
identifier[ndigits] = identifier[nd]
keyword[break]
keyword[if] identifier[ndigits] == literal[int] keyword[or] identifier[size] ==( identifier[size] >> identifier[shift] )<< identifier[shift] :
identifier[rounded_val] = identifier[str] ( identifier[size] >> identifier[shift] )
keyword[else] :
identifier[rounded_val] = literal[string] %( identifier[ndigits] , identifier[size] /( literal[int] << identifier[shift] ))
keyword[return] literal[string] %( identifier[rounded_val] , identifier[suffixes] [ identifier[i] ] keyword[if] identifier[i] < identifier[maxl] keyword[else] literal[string] )
|
def humanize_bytes(size):
"""
Convert given number of bytes into a human readable representation, i.e. add
prefix such as KB, MB, GB, etc. The `size` argument must be a non-negative
integer.
:param size: integer representing byte size of something
:return: string representation of the size, in human-readable form
"""
if size == 0:
return '0' # depends on [control=['if'], data=[]]
if size is None:
return '' # depends on [control=['if'], data=[]]
assert size >= 0, '`size` cannot be negative, got %d' % size
suffixes = 'TGMK'
maxl = len(suffixes)
for i in range(maxl + 1):
shift = (maxl - i) * 10
if size >> shift == 0:
continue # depends on [control=['if'], data=[]]
ndigits = 0
for nd in [3, 2, 1]:
if size >> shift + 12 - nd * 3 == 0:
ndigits = nd
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['nd']]
if ndigits == 0 or size == size >> shift << shift:
rounded_val = str(size >> shift) # depends on [control=['if'], data=[]]
else:
rounded_val = '%.*f' % (ndigits, size / (1 << shift))
return '%s%sB' % (rounded_val, suffixes[i] if i < maxl else '') # depends on [control=['for'], data=['i']]
|
def fix_tour(self, tour):
"""
Test each scaffold if dropping does not decrease LMS.
"""
scaffolds, oos = zip(*tour)
keep = set()
for mlg in self.linkage_groups:
lg = mlg.lg
for s, o in tour:
i = scaffolds.index(s)
L = [self.get_series(lg, x, xo) for x, xo in tour[:i]]
U = [self.get_series(lg, x, xo) for x, xo in tour[i + 1:]]
L, U = list(flatten(L)), list(flatten(U))
M = self.get_series(lg, s, o)
score_with = lms(L + M + U)[0]
score_without = lms(L + U)[0]
assert score_with >= score_without
if score_with > score_without:
keep.add(s)
dropped = len(tour) - len(keep)
logging.debug("Dropped {0} minor scaffolds".format(dropped))
return [(s, o) for (s, o) in tour if s in keep]
|
def function[fix_tour, parameter[self, tour]]:
constant[
Test each scaffold if dropping does not decrease LMS.
]
<ast.Tuple object at 0x7da204622590> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da2046231f0>]]
variable[keep] assign[=] call[name[set], parameter[]]
for taget[name[mlg]] in starred[name[self].linkage_groups] begin[:]
variable[lg] assign[=] name[mlg].lg
for taget[tuple[[<ast.Name object at 0x7da204621270>, <ast.Name object at 0x7da204622b60>]]] in starred[name[tour]] begin[:]
variable[i] assign[=] call[name[scaffolds].index, parameter[name[s]]]
variable[L] assign[=] <ast.ListComp object at 0x7da2046229b0>
variable[U] assign[=] <ast.ListComp object at 0x7da204623160>
<ast.Tuple object at 0x7da2046205b0> assign[=] tuple[[<ast.Call object at 0x7da204622650>, <ast.Call object at 0x7da204622ce0>]]
variable[M] assign[=] call[name[self].get_series, parameter[name[lg], name[s], name[o]]]
variable[score_with] assign[=] call[call[name[lms], parameter[binary_operation[binary_operation[name[L] + name[M]] + name[U]]]]][constant[0]]
variable[score_without] assign[=] call[call[name[lms], parameter[binary_operation[name[L] + name[U]]]]][constant[0]]
assert[compare[name[score_with] greater_or_equal[>=] name[score_without]]]
if compare[name[score_with] greater[>] name[score_without]] begin[:]
call[name[keep].add, parameter[name[s]]]
variable[dropped] assign[=] binary_operation[call[name[len], parameter[name[tour]]] - call[name[len], parameter[name[keep]]]]
call[name[logging].debug, parameter[call[constant[Dropped {0} minor scaffolds].format, parameter[name[dropped]]]]]
return[<ast.ListComp object at 0x7da204622f80>]
|
keyword[def] identifier[fix_tour] ( identifier[self] , identifier[tour] ):
literal[string]
identifier[scaffolds] , identifier[oos] = identifier[zip] (* identifier[tour] )
identifier[keep] = identifier[set] ()
keyword[for] identifier[mlg] keyword[in] identifier[self] . identifier[linkage_groups] :
identifier[lg] = identifier[mlg] . identifier[lg]
keyword[for] identifier[s] , identifier[o] keyword[in] identifier[tour] :
identifier[i] = identifier[scaffolds] . identifier[index] ( identifier[s] )
identifier[L] =[ identifier[self] . identifier[get_series] ( identifier[lg] , identifier[x] , identifier[xo] ) keyword[for] identifier[x] , identifier[xo] keyword[in] identifier[tour] [: identifier[i] ]]
identifier[U] =[ identifier[self] . identifier[get_series] ( identifier[lg] , identifier[x] , identifier[xo] ) keyword[for] identifier[x] , identifier[xo] keyword[in] identifier[tour] [ identifier[i] + literal[int] :]]
identifier[L] , identifier[U] = identifier[list] ( identifier[flatten] ( identifier[L] )), identifier[list] ( identifier[flatten] ( identifier[U] ))
identifier[M] = identifier[self] . identifier[get_series] ( identifier[lg] , identifier[s] , identifier[o] )
identifier[score_with] = identifier[lms] ( identifier[L] + identifier[M] + identifier[U] )[ literal[int] ]
identifier[score_without] = identifier[lms] ( identifier[L] + identifier[U] )[ literal[int] ]
keyword[assert] identifier[score_with] >= identifier[score_without]
keyword[if] identifier[score_with] > identifier[score_without] :
identifier[keep] . identifier[add] ( identifier[s] )
identifier[dropped] = identifier[len] ( identifier[tour] )- identifier[len] ( identifier[keep] )
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[dropped] ))
keyword[return] [( identifier[s] , identifier[o] ) keyword[for] ( identifier[s] , identifier[o] ) keyword[in] identifier[tour] keyword[if] identifier[s] keyword[in] identifier[keep] ]
|
def fix_tour(self, tour):
"""
Test each scaffold if dropping does not decrease LMS.
"""
(scaffolds, oos) = zip(*tour)
keep = set()
for mlg in self.linkage_groups:
lg = mlg.lg
for (s, o) in tour:
i = scaffolds.index(s)
L = [self.get_series(lg, x, xo) for (x, xo) in tour[:i]]
U = [self.get_series(lg, x, xo) for (x, xo) in tour[i + 1:]]
(L, U) = (list(flatten(L)), list(flatten(U)))
M = self.get_series(lg, s, o)
score_with = lms(L + M + U)[0]
score_without = lms(L + U)[0]
assert score_with >= score_without
if score_with > score_without:
keep.add(s) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['mlg']]
dropped = len(tour) - len(keep)
logging.debug('Dropped {0} minor scaffolds'.format(dropped))
return [(s, o) for (s, o) in tour if s in keep]
|
def read_graph(filename, directed=False, weighted=False, default_weight=None):
"""Read a graph from a text file
:param filename: plain text file. All numbers are separated by space.
Starts with a line containing n (#vertices) and m (#edges).
Then m lines follow, for each edge.
Vertices are numbered from 0 to n-1.
Line for unweighted edge u,v contains two integers u, v.
Line for weighted edge u,v contains three integers u, v, w[u,v].
:param directed: true for a directed graph, false for undirected
:param weighted: true for an edge weighted graph
:returns: graph in listlist format, possibly followed by weight matrix
:complexity: O(n + m) for unweighted graph,
:math:`O(n^2)` for weighted graph
"""
with open(filename, 'r') as f:
while True:
line = f.readline() # ignore leading comments
if line[0] != '#':
break
nb_nodes, nb_edges = tuple(map(int, line.split()))
graph = [[] for u in range(nb_nodes)]
if weighted:
weight = [[default_weight] * nb_nodes for v in range(nb_nodes)]
for v in range(nb_nodes):
weight[v][v] = 0
for _ in range(nb_edges):
u, v, w = readtab(f, int)
graph[u].append(v)
weight[u][v] = w
if not directed:
graph[v].append(u)
weight[v][u] = w
return graph, weight
else:
for _ in range(nb_edges):
# si le fichier contient des poids, ils seront ignorés
u, v = readtab(f, int)[:2]
graph[u].append(v)
if not directed:
graph[v].append(u)
return graph
|
def function[read_graph, parameter[filename, directed, weighted, default_weight]]:
constant[Read a graph from a text file
:param filename: plain text file. All numbers are separated by space.
Starts with a line containing n (#vertices) and m (#edges).
Then m lines follow, for each edge.
Vertices are numbered from 0 to n-1.
Line for unweighted edge u,v contains two integers u, v.
Line for weighted edge u,v contains three integers u, v, w[u,v].
:param directed: true for a directed graph, false for undirected
:param weighted: true for an edge weighted graph
:returns: graph in listlist format, possibly followed by weight matrix
:complexity: O(n + m) for unweighted graph,
:math:`O(n^2)` for weighted graph
]
with call[name[open], parameter[name[filename], constant[r]]] begin[:]
while constant[True] begin[:]
variable[line] assign[=] call[name[f].readline, parameter[]]
if compare[call[name[line]][constant[0]] not_equal[!=] constant[#]] begin[:]
break
<ast.Tuple object at 0x7da1b067b4c0> assign[=] call[name[tuple], parameter[call[name[map], parameter[name[int], call[name[line].split, parameter[]]]]]]
variable[graph] assign[=] <ast.ListComp object at 0x7da1b067abc0>
if name[weighted] begin[:]
variable[weight] assign[=] <ast.ListComp object at 0x7da1b0679990>
for taget[name[v]] in starred[call[name[range], parameter[name[nb_nodes]]]] begin[:]
call[call[name[weight]][name[v]]][name[v]] assign[=] constant[0]
for taget[name[_]] in starred[call[name[range], parameter[name[nb_edges]]]] begin[:]
<ast.Tuple object at 0x7da1b067b070> assign[=] call[name[readtab], parameter[name[f], name[int]]]
call[call[name[graph]][name[u]].append, parameter[name[v]]]
call[call[name[weight]][name[u]]][name[v]] assign[=] name[w]
if <ast.UnaryOp object at 0x7da1b06781f0> begin[:]
call[call[name[graph]][name[v]].append, parameter[name[u]]]
call[call[name[weight]][name[v]]][name[u]] assign[=] name[w]
return[tuple[[<ast.Name object at 0x7da1b0679330>, <ast.Name object at 0x7da1b067be50>]]]
|
keyword[def] identifier[read_graph] ( identifier[filename] , identifier[directed] = keyword[False] , identifier[weighted] = keyword[False] , identifier[default_weight] = keyword[None] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
keyword[while] keyword[True] :
identifier[line] = identifier[f] . identifier[readline] ()
keyword[if] identifier[line] [ literal[int] ]!= literal[string] :
keyword[break]
identifier[nb_nodes] , identifier[nb_edges] = identifier[tuple] ( identifier[map] ( identifier[int] , identifier[line] . identifier[split] ()))
identifier[graph] =[[] keyword[for] identifier[u] keyword[in] identifier[range] ( identifier[nb_nodes] )]
keyword[if] identifier[weighted] :
identifier[weight] =[[ identifier[default_weight] ]* identifier[nb_nodes] keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[nb_nodes] )]
keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[nb_nodes] ):
identifier[weight] [ identifier[v] ][ identifier[v] ]= literal[int]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[nb_edges] ):
identifier[u] , identifier[v] , identifier[w] = identifier[readtab] ( identifier[f] , identifier[int] )
identifier[graph] [ identifier[u] ]. identifier[append] ( identifier[v] )
identifier[weight] [ identifier[u] ][ identifier[v] ]= identifier[w]
keyword[if] keyword[not] identifier[directed] :
identifier[graph] [ identifier[v] ]. identifier[append] ( identifier[u] )
identifier[weight] [ identifier[v] ][ identifier[u] ]= identifier[w]
keyword[return] identifier[graph] , identifier[weight]
keyword[else] :
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[nb_edges] ):
identifier[u] , identifier[v] = identifier[readtab] ( identifier[f] , identifier[int] )[: literal[int] ]
identifier[graph] [ identifier[u] ]. identifier[append] ( identifier[v] )
keyword[if] keyword[not] identifier[directed] :
identifier[graph] [ identifier[v] ]. identifier[append] ( identifier[u] )
keyword[return] identifier[graph]
|
def read_graph(filename, directed=False, weighted=False, default_weight=None):
"""Read a graph from a text file
:param filename: plain text file. All numbers are separated by space.
Starts with a line containing n (#vertices) and m (#edges).
Then m lines follow, for each edge.
Vertices are numbered from 0 to n-1.
Line for unweighted edge u,v contains two integers u, v.
Line for weighted edge u,v contains three integers u, v, w[u,v].
:param directed: true for a directed graph, false for undirected
:param weighted: true for an edge weighted graph
:returns: graph in listlist format, possibly followed by weight matrix
:complexity: O(n + m) for unweighted graph,
:math:`O(n^2)` for weighted graph
"""
with open(filename, 'r') as f:
while True:
line = f.readline() # ignore leading comments
if line[0] != '#':
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
(nb_nodes, nb_edges) = tuple(map(int, line.split()))
graph = [[] for u in range(nb_nodes)]
if weighted:
weight = [[default_weight] * nb_nodes for v in range(nb_nodes)]
for v in range(nb_nodes):
weight[v][v] = 0 # depends on [control=['for'], data=['v']]
for _ in range(nb_edges):
(u, v, w) = readtab(f, int)
graph[u].append(v)
weight[u][v] = w
if not directed:
graph[v].append(u)
weight[v][u] = w # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (graph, weight) # depends on [control=['if'], data=[]]
else:
for _ in range(nb_edges):
# si le fichier contient des poids, ils seront ignorés
(u, v) = readtab(f, int)[:2]
graph[u].append(v)
if not directed:
graph[v].append(u) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return graph # depends on [control=['with'], data=['f']]
|
def build_columns(self, X, verbose=False):
"""construct the model matrix columns for the term
Parameters
----------
X : array-like
Input dataset with n rows
verbose : bool
whether to show warnings
Returns
-------
scipy sparse array with n rows
"""
splines = self._terms[0].build_columns(X, verbose=verbose)
for term in self._terms[1:]:
marginal_splines = term.build_columns(X, verbose=verbose)
splines = tensor_product(splines, marginal_splines)
if self.by is not None:
splines *= X[:, self.by][:, np.newaxis]
return sp.sparse.csc_matrix(splines)
|
def function[build_columns, parameter[self, X, verbose]]:
constant[construct the model matrix columns for the term
Parameters
----------
X : array-like
Input dataset with n rows
verbose : bool
whether to show warnings
Returns
-------
scipy sparse array with n rows
]
variable[splines] assign[=] call[call[name[self]._terms][constant[0]].build_columns, parameter[name[X]]]
for taget[name[term]] in starred[call[name[self]._terms][<ast.Slice object at 0x7da18f00ef50>]] begin[:]
variable[marginal_splines] assign[=] call[name[term].build_columns, parameter[name[X]]]
variable[splines] assign[=] call[name[tensor_product], parameter[name[splines], name[marginal_splines]]]
if compare[name[self].by is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da18f00f6d0>
return[call[name[sp].sparse.csc_matrix, parameter[name[splines]]]]
|
keyword[def] identifier[build_columns] ( identifier[self] , identifier[X] , identifier[verbose] = keyword[False] ):
literal[string]
identifier[splines] = identifier[self] . identifier[_terms] [ literal[int] ]. identifier[build_columns] ( identifier[X] , identifier[verbose] = identifier[verbose] )
keyword[for] identifier[term] keyword[in] identifier[self] . identifier[_terms] [ literal[int] :]:
identifier[marginal_splines] = identifier[term] . identifier[build_columns] ( identifier[X] , identifier[verbose] = identifier[verbose] )
identifier[splines] = identifier[tensor_product] ( identifier[splines] , identifier[marginal_splines] )
keyword[if] identifier[self] . identifier[by] keyword[is] keyword[not] keyword[None] :
identifier[splines] *= identifier[X] [:, identifier[self] . identifier[by] ][:, identifier[np] . identifier[newaxis] ]
keyword[return] identifier[sp] . identifier[sparse] . identifier[csc_matrix] ( identifier[splines] )
|
def build_columns(self, X, verbose=False):
"""construct the model matrix columns for the term
Parameters
----------
X : array-like
Input dataset with n rows
verbose : bool
whether to show warnings
Returns
-------
scipy sparse array with n rows
"""
splines = self._terms[0].build_columns(X, verbose=verbose)
for term in self._terms[1:]:
marginal_splines = term.build_columns(X, verbose=verbose)
splines = tensor_product(splines, marginal_splines) # depends on [control=['for'], data=['term']]
if self.by is not None:
splines *= X[:, self.by][:, np.newaxis] # depends on [control=['if'], data=[]]
return sp.sparse.csc_matrix(splines)
|
def get_present_elements(self, locator, params=None, timeout=None, visible=False, parent=None):
"""
Get elements present in the DOM.
If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise
TimeoutException should the element not be found.
:param locator: element identifier
:param params: (optional) locator parameters
:param timeout: (optional) time to wait for element (default: self._explicit_wait)
:param visible: (optional) if the element should also be visible (default: False)
:param parent: internal (see #get_present_children)
:return: WebElement instance
"""
error_msg = "Children were never present" if parent else "Elements were never present!"
expected_condition = ec.visibility_of_all_elements_located if visible else ec.presence_of_all_elements_located
return self._get(locator, expected_condition, params, timeout, error_msg, parent)
|
def function[get_present_elements, parameter[self, locator, params, timeout, visible, parent]]:
constant[
Get elements present in the DOM.
If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise
TimeoutException should the element not be found.
:param locator: element identifier
:param params: (optional) locator parameters
:param timeout: (optional) time to wait for element (default: self._explicit_wait)
:param visible: (optional) if the element should also be visible (default: False)
:param parent: internal (see #get_present_children)
:return: WebElement instance
]
variable[error_msg] assign[=] <ast.IfExp object at 0x7da18bccb940>
variable[expected_condition] assign[=] <ast.IfExp object at 0x7da18bcc9750>
return[call[name[self]._get, parameter[name[locator], name[expected_condition], name[params], name[timeout], name[error_msg], name[parent]]]]
|
keyword[def] identifier[get_present_elements] ( identifier[self] , identifier[locator] , identifier[params] = keyword[None] , identifier[timeout] = keyword[None] , identifier[visible] = keyword[False] , identifier[parent] = keyword[None] ):
literal[string]
identifier[error_msg] = literal[string] keyword[if] identifier[parent] keyword[else] literal[string]
identifier[expected_condition] = identifier[ec] . identifier[visibility_of_all_elements_located] keyword[if] identifier[visible] keyword[else] identifier[ec] . identifier[presence_of_all_elements_located]
keyword[return] identifier[self] . identifier[_get] ( identifier[locator] , identifier[expected_condition] , identifier[params] , identifier[timeout] , identifier[error_msg] , identifier[parent] )
|
def get_present_elements(self, locator, params=None, timeout=None, visible=False, parent=None):
"""
Get elements present in the DOM.
If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise
TimeoutException should the element not be found.
:param locator: element identifier
:param params: (optional) locator parameters
:param timeout: (optional) time to wait for element (default: self._explicit_wait)
:param visible: (optional) if the element should also be visible (default: False)
:param parent: internal (see #get_present_children)
:return: WebElement instance
"""
error_msg = 'Children were never present' if parent else 'Elements were never present!'
expected_condition = ec.visibility_of_all_elements_located if visible else ec.presence_of_all_elements_located
return self._get(locator, expected_condition, params, timeout, error_msg, parent)
|
def normalize(self):
"""
Sum the values in a Counter, then create a new Counter
where each new value (while keeping the original key)
is equal to the original value divided by sum of all the
original values (this is sometimes referred to as the
normalization constant).
https://en.wikipedia.org/wiki/Normalization_(statistics)
"""
total = sum(self.values())
stats = {k: (v / float(total)) for k, v in self.items()}
return StatsCounter(stats)
|
def function[normalize, parameter[self]]:
constant[
Sum the values in a Counter, then create a new Counter
where each new value (while keeping the original key)
is equal to the original value divided by sum of all the
original values (this is sometimes referred to as the
normalization constant).
https://en.wikipedia.org/wiki/Normalization_(statistics)
]
variable[total] assign[=] call[name[sum], parameter[call[name[self].values, parameter[]]]]
variable[stats] assign[=] <ast.DictComp object at 0x7da1b0aa7820>
return[call[name[StatsCounter], parameter[name[stats]]]]
|
keyword[def] identifier[normalize] ( identifier[self] ):
literal[string]
identifier[total] = identifier[sum] ( identifier[self] . identifier[values] ())
identifier[stats] ={ identifier[k] :( identifier[v] / identifier[float] ( identifier[total] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[items] ()}
keyword[return] identifier[StatsCounter] ( identifier[stats] )
|
def normalize(self):
"""
Sum the values in a Counter, then create a new Counter
where each new value (while keeping the original key)
is equal to the original value divided by sum of all the
original values (this is sometimes referred to as the
normalization constant).
https://en.wikipedia.org/wiki/Normalization_(statistics)
"""
total = sum(self.values())
stats = {k: v / float(total) for (k, v) in self.items()}
return StatsCounter(stats)
|
def log_player_buys_road(self, player, location):
"""
:param player: catan.game.Player
:param location: string, see hexgrid.location()
"""
self._logln('{0} buys road, builds at {1}'.format(
player.color,
location
))
|
def function[log_player_buys_road, parameter[self, player, location]]:
constant[
:param player: catan.game.Player
:param location: string, see hexgrid.location()
]
call[name[self]._logln, parameter[call[constant[{0} buys road, builds at {1}].format, parameter[name[player].color, name[location]]]]]
|
keyword[def] identifier[log_player_buys_road] ( identifier[self] , identifier[player] , identifier[location] ):
literal[string]
identifier[self] . identifier[_logln] ( literal[string] . identifier[format] (
identifier[player] . identifier[color] ,
identifier[location]
))
|
def log_player_buys_road(self, player, location):
"""
:param player: catan.game.Player
:param location: string, see hexgrid.location()
"""
self._logln('{0} buys road, builds at {1}'.format(player.color, location))
|
def spawn(self, options, port, background=False, prefix=""):
"Spawn a daemon instance."
self.spawncmd = None
# Look for gpsd in GPSD_HOME env variable
if os.environ.get('GPSD_HOME'):
for path in os.environ['GPSD_HOME'].split(':'):
_spawncmd = "%s/gpsd" % path
if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK):
self.spawncmd = _spawncmd
break
# if we could not find it yet try PATH env variable for it
if not self.spawncmd:
if not '/usr/sbin' in os.environ['PATH']:
os.environ['PATH']=os.environ['PATH'] + ":/usr/sbin"
for path in os.environ['PATH'].split(':'):
_spawncmd = "%s/gpsd" % path
if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK):
self.spawncmd = _spawncmd
break
if not self.spawncmd:
raise DaemonError("Cannot execute gpsd: executable not found. Set GPSD_HOME env variable")
# The -b option to suppress hanging on probe returns is needed to cope
# with OpenBSD (and possibly other non-Linux systems) that don't support
# anything we can use to implement the FakeGPS.read() method
self.spawncmd += " -b -N -S %s -F %s -P %s %s" % (port, self.control_socket, self.pidfile, options)
if prefix:
self.spawncmd = prefix + " " + self.spawncmd.strip()
if background:
self.spawncmd += " &"
status = os.system(self.spawncmd)
if os.WIFSIGNALED(status) or os.WEXITSTATUS(status):
raise DaemonError("daemon exited with status %d" % status)
|
def function[spawn, parameter[self, options, port, background, prefix]]:
constant[Spawn a daemon instance.]
name[self].spawncmd assign[=] constant[None]
if call[name[os].environ.get, parameter[constant[GPSD_HOME]]] begin[:]
for taget[name[path]] in starred[call[call[name[os].environ][constant[GPSD_HOME]].split, parameter[constant[:]]]] begin[:]
variable[_spawncmd] assign[=] binary_operation[constant[%s/gpsd] <ast.Mod object at 0x7da2590d6920> name[path]]
if <ast.BoolOp object at 0x7da20c6c5b70> begin[:]
name[self].spawncmd assign[=] name[_spawncmd]
break
if <ast.UnaryOp object at 0x7da1b0baa8f0> begin[:]
if <ast.UnaryOp object at 0x7da1b0bab100> begin[:]
call[name[os].environ][constant[PATH]] assign[=] binary_operation[call[name[os].environ][constant[PATH]] + constant[:/usr/sbin]]
for taget[name[path]] in starred[call[call[name[os].environ][constant[PATH]].split, parameter[constant[:]]]] begin[:]
variable[_spawncmd] assign[=] binary_operation[constant[%s/gpsd] <ast.Mod object at 0x7da2590d6920> name[path]]
if <ast.BoolOp object at 0x7da1b0ba8b50> begin[:]
name[self].spawncmd assign[=] name[_spawncmd]
break
if <ast.UnaryOp object at 0x7da1b0ba9300> begin[:]
<ast.Raise object at 0x7da1b0baac50>
<ast.AugAssign object at 0x7da1b0ba98d0>
if name[prefix] begin[:]
name[self].spawncmd assign[=] binary_operation[binary_operation[name[prefix] + constant[ ]] + call[name[self].spawncmd.strip, parameter[]]]
if name[background] begin[:]
<ast.AugAssign object at 0x7da1b0ba88b0>
variable[status] assign[=] call[name[os].system, parameter[name[self].spawncmd]]
if <ast.BoolOp object at 0x7da1b0ba9de0> begin[:]
<ast.Raise object at 0x7da1b0babdc0>
|
keyword[def] identifier[spawn] ( identifier[self] , identifier[options] , identifier[port] , identifier[background] = keyword[False] , identifier[prefix] = literal[string] ):
literal[string]
identifier[self] . identifier[spawncmd] = keyword[None]
keyword[if] identifier[os] . identifier[environ] . identifier[get] ( literal[string] ):
keyword[for] identifier[path] keyword[in] identifier[os] . identifier[environ] [ literal[string] ]. identifier[split] ( literal[string] ):
identifier[_spawncmd] = literal[string] % identifier[path]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[_spawncmd] ) keyword[and] identifier[os] . identifier[access] ( identifier[_spawncmd] , identifier[os] . identifier[X_OK] ):
identifier[self] . identifier[spawncmd] = identifier[_spawncmd]
keyword[break]
keyword[if] keyword[not] identifier[self] . identifier[spawncmd] :
keyword[if] keyword[not] literal[string] keyword[in] identifier[os] . identifier[environ] [ literal[string] ]:
identifier[os] . identifier[environ] [ literal[string] ]= identifier[os] . identifier[environ] [ literal[string] ]+ literal[string]
keyword[for] identifier[path] keyword[in] identifier[os] . identifier[environ] [ literal[string] ]. identifier[split] ( literal[string] ):
identifier[_spawncmd] = literal[string] % identifier[path]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[_spawncmd] ) keyword[and] identifier[os] . identifier[access] ( identifier[_spawncmd] , identifier[os] . identifier[X_OK] ):
identifier[self] . identifier[spawncmd] = identifier[_spawncmd]
keyword[break]
keyword[if] keyword[not] identifier[self] . identifier[spawncmd] :
keyword[raise] identifier[DaemonError] ( literal[string] )
identifier[self] . identifier[spawncmd] += literal[string] %( identifier[port] , identifier[self] . identifier[control_socket] , identifier[self] . identifier[pidfile] , identifier[options] )
keyword[if] identifier[prefix] :
identifier[self] . identifier[spawncmd] = identifier[prefix] + literal[string] + identifier[self] . identifier[spawncmd] . identifier[strip] ()
keyword[if] identifier[background] :
identifier[self] . identifier[spawncmd] += literal[string]
identifier[status] = identifier[os] . identifier[system] ( identifier[self] . identifier[spawncmd] )
keyword[if] identifier[os] . identifier[WIFSIGNALED] ( identifier[status] ) keyword[or] identifier[os] . identifier[WEXITSTATUS] ( identifier[status] ):
keyword[raise] identifier[DaemonError] ( literal[string] % identifier[status] )
|
def spawn(self, options, port, background=False, prefix=''):
"""Spawn a daemon instance."""
self.spawncmd = None # Look for gpsd in GPSD_HOME env variable
if os.environ.get('GPSD_HOME'):
for path in os.environ['GPSD_HOME'].split(':'):
_spawncmd = '%s/gpsd' % path
if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK):
self.spawncmd = _spawncmd
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] # depends on [control=['if'], data=[]] # if we could not find it yet try PATH env variable for it
if not self.spawncmd:
if not '/usr/sbin' in os.environ['PATH']:
os.environ['PATH'] = os.environ['PATH'] + ':/usr/sbin' # depends on [control=['if'], data=[]]
for path in os.environ['PATH'].split(':'):
_spawncmd = '%s/gpsd' % path
if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK):
self.spawncmd = _spawncmd
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] # depends on [control=['if'], data=[]]
if not self.spawncmd:
raise DaemonError('Cannot execute gpsd: executable not found. Set GPSD_HOME env variable') # depends on [control=['if'], data=[]]
# The -b option to suppress hanging on probe returns is needed to cope
# with OpenBSD (and possibly other non-Linux systems) that don't support
# anything we can use to implement the FakeGPS.read() method
self.spawncmd += ' -b -N -S %s -F %s -P %s %s' % (port, self.control_socket, self.pidfile, options)
if prefix:
self.spawncmd = prefix + ' ' + self.spawncmd.strip() # depends on [control=['if'], data=[]]
if background:
self.spawncmd += ' &' # depends on [control=['if'], data=[]]
status = os.system(self.spawncmd)
if os.WIFSIGNALED(status) or os.WEXITSTATUS(status):
raise DaemonError('daemon exited with status %d' % status) # depends on [control=['if'], data=[]]
|
def map_plugin_coro(self, coro_name, *args, **kwargs):
"""
Call a plugin declared by plugin by its name
:param coro_name:
:param args:
:param kwargs:
:return:
"""
return (yield from self.map(self._call_coro, coro_name, *args, **kwargs))
|
def function[map_plugin_coro, parameter[self, coro_name]]:
constant[
Call a plugin declared by plugin by its name
:param coro_name:
:param args:
:param kwargs:
:return:
]
return[<ast.YieldFrom object at 0x7da18fe93b50>]
|
keyword[def] identifier[map_plugin_coro] ( identifier[self] , identifier[coro_name] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] ( keyword[yield] keyword[from] identifier[self] . identifier[map] ( identifier[self] . identifier[_call_coro] , identifier[coro_name] ,* identifier[args] ,** identifier[kwargs] ))
|
def map_plugin_coro(self, coro_name, *args, **kwargs):
"""
Call a plugin declared by plugin by its name
:param coro_name:
:param args:
:param kwargs:
:return:
"""
return (yield from self.map(self._call_coro, coro_name, *args, **kwargs))
|
def get_user_paginator(
cls,
instance,
page=1,
item_count=None,
items_per_page=50,
user_ids=None,
GET_params=None,
):
"""
returns paginator over users belonging to the group
:param instance:
:param page:
:param item_count:
:param items_per_page:
:param user_ids:
:param GET_params:
:return:
"""
if not GET_params:
GET_params = {}
GET_params.pop("page", None)
query = instance.users_dynamic
if user_ids:
query = query.filter(cls.models_proxy.UserGroup.user_id.in_(user_ids))
return SqlalchemyOrmPage(
query,
page=page,
item_count=item_count,
items_per_page=items_per_page,
**GET_params
)
|
def function[get_user_paginator, parameter[cls, instance, page, item_count, items_per_page, user_ids, GET_params]]:
constant[
returns paginator over users belonging to the group
:param instance:
:param page:
:param item_count:
:param items_per_page:
:param user_ids:
:param GET_params:
:return:
]
if <ast.UnaryOp object at 0x7da1b0fc6350> begin[:]
variable[GET_params] assign[=] dictionary[[], []]
call[name[GET_params].pop, parameter[constant[page], constant[None]]]
variable[query] assign[=] name[instance].users_dynamic
if name[user_ids] begin[:]
variable[query] assign[=] call[name[query].filter, parameter[call[name[cls].models_proxy.UserGroup.user_id.in_, parameter[name[user_ids]]]]]
return[call[name[SqlalchemyOrmPage], parameter[name[query]]]]
|
keyword[def] identifier[get_user_paginator] (
identifier[cls] ,
identifier[instance] ,
identifier[page] = literal[int] ,
identifier[item_count] = keyword[None] ,
identifier[items_per_page] = literal[int] ,
identifier[user_ids] = keyword[None] ,
identifier[GET_params] = keyword[None] ,
):
literal[string]
keyword[if] keyword[not] identifier[GET_params] :
identifier[GET_params] ={}
identifier[GET_params] . identifier[pop] ( literal[string] , keyword[None] )
identifier[query] = identifier[instance] . identifier[users_dynamic]
keyword[if] identifier[user_ids] :
identifier[query] = identifier[query] . identifier[filter] ( identifier[cls] . identifier[models_proxy] . identifier[UserGroup] . identifier[user_id] . identifier[in_] ( identifier[user_ids] ))
keyword[return] identifier[SqlalchemyOrmPage] (
identifier[query] ,
identifier[page] = identifier[page] ,
identifier[item_count] = identifier[item_count] ,
identifier[items_per_page] = identifier[items_per_page] ,
** identifier[GET_params]
)
|
def get_user_paginator(cls, instance, page=1, item_count=None, items_per_page=50, user_ids=None, GET_params=None):
"""
returns paginator over users belonging to the group
:param instance:
:param page:
:param item_count:
:param items_per_page:
:param user_ids:
:param GET_params:
:return:
"""
if not GET_params:
GET_params = {} # depends on [control=['if'], data=[]]
GET_params.pop('page', None)
query = instance.users_dynamic
if user_ids:
query = query.filter(cls.models_proxy.UserGroup.user_id.in_(user_ids)) # depends on [control=['if'], data=[]]
return SqlalchemyOrmPage(query, page=page, item_count=item_count, items_per_page=items_per_page, **GET_params)
|
def iter_series(self, workbook, row, col):
"""
Yield series dictionaries with values resolved to the final excel formulas.
"""
for series in self.__series:
series = dict(series)
series["values"] = series["values"].get_formula(workbook, row, col)
if "categories" in series:
series["categories"] = series["categories"].get_formula(workbook, row, col)
yield series
|
def function[iter_series, parameter[self, workbook, row, col]]:
constant[
Yield series dictionaries with values resolved to the final excel formulas.
]
for taget[name[series]] in starred[name[self].__series] begin[:]
variable[series] assign[=] call[name[dict], parameter[name[series]]]
call[name[series]][constant[values]] assign[=] call[call[name[series]][constant[values]].get_formula, parameter[name[workbook], name[row], name[col]]]
if compare[constant[categories] in name[series]] begin[:]
call[name[series]][constant[categories]] assign[=] call[call[name[series]][constant[categories]].get_formula, parameter[name[workbook], name[row], name[col]]]
<ast.Yield object at 0x7da1b2391210>
|
keyword[def] identifier[iter_series] ( identifier[self] , identifier[workbook] , identifier[row] , identifier[col] ):
literal[string]
keyword[for] identifier[series] keyword[in] identifier[self] . identifier[__series] :
identifier[series] = identifier[dict] ( identifier[series] )
identifier[series] [ literal[string] ]= identifier[series] [ literal[string] ]. identifier[get_formula] ( identifier[workbook] , identifier[row] , identifier[col] )
keyword[if] literal[string] keyword[in] identifier[series] :
identifier[series] [ literal[string] ]= identifier[series] [ literal[string] ]. identifier[get_formula] ( identifier[workbook] , identifier[row] , identifier[col] )
keyword[yield] identifier[series]
|
def iter_series(self, workbook, row, col):
"""
Yield series dictionaries with values resolved to the final excel formulas.
"""
for series in self.__series:
series = dict(series)
series['values'] = series['values'].get_formula(workbook, row, col)
if 'categories' in series:
series['categories'] = series['categories'].get_formula(workbook, row, col) # depends on [control=['if'], data=['series']]
yield series # depends on [control=['for'], data=['series']]
|
def get_asset_address(self, asset: str) -> bytes:
"""
This interface is used to get the smart contract address of ONT otr ONG.
:param asset: a string which is used to indicate which asset's contract address we want to get.
:return: the contract address of asset in the form of bytearray.
"""
if asset.upper() == 'ONT':
return self.__ont_contract
elif asset.upper() == 'ONG':
return self.__ong_contract
else:
raise SDKException(ErrorCode.other_error('asset is not equal to ONT or ONG.'))
|
def function[get_asset_address, parameter[self, asset]]:
constant[
This interface is used to get the smart contract address of ONT otr ONG.
:param asset: a string which is used to indicate which asset's contract address we want to get.
:return: the contract address of asset in the form of bytearray.
]
if compare[call[name[asset].upper, parameter[]] equal[==] constant[ONT]] begin[:]
return[name[self].__ont_contract]
|
keyword[def] identifier[get_asset_address] ( identifier[self] , identifier[asset] : identifier[str] )-> identifier[bytes] :
literal[string]
keyword[if] identifier[asset] . identifier[upper] ()== literal[string] :
keyword[return] identifier[self] . identifier[__ont_contract]
keyword[elif] identifier[asset] . identifier[upper] ()== literal[string] :
keyword[return] identifier[self] . identifier[__ong_contract]
keyword[else] :
keyword[raise] identifier[SDKException] ( identifier[ErrorCode] . identifier[other_error] ( literal[string] ))
|
def get_asset_address(self, asset: str) -> bytes:
"""
This interface is used to get the smart contract address of ONT otr ONG.
:param asset: a string which is used to indicate which asset's contract address we want to get.
:return: the contract address of asset in the form of bytearray.
"""
if asset.upper() == 'ONT':
return self.__ont_contract # depends on [control=['if'], data=[]]
elif asset.upper() == 'ONG':
return self.__ong_contract # depends on [control=['if'], data=[]]
else:
raise SDKException(ErrorCode.other_error('asset is not equal to ONT or ONG.'))
|
def save(self, sync_only=False):
"""
:param sync_only:
:type: bool
"""
entity = datastore.Entity(key=self._key)
entity["last_accessed"] = self.last_accessed
# todo: restore sync only
entity["data"] = self._data
if self.expires:
entity["expires"] = self.expires
self._client.put(entity)
|
def function[save, parameter[self, sync_only]]:
constant[
:param sync_only:
:type: bool
]
variable[entity] assign[=] call[name[datastore].Entity, parameter[]]
call[name[entity]][constant[last_accessed]] assign[=] name[self].last_accessed
call[name[entity]][constant[data]] assign[=] name[self]._data
if name[self].expires begin[:]
call[name[entity]][constant[expires]] assign[=] name[self].expires
call[name[self]._client.put, parameter[name[entity]]]
|
keyword[def] identifier[save] ( identifier[self] , identifier[sync_only] = keyword[False] ):
literal[string]
identifier[entity] = identifier[datastore] . identifier[Entity] ( identifier[key] = identifier[self] . identifier[_key] )
identifier[entity] [ literal[string] ]= identifier[self] . identifier[last_accessed]
identifier[entity] [ literal[string] ]= identifier[self] . identifier[_data]
keyword[if] identifier[self] . identifier[expires] :
identifier[entity] [ literal[string] ]= identifier[self] . identifier[expires]
identifier[self] . identifier[_client] . identifier[put] ( identifier[entity] )
|
def save(self, sync_only=False):
"""
:param sync_only:
:type: bool
"""
entity = datastore.Entity(key=self._key)
entity['last_accessed'] = self.last_accessed
# todo: restore sync only
entity['data'] = self._data
if self.expires:
entity['expires'] = self.expires # depends on [control=['if'], data=[]]
self._client.put(entity)
|
def setError(self, msg=None, title=None):
""" Shows and error message
"""
if msg is not None:
self.messageLabel.setText(msg)
if title is not None:
self.titleLabel.setText(title)
|
def function[setError, parameter[self, msg, title]]:
constant[ Shows and error message
]
if compare[name[msg] is_not constant[None]] begin[:]
call[name[self].messageLabel.setText, parameter[name[msg]]]
if compare[name[title] is_not constant[None]] begin[:]
call[name[self].titleLabel.setText, parameter[name[title]]]
|
keyword[def] identifier[setError] ( identifier[self] , identifier[msg] = keyword[None] , identifier[title] = keyword[None] ):
literal[string]
keyword[if] identifier[msg] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[messageLabel] . identifier[setText] ( identifier[msg] )
keyword[if] identifier[title] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[titleLabel] . identifier[setText] ( identifier[title] )
|
def setError(self, msg=None, title=None):
""" Shows and error message
"""
if msg is not None:
self.messageLabel.setText(msg) # depends on [control=['if'], data=['msg']]
if title is not None:
self.titleLabel.setText(title) # depends on [control=['if'], data=['title']]
|
def page(request, slug, template=u"pages/page.html", extra_context=None):
"""
Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
which case the page attribute won't exist, so raise a 404 then.
For template selection, a list of possible templates is built up
based on the current page. This list is order from most granular
match, starting with a custom template for the exact page, then
adding templates based on the page's parent page, that could be
used for sections of a site (eg all children of the parent).
Finally at the broadest level, a template for the page's content
type (it's model class) is checked for, and then if none of these
templates match, the default pages/page.html is used.
"""
from yacms.pages.middleware import PageMiddleware
if not PageMiddleware.installed():
raise ImproperlyConfigured("yacms.pages.middleware.PageMiddleware "
"(or a subclass of it) is missing from " +
"settings.MIDDLEWARE_CLASSES or " +
"settings.MIDDLEWARE")
if not hasattr(request, "page") or request.page.slug != slug:
raise Http404
# Check for a template name matching the page's slug. If the homepage
# is configured as a page instance, the template "pages/index.html" is
# used, since the slug "/" won't match a template name.
template_name = str(slug) if slug != home_slug() else "index"
templates = [u"pages/%s.html" % template_name]
method_template = request.page.get_content_model().get_template_name()
if method_template:
templates.insert(0, method_template)
if request.page.content_model is not None:
templates.append(u"pages/%s/%s.html" % (template_name,
request.page.content_model))
for parent in request.page.get_ascendants(for_user=request.user):
parent_template_name = str(parent.slug)
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u"pages/%s/%s.html" % (parent_template_name,
request.page.content_model))
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u"pages/%s.html" % request.page.content_model)
templates.append(template)
return TemplateResponse(request, templates, extra_context or {})
|
def function[page, parameter[request, slug, template, extra_context]]:
constant[
Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
which case the page attribute won't exist, so raise a 404 then.
For template selection, a list of possible templates is built up
based on the current page. This list is order from most granular
match, starting with a custom template for the exact page, then
adding templates based on the page's parent page, that could be
used for sections of a site (eg all children of the parent).
Finally at the broadest level, a template for the page's content
type (it's model class) is checked for, and then if none of these
templates match, the default pages/page.html is used.
]
from relative_module[yacms.pages.middleware] import module[PageMiddleware]
if <ast.UnaryOp object at 0x7da204621f30> begin[:]
<ast.Raise object at 0x7da204622950>
if <ast.BoolOp object at 0x7da204622c80> begin[:]
<ast.Raise object at 0x7da204623e80>
variable[template_name] assign[=] <ast.IfExp object at 0x7da204620b80>
variable[templates] assign[=] list[[<ast.BinOp object at 0x7da204623f40>]]
variable[method_template] assign[=] call[call[name[request].page.get_content_model, parameter[]].get_template_name, parameter[]]
if name[method_template] begin[:]
call[name[templates].insert, parameter[constant[0], name[method_template]]]
if compare[name[request].page.content_model is_not constant[None]] begin[:]
call[name[templates].append, parameter[binary_operation[constant[pages/%s/%s.html] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b15f0a00>, <ast.Attribute object at 0x7da1b15f1480>]]]]]
for taget[name[parent]] in starred[call[name[request].page.get_ascendants, parameter[]]] begin[:]
variable[parent_template_name] assign[=] call[name[str], parameter[name[parent].slug]]
if compare[name[request].page.content_model is_not constant[None]] begin[:]
call[name[templates].append, parameter[binary_operation[constant[pages/%s/%s.html] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b15f04c0>, <ast.Attribute object at 0x7da1b15f1ea0>]]]]]
if compare[name[request].page.content_model is_not constant[None]] begin[:]
call[name[templates].append, parameter[binary_operation[constant[pages/%s.html] <ast.Mod object at 0x7da2590d6920> name[request].page.content_model]]]
call[name[templates].append, parameter[name[template]]]
return[call[name[TemplateResponse], parameter[name[request], name[templates], <ast.BoolOp object at 0x7da1b15f0f40>]]]
|
keyword[def] identifier[page] ( identifier[request] , identifier[slug] , identifier[template] = literal[string] , identifier[extra_context] = keyword[None] ):
literal[string]
keyword[from] identifier[yacms] . identifier[pages] . identifier[middleware] keyword[import] identifier[PageMiddleware]
keyword[if] keyword[not] identifier[PageMiddleware] . identifier[installed] ():
keyword[raise] identifier[ImproperlyConfigured] ( literal[string]
literal[string] +
literal[string] +
literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[request] , literal[string] ) keyword[or] identifier[request] . identifier[page] . identifier[slug] != identifier[slug] :
keyword[raise] identifier[Http404]
identifier[template_name] = identifier[str] ( identifier[slug] ) keyword[if] identifier[slug] != identifier[home_slug] () keyword[else] literal[string]
identifier[templates] =[ literal[string] % identifier[template_name] ]
identifier[method_template] = identifier[request] . identifier[page] . identifier[get_content_model] (). identifier[get_template_name] ()
keyword[if] identifier[method_template] :
identifier[templates] . identifier[insert] ( literal[int] , identifier[method_template] )
keyword[if] identifier[request] . identifier[page] . identifier[content_model] keyword[is] keyword[not] keyword[None] :
identifier[templates] . identifier[append] ( literal[string] %( identifier[template_name] ,
identifier[request] . identifier[page] . identifier[content_model] ))
keyword[for] identifier[parent] keyword[in] identifier[request] . identifier[page] . identifier[get_ascendants] ( identifier[for_user] = identifier[request] . identifier[user] ):
identifier[parent_template_name] = identifier[str] ( identifier[parent] . identifier[slug] )
keyword[if] identifier[request] . identifier[page] . identifier[content_model] keyword[is] keyword[not] keyword[None] :
identifier[templates] . identifier[append] ( literal[string] %( identifier[parent_template_name] ,
identifier[request] . identifier[page] . identifier[content_model] ))
keyword[if] identifier[request] . identifier[page] . identifier[content_model] keyword[is] keyword[not] keyword[None] :
identifier[templates] . identifier[append] ( literal[string] % identifier[request] . identifier[page] . identifier[content_model] )
identifier[templates] . identifier[append] ( identifier[template] )
keyword[return] identifier[TemplateResponse] ( identifier[request] , identifier[templates] , identifier[extra_context] keyword[or] {})
|
def page(request, slug, template=u'pages/page.html', extra_context=None):
"""
Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
which case the page attribute won't exist, so raise a 404 then.
For template selection, a list of possible templates is built up
based on the current page. This list is order from most granular
match, starting with a custom template for the exact page, then
adding templates based on the page's parent page, that could be
used for sections of a site (eg all children of the parent).
Finally at the broadest level, a template for the page's content
type (it's model class) is checked for, and then if none of these
templates match, the default pages/page.html is used.
"""
from yacms.pages.middleware import PageMiddleware
if not PageMiddleware.installed():
raise ImproperlyConfigured('yacms.pages.middleware.PageMiddleware (or a subclass of it) is missing from ' + 'settings.MIDDLEWARE_CLASSES or ' + 'settings.MIDDLEWARE') # depends on [control=['if'], data=[]]
if not hasattr(request, 'page') or request.page.slug != slug:
raise Http404 # depends on [control=['if'], data=[]]
# Check for a template name matching the page's slug. If the homepage
# is configured as a page instance, the template "pages/index.html" is
# used, since the slug "/" won't match a template name.
template_name = str(slug) if slug != home_slug() else 'index'
templates = [u'pages/%s.html' % template_name]
method_template = request.page.get_content_model().get_template_name()
if method_template:
templates.insert(0, method_template) # depends on [control=['if'], data=[]]
if request.page.content_model is not None:
templates.append(u'pages/%s/%s.html' % (template_name, request.page.content_model)) # depends on [control=['if'], data=[]]
for parent in request.page.get_ascendants(for_user=request.user):
parent_template_name = str(parent.slug)
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u'pages/%s/%s.html' % (parent_template_name, request.page.content_model)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['parent']]
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u'pages/%s.html' % request.page.content_model) # depends on [control=['if'], data=[]]
templates.append(template)
return TemplateResponse(request, templates, extra_context or {})
|
def p_partselect_pointer_plus(self, p):
'partselect : pointer LBRACKET expression PLUSCOLON expression RBRACKET'
p[0] = Partselect(p[1], p[3], Plus(
p[3], p[5], lineno=p.lineno(1)), lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
def function[p_partselect_pointer_plus, parameter[self, p]]:
constant[partselect : pointer LBRACKET expression PLUSCOLON expression RBRACKET]
call[name[p]][constant[0]] assign[=] call[name[Partselect], parameter[call[name[p]][constant[1]], call[name[p]][constant[3]], call[name[Plus], parameter[call[name[p]][constant[3]], call[name[p]][constant[5]]]]]]
call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
|
keyword[def] identifier[p_partselect_pointer_plus] ( identifier[self] , identifier[p] ):
literal[string]
identifier[p] [ literal[int] ]= identifier[Partselect] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[Plus] (
identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] )), identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
|
def p_partselect_pointer_plus(self, p):
"""partselect : pointer LBRACKET expression PLUSCOLON expression RBRACKET"""
p[0] = Partselect(p[1], p[3], Plus(p[3], p[5], lineno=p.lineno(1)), lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
def get_net_imbalance(count_per_broker):
"""Calculate and return net imbalance based on given count of
partitions or leaders per broker.
Net-imbalance in case of partitions implies total number of
extra partitions from optimal count over all brokers.
This is also implies, the minimum number of partition movements
required for overall balancing.
For leaders, net imbalance implies total number of extra brokers
as leaders from optimal count.
"""
net_imbalance = 0
opt_count, extra_allowed = \
compute_optimum(len(count_per_broker), sum(count_per_broker))
for count in count_per_broker:
extra_cnt, extra_allowed = \
get_extra_element_count(count, opt_count, extra_allowed)
net_imbalance += extra_cnt
return net_imbalance
|
def function[get_net_imbalance, parameter[count_per_broker]]:
constant[Calculate and return net imbalance based on given count of
partitions or leaders per broker.
Net-imbalance in case of partitions implies total number of
extra partitions from optimal count over all brokers.
This is also implies, the minimum number of partition movements
required for overall balancing.
For leaders, net imbalance implies total number of extra brokers
as leaders from optimal count.
]
variable[net_imbalance] assign[=] constant[0]
<ast.Tuple object at 0x7da1b0788610> assign[=] call[name[compute_optimum], parameter[call[name[len], parameter[name[count_per_broker]]], call[name[sum], parameter[name[count_per_broker]]]]]
for taget[name[count]] in starred[name[count_per_broker]] begin[:]
<ast.Tuple object at 0x7da1b078a0e0> assign[=] call[name[get_extra_element_count], parameter[name[count], name[opt_count], name[extra_allowed]]]
<ast.AugAssign object at 0x7da1b070ead0>
return[name[net_imbalance]]
|
keyword[def] identifier[get_net_imbalance] ( identifier[count_per_broker] ):
literal[string]
identifier[net_imbalance] = literal[int]
identifier[opt_count] , identifier[extra_allowed] = identifier[compute_optimum] ( identifier[len] ( identifier[count_per_broker] ), identifier[sum] ( identifier[count_per_broker] ))
keyword[for] identifier[count] keyword[in] identifier[count_per_broker] :
identifier[extra_cnt] , identifier[extra_allowed] = identifier[get_extra_element_count] ( identifier[count] , identifier[opt_count] , identifier[extra_allowed] )
identifier[net_imbalance] += identifier[extra_cnt]
keyword[return] identifier[net_imbalance]
|
def get_net_imbalance(count_per_broker):
"""Calculate and return net imbalance based on given count of
partitions or leaders per broker.
Net-imbalance in case of partitions implies total number of
extra partitions from optimal count over all brokers.
This is also implies, the minimum number of partition movements
required for overall balancing.
For leaders, net imbalance implies total number of extra brokers
as leaders from optimal count.
"""
net_imbalance = 0
(opt_count, extra_allowed) = compute_optimum(len(count_per_broker), sum(count_per_broker))
for count in count_per_broker:
(extra_cnt, extra_allowed) = get_extra_element_count(count, opt_count, extra_allowed)
net_imbalance += extra_cnt # depends on [control=['for'], data=['count']]
return net_imbalance
|
def write_skycatalog(self,filename):
""" Write out the all_radec catalog for this image to a file.
"""
if self.all_radec is None:
return
ralist = self.all_radec[0]#.tolist()
declist = self.all_radec[1]#.tolist()
f = open(filename,'w')
f.write("#Sky positions for: "+self.name+'\n')
f.write("#RA Dec\n")
f.write("#(deg) (deg)\n")
for i in range(len(ralist)):
f.write('%0.12f %0.12f\n'%(ralist[i],declist[i]))
f.close()
|
def function[write_skycatalog, parameter[self, filename]]:
constant[ Write out the all_radec catalog for this image to a file.
]
if compare[name[self].all_radec is constant[None]] begin[:]
return[None]
variable[ralist] assign[=] call[name[self].all_radec][constant[0]]
variable[declist] assign[=] call[name[self].all_radec][constant[1]]
variable[f] assign[=] call[name[open], parameter[name[filename], constant[w]]]
call[name[f].write, parameter[binary_operation[binary_operation[constant[#Sky positions for: ] + name[self].name] + constant[
]]]]
call[name[f].write, parameter[constant[#RA Dec
]]]
call[name[f].write, parameter[constant[#(deg) (deg)
]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[ralist]]]]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[%0.12f %0.12f
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1bb51b0>, <ast.Subscript object at 0x7da1b1bb53c0>]]]]]
call[name[f].close, parameter[]]
|
keyword[def] identifier[write_skycatalog] ( identifier[self] , identifier[filename] ):
literal[string]
keyword[if] identifier[self] . identifier[all_radec] keyword[is] keyword[None] :
keyword[return]
identifier[ralist] = identifier[self] . identifier[all_radec] [ literal[int] ]
identifier[declist] = identifier[self] . identifier[all_radec] [ literal[int] ]
identifier[f] = identifier[open] ( identifier[filename] , literal[string] )
identifier[f] . identifier[write] ( literal[string] + identifier[self] . identifier[name] + literal[string] )
identifier[f] . identifier[write] ( literal[string] )
identifier[f] . identifier[write] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ralist] )):
identifier[f] . identifier[write] ( literal[string] %( identifier[ralist] [ identifier[i] ], identifier[declist] [ identifier[i] ]))
identifier[f] . identifier[close] ()
|
def write_skycatalog(self, filename):
""" Write out the all_radec catalog for this image to a file.
"""
if self.all_radec is None:
return # depends on [control=['if'], data=[]]
ralist = self.all_radec[0] #.tolist()
declist = self.all_radec[1] #.tolist()
f = open(filename, 'w')
f.write('#Sky positions for: ' + self.name + '\n')
f.write('#RA Dec\n')
f.write('#(deg) (deg)\n')
for i in range(len(ralist)):
f.write('%0.12f %0.12f\n' % (ralist[i], declist[i])) # depends on [control=['for'], data=['i']]
f.close()
|
def generate_api_docs(package, api_dir, clean=False, printlog=True):
"""Generate a module level API documentation of a python package.
Description
-----------
Generates markdown API files for each module in a Python package whereas
the structure is as follows:
`package/package.subpackage/package.subpackage.module.md`
Parameters
-----------
package : Python package object
api_dir : str
Output directory path for the top-level package directory
clean : bool (default: False)
Removes previously existing API directory if True.
printlog : bool (default: True)
Prints a progress log to the standard output screen if True.
"""
if printlog:
print('\n\nGenerating Module Files\n%s\n' % (50 * '='))
prefix = package.__name__ + "."
# clear the previous version
if clean:
if os.path.isdir(api_dir):
shutil.rmtree(api_dir)
# get subpackages
api_docs = {}
for importer, pkg_name, is_pkg in pkgutil.iter_modules(
package.__path__,
prefix):
if is_pkg:
subpackage = __import__(pkg_name, fromlist="dummy")
prefix = subpackage.__name__ + "."
# get functions and classes
classes, functions = get_functions_and_classes(subpackage)
target_dir = os.path.join(api_dir, subpackage.__name__)
# create the subdirs
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if printlog:
print('created %s' % target_dir)
# create markdown documents in memory
for obj in classes + functions:
md_path = os.path.join(target_dir, obj[0]) + '.md'
if md_path not in api_docs:
api_docs[md_path] = object_to_markdownpage(obj_name=obj[0],
obj=obj[1],
s='')
else:
api_docs[md_path] += object_to_markdownpage(obj_name=(
obj[0]),
obj=obj[1],
s='')
# write to files
for d in sorted(api_docs):
prev = ''
if os.path.isfile(d):
with open(d, 'r') as f:
prev = f.read()
if prev == api_docs[d]:
msg = 'skipped'
else:
msg = 'updated'
else:
msg = 'created'
if msg != 'skipped':
with open(d, 'w') as f:
f.write(api_docs[d])
if printlog:
print('%s %s' % (msg, d))
|
def function[generate_api_docs, parameter[package, api_dir, clean, printlog]]:
constant[Generate a module level API documentation of a python package.
Description
-----------
Generates markdown API files for each module in a Python package whereas
the structure is as follows:
`package/package.subpackage/package.subpackage.module.md`
Parameters
-----------
package : Python package object
api_dir : str
Output directory path for the top-level package directory
clean : bool (default: False)
Removes previously existing API directory if True.
printlog : bool (default: True)
Prints a progress log to the standard output screen if True.
]
if name[printlog] begin[:]
call[name[print], parameter[binary_operation[constant[
Generating Module Files
%s
] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[50] * constant[=]]]]]
variable[prefix] assign[=] binary_operation[name[package].__name__ + constant[.]]
if name[clean] begin[:]
if call[name[os].path.isdir, parameter[name[api_dir]]] begin[:]
call[name[shutil].rmtree, parameter[name[api_dir]]]
variable[api_docs] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b0ebcbe0>, <ast.Name object at 0x7da1b0ebe380>, <ast.Name object at 0x7da1b0ebf0d0>]]] in starred[call[name[pkgutil].iter_modules, parameter[name[package].__path__, name[prefix]]]] begin[:]
if name[is_pkg] begin[:]
variable[subpackage] assign[=] call[name[__import__], parameter[name[pkg_name]]]
variable[prefix] assign[=] binary_operation[name[subpackage].__name__ + constant[.]]
<ast.Tuple object at 0x7da1b0c402b0> assign[=] call[name[get_functions_and_classes], parameter[name[subpackage]]]
variable[target_dir] assign[=] call[name[os].path.join, parameter[name[api_dir], name[subpackage].__name__]]
if <ast.UnaryOp object at 0x7da207f9af80> begin[:]
call[name[os].makedirs, parameter[name[target_dir]]]
if name[printlog] begin[:]
call[name[print], parameter[binary_operation[constant[created %s] <ast.Mod object at 0x7da2590d6920> name[target_dir]]]]
for taget[name[obj]] in starred[binary_operation[name[classes] + name[functions]]] begin[:]
variable[md_path] assign[=] binary_operation[call[name[os].path.join, parameter[name[target_dir], call[name[obj]][constant[0]]]] + constant[.md]]
if compare[name[md_path] <ast.NotIn object at 0x7da2590d7190> name[api_docs]] begin[:]
call[name[api_docs]][name[md_path]] assign[=] call[name[object_to_markdownpage], parameter[]]
for taget[name[d]] in starred[call[name[sorted], parameter[name[api_docs]]]] begin[:]
variable[prev] assign[=] constant[]
if call[name[os].path.isfile, parameter[name[d]]] begin[:]
with call[name[open], parameter[name[d], constant[r]]] begin[:]
variable[prev] assign[=] call[name[f].read, parameter[]]
if compare[name[prev] equal[==] call[name[api_docs]][name[d]]] begin[:]
variable[msg] assign[=] constant[skipped]
if compare[name[msg] not_equal[!=] constant[skipped]] begin[:]
with call[name[open], parameter[name[d], constant[w]]] begin[:]
call[name[f].write, parameter[call[name[api_docs]][name[d]]]]
if name[printlog] begin[:]
call[name[print], parameter[binary_operation[constant[%s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0c67cd0>, <ast.Name object at 0x7da1b0c67520>]]]]]
|
keyword[def] identifier[generate_api_docs] ( identifier[package] , identifier[api_dir] , identifier[clean] = keyword[False] , identifier[printlog] = keyword[True] ):
literal[string]
keyword[if] identifier[printlog] :
identifier[print] ( literal[string] %( literal[int] * literal[string] ))
identifier[prefix] = identifier[package] . identifier[__name__] + literal[string]
keyword[if] identifier[clean] :
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[api_dir] ):
identifier[shutil] . identifier[rmtree] ( identifier[api_dir] )
identifier[api_docs] ={}
keyword[for] identifier[importer] , identifier[pkg_name] , identifier[is_pkg] keyword[in] identifier[pkgutil] . identifier[iter_modules] (
identifier[package] . identifier[__path__] ,
identifier[prefix] ):
keyword[if] identifier[is_pkg] :
identifier[subpackage] = identifier[__import__] ( identifier[pkg_name] , identifier[fromlist] = literal[string] )
identifier[prefix] = identifier[subpackage] . identifier[__name__] + literal[string]
identifier[classes] , identifier[functions] = identifier[get_functions_and_classes] ( identifier[subpackage] )
identifier[target_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[api_dir] , identifier[subpackage] . identifier[__name__] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[target_dir] ):
identifier[os] . identifier[makedirs] ( identifier[target_dir] )
keyword[if] identifier[printlog] :
identifier[print] ( literal[string] % identifier[target_dir] )
keyword[for] identifier[obj] keyword[in] identifier[classes] + identifier[functions] :
identifier[md_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[target_dir] , identifier[obj] [ literal[int] ])+ literal[string]
keyword[if] identifier[md_path] keyword[not] keyword[in] identifier[api_docs] :
identifier[api_docs] [ identifier[md_path] ]= identifier[object_to_markdownpage] ( identifier[obj_name] = identifier[obj] [ literal[int] ],
identifier[obj] = identifier[obj] [ literal[int] ],
identifier[s] = literal[string] )
keyword[else] :
identifier[api_docs] [ identifier[md_path] ]+= identifier[object_to_markdownpage] ( identifier[obj_name] =(
identifier[obj] [ literal[int] ]),
identifier[obj] = identifier[obj] [ literal[int] ],
identifier[s] = literal[string] )
keyword[for] identifier[d] keyword[in] identifier[sorted] ( identifier[api_docs] ):
identifier[prev] = literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[d] ):
keyword[with] identifier[open] ( identifier[d] , literal[string] ) keyword[as] identifier[f] :
identifier[prev] = identifier[f] . identifier[read] ()
keyword[if] identifier[prev] == identifier[api_docs] [ identifier[d] ]:
identifier[msg] = literal[string]
keyword[else] :
identifier[msg] = literal[string]
keyword[else] :
identifier[msg] = literal[string]
keyword[if] identifier[msg] != literal[string] :
keyword[with] identifier[open] ( identifier[d] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[api_docs] [ identifier[d] ])
keyword[if] identifier[printlog] :
identifier[print] ( literal[string] %( identifier[msg] , identifier[d] ))
|
def generate_api_docs(package, api_dir, clean=False, printlog=True):
"""Generate a module level API documentation of a python package.
Description
-----------
Generates markdown API files for each module in a Python package whereas
the structure is as follows:
`package/package.subpackage/package.subpackage.module.md`
Parameters
-----------
package : Python package object
api_dir : str
Output directory path for the top-level package directory
clean : bool (default: False)
Removes previously existing API directory if True.
printlog : bool (default: True)
Prints a progress log to the standard output screen if True.
"""
if printlog:
print('\n\nGenerating Module Files\n%s\n' % (50 * '=')) # depends on [control=['if'], data=[]]
prefix = package.__name__ + '.'
# clear the previous version
if clean:
if os.path.isdir(api_dir):
shutil.rmtree(api_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# get subpackages
api_docs = {}
for (importer, pkg_name, is_pkg) in pkgutil.iter_modules(package.__path__, prefix):
if is_pkg:
subpackage = __import__(pkg_name, fromlist='dummy')
prefix = subpackage.__name__ + '.'
# get functions and classes
(classes, functions) = get_functions_and_classes(subpackage)
target_dir = os.path.join(api_dir, subpackage.__name__)
# create the subdirs
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if printlog:
print('created %s' % target_dir) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# create markdown documents in memory
for obj in classes + functions:
md_path = os.path.join(target_dir, obj[0]) + '.md'
if md_path not in api_docs:
api_docs[md_path] = object_to_markdownpage(obj_name=obj[0], obj=obj[1], s='') # depends on [control=['if'], data=['md_path', 'api_docs']]
else:
api_docs[md_path] += object_to_markdownpage(obj_name=obj[0], obj=obj[1], s='') # depends on [control=['for'], data=['obj']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# write to files
for d in sorted(api_docs):
prev = ''
if os.path.isfile(d):
with open(d, 'r') as f:
prev = f.read() # depends on [control=['with'], data=['f']]
if prev == api_docs[d]:
msg = 'skipped' # depends on [control=['if'], data=[]]
else:
msg = 'updated' # depends on [control=['if'], data=[]]
else:
msg = 'created'
if msg != 'skipped':
with open(d, 'w') as f:
f.write(api_docs[d]) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
if printlog:
print('%s %s' % (msg, d)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
|
def convert_predictions_to_image_summaries(hook_args):
"""Optionally converts images from hooks_args to image summaries.
Args:
hook_args: DecodeHookArgs namedtuple
Returns:
summaries: list of tf.Summary values if hook_args.decode_hpara
"""
decode_hparams = hook_args.decode_hparams
if not decode_hparams.display_decoded_images:
return []
predictions = hook_args.predictions[0]
# Display ten random inputs and outputs so that tensorboard does not hang.
all_summaries = []
rand_predictions = np.random.choice(predictions, size=10)
for ind, prediction in enumerate(rand_predictions):
output_summary = image_to_tf_summary_value(
prediction["outputs"], tag="%d_output" % ind)
input_summary = image_to_tf_summary_value(
prediction["inputs"], tag="%d_input" % ind)
all_summaries.append(input_summary)
all_summaries.append(output_summary)
return all_summaries
|
def function[convert_predictions_to_image_summaries, parameter[hook_args]]:
constant[Optionally converts images from hooks_args to image summaries.
Args:
hook_args: DecodeHookArgs namedtuple
Returns:
summaries: list of tf.Summary values if hook_args.decode_hpara
]
variable[decode_hparams] assign[=] name[hook_args].decode_hparams
if <ast.UnaryOp object at 0x7da1b1e17d30> begin[:]
return[list[[]]]
variable[predictions] assign[=] call[name[hook_args].predictions][constant[0]]
variable[all_summaries] assign[=] list[[]]
variable[rand_predictions] assign[=] call[name[np].random.choice, parameter[name[predictions]]]
for taget[tuple[[<ast.Name object at 0x7da1b1e17940>, <ast.Name object at 0x7da1b1e177f0>]]] in starred[call[name[enumerate], parameter[name[rand_predictions]]]] begin[:]
variable[output_summary] assign[=] call[name[image_to_tf_summary_value], parameter[call[name[prediction]][constant[outputs]]]]
variable[input_summary] assign[=] call[name[image_to_tf_summary_value], parameter[call[name[prediction]][constant[inputs]]]]
call[name[all_summaries].append, parameter[name[input_summary]]]
call[name[all_summaries].append, parameter[name[output_summary]]]
return[name[all_summaries]]
|
keyword[def] identifier[convert_predictions_to_image_summaries] ( identifier[hook_args] ):
literal[string]
identifier[decode_hparams] = identifier[hook_args] . identifier[decode_hparams]
keyword[if] keyword[not] identifier[decode_hparams] . identifier[display_decoded_images] :
keyword[return] []
identifier[predictions] = identifier[hook_args] . identifier[predictions] [ literal[int] ]
identifier[all_summaries] =[]
identifier[rand_predictions] = identifier[np] . identifier[random] . identifier[choice] ( identifier[predictions] , identifier[size] = literal[int] )
keyword[for] identifier[ind] , identifier[prediction] keyword[in] identifier[enumerate] ( identifier[rand_predictions] ):
identifier[output_summary] = identifier[image_to_tf_summary_value] (
identifier[prediction] [ literal[string] ], identifier[tag] = literal[string] % identifier[ind] )
identifier[input_summary] = identifier[image_to_tf_summary_value] (
identifier[prediction] [ literal[string] ], identifier[tag] = literal[string] % identifier[ind] )
identifier[all_summaries] . identifier[append] ( identifier[input_summary] )
identifier[all_summaries] . identifier[append] ( identifier[output_summary] )
keyword[return] identifier[all_summaries]
|
def convert_predictions_to_image_summaries(hook_args):
"""Optionally converts images from hooks_args to image summaries.
Args:
hook_args: DecodeHookArgs namedtuple
Returns:
summaries: list of tf.Summary values if hook_args.decode_hpara
"""
decode_hparams = hook_args.decode_hparams
if not decode_hparams.display_decoded_images:
return [] # depends on [control=['if'], data=[]]
predictions = hook_args.predictions[0]
# Display ten random inputs and outputs so that tensorboard does not hang.
all_summaries = []
rand_predictions = np.random.choice(predictions, size=10)
for (ind, prediction) in enumerate(rand_predictions):
output_summary = image_to_tf_summary_value(prediction['outputs'], tag='%d_output' % ind)
input_summary = image_to_tf_summary_value(prediction['inputs'], tag='%d_input' % ind)
all_summaries.append(input_summary)
all_summaries.append(output_summary) # depends on [control=['for'], data=[]]
return all_summaries
|
def section(request):
"""
Determines the current site section from resolved view pattern and adds
it to context['section']. Section defaults to the first specified section.
"""
# If SECTIONS setting is not specified, don't do anything.
try:
sections = settings.SECTIONS
except AttributeError:
return {}
# Default return is first section.
section = sections[0]['name']
try:
pattern_name = resolve_to_name(request.path_info)
except Resolver404:
pattern_name = None
if pattern_name:
for option in settings.SECTIONS:
if pattern_name in option['matching_pattern_names']:
section = option['name']
return {'section': section}
|
def function[section, parameter[request]]:
constant[
Determines the current site section from resolved view pattern and adds
it to context['section']. Section defaults to the first specified section.
]
<ast.Try object at 0x7da204345900>
variable[section] assign[=] call[call[name[sections]][constant[0]]][constant[name]]
<ast.Try object at 0x7da204346950>
if name[pattern_name] begin[:]
for taget[name[option]] in starred[name[settings].SECTIONS] begin[:]
if compare[name[pattern_name] in call[name[option]][constant[matching_pattern_names]]] begin[:]
variable[section] assign[=] call[name[option]][constant[name]]
return[dictionary[[<ast.Constant object at 0x7da204346650>], [<ast.Name object at 0x7da204347940>]]]
|
keyword[def] identifier[section] ( identifier[request] ):
literal[string]
keyword[try] :
identifier[sections] = identifier[settings] . identifier[SECTIONS]
keyword[except] identifier[AttributeError] :
keyword[return] {}
identifier[section] = identifier[sections] [ literal[int] ][ literal[string] ]
keyword[try] :
identifier[pattern_name] = identifier[resolve_to_name] ( identifier[request] . identifier[path_info] )
keyword[except] identifier[Resolver404] :
identifier[pattern_name] = keyword[None]
keyword[if] identifier[pattern_name] :
keyword[for] identifier[option] keyword[in] identifier[settings] . identifier[SECTIONS] :
keyword[if] identifier[pattern_name] keyword[in] identifier[option] [ literal[string] ]:
identifier[section] = identifier[option] [ literal[string] ]
keyword[return] { literal[string] : identifier[section] }
|
def section(request):
"""
Determines the current site section from resolved view pattern and adds
it to context['section']. Section defaults to the first specified section.
"""
# If SECTIONS setting is not specified, don't do anything.
try:
sections = settings.SECTIONS # depends on [control=['try'], data=[]]
except AttributeError:
return {} # depends on [control=['except'], data=[]]
# Default return is first section.
section = sections[0]['name']
try:
pattern_name = resolve_to_name(request.path_info) # depends on [control=['try'], data=[]]
except Resolver404:
pattern_name = None # depends on [control=['except'], data=[]]
if pattern_name:
for option in settings.SECTIONS:
if pattern_name in option['matching_pattern_names']:
section = option['name'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['option']] # depends on [control=['if'], data=[]]
return {'section': section}
|
def bundles(ctx):
"""
List discovered bundles.
"""
bundles = _get_bundles(ctx.obj.data['env'])
print_table(('Name', 'Location'),
[(bundle.name, f'{bundle.__module__}.{bundle.__class__.__name__}')
for bundle in bundles])
|
def function[bundles, parameter[ctx]]:
constant[
List discovered bundles.
]
variable[bundles] assign[=] call[name[_get_bundles], parameter[call[name[ctx].obj.data][constant[env]]]]
call[name[print_table], parameter[tuple[[<ast.Constant object at 0x7da20c992860>, <ast.Constant object at 0x7da20c992ad0>]], <ast.ListComp object at 0x7da20c991420>]]
|
keyword[def] identifier[bundles] ( identifier[ctx] ):
literal[string]
identifier[bundles] = identifier[_get_bundles] ( identifier[ctx] . identifier[obj] . identifier[data] [ literal[string] ])
identifier[print_table] (( literal[string] , literal[string] ),
[( identifier[bundle] . identifier[name] , literal[string] )
keyword[for] identifier[bundle] keyword[in] identifier[bundles] ])
|
def bundles(ctx):
"""
List discovered bundles.
"""
bundles = _get_bundles(ctx.obj.data['env'])
print_table(('Name', 'Location'), [(bundle.name, f'{bundle.__module__}.{bundle.__class__.__name__}') for bundle in bundles])
|
def _set_scroll_area(self, force=False):
"""
Args:
force(bool): Set the scroll area even if no change in height and position is detected
Sets the scroll window based on the counter positions
"""
# Save scroll offset for resizing
oldOffset = self.scroll_offset
self.scroll_offset = newOffset = max(self.counters.values()) + 1
if not self.enabled:
return
# Set exit handling only once
if not self.process_exit:
atexit.register(self._at_exit)
if not self.no_resize and RESIZE_SUPPORTED:
signal.signal(signal.SIGWINCH, self._resize_handler)
self.process_exit = True
if self.set_scroll:
term = self.term
newHeight = term.height
scrollPosition = max(0, newHeight - newOffset)
if force or newOffset > oldOffset or newHeight != self.height:
self.height = newHeight
# Add line feeds so we don't overwrite existing output
if newOffset - oldOffset > 0:
term.move_to(0, max(0, newHeight - oldOffset))
self.stream.write('\n' * (newOffset - oldOffset))
# Reset scroll area
self.term.change_scroll(scrollPosition)
# Always reset position
term.move_to(0, scrollPosition)
if self.companion_term:
self.companion_term.move_to(0, scrollPosition)
|
def function[_set_scroll_area, parameter[self, force]]:
constant[
Args:
force(bool): Set the scroll area even if no change in height and position is detected
Sets the scroll window based on the counter positions
]
variable[oldOffset] assign[=] name[self].scroll_offset
name[self].scroll_offset assign[=] binary_operation[call[name[max], parameter[call[name[self].counters.values, parameter[]]]] + constant[1]]
if <ast.UnaryOp object at 0x7da1b07f6080> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b07f68c0> begin[:]
call[name[atexit].register, parameter[name[self]._at_exit]]
if <ast.BoolOp object at 0x7da1b07f48b0> begin[:]
call[name[signal].signal, parameter[name[signal].SIGWINCH, name[self]._resize_handler]]
name[self].process_exit assign[=] constant[True]
if name[self].set_scroll begin[:]
variable[term] assign[=] name[self].term
variable[newHeight] assign[=] name[term].height
variable[scrollPosition] assign[=] call[name[max], parameter[constant[0], binary_operation[name[newHeight] - name[newOffset]]]]
if <ast.BoolOp object at 0x7da1b07f5e70> begin[:]
name[self].height assign[=] name[newHeight]
if compare[binary_operation[name[newOffset] - name[oldOffset]] greater[>] constant[0]] begin[:]
call[name[term].move_to, parameter[constant[0], call[name[max], parameter[constant[0], binary_operation[name[newHeight] - name[oldOffset]]]]]]
call[name[self].stream.write, parameter[binary_operation[constant[
] * binary_operation[name[newOffset] - name[oldOffset]]]]]
call[name[self].term.change_scroll, parameter[name[scrollPosition]]]
call[name[term].move_to, parameter[constant[0], name[scrollPosition]]]
if name[self].companion_term begin[:]
call[name[self].companion_term.move_to, parameter[constant[0], name[scrollPosition]]]
|
keyword[def] identifier[_set_scroll_area] ( identifier[self] , identifier[force] = keyword[False] ):
literal[string]
identifier[oldOffset] = identifier[self] . identifier[scroll_offset]
identifier[self] . identifier[scroll_offset] = identifier[newOffset] = identifier[max] ( identifier[self] . identifier[counters] . identifier[values] ())+ literal[int]
keyword[if] keyword[not] identifier[self] . identifier[enabled] :
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[process_exit] :
identifier[atexit] . identifier[register] ( identifier[self] . identifier[_at_exit] )
keyword[if] keyword[not] identifier[self] . identifier[no_resize] keyword[and] identifier[RESIZE_SUPPORTED] :
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGWINCH] , identifier[self] . identifier[_resize_handler] )
identifier[self] . identifier[process_exit] = keyword[True]
keyword[if] identifier[self] . identifier[set_scroll] :
identifier[term] = identifier[self] . identifier[term]
identifier[newHeight] = identifier[term] . identifier[height]
identifier[scrollPosition] = identifier[max] ( literal[int] , identifier[newHeight] - identifier[newOffset] )
keyword[if] identifier[force] keyword[or] identifier[newOffset] > identifier[oldOffset] keyword[or] identifier[newHeight] != identifier[self] . identifier[height] :
identifier[self] . identifier[height] = identifier[newHeight]
keyword[if] identifier[newOffset] - identifier[oldOffset] > literal[int] :
identifier[term] . identifier[move_to] ( literal[int] , identifier[max] ( literal[int] , identifier[newHeight] - identifier[oldOffset] ))
identifier[self] . identifier[stream] . identifier[write] ( literal[string] *( identifier[newOffset] - identifier[oldOffset] ))
identifier[self] . identifier[term] . identifier[change_scroll] ( identifier[scrollPosition] )
identifier[term] . identifier[move_to] ( literal[int] , identifier[scrollPosition] )
keyword[if] identifier[self] . identifier[companion_term] :
identifier[self] . identifier[companion_term] . identifier[move_to] ( literal[int] , identifier[scrollPosition] )
|
def _set_scroll_area(self, force=False):
"""
Args:
force(bool): Set the scroll area even if no change in height and position is detected
Sets the scroll window based on the counter positions
"""
# Save scroll offset for resizing
oldOffset = self.scroll_offset
self.scroll_offset = newOffset = max(self.counters.values()) + 1
if not self.enabled:
return # depends on [control=['if'], data=[]]
# Set exit handling only once
if not self.process_exit:
atexit.register(self._at_exit)
if not self.no_resize and RESIZE_SUPPORTED:
signal.signal(signal.SIGWINCH, self._resize_handler) # depends on [control=['if'], data=[]]
self.process_exit = True # depends on [control=['if'], data=[]]
if self.set_scroll:
term = self.term
newHeight = term.height
scrollPosition = max(0, newHeight - newOffset)
if force or newOffset > oldOffset or newHeight != self.height:
self.height = newHeight
# Add line feeds so we don't overwrite existing output
if newOffset - oldOffset > 0:
term.move_to(0, max(0, newHeight - oldOffset))
self.stream.write('\n' * (newOffset - oldOffset)) # depends on [control=['if'], data=[]]
# Reset scroll area
self.term.change_scroll(scrollPosition) # depends on [control=['if'], data=[]]
# Always reset position
term.move_to(0, scrollPosition)
if self.companion_term:
self.companion_term.move_to(0, scrollPosition) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def parse_jellyfish_data(self, f):
""" Go through the hist file and memorise it """
histogram = {}
occurence = 0
for line in f['f']:
line = line.rstrip('\n')
occurence = int(line.split(" ")[0])
count = int(line.split(" ")[1])
histogram[occurence] = occurence*count
#delete last occurnece as it is the sum of all kmer occuring more often than it.
del histogram[occurence]
#sanity check
self.max_key = max(histogram, key=histogram.get)
self.jellyfish_max_x = max(self.jellyfish_max_x, self.max_key)
if len(histogram) > 0:
if f['s_name'] in self.jellyfish_data:
log.debug("Duplicate sample name found! Overwriting: {}".format(f['s_name']))
self.add_data_source(f)
self.jellyfish_data[f['s_name']] = histogram
|
def function[parse_jellyfish_data, parameter[self, f]]:
constant[ Go through the hist file and memorise it ]
variable[histogram] assign[=] dictionary[[], []]
variable[occurence] assign[=] constant[0]
for taget[name[line]] in starred[call[name[f]][constant[f]]] begin[:]
variable[line] assign[=] call[name[line].rstrip, parameter[constant[
]]]
variable[occurence] assign[=] call[name[int], parameter[call[call[name[line].split, parameter[constant[ ]]]][constant[0]]]]
variable[count] assign[=] call[name[int], parameter[call[call[name[line].split, parameter[constant[ ]]]][constant[1]]]]
call[name[histogram]][name[occurence]] assign[=] binary_operation[name[occurence] * name[count]]
<ast.Delete object at 0x7da18ede46d0>
name[self].max_key assign[=] call[name[max], parameter[name[histogram]]]
name[self].jellyfish_max_x assign[=] call[name[max], parameter[name[self].jellyfish_max_x, name[self].max_key]]
if compare[call[name[len], parameter[name[histogram]]] greater[>] constant[0]] begin[:]
if compare[call[name[f]][constant[s_name]] in name[self].jellyfish_data] begin[:]
call[name[log].debug, parameter[call[constant[Duplicate sample name found! Overwriting: {}].format, parameter[call[name[f]][constant[s_name]]]]]]
call[name[self].add_data_source, parameter[name[f]]]
call[name[self].jellyfish_data][call[name[f]][constant[s_name]]] assign[=] name[histogram]
|
keyword[def] identifier[parse_jellyfish_data] ( identifier[self] , identifier[f] ):
literal[string]
identifier[histogram] ={}
identifier[occurence] = literal[int]
keyword[for] identifier[line] keyword[in] identifier[f] [ literal[string] ]:
identifier[line] = identifier[line] . identifier[rstrip] ( literal[string] )
identifier[occurence] = identifier[int] ( identifier[line] . identifier[split] ( literal[string] )[ literal[int] ])
identifier[count] = identifier[int] ( identifier[line] . identifier[split] ( literal[string] )[ literal[int] ])
identifier[histogram] [ identifier[occurence] ]= identifier[occurence] * identifier[count]
keyword[del] identifier[histogram] [ identifier[occurence] ]
identifier[self] . identifier[max_key] = identifier[max] ( identifier[histogram] , identifier[key] = identifier[histogram] . identifier[get] )
identifier[self] . identifier[jellyfish_max_x] = identifier[max] ( identifier[self] . identifier[jellyfish_max_x] , identifier[self] . identifier[max_key] )
keyword[if] identifier[len] ( identifier[histogram] )> literal[int] :
keyword[if] identifier[f] [ literal[string] ] keyword[in] identifier[self] . identifier[jellyfish_data] :
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[f] [ literal[string] ]))
identifier[self] . identifier[add_data_source] ( identifier[f] )
identifier[self] . identifier[jellyfish_data] [ identifier[f] [ literal[string] ]]= identifier[histogram]
|
def parse_jellyfish_data(self, f):
""" Go through the hist file and memorise it """
histogram = {}
occurence = 0
for line in f['f']:
line = line.rstrip('\n')
occurence = int(line.split(' ')[0])
count = int(line.split(' ')[1])
histogram[occurence] = occurence * count # depends on [control=['for'], data=['line']]
#delete last occurnece as it is the sum of all kmer occuring more often than it.
del histogram[occurence]
#sanity check
self.max_key = max(histogram, key=histogram.get)
self.jellyfish_max_x = max(self.jellyfish_max_x, self.max_key)
if len(histogram) > 0:
if f['s_name'] in self.jellyfish_data:
log.debug('Duplicate sample name found! Overwriting: {}'.format(f['s_name'])) # depends on [control=['if'], data=[]]
self.add_data_source(f)
self.jellyfish_data[f['s_name']] = histogram # depends on [control=['if'], data=[]]
|
def get_agent_lookup_session(self):
"""Gets the ``OsidSession`` associated with the agent lookup service.
return: (osid.authentication.AgentLookupSession) - an
``AgentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_agent_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` is ``true``.*
"""
if not self.supports_agent_lookup():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.AgentLookupSession(runtime=self._runtime)
|
def function[get_agent_lookup_session, parameter[self]]:
constant[Gets the ``OsidSession`` associated with the agent lookup service.
return: (osid.authentication.AgentLookupSession) - an
``AgentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_agent_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` is ``true``.*
]
if <ast.UnaryOp object at 0x7da20c794c10> begin[:]
<ast.Raise object at 0x7da20c794b80>
return[call[name[sessions].AgentLookupSession, parameter[]]]
|
keyword[def] identifier[get_agent_lookup_session] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_agent_lookup] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[AgentLookupSession] ( identifier[runtime] = identifier[self] . identifier[_runtime] )
|
def get_agent_lookup_session(self):
"""Gets the ``OsidSession`` associated with the agent lookup service.
return: (osid.authentication.AgentLookupSession) - an
``AgentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_agent_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_agent_lookup()`` is ``true``.*
"""
if not self.supports_agent_lookup():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.AgentLookupSession(runtime=self._runtime)
|
def ones(self, name, **kwargs):
"""Create an array. Keyword arguments as per
:func:`zarr.creation.ones`."""
return self._write_op(self._ones_nosync, name, **kwargs)
|
def function[ones, parameter[self, name]]:
constant[Create an array. Keyword arguments as per
:func:`zarr.creation.ones`.]
return[call[name[self]._write_op, parameter[name[self]._ones_nosync, name[name]]]]
|
keyword[def] identifier[ones] ( identifier[self] , identifier[name] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_write_op] ( identifier[self] . identifier[_ones_nosync] , identifier[name] ,** identifier[kwargs] )
|
def ones(self, name, **kwargs):
"""Create an array. Keyword arguments as per
:func:`zarr.creation.ones`."""
return self._write_op(self._ones_nosync, name, **kwargs)
|
def handle_json_wrapper_GET(self, handler, parsed_params):
"""Call handler and output the return value in JSON."""
schedule = self.server.schedule
result = handler(parsed_params)
content = ResultEncoder().encode(result)
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', str(len(content)))
self.end_headers()
self.wfile.write(content)
|
def function[handle_json_wrapper_GET, parameter[self, handler, parsed_params]]:
constant[Call handler and output the return value in JSON.]
variable[schedule] assign[=] name[self].server.schedule
variable[result] assign[=] call[name[handler], parameter[name[parsed_params]]]
variable[content] assign[=] call[call[name[ResultEncoder], parameter[]].encode, parameter[name[result]]]
call[name[self].send_response, parameter[constant[200]]]
call[name[self].send_header, parameter[constant[Content-Type], constant[text/plain]]]
call[name[self].send_header, parameter[constant[Content-Length], call[name[str], parameter[call[name[len], parameter[name[content]]]]]]]
call[name[self].end_headers, parameter[]]
call[name[self].wfile.write, parameter[name[content]]]
|
keyword[def] identifier[handle_json_wrapper_GET] ( identifier[self] , identifier[handler] , identifier[parsed_params] ):
literal[string]
identifier[schedule] = identifier[self] . identifier[server] . identifier[schedule]
identifier[result] = identifier[handler] ( identifier[parsed_params] )
identifier[content] = identifier[ResultEncoder] (). identifier[encode] ( identifier[result] )
identifier[self] . identifier[send_response] ( literal[int] )
identifier[self] . identifier[send_header] ( literal[string] , literal[string] )
identifier[self] . identifier[send_header] ( literal[string] , identifier[str] ( identifier[len] ( identifier[content] )))
identifier[self] . identifier[end_headers] ()
identifier[self] . identifier[wfile] . identifier[write] ( identifier[content] )
|
def handle_json_wrapper_GET(self, handler, parsed_params):
"""Call handler and output the return value in JSON."""
schedule = self.server.schedule
result = handler(parsed_params)
content = ResultEncoder().encode(result)
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', str(len(content)))
self.end_headers()
self.wfile.write(content)
|
def get_mode(device):
'''
Report whether the quota system for this device is on or off
CLI Example:
.. code-block:: bash
salt '*' quota.get_mode
'''
ret = {}
cmd = 'quotaon -p {0}'.format(device)
out = __salt__['cmd.run'](cmd, python_shell=False)
for line in out.splitlines():
comps = line.strip().split()
if comps[3] not in ret:
if comps[0].startswith('quotaon'):
if comps[1].startswith('Mountpoint'):
ret[comps[4]] = 'disabled'
continue
elif comps[1].startswith('Cannot'):
ret[device] = 'Not found'
return ret
continue
ret[comps[3]] = {
'device': comps[4].replace('(', '').replace(')', ''),
}
ret[comps[3]][comps[0]] = comps[6]
return ret
|
def function[get_mode, parameter[device]]:
constant[
Report whether the quota system for this device is on or off
CLI Example:
.. code-block:: bash
salt '*' quota.get_mode
]
variable[ret] assign[=] dictionary[[], []]
variable[cmd] assign[=] call[constant[quotaon -p {0}].format, parameter[name[device]]]
variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
for taget[name[line]] in starred[call[name[out].splitlines, parameter[]]] begin[:]
variable[comps] assign[=] call[call[name[line].strip, parameter[]].split, parameter[]]
if compare[call[name[comps]][constant[3]] <ast.NotIn object at 0x7da2590d7190> name[ret]] begin[:]
if call[call[name[comps]][constant[0]].startswith, parameter[constant[quotaon]]] begin[:]
if call[call[name[comps]][constant[1]].startswith, parameter[constant[Mountpoint]]] begin[:]
call[name[ret]][call[name[comps]][constant[4]]] assign[=] constant[disabled]
continue
continue
call[name[ret]][call[name[comps]][constant[3]]] assign[=] dictionary[[<ast.Constant object at 0x7da1b21c5cf0>], [<ast.Call object at 0x7da1b21c5c90>]]
call[call[name[ret]][call[name[comps]][constant[3]]]][call[name[comps]][constant[0]]] assign[=] call[name[comps]][constant[6]]
return[name[ret]]
|
keyword[def] identifier[get_mode] ( identifier[device] ):
literal[string]
identifier[ret] ={}
identifier[cmd] = literal[string] . identifier[format] ( identifier[device] )
identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )
keyword[for] identifier[line] keyword[in] identifier[out] . identifier[splitlines] ():
identifier[comps] = identifier[line] . identifier[strip] (). identifier[split] ()
keyword[if] identifier[comps] [ literal[int] ] keyword[not] keyword[in] identifier[ret] :
keyword[if] identifier[comps] [ literal[int] ]. identifier[startswith] ( literal[string] ):
keyword[if] identifier[comps] [ literal[int] ]. identifier[startswith] ( literal[string] ):
identifier[ret] [ identifier[comps] [ literal[int] ]]= literal[string]
keyword[continue]
keyword[elif] identifier[comps] [ literal[int] ]. identifier[startswith] ( literal[string] ):
identifier[ret] [ identifier[device] ]= literal[string]
keyword[return] identifier[ret]
keyword[continue]
identifier[ret] [ identifier[comps] [ literal[int] ]]={
literal[string] : identifier[comps] [ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ),
}
identifier[ret] [ identifier[comps] [ literal[int] ]][ identifier[comps] [ literal[int] ]]= identifier[comps] [ literal[int] ]
keyword[return] identifier[ret]
|
def get_mode(device):
"""
Report whether the quota system for this device is on or off
CLI Example:
.. code-block:: bash
salt '*' quota.get_mode
"""
ret = {}
cmd = 'quotaon -p {0}'.format(device)
out = __salt__['cmd.run'](cmd, python_shell=False)
for line in out.splitlines():
comps = line.strip().split()
if comps[3] not in ret:
if comps[0].startswith('quotaon'):
if comps[1].startswith('Mountpoint'):
ret[comps[4]] = 'disabled'
continue # depends on [control=['if'], data=[]]
elif comps[1].startswith('Cannot'):
ret[device] = 'Not found'
return ret # depends on [control=['if'], data=[]]
continue # depends on [control=['if'], data=[]]
ret[comps[3]] = {'device': comps[4].replace('(', '').replace(')', '')} # depends on [control=['if'], data=['ret']]
ret[comps[3]][comps[0]] = comps[6] # depends on [control=['for'], data=['line']]
return ret
|
def _prepare_env(self): # pragma: no cover
"""Setup the document's environment, if necessary."""
env = self.state.document.settings.env
if not hasattr(env, self.directive_name):
# Track places where we use this directive, so we can check for
# outdated documents in the future.
state = DirectiveState()
setattr(env, self.directive_name, state)
else:
state = getattr(env, self.directive_name)
return env, state
|
def function[_prepare_env, parameter[self]]:
constant[Setup the document's environment, if necessary.]
variable[env] assign[=] name[self].state.document.settings.env
if <ast.UnaryOp object at 0x7da1b18000d0> begin[:]
variable[state] assign[=] call[name[DirectiveState], parameter[]]
call[name[setattr], parameter[name[env], name[self].directive_name, name[state]]]
return[tuple[[<ast.Name object at 0x7da1b18011e0>, <ast.Name object at 0x7da1b1802f20>]]]
|
keyword[def] identifier[_prepare_env] ( identifier[self] ):
literal[string]
identifier[env] = identifier[self] . identifier[state] . identifier[document] . identifier[settings] . identifier[env]
keyword[if] keyword[not] identifier[hasattr] ( identifier[env] , identifier[self] . identifier[directive_name] ):
identifier[state] = identifier[DirectiveState] ()
identifier[setattr] ( identifier[env] , identifier[self] . identifier[directive_name] , identifier[state] )
keyword[else] :
identifier[state] = identifier[getattr] ( identifier[env] , identifier[self] . identifier[directive_name] )
keyword[return] identifier[env] , identifier[state]
|
def _prepare_env(self): # pragma: no cover
"Setup the document's environment, if necessary."
env = self.state.document.settings.env
if not hasattr(env, self.directive_name):
# Track places where we use this directive, so we can check for
# outdated documents in the future.
state = DirectiveState()
setattr(env, self.directive_name, state) # depends on [control=['if'], data=[]]
else:
state = getattr(env, self.directive_name)
return (env, state)
|
def CopyToDateTimeString(self):
"""Copies the time elements to a date and time string.
Returns:
str: date and time value formatted as: "YYYY-MM-DD hh:mm:ss" or
"YYYY-MM-DD hh:mm:ss.######" or None if time elements are missing.
Raises:
ValueError: if the precision value is unsupported.
"""
if self._number_of_seconds is None or self.fraction_of_second is None:
return None
precision_helper = precisions.PrecisionHelperFactory.CreatePrecisionHelper(
self._precision)
return precision_helper.CopyToDateTimeString(
self._time_elements_tuple, self.fraction_of_second)
|
def function[CopyToDateTimeString, parameter[self]]:
constant[Copies the time elements to a date and time string.
Returns:
str: date and time value formatted as: "YYYY-MM-DD hh:mm:ss" or
"YYYY-MM-DD hh:mm:ss.######" or None if time elements are missing.
Raises:
ValueError: if the precision value is unsupported.
]
if <ast.BoolOp object at 0x7da2044c2350> begin[:]
return[constant[None]]
variable[precision_helper] assign[=] call[name[precisions].PrecisionHelperFactory.CreatePrecisionHelper, parameter[name[self]._precision]]
return[call[name[precision_helper].CopyToDateTimeString, parameter[name[self]._time_elements_tuple, name[self].fraction_of_second]]]
|
keyword[def] identifier[CopyToDateTimeString] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_number_of_seconds] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[fraction_of_second] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[precision_helper] = identifier[precisions] . identifier[PrecisionHelperFactory] . identifier[CreatePrecisionHelper] (
identifier[self] . identifier[_precision] )
keyword[return] identifier[precision_helper] . identifier[CopyToDateTimeString] (
identifier[self] . identifier[_time_elements_tuple] , identifier[self] . identifier[fraction_of_second] )
|
def CopyToDateTimeString(self):
"""Copies the time elements to a date and time string.
Returns:
str: date and time value formatted as: "YYYY-MM-DD hh:mm:ss" or
"YYYY-MM-DD hh:mm:ss.######" or None if time elements are missing.
Raises:
ValueError: if the precision value is unsupported.
"""
if self._number_of_seconds is None or self.fraction_of_second is None:
return None # depends on [control=['if'], data=[]]
precision_helper = precisions.PrecisionHelperFactory.CreatePrecisionHelper(self._precision)
return precision_helper.CopyToDateTimeString(self._time_elements_tuple, self.fraction_of_second)
|
def get_decoder(encoding, flexible=False):
"""
RETURN FUNCTION TO PERFORM DECODE
:param encoding: STRING OF THE ENCODING
:param flexible: True IF YOU WISH TO TRY OUR BEST, AND KEEP GOING
:return: FUNCTION
"""
if encoding == None:
def no_decode(v):
return v
return no_decode
elif flexible:
def do_decode1(v):
return v.decode(encoding, 'ignore')
return do_decode1
else:
def do_decode2(v):
return v.decode(encoding)
return do_decode2
|
def function[get_decoder, parameter[encoding, flexible]]:
constant[
RETURN FUNCTION TO PERFORM DECODE
:param encoding: STRING OF THE ENCODING
:param flexible: True IF YOU WISH TO TRY OUR BEST, AND KEEP GOING
:return: FUNCTION
]
if compare[name[encoding] equal[==] constant[None]] begin[:]
def function[no_decode, parameter[v]]:
return[name[v]]
return[name[no_decode]]
|
keyword[def] identifier[get_decoder] ( identifier[encoding] , identifier[flexible] = keyword[False] ):
literal[string]
keyword[if] identifier[encoding] == keyword[None] :
keyword[def] identifier[no_decode] ( identifier[v] ):
keyword[return] identifier[v]
keyword[return] identifier[no_decode]
keyword[elif] identifier[flexible] :
keyword[def] identifier[do_decode1] ( identifier[v] ):
keyword[return] identifier[v] . identifier[decode] ( identifier[encoding] , literal[string] )
keyword[return] identifier[do_decode1]
keyword[else] :
keyword[def] identifier[do_decode2] ( identifier[v] ):
keyword[return] identifier[v] . identifier[decode] ( identifier[encoding] )
keyword[return] identifier[do_decode2]
|
def get_decoder(encoding, flexible=False):
"""
RETURN FUNCTION TO PERFORM DECODE
:param encoding: STRING OF THE ENCODING
:param flexible: True IF YOU WISH TO TRY OUR BEST, AND KEEP GOING
:return: FUNCTION
"""
if encoding == None:
def no_decode(v):
return v
return no_decode # depends on [control=['if'], data=[]]
elif flexible:
def do_decode1(v):
return v.decode(encoding, 'ignore')
return do_decode1 # depends on [control=['if'], data=[]]
else:
def do_decode2(v):
return v.decode(encoding)
return do_decode2
|
def failsafe(func):
"""
Wraps an app factory to provide a fallback in case of import errors.
Takes a factory function to generate a Flask app. If there is an error
creating the app, it will return a dummy app that just returns the Flask
error page for the exception.
This works with the Flask code reloader so that if the app fails during
initialization it will still monitor those files for changes and reload
the app.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
extra_files = []
try:
return func(*args, **kwargs)
except:
exc_type, exc_val, exc_tb = sys.exc_info()
traceback.print_exc()
tb = exc_tb
while tb:
filename = tb.tb_frame.f_code.co_filename
extra_files.append(filename)
tb = tb.tb_next
if isinstance(exc_val, SyntaxError):
extra_files.append(exc_val.filename)
app = _FailSafeFlask(extra_files)
app.debug = True
@app.route('/')
@app.route('/<path:path>')
def index(path='/'):
reraise(exc_type, exc_val, exc_tb)
return app
return wrapper
|
def function[failsafe, parameter[func]]:
constant[
Wraps an app factory to provide a fallback in case of import errors.
Takes a factory function to generate a Flask app. If there is an error
creating the app, it will return a dummy app that just returns the Flask
error page for the exception.
This works with the Flask code reloader so that if the app fails during
initialization it will still monitor those files for changes and reload
the app.
]
def function[wrapper, parameter[]]:
variable[extra_files] assign[=] list[[]]
<ast.Try object at 0x7da1b255fd00>
variable[tb] assign[=] name[exc_tb]
while name[tb] begin[:]
variable[filename] assign[=] name[tb].tb_frame.f_code.co_filename
call[name[extra_files].append, parameter[name[filename]]]
variable[tb] assign[=] name[tb].tb_next
if call[name[isinstance], parameter[name[exc_val], name[SyntaxError]]] begin[:]
call[name[extra_files].append, parameter[name[exc_val].filename]]
variable[app] assign[=] call[name[_FailSafeFlask], parameter[name[extra_files]]]
name[app].debug assign[=] constant[True]
def function[index, parameter[path]]:
call[name[reraise], parameter[name[exc_type], name[exc_val], name[exc_tb]]]
return[name[app]]
return[name[wrapper]]
|
keyword[def] identifier[failsafe] ( identifier[func] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[func] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[extra_files] =[]
keyword[try] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[except] :
identifier[exc_type] , identifier[exc_val] , identifier[exc_tb] = identifier[sys] . identifier[exc_info] ()
identifier[traceback] . identifier[print_exc] ()
identifier[tb] = identifier[exc_tb]
keyword[while] identifier[tb] :
identifier[filename] = identifier[tb] . identifier[tb_frame] . identifier[f_code] . identifier[co_filename]
identifier[extra_files] . identifier[append] ( identifier[filename] )
identifier[tb] = identifier[tb] . identifier[tb_next]
keyword[if] identifier[isinstance] ( identifier[exc_val] , identifier[SyntaxError] ):
identifier[extra_files] . identifier[append] ( identifier[exc_val] . identifier[filename] )
identifier[app] = identifier[_FailSafeFlask] ( identifier[extra_files] )
identifier[app] . identifier[debug] = keyword[True]
@ identifier[app] . identifier[route] ( literal[string] )
@ identifier[app] . identifier[route] ( literal[string] )
keyword[def] identifier[index] ( identifier[path] = literal[string] ):
identifier[reraise] ( identifier[exc_type] , identifier[exc_val] , identifier[exc_tb] )
keyword[return] identifier[app]
keyword[return] identifier[wrapper]
|
def failsafe(func):
"""
Wraps an app factory to provide a fallback in case of import errors.
Takes a factory function to generate a Flask app. If there is an error
creating the app, it will return a dummy app that just returns the Flask
error page for the exception.
This works with the Flask code reloader so that if the app fails during
initialization it will still monitor those files for changes and reload
the app.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
extra_files = []
try:
return func(*args, **kwargs) # depends on [control=['try'], data=[]]
except:
(exc_type, exc_val, exc_tb) = sys.exc_info()
traceback.print_exc() # depends on [control=['except'], data=[]]
tb = exc_tb
while tb:
filename = tb.tb_frame.f_code.co_filename
extra_files.append(filename)
tb = tb.tb_next # depends on [control=['while'], data=[]]
if isinstance(exc_val, SyntaxError):
extra_files.append(exc_val.filename) # depends on [control=['if'], data=[]]
app = _FailSafeFlask(extra_files)
app.debug = True
@app.route('/')
@app.route('/<path:path>')
def index(path='/'):
reraise(exc_type, exc_val, exc_tb)
return app
return wrapper
|
def parse_table_definition_file(file):
'''
Read an parse the XML of a table-definition file.
@return: an ElementTree object for the table definition
'''
logging.info("Reading table definition from '%s'...", file)
if not os.path.isfile(file):
logging.error("File '%s' does not exist.", file)
exit(1)
try:
tableGenFile = ElementTree.ElementTree().parse(file)
except IOError as e:
logging.error('Could not read result file %s: %s', file, e)
exit(1)
except ElementTree.ParseError as e:
logging.error('Table file %s is invalid: %s', file, e)
exit(1)
if 'table' != tableGenFile.tag:
logging.error("Table file %s is invalid: It's root element is not named 'table'.", file)
exit(1)
return tableGenFile
|
def function[parse_table_definition_file, parameter[file]]:
constant[
Read an parse the XML of a table-definition file.
@return: an ElementTree object for the table definition
]
call[name[logging].info, parameter[constant[Reading table definition from '%s'...], name[file]]]
if <ast.UnaryOp object at 0x7da2044c2f50> begin[:]
call[name[logging].error, parameter[constant[File '%s' does not exist.], name[file]]]
call[name[exit], parameter[constant[1]]]
<ast.Try object at 0x7da2044c1120>
if compare[constant[table] not_equal[!=] name[tableGenFile].tag] begin[:]
call[name[logging].error, parameter[constant[Table file %s is invalid: It's root element is not named 'table'.], name[file]]]
call[name[exit], parameter[constant[1]]]
return[name[tableGenFile]]
|
keyword[def] identifier[parse_table_definition_file] ( identifier[file] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] , identifier[file] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[file] ):
identifier[logging] . identifier[error] ( literal[string] , identifier[file] )
identifier[exit] ( literal[int] )
keyword[try] :
identifier[tableGenFile] = identifier[ElementTree] . identifier[ElementTree] (). identifier[parse] ( identifier[file] )
keyword[except] identifier[IOError] keyword[as] identifier[e] :
identifier[logging] . identifier[error] ( literal[string] , identifier[file] , identifier[e] )
identifier[exit] ( literal[int] )
keyword[except] identifier[ElementTree] . identifier[ParseError] keyword[as] identifier[e] :
identifier[logging] . identifier[error] ( literal[string] , identifier[file] , identifier[e] )
identifier[exit] ( literal[int] )
keyword[if] literal[string] != identifier[tableGenFile] . identifier[tag] :
identifier[logging] . identifier[error] ( literal[string] , identifier[file] )
identifier[exit] ( literal[int] )
keyword[return] identifier[tableGenFile]
|
def parse_table_definition_file(file):
"""
Read an parse the XML of a table-definition file.
@return: an ElementTree object for the table definition
"""
logging.info("Reading table definition from '%s'...", file)
if not os.path.isfile(file):
logging.error("File '%s' does not exist.", file)
exit(1) # depends on [control=['if'], data=[]]
try:
tableGenFile = ElementTree.ElementTree().parse(file) # depends on [control=['try'], data=[]]
except IOError as e:
logging.error('Could not read result file %s: %s', file, e)
exit(1) # depends on [control=['except'], data=['e']]
except ElementTree.ParseError as e:
logging.error('Table file %s is invalid: %s', file, e)
exit(1) # depends on [control=['except'], data=['e']]
if 'table' != tableGenFile.tag:
logging.error("Table file %s is invalid: It's root element is not named 'table'.", file)
exit(1) # depends on [control=['if'], data=[]]
return tableGenFile
|
def list_build_configurations(page_size=200, page_index=0, sort="", q=""):
"""
List all BuildConfigurations
"""
data = list_build_configurations_raw(page_size, page_index, sort, q)
if data:
return utils.format_json_list(data)
|
def function[list_build_configurations, parameter[page_size, page_index, sort, q]]:
constant[
List all BuildConfigurations
]
variable[data] assign[=] call[name[list_build_configurations_raw], parameter[name[page_size], name[page_index], name[sort], name[q]]]
if name[data] begin[:]
return[call[name[utils].format_json_list, parameter[name[data]]]]
|
keyword[def] identifier[list_build_configurations] ( identifier[page_size] = literal[int] , identifier[page_index] = literal[int] , identifier[sort] = literal[string] , identifier[q] = literal[string] ):
literal[string]
identifier[data] = identifier[list_build_configurations_raw] ( identifier[page_size] , identifier[page_index] , identifier[sort] , identifier[q] )
keyword[if] identifier[data] :
keyword[return] identifier[utils] . identifier[format_json_list] ( identifier[data] )
|
def list_build_configurations(page_size=200, page_index=0, sort='', q=''):
"""
List all BuildConfigurations
"""
data = list_build_configurations_raw(page_size, page_index, sort, q)
if data:
return utils.format_json_list(data) # depends on [control=['if'], data=[]]
|
def get_default_master_type(num_gpus=1):
"""Returns master_type for trainingInput."""
gpus_to_master_map = {
0: "standard",
1: "standard_p100",
4: "complex_model_m_p100",
8: "complex_model_l_gpu",
}
if num_gpus not in gpus_to_master_map:
raise ValueError("Num gpus must be in %s" %
str(sorted(list(gpus_to_master_map.keys()))))
return gpus_to_master_map[num_gpus]
|
def function[get_default_master_type, parameter[num_gpus]]:
constant[Returns master_type for trainingInput.]
variable[gpus_to_master_map] assign[=] dictionary[[<ast.Constant object at 0x7da18f00e020>, <ast.Constant object at 0x7da18f00fa30>, <ast.Constant object at 0x7da18f00d8d0>, <ast.Constant object at 0x7da18f00fc10>], [<ast.Constant object at 0x7da18f00de40>, <ast.Constant object at 0x7da18f00dc60>, <ast.Constant object at 0x7da18f00df00>, <ast.Constant object at 0x7da18f00f6a0>]]
if compare[name[num_gpus] <ast.NotIn object at 0x7da2590d7190> name[gpus_to_master_map]] begin[:]
<ast.Raise object at 0x7da18f00f0d0>
return[call[name[gpus_to_master_map]][name[num_gpus]]]
|
keyword[def] identifier[get_default_master_type] ( identifier[num_gpus] = literal[int] ):
literal[string]
identifier[gpus_to_master_map] ={
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
literal[int] : literal[string] ,
}
keyword[if] identifier[num_gpus] keyword[not] keyword[in] identifier[gpus_to_master_map] :
keyword[raise] identifier[ValueError] ( literal[string] %
identifier[str] ( identifier[sorted] ( identifier[list] ( identifier[gpus_to_master_map] . identifier[keys] ()))))
keyword[return] identifier[gpus_to_master_map] [ identifier[num_gpus] ]
|
def get_default_master_type(num_gpus=1):
"""Returns master_type for trainingInput."""
gpus_to_master_map = {0: 'standard', 1: 'standard_p100', 4: 'complex_model_m_p100', 8: 'complex_model_l_gpu'}
if num_gpus not in gpus_to_master_map:
raise ValueError('Num gpus must be in %s' % str(sorted(list(gpus_to_master_map.keys())))) # depends on [control=['if'], data=['gpus_to_master_map']]
return gpus_to_master_map[num_gpus]
|
def commonprefix(items):
"""Get common prefix for completions
Return the longest common prefix of a list of strings, but with special
treatment of escape characters that might precede commands in IPython,
such as %magic functions. Used in tab completion.
For a more general function, see os.path.commonprefix
"""
# the last item will always have the least leading % symbol
# min / max are first/last in alphabetical order
first_match = ESCAPE_RE.match(min(items))
last_match = ESCAPE_RE.match(max(items))
# common suffix is (common prefix of reversed items) reversed
if first_match and last_match:
prefix = os.path.commonprefix((first_match.group(0)[::-1], last_match.group(0)[::-1]))[::-1]
else:
prefix = ''
items = [s.lstrip(ESCAPE_CHARS) for s in items]
return prefix+os.path.commonprefix(items)
|
def function[commonprefix, parameter[items]]:
constant[Get common prefix for completions
Return the longest common prefix of a list of strings, but with special
treatment of escape characters that might precede commands in IPython,
such as %magic functions. Used in tab completion.
For a more general function, see os.path.commonprefix
]
variable[first_match] assign[=] call[name[ESCAPE_RE].match, parameter[call[name[min], parameter[name[items]]]]]
variable[last_match] assign[=] call[name[ESCAPE_RE].match, parameter[call[name[max], parameter[name[items]]]]]
if <ast.BoolOp object at 0x7da20c7cb6a0> begin[:]
variable[prefix] assign[=] call[call[name[os].path.commonprefix, parameter[tuple[[<ast.Subscript object at 0x7da20c7cb010>, <ast.Subscript object at 0x7da20c7cabf0>]]]]][<ast.Slice object at 0x7da20c7c9690>]
variable[items] assign[=] <ast.ListComp object at 0x7da20c7c9420>
return[binary_operation[name[prefix] + call[name[os].path.commonprefix, parameter[name[items]]]]]
|
keyword[def] identifier[commonprefix] ( identifier[items] ):
literal[string]
identifier[first_match] = identifier[ESCAPE_RE] . identifier[match] ( identifier[min] ( identifier[items] ))
identifier[last_match] = identifier[ESCAPE_RE] . identifier[match] ( identifier[max] ( identifier[items] ))
keyword[if] identifier[first_match] keyword[and] identifier[last_match] :
identifier[prefix] = identifier[os] . identifier[path] . identifier[commonprefix] (( identifier[first_match] . identifier[group] ( literal[int] )[::- literal[int] ], identifier[last_match] . identifier[group] ( literal[int] )[::- literal[int] ]))[::- literal[int] ]
keyword[else] :
identifier[prefix] = literal[string]
identifier[items] =[ identifier[s] . identifier[lstrip] ( identifier[ESCAPE_CHARS] ) keyword[for] identifier[s] keyword[in] identifier[items] ]
keyword[return] identifier[prefix] + identifier[os] . identifier[path] . identifier[commonprefix] ( identifier[items] )
|
def commonprefix(items):
"""Get common prefix for completions
Return the longest common prefix of a list of strings, but with special
treatment of escape characters that might precede commands in IPython,
such as %magic functions. Used in tab completion.
For a more general function, see os.path.commonprefix
"""
# the last item will always have the least leading % symbol
# min / max are first/last in alphabetical order
first_match = ESCAPE_RE.match(min(items))
last_match = ESCAPE_RE.match(max(items))
# common suffix is (common prefix of reversed items) reversed
if first_match and last_match:
prefix = os.path.commonprefix((first_match.group(0)[::-1], last_match.group(0)[::-1]))[::-1] # depends on [control=['if'], data=[]]
else:
prefix = ''
items = [s.lstrip(ESCAPE_CHARS) for s in items]
return prefix + os.path.commonprefix(items)
|
def get_alpn_proto_negotiated(self):
"""
Get the protocol that was negotiated by ALPN.
:returns: A bytestring of the protocol name. If no protocol has been
negotiated yet, returns an empty string.
"""
data = _ffi.new("unsigned char **")
data_len = _ffi.new("unsigned int *")
_lib.SSL_get0_alpn_selected(self._ssl, data, data_len)
if not data_len:
return b''
return _ffi.buffer(data[0], data_len[0])[:]
|
def function[get_alpn_proto_negotiated, parameter[self]]:
constant[
Get the protocol that was negotiated by ALPN.
:returns: A bytestring of the protocol name. If no protocol has been
negotiated yet, returns an empty string.
]
variable[data] assign[=] call[name[_ffi].new, parameter[constant[unsigned char **]]]
variable[data_len] assign[=] call[name[_ffi].new, parameter[constant[unsigned int *]]]
call[name[_lib].SSL_get0_alpn_selected, parameter[name[self]._ssl, name[data], name[data_len]]]
if <ast.UnaryOp object at 0x7da1b028ef80> begin[:]
return[constant[b'']]
return[call[call[name[_ffi].buffer, parameter[call[name[data]][constant[0]], call[name[data_len]][constant[0]]]]][<ast.Slice object at 0x7da1b028d4b0>]]
|
keyword[def] identifier[get_alpn_proto_negotiated] ( identifier[self] ):
literal[string]
identifier[data] = identifier[_ffi] . identifier[new] ( literal[string] )
identifier[data_len] = identifier[_ffi] . identifier[new] ( literal[string] )
identifier[_lib] . identifier[SSL_get0_alpn_selected] ( identifier[self] . identifier[_ssl] , identifier[data] , identifier[data_len] )
keyword[if] keyword[not] identifier[data_len] :
keyword[return] literal[string]
keyword[return] identifier[_ffi] . identifier[buffer] ( identifier[data] [ literal[int] ], identifier[data_len] [ literal[int] ])[:]
|
def get_alpn_proto_negotiated(self):
"""
Get the protocol that was negotiated by ALPN.
:returns: A bytestring of the protocol name. If no protocol has been
negotiated yet, returns an empty string.
"""
data = _ffi.new('unsigned char **')
data_len = _ffi.new('unsigned int *')
_lib.SSL_get0_alpn_selected(self._ssl, data, data_len)
if not data_len:
return b'' # depends on [control=['if'], data=[]]
return _ffi.buffer(data[0], data_len[0])[:]
|
def latrec(radius, longitude, latitude):
"""
Convert from latitudinal coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latrec_c.html
:param radius: Distance of a point from the origin.
:type radius: float
:param longitude: Longitude of point in radians.
:type longitude: float
:param latitude: Latitude of point in radians.
:type latitude: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
radius = ctypes.c_double(radius)
longitude = ctypes.c_double(longitude)
latitude = ctypes.c_double(latitude)
rectan = stypes.emptyDoubleVector(3)
libspice.latrec_c(radius, longitude, latitude, rectan)
return stypes.cVectorToPython(rectan)
|
def function[latrec, parameter[radius, longitude, latitude]]:
constant[
Convert from latitudinal coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latrec_c.html
:param radius: Distance of a point from the origin.
:type radius: float
:param longitude: Longitude of point in radians.
:type longitude: float
:param latitude: Latitude of point in radians.
:type latitude: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
]
variable[radius] assign[=] call[name[ctypes].c_double, parameter[name[radius]]]
variable[longitude] assign[=] call[name[ctypes].c_double, parameter[name[longitude]]]
variable[latitude] assign[=] call[name[ctypes].c_double, parameter[name[latitude]]]
variable[rectan] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[3]]]
call[name[libspice].latrec_c, parameter[name[radius], name[longitude], name[latitude], name[rectan]]]
return[call[name[stypes].cVectorToPython, parameter[name[rectan]]]]
|
keyword[def] identifier[latrec] ( identifier[radius] , identifier[longitude] , identifier[latitude] ):
literal[string]
identifier[radius] = identifier[ctypes] . identifier[c_double] ( identifier[radius] )
identifier[longitude] = identifier[ctypes] . identifier[c_double] ( identifier[longitude] )
identifier[latitude] = identifier[ctypes] . identifier[c_double] ( identifier[latitude] )
identifier[rectan] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[latrec_c] ( identifier[radius] , identifier[longitude] , identifier[latitude] , identifier[rectan] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[rectan] )
|
def latrec(radius, longitude, latitude):
"""
Convert from latitudinal coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latrec_c.html
:param radius: Distance of a point from the origin.
:type radius: float
:param longitude: Longitude of point in radians.
:type longitude: float
:param latitude: Latitude of point in radians.
:type latitude: float
:return: Rectangular coordinates of the point.
:rtype: 3-Element Array of floats
"""
radius = ctypes.c_double(radius)
longitude = ctypes.c_double(longitude)
latitude = ctypes.c_double(latitude)
rectan = stypes.emptyDoubleVector(3)
libspice.latrec_c(radius, longitude, latitude, rectan)
return stypes.cVectorToPython(rectan)
|
def removeTopology(self, topology_name, state_manager_name):
"""
Removes the topology from the local cache.
"""
topologies = []
for top in self.topologies:
if (top.name == topology_name and
top.state_manager_name == state_manager_name):
# Remove topologyInfo
if (topology_name, state_manager_name) in self.topologyInfos:
self.topologyInfos.pop((topology_name, state_manager_name))
else:
topologies.append(top)
self.topologies = topologies
|
def function[removeTopology, parameter[self, topology_name, state_manager_name]]:
constant[
Removes the topology from the local cache.
]
variable[topologies] assign[=] list[[]]
for taget[name[top]] in starred[name[self].topologies] begin[:]
if <ast.BoolOp object at 0x7da20c76d3c0> begin[:]
if compare[tuple[[<ast.Name object at 0x7da20c76c910>, <ast.Name object at 0x7da20c76c3d0>]] in name[self].topologyInfos] begin[:]
call[name[self].topologyInfos.pop, parameter[tuple[[<ast.Name object at 0x7da20c76dc60>, <ast.Name object at 0x7da20c76f7f0>]]]]
name[self].topologies assign[=] name[topologies]
|
keyword[def] identifier[removeTopology] ( identifier[self] , identifier[topology_name] , identifier[state_manager_name] ):
literal[string]
identifier[topologies] =[]
keyword[for] identifier[top] keyword[in] identifier[self] . identifier[topologies] :
keyword[if] ( identifier[top] . identifier[name] == identifier[topology_name] keyword[and]
identifier[top] . identifier[state_manager_name] == identifier[state_manager_name] ):
keyword[if] ( identifier[topology_name] , identifier[state_manager_name] ) keyword[in] identifier[self] . identifier[topologyInfos] :
identifier[self] . identifier[topologyInfos] . identifier[pop] (( identifier[topology_name] , identifier[state_manager_name] ))
keyword[else] :
identifier[topologies] . identifier[append] ( identifier[top] )
identifier[self] . identifier[topologies] = identifier[topologies]
|
def removeTopology(self, topology_name, state_manager_name):
"""
Removes the topology from the local cache.
"""
topologies = []
for top in self.topologies:
if top.name == topology_name and top.state_manager_name == state_manager_name:
# Remove topologyInfo
if (topology_name, state_manager_name) in self.topologyInfos:
self.topologyInfos.pop((topology_name, state_manager_name)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
topologies.append(top) # depends on [control=['for'], data=['top']]
self.topologies = topologies
|
def fitNull(self, init_method='emp_cov'):
""" fit null model """
self.null = self.mtSet1.fitNull(cache=False, factr=self.factr, init_method=init_method)
self.null['NLL'] = self.null['NLL0']
self.mtSet2.null = copy.copy(self.null)
return self.null
|
def function[fitNull, parameter[self, init_method]]:
constant[ fit null model ]
name[self].null assign[=] call[name[self].mtSet1.fitNull, parameter[]]
call[name[self].null][constant[NLL]] assign[=] call[name[self].null][constant[NLL0]]
name[self].mtSet2.null assign[=] call[name[copy].copy, parameter[name[self].null]]
return[name[self].null]
|
keyword[def] identifier[fitNull] ( identifier[self] , identifier[init_method] = literal[string] ):
literal[string]
identifier[self] . identifier[null] = identifier[self] . identifier[mtSet1] . identifier[fitNull] ( identifier[cache] = keyword[False] , identifier[factr] = identifier[self] . identifier[factr] , identifier[init_method] = identifier[init_method] )
identifier[self] . identifier[null] [ literal[string] ]= identifier[self] . identifier[null] [ literal[string] ]
identifier[self] . identifier[mtSet2] . identifier[null] = identifier[copy] . identifier[copy] ( identifier[self] . identifier[null] )
keyword[return] identifier[self] . identifier[null]
|
def fitNull(self, init_method='emp_cov'):
""" fit null model """
self.null = self.mtSet1.fitNull(cache=False, factr=self.factr, init_method=init_method)
self.null['NLL'] = self.null['NLL0']
self.mtSet2.null = copy.copy(self.null)
return self.null
|
def is_inside(directory, fname):
"""True if fname is inside directory.
The parameters should typically be passed to osutils.normpath first, so
that . and .. and repeated slashes are eliminated, and the separators
are canonical for the platform.
The empty string as a dir name is taken as top-of-tree and matches
everything.
"""
# XXX: Most callers of this can actually do something smarter by
# looking at the inventory
if directory == fname:
return True
if directory == b'':
return True
if not directory.endswith(b'/'):
directory += b'/'
return fname.startswith(directory)
|
def function[is_inside, parameter[directory, fname]]:
constant[True if fname is inside directory.
The parameters should typically be passed to osutils.normpath first, so
that . and .. and repeated slashes are eliminated, and the separators
are canonical for the platform.
The empty string as a dir name is taken as top-of-tree and matches
everything.
]
if compare[name[directory] equal[==] name[fname]] begin[:]
return[constant[True]]
if compare[name[directory] equal[==] constant[b'']] begin[:]
return[constant[True]]
if <ast.UnaryOp object at 0x7da1b0a22d10> begin[:]
<ast.AugAssign object at 0x7da1b0a20dc0>
return[call[name[fname].startswith, parameter[name[directory]]]]
|
keyword[def] identifier[is_inside] ( identifier[directory] , identifier[fname] ):
literal[string]
keyword[if] identifier[directory] == identifier[fname] :
keyword[return] keyword[True]
keyword[if] identifier[directory] == literal[string] :
keyword[return] keyword[True]
keyword[if] keyword[not] identifier[directory] . identifier[endswith] ( literal[string] ):
identifier[directory] += literal[string]
keyword[return] identifier[fname] . identifier[startswith] ( identifier[directory] )
|
def is_inside(directory, fname):
"""True if fname is inside directory.
The parameters should typically be passed to osutils.normpath first, so
that . and .. and repeated slashes are eliminated, and the separators
are canonical for the platform.
The empty string as a dir name is taken as top-of-tree and matches
everything.
"""
# XXX: Most callers of this can actually do something smarter by
# looking at the inventory
if directory == fname:
return True # depends on [control=['if'], data=[]]
if directory == b'':
return True # depends on [control=['if'], data=[]]
if not directory.endswith(b'/'):
directory += b'/' # depends on [control=['if'], data=[]]
return fname.startswith(directory)
|
def addPrivateKey(self, wif):
""" Add a private key to the wallet database
"""
try:
pub = self.publickey_from_wif(wif)
except Exception:
raise InvalidWifError("Invalid Key format!")
if str(pub) in self.store:
raise KeyAlreadyInStoreException("Key already in the store")
self.store.add(str(wif), str(pub))
|
def function[addPrivateKey, parameter[self, wif]]:
constant[ Add a private key to the wallet database
]
<ast.Try object at 0x7da1b0109d50>
if compare[call[name[str], parameter[name[pub]]] in name[self].store] begin[:]
<ast.Raise object at 0x7da1b010aa70>
call[name[self].store.add, parameter[call[name[str], parameter[name[wif]]], call[name[str], parameter[name[pub]]]]]
|
keyword[def] identifier[addPrivateKey] ( identifier[self] , identifier[wif] ):
literal[string]
keyword[try] :
identifier[pub] = identifier[self] . identifier[publickey_from_wif] ( identifier[wif] )
keyword[except] identifier[Exception] :
keyword[raise] identifier[InvalidWifError] ( literal[string] )
keyword[if] identifier[str] ( identifier[pub] ) keyword[in] identifier[self] . identifier[store] :
keyword[raise] identifier[KeyAlreadyInStoreException] ( literal[string] )
identifier[self] . identifier[store] . identifier[add] ( identifier[str] ( identifier[wif] ), identifier[str] ( identifier[pub] ))
|
def addPrivateKey(self, wif):
""" Add a private key to the wallet database
"""
try:
pub = self.publickey_from_wif(wif) # depends on [control=['try'], data=[]]
except Exception:
raise InvalidWifError('Invalid Key format!') # depends on [control=['except'], data=[]]
if str(pub) in self.store:
raise KeyAlreadyInStoreException('Key already in the store') # depends on [control=['if'], data=[]]
self.store.add(str(wif), str(pub))
|
def patch_ref(self, sha):
""" Patch reference on the origin master branch
:param sha: Sha to use for the branch
:return: Status of success
:rtype: str or self.ProxyError
"""
uri = "{api}/repos/{origin}/git/refs/heads/{branch}".format(
api=self.github_api_url,
origin=self.origin,
branch=self.master_fork
)
data = {
"sha": sha,
"force": True
}
reply = self.request(
"PATCH",
uri,
data=data
)
if reply.status_code == 200:
dic = json.loads(reply.content.decode("utf-8"))
return dic["object"]["sha"]
else:
dic = json.loads(reply.content.decode("utf-8"))
return self.ProxyError(
reply.status_code,
(dic, "message"),
step="patch",
context={
"uri": uri,
"data": data
}
)
|
def function[patch_ref, parameter[self, sha]]:
constant[ Patch reference on the origin master branch
:param sha: Sha to use for the branch
:return: Status of success
:rtype: str or self.ProxyError
]
variable[uri] assign[=] call[constant[{api}/repos/{origin}/git/refs/heads/{branch}].format, parameter[]]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b287ee00>, <ast.Constant object at 0x7da1b287dcc0>], [<ast.Name object at 0x7da1b287ca30>, <ast.Constant object at 0x7da1b287e5c0>]]
variable[reply] assign[=] call[name[self].request, parameter[constant[PATCH], name[uri]]]
if compare[name[reply].status_code equal[==] constant[200]] begin[:]
variable[dic] assign[=] call[name[json].loads, parameter[call[name[reply].content.decode, parameter[constant[utf-8]]]]]
return[call[call[name[dic]][constant[object]]][constant[sha]]]
|
keyword[def] identifier[patch_ref] ( identifier[self] , identifier[sha] ):
literal[string]
identifier[uri] = literal[string] . identifier[format] (
identifier[api] = identifier[self] . identifier[github_api_url] ,
identifier[origin] = identifier[self] . identifier[origin] ,
identifier[branch] = identifier[self] . identifier[master_fork]
)
identifier[data] ={
literal[string] : identifier[sha] ,
literal[string] : keyword[True]
}
identifier[reply] = identifier[self] . identifier[request] (
literal[string] ,
identifier[uri] ,
identifier[data] = identifier[data]
)
keyword[if] identifier[reply] . identifier[status_code] == literal[int] :
identifier[dic] = identifier[json] . identifier[loads] ( identifier[reply] . identifier[content] . identifier[decode] ( literal[string] ))
keyword[return] identifier[dic] [ literal[string] ][ literal[string] ]
keyword[else] :
identifier[dic] = identifier[json] . identifier[loads] ( identifier[reply] . identifier[content] . identifier[decode] ( literal[string] ))
keyword[return] identifier[self] . identifier[ProxyError] (
identifier[reply] . identifier[status_code] ,
( identifier[dic] , literal[string] ),
identifier[step] = literal[string] ,
identifier[context] ={
literal[string] : identifier[uri] ,
literal[string] : identifier[data]
}
)
|
def patch_ref(self, sha):
""" Patch reference on the origin master branch
:param sha: Sha to use for the branch
:return: Status of success
:rtype: str or self.ProxyError
"""
uri = '{api}/repos/{origin}/git/refs/heads/{branch}'.format(api=self.github_api_url, origin=self.origin, branch=self.master_fork)
data = {'sha': sha, 'force': True}
reply = self.request('PATCH', uri, data=data)
if reply.status_code == 200:
dic = json.loads(reply.content.decode('utf-8'))
return dic['object']['sha'] # depends on [control=['if'], data=[]]
else:
dic = json.loads(reply.content.decode('utf-8'))
return self.ProxyError(reply.status_code, (dic, 'message'), step='patch', context={'uri': uri, 'data': data})
|
def load_rules(self, filename):
"""
Load rules from YAML configuration in the given stream object
:param filename: Filename of rule YAML file
:return: rules object
"""
self.logger.debug('Reading rules from %s', filename)
try:
in_file = open(filename)
except IOError:
self.logger.error('Error opening {0}'.format(filename))
raise
y = None
try:
y = yaml.load(in_file)
except yaml.YAMLError as exc:
if hasattr(exc, 'problem_mark'):
self.logger.error('Error parsing rules{0}'.format(exc.problem_mark))
else:
self.logger.error('Error parsing rules in {0}'.format(in_file.name))
raise
return y
|
def function[load_rules, parameter[self, filename]]:
constant[
Load rules from YAML configuration in the given stream object
:param filename: Filename of rule YAML file
:return: rules object
]
call[name[self].logger.debug, parameter[constant[Reading rules from %s], name[filename]]]
<ast.Try object at 0x7da1b26af7f0>
variable[y] assign[=] constant[None]
<ast.Try object at 0x7da1b26ad240>
return[name[y]]
|
keyword[def] identifier[load_rules] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[filename] )
keyword[try] :
identifier[in_file] = identifier[open] ( identifier[filename] )
keyword[except] identifier[IOError] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[filename] ))
keyword[raise]
identifier[y] = keyword[None]
keyword[try] :
identifier[y] = identifier[yaml] . identifier[load] ( identifier[in_file] )
keyword[except] identifier[yaml] . identifier[YAMLError] keyword[as] identifier[exc] :
keyword[if] identifier[hasattr] ( identifier[exc] , literal[string] ):
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[exc] . identifier[problem_mark] ))
keyword[else] :
identifier[self] . identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[in_file] . identifier[name] ))
keyword[raise]
keyword[return] identifier[y]
|
def load_rules(self, filename):
"""
Load rules from YAML configuration in the given stream object
:param filename: Filename of rule YAML file
:return: rules object
"""
self.logger.debug('Reading rules from %s', filename)
try:
in_file = open(filename) # depends on [control=['try'], data=[]]
except IOError:
self.logger.error('Error opening {0}'.format(filename))
raise # depends on [control=['except'], data=[]]
y = None
try:
y = yaml.load(in_file) # depends on [control=['try'], data=[]]
except yaml.YAMLError as exc:
if hasattr(exc, 'problem_mark'):
self.logger.error('Error parsing rules{0}'.format(exc.problem_mark)) # depends on [control=['if'], data=[]]
else:
self.logger.error('Error parsing rules in {0}'.format(in_file.name))
raise # depends on [control=['except'], data=['exc']]
return y
|
def componentsintobranch(idf, branch, listofcomponents, fluid=None):
"""insert a list of components into a branch
fluid is only needed if there are air and water nodes in same object
fluid is Air or Water or ''.
if the fluid is Steam, use Water"""
if fluid is None:
fluid = ''
componentlist = [item[0] for item in listofcomponents]
# assumes that the nodes of the component connect to each other
# empty branch if it has existing components
thebranchname = branch.Name
thebranch = idf.removeextensibles('BRANCH', thebranchname) # empty the branch
# fill in the new components with the node names into this branch
# find the first extensible field and fill in the data in obj.
e_index = idf.getextensibleindex('BRANCH', thebranchname)
theobj = thebranch.obj
modeleditor.extendlist(theobj, e_index) # just being careful here
for comp, compnode in listofcomponents:
theobj.append(comp.key)
theobj.append(comp.Name)
inletnodename = getnodefieldname(comp, "Inlet_Node_Name", fluid=fluid,
startswith=compnode)
theobj.append(comp[inletnodename])
outletnodename = getnodefieldname(comp, "Outlet_Node_Name",
fluid=fluid, startswith=compnode)
theobj.append(comp[outletnodename])
theobj.append('')
return thebranch
|
def function[componentsintobranch, parameter[idf, branch, listofcomponents, fluid]]:
constant[insert a list of components into a branch
fluid is only needed if there are air and water nodes in same object
fluid is Air or Water or ''.
if the fluid is Steam, use Water]
if compare[name[fluid] is constant[None]] begin[:]
variable[fluid] assign[=] constant[]
variable[componentlist] assign[=] <ast.ListComp object at 0x7da18f811990>
variable[thebranchname] assign[=] name[branch].Name
variable[thebranch] assign[=] call[name[idf].removeextensibles, parameter[constant[BRANCH], name[thebranchname]]]
variable[e_index] assign[=] call[name[idf].getextensibleindex, parameter[constant[BRANCH], name[thebranchname]]]
variable[theobj] assign[=] name[thebranch].obj
call[name[modeleditor].extendlist, parameter[name[theobj], name[e_index]]]
for taget[tuple[[<ast.Name object at 0x7da204962950>, <ast.Name object at 0x7da204963c40>]]] in starred[name[listofcomponents]] begin[:]
call[name[theobj].append, parameter[name[comp].key]]
call[name[theobj].append, parameter[name[comp].Name]]
variable[inletnodename] assign[=] call[name[getnodefieldname], parameter[name[comp], constant[Inlet_Node_Name]]]
call[name[theobj].append, parameter[call[name[comp]][name[inletnodename]]]]
variable[outletnodename] assign[=] call[name[getnodefieldname], parameter[name[comp], constant[Outlet_Node_Name]]]
call[name[theobj].append, parameter[call[name[comp]][name[outletnodename]]]]
call[name[theobj].append, parameter[constant[]]]
return[name[thebranch]]
|
keyword[def] identifier[componentsintobranch] ( identifier[idf] , identifier[branch] , identifier[listofcomponents] , identifier[fluid] = keyword[None] ):
literal[string]
keyword[if] identifier[fluid] keyword[is] keyword[None] :
identifier[fluid] = literal[string]
identifier[componentlist] =[ identifier[item] [ literal[int] ] keyword[for] identifier[item] keyword[in] identifier[listofcomponents] ]
identifier[thebranchname] = identifier[branch] . identifier[Name]
identifier[thebranch] = identifier[idf] . identifier[removeextensibles] ( literal[string] , identifier[thebranchname] )
identifier[e_index] = identifier[idf] . identifier[getextensibleindex] ( literal[string] , identifier[thebranchname] )
identifier[theobj] = identifier[thebranch] . identifier[obj]
identifier[modeleditor] . identifier[extendlist] ( identifier[theobj] , identifier[e_index] )
keyword[for] identifier[comp] , identifier[compnode] keyword[in] identifier[listofcomponents] :
identifier[theobj] . identifier[append] ( identifier[comp] . identifier[key] )
identifier[theobj] . identifier[append] ( identifier[comp] . identifier[Name] )
identifier[inletnodename] = identifier[getnodefieldname] ( identifier[comp] , literal[string] , identifier[fluid] = identifier[fluid] ,
identifier[startswith] = identifier[compnode] )
identifier[theobj] . identifier[append] ( identifier[comp] [ identifier[inletnodename] ])
identifier[outletnodename] = identifier[getnodefieldname] ( identifier[comp] , literal[string] ,
identifier[fluid] = identifier[fluid] , identifier[startswith] = identifier[compnode] )
identifier[theobj] . identifier[append] ( identifier[comp] [ identifier[outletnodename] ])
identifier[theobj] . identifier[append] ( literal[string] )
keyword[return] identifier[thebranch]
|
def componentsintobranch(idf, branch, listofcomponents, fluid=None):
"""insert a list of components into a branch
fluid is only needed if there are air and water nodes in same object
fluid is Air or Water or ''.
if the fluid is Steam, use Water"""
if fluid is None:
fluid = '' # depends on [control=['if'], data=['fluid']]
componentlist = [item[0] for item in listofcomponents]
# assumes that the nodes of the component connect to each other
# empty branch if it has existing components
thebranchname = branch.Name
thebranch = idf.removeextensibles('BRANCH', thebranchname) # empty the branch
# fill in the new components with the node names into this branch
# find the first extensible field and fill in the data in obj.
e_index = idf.getextensibleindex('BRANCH', thebranchname)
theobj = thebranch.obj
modeleditor.extendlist(theobj, e_index) # just being careful here
for (comp, compnode) in listofcomponents:
theobj.append(comp.key)
theobj.append(comp.Name)
inletnodename = getnodefieldname(comp, 'Inlet_Node_Name', fluid=fluid, startswith=compnode)
theobj.append(comp[inletnodename])
outletnodename = getnodefieldname(comp, 'Outlet_Node_Name', fluid=fluid, startswith=compnode)
theobj.append(comp[outletnodename])
theobj.append('') # depends on [control=['for'], data=[]]
return thebranch
|
def processpool_map(task, args, message, concurrency, batchsize=1, nargs=None):
"""
See http://stackoverflow.com/a/16071616
"""
njobs = get_njobs(nargs, args)
show_progress = bool(message)
batches = grouper(batchsize, tupleise(args))
def batched_task(*batch):
return [task(*job) for job in batch]
if show_progress:
message += ' (PP:{}w:{}b)'.format(concurrency, batchsize)
pbar = setup_progressbar(message, njobs, simple_progress=True)
pbar.start()
q_in = multiprocessing.Queue() # Should I limit either queue size? Limiting in-queue
q_out = multiprocessing.Queue() # increases time taken to send jobs, makes pbar less useful
proc = [multiprocessing.Process(target=fun, args=(batched_task, q_in, q_out)) for _ in range(concurrency)]
for p in proc:
p.daemon = True
p.start()
sent = [q_in.put((i, x)) for (i, x) in enumerate(batches)]
[q_in.put((None, None)) for _ in range(concurrency)]
res = []
completed_count = 0
for _ in range(len(sent)):
result = get_from_queue(q_out)
res.append(result)
completed_count += len(result[1])
if show_progress:
pbar.update(completed_count)
[p.join() for p in proc]
if show_progress:
pbar.finish()
return flatten_list([x for (i, x) in sorted(res)])
|
def function[processpool_map, parameter[task, args, message, concurrency, batchsize, nargs]]:
constant[
See http://stackoverflow.com/a/16071616
]
variable[njobs] assign[=] call[name[get_njobs], parameter[name[nargs], name[args]]]
variable[show_progress] assign[=] call[name[bool], parameter[name[message]]]
variable[batches] assign[=] call[name[grouper], parameter[name[batchsize], call[name[tupleise], parameter[name[args]]]]]
def function[batched_task, parameter[]]:
return[<ast.ListComp object at 0x7da20e9b3cd0>]
if name[show_progress] begin[:]
<ast.AugAssign object at 0x7da20e9b2f50>
variable[pbar] assign[=] call[name[setup_progressbar], parameter[name[message], name[njobs]]]
call[name[pbar].start, parameter[]]
variable[q_in] assign[=] call[name[multiprocessing].Queue, parameter[]]
variable[q_out] assign[=] call[name[multiprocessing].Queue, parameter[]]
variable[proc] assign[=] <ast.ListComp object at 0x7da20e9b1360>
for taget[name[p]] in starred[name[proc]] begin[:]
name[p].daemon assign[=] constant[True]
call[name[p].start, parameter[]]
variable[sent] assign[=] <ast.ListComp object at 0x7da20e9b00a0>
<ast.ListComp object at 0x7da20e9b1870>
variable[res] assign[=] list[[]]
variable[completed_count] assign[=] constant[0]
for taget[name[_]] in starred[call[name[range], parameter[call[name[len], parameter[name[sent]]]]]] begin[:]
variable[result] assign[=] call[name[get_from_queue], parameter[name[q_out]]]
call[name[res].append, parameter[name[result]]]
<ast.AugAssign object at 0x7da1b27bb550>
if name[show_progress] begin[:]
call[name[pbar].update, parameter[name[completed_count]]]
<ast.ListComp object at 0x7da18bcc8520>
if name[show_progress] begin[:]
call[name[pbar].finish, parameter[]]
return[call[name[flatten_list], parameter[<ast.ListComp object at 0x7da18bcca7a0>]]]
|
keyword[def] identifier[processpool_map] ( identifier[task] , identifier[args] , identifier[message] , identifier[concurrency] , identifier[batchsize] = literal[int] , identifier[nargs] = keyword[None] ):
literal[string]
identifier[njobs] = identifier[get_njobs] ( identifier[nargs] , identifier[args] )
identifier[show_progress] = identifier[bool] ( identifier[message] )
identifier[batches] = identifier[grouper] ( identifier[batchsize] , identifier[tupleise] ( identifier[args] ))
keyword[def] identifier[batched_task] (* identifier[batch] ):
keyword[return] [ identifier[task] (* identifier[job] ) keyword[for] identifier[job] keyword[in] identifier[batch] ]
keyword[if] identifier[show_progress] :
identifier[message] += literal[string] . identifier[format] ( identifier[concurrency] , identifier[batchsize] )
identifier[pbar] = identifier[setup_progressbar] ( identifier[message] , identifier[njobs] , identifier[simple_progress] = keyword[True] )
identifier[pbar] . identifier[start] ()
identifier[q_in] = identifier[multiprocessing] . identifier[Queue] ()
identifier[q_out] = identifier[multiprocessing] . identifier[Queue] ()
identifier[proc] =[ identifier[multiprocessing] . identifier[Process] ( identifier[target] = identifier[fun] , identifier[args] =( identifier[batched_task] , identifier[q_in] , identifier[q_out] )) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[concurrency] )]
keyword[for] identifier[p] keyword[in] identifier[proc] :
identifier[p] . identifier[daemon] = keyword[True]
identifier[p] . identifier[start] ()
identifier[sent] =[ identifier[q_in] . identifier[put] (( identifier[i] , identifier[x] )) keyword[for] ( identifier[i] , identifier[x] ) keyword[in] identifier[enumerate] ( identifier[batches] )]
[ identifier[q_in] . identifier[put] (( keyword[None] , keyword[None] )) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[concurrency] )]
identifier[res] =[]
identifier[completed_count] = literal[int]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[sent] )):
identifier[result] = identifier[get_from_queue] ( identifier[q_out] )
identifier[res] . identifier[append] ( identifier[result] )
identifier[completed_count] += identifier[len] ( identifier[result] [ literal[int] ])
keyword[if] identifier[show_progress] :
identifier[pbar] . identifier[update] ( identifier[completed_count] )
[ identifier[p] . identifier[join] () keyword[for] identifier[p] keyword[in] identifier[proc] ]
keyword[if] identifier[show_progress] :
identifier[pbar] . identifier[finish] ()
keyword[return] identifier[flatten_list] ([ identifier[x] keyword[for] ( identifier[i] , identifier[x] ) keyword[in] identifier[sorted] ( identifier[res] )])
|
def processpool_map(task, args, message, concurrency, batchsize=1, nargs=None):
"""
See http://stackoverflow.com/a/16071616
"""
njobs = get_njobs(nargs, args)
show_progress = bool(message)
batches = grouper(batchsize, tupleise(args))
def batched_task(*batch):
return [task(*job) for job in batch]
if show_progress:
message += ' (PP:{}w:{}b)'.format(concurrency, batchsize)
pbar = setup_progressbar(message, njobs, simple_progress=True)
pbar.start() # depends on [control=['if'], data=[]]
q_in = multiprocessing.Queue() # Should I limit either queue size? Limiting in-queue
q_out = multiprocessing.Queue() # increases time taken to send jobs, makes pbar less useful
proc = [multiprocessing.Process(target=fun, args=(batched_task, q_in, q_out)) for _ in range(concurrency)]
for p in proc:
p.daemon = True
p.start() # depends on [control=['for'], data=['p']]
sent = [q_in.put((i, x)) for (i, x) in enumerate(batches)]
[q_in.put((None, None)) for _ in range(concurrency)]
res = []
completed_count = 0
for _ in range(len(sent)):
result = get_from_queue(q_out)
res.append(result)
completed_count += len(result[1])
if show_progress:
pbar.update(completed_count) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
[p.join() for p in proc]
if show_progress:
pbar.finish() # depends on [control=['if'], data=[]]
return flatten_list([x for (i, x) in sorted(res)])
|
def list(self, link_type, product, identifierType=None):
"""
Retrieve list of linked products
:param link_type: type of link, one of 'cross_sell', 'up_sell',
'related' or 'grouped'
:param product: ID or SKU of product
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `list` of `dict`
"""
return self.call('catalog_product_link.list',
[link_type, product, identifierType])
|
def function[list, parameter[self, link_type, product, identifierType]]:
constant[
Retrieve list of linked products
:param link_type: type of link, one of 'cross_sell', 'up_sell',
'related' or 'grouped'
:param product: ID or SKU of product
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `list` of `dict`
]
return[call[name[self].call, parameter[constant[catalog_product_link.list], list[[<ast.Name object at 0x7da1b0476dd0>, <ast.Name object at 0x7da1b0477ca0>, <ast.Name object at 0x7da1b0477220>]]]]]
|
keyword[def] identifier[list] ( identifier[self] , identifier[link_type] , identifier[product] , identifier[identifierType] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[call] ( literal[string] ,
[ identifier[link_type] , identifier[product] , identifier[identifierType] ])
|
def list(self, link_type, product, identifierType=None):
"""
Retrieve list of linked products
:param link_type: type of link, one of 'cross_sell', 'up_sell',
'related' or 'grouped'
:param product: ID or SKU of product
:param identifierType: Defines whether the product or SKU value is
passed in the "product" parameter.
:return: `list` of `dict`
"""
return self.call('catalog_product_link.list', [link_type, product, identifierType])
|
def _slice(self, start, end):
"""Used internally to get a slice, without error checking."""
if end == start:
return self.__class__()
offset = self._offset
startbyte, newoffset = divmod(start + offset, 8)
endbyte = (end + offset - 1) // 8
bs = self.__class__()
bs._setbytes_unsafe(self._datastore.getbyteslice(startbyte, endbyte + 1), end - start, newoffset)
return bs
|
def function[_slice, parameter[self, start, end]]:
constant[Used internally to get a slice, without error checking.]
if compare[name[end] equal[==] name[start]] begin[:]
return[call[name[self].__class__, parameter[]]]
variable[offset] assign[=] name[self]._offset
<ast.Tuple object at 0x7da1b1080280> assign[=] call[name[divmod], parameter[binary_operation[name[start] + name[offset]], constant[8]]]
variable[endbyte] assign[=] binary_operation[binary_operation[binary_operation[name[end] + name[offset]] - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]
variable[bs] assign[=] call[name[self].__class__, parameter[]]
call[name[bs]._setbytes_unsafe, parameter[call[name[self]._datastore.getbyteslice, parameter[name[startbyte], binary_operation[name[endbyte] + constant[1]]]], binary_operation[name[end] - name[start]], name[newoffset]]]
return[name[bs]]
|
keyword[def] identifier[_slice] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
keyword[if] identifier[end] == identifier[start] :
keyword[return] identifier[self] . identifier[__class__] ()
identifier[offset] = identifier[self] . identifier[_offset]
identifier[startbyte] , identifier[newoffset] = identifier[divmod] ( identifier[start] + identifier[offset] , literal[int] )
identifier[endbyte] =( identifier[end] + identifier[offset] - literal[int] )// literal[int]
identifier[bs] = identifier[self] . identifier[__class__] ()
identifier[bs] . identifier[_setbytes_unsafe] ( identifier[self] . identifier[_datastore] . identifier[getbyteslice] ( identifier[startbyte] , identifier[endbyte] + literal[int] ), identifier[end] - identifier[start] , identifier[newoffset] )
keyword[return] identifier[bs]
|
def _slice(self, start, end):
"""Used internally to get a slice, without error checking."""
if end == start:
return self.__class__() # depends on [control=['if'], data=[]]
offset = self._offset
(startbyte, newoffset) = divmod(start + offset, 8)
endbyte = (end + offset - 1) // 8
bs = self.__class__()
bs._setbytes_unsafe(self._datastore.getbyteslice(startbyte, endbyte + 1), end - start, newoffset)
return bs
|
def set_environment_variable(self, name, value):
"""
Set the value of an environment variable.
.. warning::
The server may reject this request depending on its ``AcceptEnv``
setting; such rejections will fail silently (which is common client
practice for this particular request type). Make sure you
understand your server's configuration before using!
:param str name: name of the environment variable
:param str value: value of the environment variable
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string("env")
m.add_boolean(False)
m.add_string(name)
m.add_string(value)
self.transport._send_user_message(m)
|
def function[set_environment_variable, parameter[self, name, value]]:
constant[
Set the value of an environment variable.
.. warning::
The server may reject this request depending on its ``AcceptEnv``
setting; such rejections will fail silently (which is common client
practice for this particular request type). Make sure you
understand your server's configuration before using!
:param str name: name of the environment variable
:param str value: value of the environment variable
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
]
variable[m] assign[=] call[name[Message], parameter[]]
call[name[m].add_byte, parameter[name[cMSG_CHANNEL_REQUEST]]]
call[name[m].add_int, parameter[name[self].remote_chanid]]
call[name[m].add_string, parameter[constant[env]]]
call[name[m].add_boolean, parameter[constant[False]]]
call[name[m].add_string, parameter[name[name]]]
call[name[m].add_string, parameter[name[value]]]
call[name[self].transport._send_user_message, parameter[name[m]]]
|
keyword[def] identifier[set_environment_variable] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
identifier[m] = identifier[Message] ()
identifier[m] . identifier[add_byte] ( identifier[cMSG_CHANNEL_REQUEST] )
identifier[m] . identifier[add_int] ( identifier[self] . identifier[remote_chanid] )
identifier[m] . identifier[add_string] ( literal[string] )
identifier[m] . identifier[add_boolean] ( keyword[False] )
identifier[m] . identifier[add_string] ( identifier[name] )
identifier[m] . identifier[add_string] ( identifier[value] )
identifier[self] . identifier[transport] . identifier[_send_user_message] ( identifier[m] )
|
def set_environment_variable(self, name, value):
"""
Set the value of an environment variable.
.. warning::
The server may reject this request depending on its ``AcceptEnv``
setting; such rejections will fail silently (which is common client
practice for this particular request type). Make sure you
understand your server's configuration before using!
:param str name: name of the environment variable
:param str value: value of the environment variable
:raises:
`.SSHException` -- if the request was rejected or the channel was
closed
"""
m = Message()
m.add_byte(cMSG_CHANNEL_REQUEST)
m.add_int(self.remote_chanid)
m.add_string('env')
m.add_boolean(False)
m.add_string(name)
m.add_string(value)
self.transport._send_user_message(m)
|
def _alter_umask(self):
"""Temporarily alter umask to custom setting, if applicable"""
if self.umask is None:
yield # nothing to do
else:
prev_umask = os.umask(self.umask)
try:
yield
finally:
os.umask(prev_umask)
|
def function[_alter_umask, parameter[self]]:
constant[Temporarily alter umask to custom setting, if applicable]
if compare[name[self].umask is constant[None]] begin[:]
<ast.Yield object at 0x7da18f58e920>
|
keyword[def] identifier[_alter_umask] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[umask] keyword[is] keyword[None] :
keyword[yield]
keyword[else] :
identifier[prev_umask] = identifier[os] . identifier[umask] ( identifier[self] . identifier[umask] )
keyword[try] :
keyword[yield]
keyword[finally] :
identifier[os] . identifier[umask] ( identifier[prev_umask] )
|
def _alter_umask(self):
"""Temporarily alter umask to custom setting, if applicable"""
if self.umask is None:
yield # nothing to do # depends on [control=['if'], data=[]]
else:
prev_umask = os.umask(self.umask)
try:
yield # depends on [control=['try'], data=[]]
finally:
os.umask(prev_umask)
|
def make_dataframe(result):
"""
Turns the results of one of the data API calls into a pandas dataframe
"""
import pandas as pd
ret = {}
if isinstance(result,dict):
if 'timeseries' in result:
result = result['timeseries']
for uuid, data in result.items():
df = pd.DataFrame(data)
if len(df.columns) == 5: # statistical data
df.columns = ['time','min','mean','max','count']
else:
df.columns = ['time','value']
df['time'] = pd.to_datetime(df['time'],unit='ns')
df = df.set_index(df.pop('time'))
ret[uuid] = df
return ret
|
def function[make_dataframe, parameter[result]]:
constant[
Turns the results of one of the data API calls into a pandas dataframe
]
import module[pandas] as alias[pd]
variable[ret] assign[=] dictionary[[], []]
if call[name[isinstance], parameter[name[result], name[dict]]] begin[:]
if compare[constant[timeseries] in name[result]] begin[:]
variable[result] assign[=] call[name[result]][constant[timeseries]]
for taget[tuple[[<ast.Name object at 0x7da2044c3460>, <ast.Name object at 0x7da2044c3b50>]]] in starred[call[name[result].items, parameter[]]] begin[:]
variable[df] assign[=] call[name[pd].DataFrame, parameter[name[data]]]
if compare[call[name[len], parameter[name[df].columns]] equal[==] constant[5]] begin[:]
name[df].columns assign[=] list[[<ast.Constant object at 0x7da2044c3e20>, <ast.Constant object at 0x7da2044c2230>, <ast.Constant object at 0x7da2044c0c70>, <ast.Constant object at 0x7da2044c1450>, <ast.Constant object at 0x7da2044c0700>]]
call[name[df]][constant[time]] assign[=] call[name[pd].to_datetime, parameter[call[name[df]][constant[time]]]]
variable[df] assign[=] call[name[df].set_index, parameter[call[name[df].pop, parameter[constant[time]]]]]
call[name[ret]][name[uuid]] assign[=] name[df]
return[name[ret]]
|
keyword[def] identifier[make_dataframe] ( identifier[result] ):
literal[string]
keyword[import] identifier[pandas] keyword[as] identifier[pd]
identifier[ret] ={}
keyword[if] identifier[isinstance] ( identifier[result] , identifier[dict] ):
keyword[if] literal[string] keyword[in] identifier[result] :
identifier[result] = identifier[result] [ literal[string] ]
keyword[for] identifier[uuid] , identifier[data] keyword[in] identifier[result] . identifier[items] ():
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[data] )
keyword[if] identifier[len] ( identifier[df] . identifier[columns] )== literal[int] :
identifier[df] . identifier[columns] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[else] :
identifier[df] . identifier[columns] =[ literal[string] , literal[string] ]
identifier[df] [ literal[string] ]= identifier[pd] . identifier[to_datetime] ( identifier[df] [ literal[string] ], identifier[unit] = literal[string] )
identifier[df] = identifier[df] . identifier[set_index] ( identifier[df] . identifier[pop] ( literal[string] ))
identifier[ret] [ identifier[uuid] ]= identifier[df]
keyword[return] identifier[ret]
|
def make_dataframe(result):
"""
Turns the results of one of the data API calls into a pandas dataframe
"""
import pandas as pd
ret = {}
if isinstance(result, dict):
if 'timeseries' in result:
result = result['timeseries'] # depends on [control=['if'], data=['result']] # depends on [control=['if'], data=[]]
for (uuid, data) in result.items():
df = pd.DataFrame(data)
if len(df.columns) == 5: # statistical data
df.columns = ['time', 'min', 'mean', 'max', 'count'] # depends on [control=['if'], data=[]]
else:
df.columns = ['time', 'value']
df['time'] = pd.to_datetime(df['time'], unit='ns')
df = df.set_index(df.pop('time'))
ret[uuid] = df # depends on [control=['for'], data=[]]
return ret
|
def create_new_cookbook(cookbook_name, cookbooks_home):
"""Create a new cookbook.
:param cookbook_name: Name of the new cookbook.
:param cookbooks_home: Target dir for new cookbook.
"""
cookbooks_home = utils.normalize_path(cookbooks_home)
if not os.path.exists(cookbooks_home):
raise ValueError("Target cookbook dir %s does not exist."
% os.path.relpath(cookbooks_home))
target_dir = os.path.join(cookbooks_home, cookbook_name)
LOG.debug("Creating dir -> %s", target_dir)
try:
os.makedirs(target_dir)
except OSError as err:
if err.errno != errno.EEXIST:
raise
else:
LOG.info("Skipping existing directory %s", target_dir)
cookbook_path = os.path.join(cookbooks_home, cookbook_name)
cookbook = book.CookBook(cookbook_path)
return cookbook
|
def function[create_new_cookbook, parameter[cookbook_name, cookbooks_home]]:
constant[Create a new cookbook.
:param cookbook_name: Name of the new cookbook.
:param cookbooks_home: Target dir for new cookbook.
]
variable[cookbooks_home] assign[=] call[name[utils].normalize_path, parameter[name[cookbooks_home]]]
if <ast.UnaryOp object at 0x7da18bcc82e0> begin[:]
<ast.Raise object at 0x7da18bcca830>
variable[target_dir] assign[=] call[name[os].path.join, parameter[name[cookbooks_home], name[cookbook_name]]]
call[name[LOG].debug, parameter[constant[Creating dir -> %s], name[target_dir]]]
<ast.Try object at 0x7da20c991210>
variable[cookbook_path] assign[=] call[name[os].path.join, parameter[name[cookbooks_home], name[cookbook_name]]]
variable[cookbook] assign[=] call[name[book].CookBook, parameter[name[cookbook_path]]]
return[name[cookbook]]
|
keyword[def] identifier[create_new_cookbook] ( identifier[cookbook_name] , identifier[cookbooks_home] ):
literal[string]
identifier[cookbooks_home] = identifier[utils] . identifier[normalize_path] ( identifier[cookbooks_home] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[cookbooks_home] ):
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[os] . identifier[path] . identifier[relpath] ( identifier[cookbooks_home] ))
identifier[target_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[cookbooks_home] , identifier[cookbook_name] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[target_dir] )
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[target_dir] )
keyword[except] identifier[OSError] keyword[as] identifier[err] :
keyword[if] identifier[err] . identifier[errno] != identifier[errno] . identifier[EEXIST] :
keyword[raise]
keyword[else] :
identifier[LOG] . identifier[info] ( literal[string] , identifier[target_dir] )
identifier[cookbook_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[cookbooks_home] , identifier[cookbook_name] )
identifier[cookbook] = identifier[book] . identifier[CookBook] ( identifier[cookbook_path] )
keyword[return] identifier[cookbook]
|
def create_new_cookbook(cookbook_name, cookbooks_home):
"""Create a new cookbook.
:param cookbook_name: Name of the new cookbook.
:param cookbooks_home: Target dir for new cookbook.
"""
cookbooks_home = utils.normalize_path(cookbooks_home)
if not os.path.exists(cookbooks_home):
raise ValueError('Target cookbook dir %s does not exist.' % os.path.relpath(cookbooks_home)) # depends on [control=['if'], data=[]]
target_dir = os.path.join(cookbooks_home, cookbook_name)
LOG.debug('Creating dir -> %s', target_dir)
try:
os.makedirs(target_dir) # depends on [control=['try'], data=[]]
except OSError as err:
if err.errno != errno.EEXIST:
raise # depends on [control=['if'], data=[]]
else:
LOG.info('Skipping existing directory %s', target_dir) # depends on [control=['except'], data=['err']]
cookbook_path = os.path.join(cookbooks_home, cookbook_name)
cookbook = book.CookBook(cookbook_path)
return cookbook
|
def __register_driver(self, channel, webdriver):
"Register webdriver to a channel."
# Add to list of webdrivers to cleanup.
if not self.__registered_drivers.has_key(channel):
self.__registered_drivers[channel] = [] # set to new empty array
self.__registered_drivers[channel].append(webdriver)
# Set singleton instance for the channel
self.__webdriver[channel] = webdriver
|
def function[__register_driver, parameter[self, channel, webdriver]]:
constant[Register webdriver to a channel.]
if <ast.UnaryOp object at 0x7da1b1107700> begin[:]
call[name[self].__registered_drivers][name[channel]] assign[=] list[[]]
call[call[name[self].__registered_drivers][name[channel]].append, parameter[name[webdriver]]]
call[name[self].__webdriver][name[channel]] assign[=] name[webdriver]
|
keyword[def] identifier[__register_driver] ( identifier[self] , identifier[channel] , identifier[webdriver] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[__registered_drivers] . identifier[has_key] ( identifier[channel] ):
identifier[self] . identifier[__registered_drivers] [ identifier[channel] ]=[]
identifier[self] . identifier[__registered_drivers] [ identifier[channel] ]. identifier[append] ( identifier[webdriver] )
identifier[self] . identifier[__webdriver] [ identifier[channel] ]= identifier[webdriver]
|
def __register_driver(self, channel, webdriver):
"""Register webdriver to a channel."""
# Add to list of webdrivers to cleanup.
if not self.__registered_drivers.has_key(channel):
self.__registered_drivers[channel] = [] # set to new empty array # depends on [control=['if'], data=[]]
self.__registered_drivers[channel].append(webdriver)
# Set singleton instance for the channel
self.__webdriver[channel] = webdriver
|
def previous_unwrittable_on_row(view, coords):
"""Return position of the previous (in row) letter that is unwrittable"""
x, y = coords
minx = -1
for offset in range(x - 1, minx, -1):
letter = view[offset, y]
if letter not in REWRITABLE_LETTERS:
return offset
return None
|
def function[previous_unwrittable_on_row, parameter[view, coords]]:
constant[Return position of the previous (in row) letter that is unwrittable]
<ast.Tuple object at 0x7da20c6e61d0> assign[=] name[coords]
variable[minx] assign[=] <ast.UnaryOp object at 0x7da20c6e6650>
for taget[name[offset]] in starred[call[name[range], parameter[binary_operation[name[x] - constant[1]], name[minx], <ast.UnaryOp object at 0x7da20c6e72e0>]]] begin[:]
variable[letter] assign[=] call[name[view]][tuple[[<ast.Name object at 0x7da20c6e6320>, <ast.Name object at 0x7da20c6e5d80>]]]
if compare[name[letter] <ast.NotIn object at 0x7da2590d7190> name[REWRITABLE_LETTERS]] begin[:]
return[name[offset]]
return[constant[None]]
|
keyword[def] identifier[previous_unwrittable_on_row] ( identifier[view] , identifier[coords] ):
literal[string]
identifier[x] , identifier[y] = identifier[coords]
identifier[minx] =- literal[int]
keyword[for] identifier[offset] keyword[in] identifier[range] ( identifier[x] - literal[int] , identifier[minx] ,- literal[int] ):
identifier[letter] = identifier[view] [ identifier[offset] , identifier[y] ]
keyword[if] identifier[letter] keyword[not] keyword[in] identifier[REWRITABLE_LETTERS] :
keyword[return] identifier[offset]
keyword[return] keyword[None]
|
def previous_unwrittable_on_row(view, coords):
"""Return position of the previous (in row) letter that is unwrittable"""
(x, y) = coords
minx = -1
for offset in range(x - 1, minx, -1):
letter = view[offset, y]
if letter not in REWRITABLE_LETTERS:
return offset # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['offset']]
return None
|
def measurement_key(
val: Any,
default: Any = RaiseTypeErrorIfNotProvided):
"""Get the measurement key for the given value.
Args:
val: The value which has the measurement key..
default: Determines the fallback behavior when `val` doesn't have
a measurement key. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned if the value
does not have `_measurement_key_`.
Returns:
If `val` has a `_measurement_key_` method and its result is not
`NotImplemented`, that result is returned. Otherwise, if a default
value was specified, the default value is returned.
Raises:
TypeError: `val` doesn't have a _measurement_key_ method (or that method
returned NotImplemented) and also no default value was specified.
"""
getter = getattr(val, '_measurement_key_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented:
return result
if default is not RaiseTypeErrorIfNotProvided:
return default
if getter is None:
raise TypeError(
"object of type '{}' has no _measurement_key_ method."
.format(type(val)))
raise TypeError("object of type '{}' does have a _measurement_key_ method, "
"but it returned NotImplemented.".format(type(val)))
|
def function[measurement_key, parameter[val, default]]:
constant[Get the measurement key for the given value.
Args:
val: The value which has the measurement key..
default: Determines the fallback behavior when `val` doesn't have
a measurement key. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned if the value
does not have `_measurement_key_`.
Returns:
If `val` has a `_measurement_key_` method and its result is not
`NotImplemented`, that result is returned. Otherwise, if a default
value was specified, the default value is returned.
Raises:
TypeError: `val` doesn't have a _measurement_key_ method (or that method
returned NotImplemented) and also no default value was specified.
]
variable[getter] assign[=] call[name[getattr], parameter[name[val], constant[_measurement_key_], constant[None]]]
variable[result] assign[=] <ast.IfExp object at 0x7da1b1c190c0>
if compare[name[result] is_not name[NotImplemented]] begin[:]
return[name[result]]
if compare[name[default] is_not name[RaiseTypeErrorIfNotProvided]] begin[:]
return[name[default]]
if compare[name[getter] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1c189a0>
<ast.Raise object at 0x7da1b1c19f90>
|
keyword[def] identifier[measurement_key] (
identifier[val] : identifier[Any] ,
identifier[default] : identifier[Any] = identifier[RaiseTypeErrorIfNotProvided] ):
literal[string]
identifier[getter] = identifier[getattr] ( identifier[val] , literal[string] , keyword[None] )
identifier[result] = identifier[NotImplemented] keyword[if] identifier[getter] keyword[is] keyword[None] keyword[else] identifier[getter] ()
keyword[if] identifier[result] keyword[is] keyword[not] identifier[NotImplemented] :
keyword[return] identifier[result]
keyword[if] identifier[default] keyword[is] keyword[not] identifier[RaiseTypeErrorIfNotProvided] :
keyword[return] identifier[default]
keyword[if] identifier[getter] keyword[is] keyword[None] :
keyword[raise] identifier[TypeError] (
literal[string]
. identifier[format] ( identifier[type] ( identifier[val] )))
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] . identifier[format] ( identifier[type] ( identifier[val] )))
|
def measurement_key(val: Any, default: Any=RaiseTypeErrorIfNotProvided):
"""Get the measurement key for the given value.
Args:
val: The value which has the measurement key..
default: Determines the fallback behavior when `val` doesn't have
a measurement key. If `default` is not set, a TypeError is raised.
If default is set to a value, that value is returned if the value
does not have `_measurement_key_`.
Returns:
If `val` has a `_measurement_key_` method and its result is not
`NotImplemented`, that result is returned. Otherwise, if a default
value was specified, the default value is returned.
Raises:
TypeError: `val` doesn't have a _measurement_key_ method (or that method
returned NotImplemented) and also no default value was specified.
"""
getter = getattr(val, '_measurement_key_', None)
result = NotImplemented if getter is None else getter()
if result is not NotImplemented:
return result # depends on [control=['if'], data=['result']]
if default is not RaiseTypeErrorIfNotProvided:
return default # depends on [control=['if'], data=['default']]
if getter is None:
raise TypeError("object of type '{}' has no _measurement_key_ method.".format(type(val))) # depends on [control=['if'], data=[]]
raise TypeError("object of type '{}' does have a _measurement_key_ method, but it returned NotImplemented.".format(type(val)))
|
def add_range_headers(self, range_header):
"""
Adds several headers that are necessary for a streaming file
response, in order for Safari to play audio files. Also
sets the HTTP status_code to 206 (partial content).
Args:
range_header (str): Browser HTTP_RANGE request header.
"""
self['Accept-Ranges'] = 'bytes'
size = self.ranged_file.size
try:
ranges = self.ranged_file.parse_range_header(range_header, size)
except ValueError:
ranges = None
# Only handle syntactically valid headers, that are simple (no
# multipart byteranges).
if ranges is not None and len(ranges) == 1:
start, stop = ranges[0]
if start >= size:
# Requested range not satisfiable.
self.status_code = 416
return
if stop >= size:
stop = size
self.ranged_file.start = start
self.ranged_file.stop = stop
self['Content-Range'] = 'bytes %d-%d/%d' % (start, stop - 1, size)
self['Content-Length'] = stop - start
self.status_code = 206
|
def function[add_range_headers, parameter[self, range_header]]:
constant[
Adds several headers that are necessary for a streaming file
response, in order for Safari to play audio files. Also
sets the HTTP status_code to 206 (partial content).
Args:
range_header (str): Browser HTTP_RANGE request header.
]
call[name[self]][constant[Accept-Ranges]] assign[=] constant[bytes]
variable[size] assign[=] name[self].ranged_file.size
<ast.Try object at 0x7da1b2531090>
if <ast.BoolOp object at 0x7da1b26a11b0> begin[:]
<ast.Tuple object at 0x7da1b26a3c10> assign[=] call[name[ranges]][constant[0]]
if compare[name[start] greater_or_equal[>=] name[size]] begin[:]
name[self].status_code assign[=] constant[416]
return[None]
if compare[name[stop] greater_or_equal[>=] name[size]] begin[:]
variable[stop] assign[=] name[size]
name[self].ranged_file.start assign[=] name[start]
name[self].ranged_file.stop assign[=] name[stop]
call[name[self]][constant[Content-Range]] assign[=] binary_operation[constant[bytes %d-%d/%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26a38e0>, <ast.BinOp object at 0x7da1b26a2ce0>, <ast.Name object at 0x7da1b26a0c40>]]]
call[name[self]][constant[Content-Length]] assign[=] binary_operation[name[stop] - name[start]]
name[self].status_code assign[=] constant[206]
|
keyword[def] identifier[add_range_headers] ( identifier[self] , identifier[range_header] ):
literal[string]
identifier[self] [ literal[string] ]= literal[string]
identifier[size] = identifier[self] . identifier[ranged_file] . identifier[size]
keyword[try] :
identifier[ranges] = identifier[self] . identifier[ranged_file] . identifier[parse_range_header] ( identifier[range_header] , identifier[size] )
keyword[except] identifier[ValueError] :
identifier[ranges] = keyword[None]
keyword[if] identifier[ranges] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[ranges] )== literal[int] :
identifier[start] , identifier[stop] = identifier[ranges] [ literal[int] ]
keyword[if] identifier[start] >= identifier[size] :
identifier[self] . identifier[status_code] = literal[int]
keyword[return]
keyword[if] identifier[stop] >= identifier[size] :
identifier[stop] = identifier[size]
identifier[self] . identifier[ranged_file] . identifier[start] = identifier[start]
identifier[self] . identifier[ranged_file] . identifier[stop] = identifier[stop]
identifier[self] [ literal[string] ]= literal[string] %( identifier[start] , identifier[stop] - literal[int] , identifier[size] )
identifier[self] [ literal[string] ]= identifier[stop] - identifier[start]
identifier[self] . identifier[status_code] = literal[int]
|
def add_range_headers(self, range_header):
"""
Adds several headers that are necessary for a streaming file
response, in order for Safari to play audio files. Also
sets the HTTP status_code to 206 (partial content).
Args:
range_header (str): Browser HTTP_RANGE request header.
"""
self['Accept-Ranges'] = 'bytes'
size = self.ranged_file.size
try:
ranges = self.ranged_file.parse_range_header(range_header, size) # depends on [control=['try'], data=[]]
except ValueError:
ranges = None # depends on [control=['except'], data=[]]
# Only handle syntactically valid headers, that are simple (no
# multipart byteranges).
if ranges is not None and len(ranges) == 1:
(start, stop) = ranges[0]
if start >= size:
# Requested range not satisfiable.
self.status_code = 416
return # depends on [control=['if'], data=[]]
if stop >= size:
stop = size # depends on [control=['if'], data=['stop', 'size']]
self.ranged_file.start = start
self.ranged_file.stop = stop
self['Content-Range'] = 'bytes %d-%d/%d' % (start, stop - 1, size)
self['Content-Length'] = stop - start
self.status_code = 206 # depends on [control=['if'], data=[]]
|
def roles(self):
"""Return a set with all roles granted to the user."""
roles = []
for ur in self.roleusers:
roles.append(ur.role)
return set(roles)
|
def function[roles, parameter[self]]:
constant[Return a set with all roles granted to the user.]
variable[roles] assign[=] list[[]]
for taget[name[ur]] in starred[name[self].roleusers] begin[:]
call[name[roles].append, parameter[name[ur].role]]
return[call[name[set], parameter[name[roles]]]]
|
keyword[def] identifier[roles] ( identifier[self] ):
literal[string]
identifier[roles] =[]
keyword[for] identifier[ur] keyword[in] identifier[self] . identifier[roleusers] :
identifier[roles] . identifier[append] ( identifier[ur] . identifier[role] )
keyword[return] identifier[set] ( identifier[roles] )
|
def roles(self):
"""Return a set with all roles granted to the user."""
roles = []
for ur in self.roleusers:
roles.append(ur.role) # depends on [control=['for'], data=['ur']]
return set(roles)
|
def stage_tc(self, owner, staging_data, variable):
"""Stage data using ThreatConnect API.
.. code-block:: javascript
[{
"data": {
"id": 116,
"value": "adversary001-build-testing",
"type": "Adversary",
"ownerName": "qa-build",
"dateAdded": "2017-08-16T18:35:07-04:00",
"webLink": "https://app.tci.ninja/auth/adversary/adversary.xhtml?adversary=116"
},
"data_type": "redis",
"variable": "#App:0822:adversary!TCEntity"
}]
Args:
owner (str): The ThreatConnect owner name.
staging_data (dict): A dict containing the ThreatConnect threat intel.
variable (str): A variable name to write to Redis.
"""
# parse resource_data
resource_type = staging_data.pop('type')
if resource_type in self.tcex.indicator_types or resource_type in self.tcex.group_types:
try:
attributes = staging_data.pop('attribute')
except KeyError:
attributes = []
try:
security_labels = staging_data.pop('security_label')
except KeyError:
security_labels = []
try:
tags = staging_data.pop('tag')
except KeyError:
tags = []
resource = self.tcex.resource(resource_type)
resource.http_method = 'POST'
resource.owner = owner
# special case for Email Group Type
if resource_type == 'Email':
resource.add_payload('option', 'createVictims')
self.log.debug('body: {}'.format(staging_data))
resource.body = json.dumps(staging_data)
response = resource.request()
if response.get('status') == 'Success':
# add resource id
if resource_type in self.tcex.indicator_types:
resource_id = resource.summary(response.get('data'))
self.log.info(
'[stage] Creating resource {}:{}'.format(resource_type, resource_id)
)
elif resource_type in self.tcex.group_types:
self.log.info(
'[stage] Creating resource {}:{}'.format(
resource_type, response.get('data', {}).get('name')
)
)
resource_id = response.get('data', {}).get('id')
self.log.debug('[stage] resource_id: {}'.format(resource_id))
resource.resource_id(resource_id)
entity = self.tcex.playbook.json_to_entity(
response.get('data'), resource.value_fields, resource.name, resource.parent
)
self.log.debug('[stage] Creating Entity: {} ({})'.format(variable, entity[0]))
self.stage_redis(variable, entity[0])
# self.tcex.playbook.create_tc_entity(variable, entity[0])
# update metadata
for attribute_data in attributes:
self.stage_tc_create_attribute(
attribute_data.get('type'), attribute_data.get('value'), resource
)
for label_data in security_labels:
self.stage_tc_create_security_label(label_data.get('name'), resource)
for tag_data in tags:
self.stage_tc_create_tag(tag_data.get('name'), resource)
else:
self.log.error('[stage] Unsupported resource type {}.'.format(resource_type))
|
def function[stage_tc, parameter[self, owner, staging_data, variable]]:
constant[Stage data using ThreatConnect API.
.. code-block:: javascript
[{
"data": {
"id": 116,
"value": "adversary001-build-testing",
"type": "Adversary",
"ownerName": "qa-build",
"dateAdded": "2017-08-16T18:35:07-04:00",
"webLink": "https://app.tci.ninja/auth/adversary/adversary.xhtml?adversary=116"
},
"data_type": "redis",
"variable": "#App:0822:adversary!TCEntity"
}]
Args:
owner (str): The ThreatConnect owner name.
staging_data (dict): A dict containing the ThreatConnect threat intel.
variable (str): A variable name to write to Redis.
]
variable[resource_type] assign[=] call[name[staging_data].pop, parameter[constant[type]]]
if <ast.BoolOp object at 0x7da2044c1000> begin[:]
<ast.Try object at 0x7da2044c0af0>
<ast.Try object at 0x7da2044c23b0>
<ast.Try object at 0x7da2044c1c30>
variable[resource] assign[=] call[name[self].tcex.resource, parameter[name[resource_type]]]
name[resource].http_method assign[=] constant[POST]
name[resource].owner assign[=] name[owner]
if compare[name[resource_type] equal[==] constant[Email]] begin[:]
call[name[resource].add_payload, parameter[constant[option], constant[createVictims]]]
call[name[self].log.debug, parameter[call[constant[body: {}].format, parameter[name[staging_data]]]]]
name[resource].body assign[=] call[name[json].dumps, parameter[name[staging_data]]]
variable[response] assign[=] call[name[resource].request, parameter[]]
if compare[call[name[response].get, parameter[constant[status]]] equal[==] constant[Success]] begin[:]
if compare[name[resource_type] in name[self].tcex.indicator_types] begin[:]
variable[resource_id] assign[=] call[name[resource].summary, parameter[call[name[response].get, parameter[constant[data]]]]]
call[name[self].log.info, parameter[call[constant[[stage] Creating resource {}:{}].format, parameter[name[resource_type], name[resource_id]]]]]
call[name[self].log.debug, parameter[call[constant[[stage] resource_id: {}].format, parameter[name[resource_id]]]]]
call[name[resource].resource_id, parameter[name[resource_id]]]
variable[entity] assign[=] call[name[self].tcex.playbook.json_to_entity, parameter[call[name[response].get, parameter[constant[data]]], name[resource].value_fields, name[resource].name, name[resource].parent]]
call[name[self].log.debug, parameter[call[constant[[stage] Creating Entity: {} ({})].format, parameter[name[variable], call[name[entity]][constant[0]]]]]]
call[name[self].stage_redis, parameter[name[variable], call[name[entity]][constant[0]]]]
for taget[name[attribute_data]] in starred[name[attributes]] begin[:]
call[name[self].stage_tc_create_attribute, parameter[call[name[attribute_data].get, parameter[constant[type]]], call[name[attribute_data].get, parameter[constant[value]]], name[resource]]]
for taget[name[label_data]] in starred[name[security_labels]] begin[:]
call[name[self].stage_tc_create_security_label, parameter[call[name[label_data].get, parameter[constant[name]]], name[resource]]]
for taget[name[tag_data]] in starred[name[tags]] begin[:]
call[name[self].stage_tc_create_tag, parameter[call[name[tag_data].get, parameter[constant[name]]], name[resource]]]
|
keyword[def] identifier[stage_tc] ( identifier[self] , identifier[owner] , identifier[staging_data] , identifier[variable] ):
literal[string]
identifier[resource_type] = identifier[staging_data] . identifier[pop] ( literal[string] )
keyword[if] identifier[resource_type] keyword[in] identifier[self] . identifier[tcex] . identifier[indicator_types] keyword[or] identifier[resource_type] keyword[in] identifier[self] . identifier[tcex] . identifier[group_types] :
keyword[try] :
identifier[attributes] = identifier[staging_data] . identifier[pop] ( literal[string] )
keyword[except] identifier[KeyError] :
identifier[attributes] =[]
keyword[try] :
identifier[security_labels] = identifier[staging_data] . identifier[pop] ( literal[string] )
keyword[except] identifier[KeyError] :
identifier[security_labels] =[]
keyword[try] :
identifier[tags] = identifier[staging_data] . identifier[pop] ( literal[string] )
keyword[except] identifier[KeyError] :
identifier[tags] =[]
identifier[resource] = identifier[self] . identifier[tcex] . identifier[resource] ( identifier[resource_type] )
identifier[resource] . identifier[http_method] = literal[string]
identifier[resource] . identifier[owner] = identifier[owner]
keyword[if] identifier[resource_type] == literal[string] :
identifier[resource] . identifier[add_payload] ( literal[string] , literal[string] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[staging_data] ))
identifier[resource] . identifier[body] = identifier[json] . identifier[dumps] ( identifier[staging_data] )
identifier[response] = identifier[resource] . identifier[request] ()
keyword[if] identifier[response] . identifier[get] ( literal[string] )== literal[string] :
keyword[if] identifier[resource_type] keyword[in] identifier[self] . identifier[tcex] . identifier[indicator_types] :
identifier[resource_id] = identifier[resource] . identifier[summary] ( identifier[response] . identifier[get] ( literal[string] ))
identifier[self] . identifier[log] . identifier[info] (
literal[string] . identifier[format] ( identifier[resource_type] , identifier[resource_id] )
)
keyword[elif] identifier[resource_type] keyword[in] identifier[self] . identifier[tcex] . identifier[group_types] :
identifier[self] . identifier[log] . identifier[info] (
literal[string] . identifier[format] (
identifier[resource_type] , identifier[response] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
)
)
identifier[resource_id] = identifier[response] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[resource_id] ))
identifier[resource] . identifier[resource_id] ( identifier[resource_id] )
identifier[entity] = identifier[self] . identifier[tcex] . identifier[playbook] . identifier[json_to_entity] (
identifier[response] . identifier[get] ( literal[string] ), identifier[resource] . identifier[value_fields] , identifier[resource] . identifier[name] , identifier[resource] . identifier[parent]
)
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[variable] , identifier[entity] [ literal[int] ]))
identifier[self] . identifier[stage_redis] ( identifier[variable] , identifier[entity] [ literal[int] ])
keyword[for] identifier[attribute_data] keyword[in] identifier[attributes] :
identifier[self] . identifier[stage_tc_create_attribute] (
identifier[attribute_data] . identifier[get] ( literal[string] ), identifier[attribute_data] . identifier[get] ( literal[string] ), identifier[resource]
)
keyword[for] identifier[label_data] keyword[in] identifier[security_labels] :
identifier[self] . identifier[stage_tc_create_security_label] ( identifier[label_data] . identifier[get] ( literal[string] ), identifier[resource] )
keyword[for] identifier[tag_data] keyword[in] identifier[tags] :
identifier[self] . identifier[stage_tc_create_tag] ( identifier[tag_data] . identifier[get] ( literal[string] ), identifier[resource] )
keyword[else] :
identifier[self] . identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[resource_type] ))
|
def stage_tc(self, owner, staging_data, variable):
"""Stage data using ThreatConnect API.
.. code-block:: javascript
[{
"data": {
"id": 116,
"value": "adversary001-build-testing",
"type": "Adversary",
"ownerName": "qa-build",
"dateAdded": "2017-08-16T18:35:07-04:00",
"webLink": "https://app.tci.ninja/auth/adversary/adversary.xhtml?adversary=116"
},
"data_type": "redis",
"variable": "#App:0822:adversary!TCEntity"
}]
Args:
owner (str): The ThreatConnect owner name.
staging_data (dict): A dict containing the ThreatConnect threat intel.
variable (str): A variable name to write to Redis.
"""
# parse resource_data
resource_type = staging_data.pop('type')
if resource_type in self.tcex.indicator_types or resource_type in self.tcex.group_types:
try:
attributes = staging_data.pop('attribute') # depends on [control=['try'], data=[]]
except KeyError:
attributes = [] # depends on [control=['except'], data=[]]
try:
security_labels = staging_data.pop('security_label') # depends on [control=['try'], data=[]]
except KeyError:
security_labels = [] # depends on [control=['except'], data=[]]
try:
tags = staging_data.pop('tag') # depends on [control=['try'], data=[]]
except KeyError:
tags = [] # depends on [control=['except'], data=[]]
resource = self.tcex.resource(resource_type)
resource.http_method = 'POST'
resource.owner = owner
# special case for Email Group Type
if resource_type == 'Email':
resource.add_payload('option', 'createVictims') # depends on [control=['if'], data=[]]
self.log.debug('body: {}'.format(staging_data))
resource.body = json.dumps(staging_data)
response = resource.request()
if response.get('status') == 'Success':
# add resource id
if resource_type in self.tcex.indicator_types:
resource_id = resource.summary(response.get('data'))
self.log.info('[stage] Creating resource {}:{}'.format(resource_type, resource_id)) # depends on [control=['if'], data=['resource_type']]
elif resource_type in self.tcex.group_types:
self.log.info('[stage] Creating resource {}:{}'.format(resource_type, response.get('data', {}).get('name')))
resource_id = response.get('data', {}).get('id') # depends on [control=['if'], data=['resource_type']]
self.log.debug('[stage] resource_id: {}'.format(resource_id))
resource.resource_id(resource_id)
entity = self.tcex.playbook.json_to_entity(response.get('data'), resource.value_fields, resource.name, resource.parent)
self.log.debug('[stage] Creating Entity: {} ({})'.format(variable, entity[0]))
self.stage_redis(variable, entity[0])
# self.tcex.playbook.create_tc_entity(variable, entity[0])
# update metadata
for attribute_data in attributes:
self.stage_tc_create_attribute(attribute_data.get('type'), attribute_data.get('value'), resource) # depends on [control=['for'], data=['attribute_data']]
for label_data in security_labels:
self.stage_tc_create_security_label(label_data.get('name'), resource) # depends on [control=['for'], data=['label_data']]
for tag_data in tags:
self.stage_tc_create_tag(tag_data.get('name'), resource) # depends on [control=['for'], data=['tag_data']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.log.error('[stage] Unsupported resource type {}.'.format(resource_type))
|
def run(self):
"""
run the plugin
"""
self.log.info("Resolving module compose")
compose_info = self._resolve_compose()
set_compose_info(self.workflow, compose_info)
override_build_kwarg(self.workflow, 'compose_ids', [compose_info.compose_id])
|
def function[run, parameter[self]]:
constant[
run the plugin
]
call[name[self].log.info, parameter[constant[Resolving module compose]]]
variable[compose_info] assign[=] call[name[self]._resolve_compose, parameter[]]
call[name[set_compose_info], parameter[name[self].workflow, name[compose_info]]]
call[name[override_build_kwarg], parameter[name[self].workflow, constant[compose_ids], list[[<ast.Attribute object at 0x7da1b26acb20>]]]]
|
keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[self] . identifier[log] . identifier[info] ( literal[string] )
identifier[compose_info] = identifier[self] . identifier[_resolve_compose] ()
identifier[set_compose_info] ( identifier[self] . identifier[workflow] , identifier[compose_info] )
identifier[override_build_kwarg] ( identifier[self] . identifier[workflow] , literal[string] ,[ identifier[compose_info] . identifier[compose_id] ])
|
def run(self):
"""
run the plugin
"""
self.log.info('Resolving module compose')
compose_info = self._resolve_compose()
set_compose_info(self.workflow, compose_info)
override_build_kwarg(self.workflow, 'compose_ids', [compose_info.compose_id])
|
def addPointVectors(self, vectors, name):
"""
Add a point vector field to the actor's polydata assigning it a name.
"""
poly = self.polydata(False)
if len(vectors) != poly.GetNumberOfPoints():
colors.printc('~times addPointVectors Error: Number of vectors != nr. of points',
len(vectors), poly.GetNumberOfPoints(), c=1)
exit()
arr = vtk.vtkDoubleArray()
arr.SetNumberOfComponents(3)
arr.SetName(name)
for v in vectors:
arr.InsertNextTuple(v)
poly.GetPointData().AddArray(arr)
poly.GetPointData().SetActiveVectors(name)
return self
|
def function[addPointVectors, parameter[self, vectors, name]]:
constant[
Add a point vector field to the actor's polydata assigning it a name.
]
variable[poly] assign[=] call[name[self].polydata, parameter[constant[False]]]
if compare[call[name[len], parameter[name[vectors]]] not_equal[!=] call[name[poly].GetNumberOfPoints, parameter[]]] begin[:]
call[name[colors].printc, parameter[constant[~times addPointVectors Error: Number of vectors != nr. of points], call[name[len], parameter[name[vectors]]], call[name[poly].GetNumberOfPoints, parameter[]]]]
call[name[exit], parameter[]]
variable[arr] assign[=] call[name[vtk].vtkDoubleArray, parameter[]]
call[name[arr].SetNumberOfComponents, parameter[constant[3]]]
call[name[arr].SetName, parameter[name[name]]]
for taget[name[v]] in starred[name[vectors]] begin[:]
call[name[arr].InsertNextTuple, parameter[name[v]]]
call[call[name[poly].GetPointData, parameter[]].AddArray, parameter[name[arr]]]
call[call[name[poly].GetPointData, parameter[]].SetActiveVectors, parameter[name[name]]]
return[name[self]]
|
keyword[def] identifier[addPointVectors] ( identifier[self] , identifier[vectors] , identifier[name] ):
literal[string]
identifier[poly] = identifier[self] . identifier[polydata] ( keyword[False] )
keyword[if] identifier[len] ( identifier[vectors] )!= identifier[poly] . identifier[GetNumberOfPoints] ():
identifier[colors] . identifier[printc] ( literal[string] ,
identifier[len] ( identifier[vectors] ), identifier[poly] . identifier[GetNumberOfPoints] (), identifier[c] = literal[int] )
identifier[exit] ()
identifier[arr] = identifier[vtk] . identifier[vtkDoubleArray] ()
identifier[arr] . identifier[SetNumberOfComponents] ( literal[int] )
identifier[arr] . identifier[SetName] ( identifier[name] )
keyword[for] identifier[v] keyword[in] identifier[vectors] :
identifier[arr] . identifier[InsertNextTuple] ( identifier[v] )
identifier[poly] . identifier[GetPointData] (). identifier[AddArray] ( identifier[arr] )
identifier[poly] . identifier[GetPointData] (). identifier[SetActiveVectors] ( identifier[name] )
keyword[return] identifier[self]
|
def addPointVectors(self, vectors, name):
"""
Add a point vector field to the actor's polydata assigning it a name.
"""
poly = self.polydata(False)
if len(vectors) != poly.GetNumberOfPoints():
colors.printc('~times addPointVectors Error: Number of vectors != nr. of points', len(vectors), poly.GetNumberOfPoints(), c=1)
exit() # depends on [control=['if'], data=[]]
arr = vtk.vtkDoubleArray()
arr.SetNumberOfComponents(3)
arr.SetName(name)
for v in vectors:
arr.InsertNextTuple(v) # depends on [control=['for'], data=['v']]
poly.GetPointData().AddArray(arr)
poly.GetPointData().SetActiveVectors(name)
return self
|
def favorite_dashboard(self, id, **kwargs): # noqa: E501
"""Mark a dashboard as favorite # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.favorite_dashboard(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.favorite_dashboard_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.favorite_dashboard_with_http_info(id, **kwargs) # noqa: E501
return data
|
def function[favorite_dashboard, parameter[self, id]]:
constant[Mark a dashboard as favorite # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.favorite_dashboard(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].favorite_dashboard_with_http_info, parameter[name[id]]]]
|
keyword[def] identifier[favorite_dashboard] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[favorite_dashboard_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[favorite_dashboard_with_http_info] ( identifier[id] ,** identifier[kwargs] )
keyword[return] identifier[data]
|
def favorite_dashboard(self, id, **kwargs): # noqa: E501
'Mark a dashboard as favorite # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.favorite_dashboard(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: ResponseContainer\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.favorite_dashboard_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.favorite_dashboard_with_http_info(id, **kwargs) # noqa: E501
return data
|
def replace(html, replacements=None):
"""Performs replacements on given HTML string."""
if not replacements:
return html # no replacements
html = HTMLFragment(html)
for r in replacements:
r.replace(html)
return unicode(html)
|
def function[replace, parameter[html, replacements]]:
constant[Performs replacements on given HTML string.]
if <ast.UnaryOp object at 0x7da18fe911e0> begin[:]
return[name[html]]
variable[html] assign[=] call[name[HTMLFragment], parameter[name[html]]]
for taget[name[r]] in starred[name[replacements]] begin[:]
call[name[r].replace, parameter[name[html]]]
return[call[name[unicode], parameter[name[html]]]]
|
keyword[def] identifier[replace] ( identifier[html] , identifier[replacements] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[replacements] :
keyword[return] identifier[html]
identifier[html] = identifier[HTMLFragment] ( identifier[html] )
keyword[for] identifier[r] keyword[in] identifier[replacements] :
identifier[r] . identifier[replace] ( identifier[html] )
keyword[return] identifier[unicode] ( identifier[html] )
|
def replace(html, replacements=None):
"""Performs replacements on given HTML string."""
if not replacements:
return html # no replacements # depends on [control=['if'], data=[]]
html = HTMLFragment(html)
for r in replacements:
r.replace(html) # depends on [control=['for'], data=['r']]
return unicode(html)
|
def list_vault_ec2_certificate_configurations(self, mount_point='aws-ec2'):
"""GET /auth/<mount_point>/config/certificates?list=true
:param mount_point:
:type mount_point:
:return:
:rtype:
"""
params = {'list': True}
return self._adapter.get('/v1/auth/{0}/config/certificates'.format(mount_point), params=params).json()
|
def function[list_vault_ec2_certificate_configurations, parameter[self, mount_point]]:
constant[GET /auth/<mount_point>/config/certificates?list=true
:param mount_point:
:type mount_point:
:return:
:rtype:
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18ede5ba0>], [<ast.Constant object at 0x7da18ede5cf0>]]
return[call[call[name[self]._adapter.get, parameter[call[constant[/v1/auth/{0}/config/certificates].format, parameter[name[mount_point]]]]].json, parameter[]]]
|
keyword[def] identifier[list_vault_ec2_certificate_configurations] ( identifier[self] , identifier[mount_point] = literal[string] ):
literal[string]
identifier[params] ={ literal[string] : keyword[True] }
keyword[return] identifier[self] . identifier[_adapter] . identifier[get] ( literal[string] . identifier[format] ( identifier[mount_point] ), identifier[params] = identifier[params] ). identifier[json] ()
|
def list_vault_ec2_certificate_configurations(self, mount_point='aws-ec2'):
"""GET /auth/<mount_point>/config/certificates?list=true
:param mount_point:
:type mount_point:
:return:
:rtype:
"""
params = {'list': True}
return self._adapter.get('/v1/auth/{0}/config/certificates'.format(mount_point), params=params).json()
|
def Read(self, expected_ids, read_data=True):
"""Read ADB messages and return FileSync packets."""
if self.send_idx:
self._Flush()
# Read one filesync packet off the recv buffer.
header_data = self._ReadBuffered(self.recv_header_len)
header = struct.unpack(self.recv_header_format, header_data)
# Header is (ID, ...).
command_id = self.wire_to_id[header[0]]
if command_id not in expected_ids:
if command_id == b'FAIL':
reason = ''
if self.recv_buffer:
reason = self.recv_buffer.decode('utf-8', errors='ignore')
raise usb_exceptions.AdbCommandFailureException('Command failed: {}'.format(reason))
raise adb_protocol.InvalidResponseError(
'Expected one of %s, got %s' % (expected_ids, command_id))
if not read_data:
return command_id, header[1:]
# Header is (ID, ..., size).
size = header[-1]
data = self._ReadBuffered(size)
return command_id, header[1:-1], data
|
def function[Read, parameter[self, expected_ids, read_data]]:
constant[Read ADB messages and return FileSync packets.]
if name[self].send_idx begin[:]
call[name[self]._Flush, parameter[]]
variable[header_data] assign[=] call[name[self]._ReadBuffered, parameter[name[self].recv_header_len]]
variable[header] assign[=] call[name[struct].unpack, parameter[name[self].recv_header_format, name[header_data]]]
variable[command_id] assign[=] call[name[self].wire_to_id][call[name[header]][constant[0]]]
if compare[name[command_id] <ast.NotIn object at 0x7da2590d7190> name[expected_ids]] begin[:]
if compare[name[command_id] equal[==] constant[b'FAIL']] begin[:]
variable[reason] assign[=] constant[]
if name[self].recv_buffer begin[:]
variable[reason] assign[=] call[name[self].recv_buffer.decode, parameter[constant[utf-8]]]
<ast.Raise object at 0x7da1b19db7f0>
<ast.Raise object at 0x7da1b19d91e0>
if <ast.UnaryOp object at 0x7da1b19d9c60> begin[:]
return[tuple[[<ast.Name object at 0x7da1b19d95a0>, <ast.Subscript object at 0x7da1b19d9030>]]]
variable[size] assign[=] call[name[header]][<ast.UnaryOp object at 0x7da1b19d9570>]
variable[data] assign[=] call[name[self]._ReadBuffered, parameter[name[size]]]
return[tuple[[<ast.Name object at 0x7da1b1714dc0>, <ast.Subscript object at 0x7da1b1716830>, <ast.Name object at 0x7da1b17146d0>]]]
|
keyword[def] identifier[Read] ( identifier[self] , identifier[expected_ids] , identifier[read_data] = keyword[True] ):
literal[string]
keyword[if] identifier[self] . identifier[send_idx] :
identifier[self] . identifier[_Flush] ()
identifier[header_data] = identifier[self] . identifier[_ReadBuffered] ( identifier[self] . identifier[recv_header_len] )
identifier[header] = identifier[struct] . identifier[unpack] ( identifier[self] . identifier[recv_header_format] , identifier[header_data] )
identifier[command_id] = identifier[self] . identifier[wire_to_id] [ identifier[header] [ literal[int] ]]
keyword[if] identifier[command_id] keyword[not] keyword[in] identifier[expected_ids] :
keyword[if] identifier[command_id] == literal[string] :
identifier[reason] = literal[string]
keyword[if] identifier[self] . identifier[recv_buffer] :
identifier[reason] = identifier[self] . identifier[recv_buffer] . identifier[decode] ( literal[string] , identifier[errors] = literal[string] )
keyword[raise] identifier[usb_exceptions] . identifier[AdbCommandFailureException] ( literal[string] . identifier[format] ( identifier[reason] ))
keyword[raise] identifier[adb_protocol] . identifier[InvalidResponseError] (
literal[string] %( identifier[expected_ids] , identifier[command_id] ))
keyword[if] keyword[not] identifier[read_data] :
keyword[return] identifier[command_id] , identifier[header] [ literal[int] :]
identifier[size] = identifier[header] [- literal[int] ]
identifier[data] = identifier[self] . identifier[_ReadBuffered] ( identifier[size] )
keyword[return] identifier[command_id] , identifier[header] [ literal[int] :- literal[int] ], identifier[data]
|
def Read(self, expected_ids, read_data=True):
"""Read ADB messages and return FileSync packets."""
if self.send_idx:
self._Flush() # depends on [control=['if'], data=[]]
# Read one filesync packet off the recv buffer.
header_data = self._ReadBuffered(self.recv_header_len)
header = struct.unpack(self.recv_header_format, header_data)
# Header is (ID, ...).
command_id = self.wire_to_id[header[0]]
if command_id not in expected_ids:
if command_id == b'FAIL':
reason = ''
if self.recv_buffer:
reason = self.recv_buffer.decode('utf-8', errors='ignore') # depends on [control=['if'], data=[]]
raise usb_exceptions.AdbCommandFailureException('Command failed: {}'.format(reason)) # depends on [control=['if'], data=[]]
raise adb_protocol.InvalidResponseError('Expected one of %s, got %s' % (expected_ids, command_id)) # depends on [control=['if'], data=['command_id', 'expected_ids']]
if not read_data:
return (command_id, header[1:]) # depends on [control=['if'], data=[]]
# Header is (ID, ..., size).
size = header[-1]
data = self._ReadBuffered(size)
return (command_id, header[1:-1], data)
|
def get_default_settings(sub_scripts, script_order, script_execution_freq, iterator_type):
"""
assigning the actual script settings depending on the iterator type
this might be overwritten by classes that inherit form ScriptIterator
Args:
sub_scripts: dictionary with the subscripts
script_order: execution order of subscripts
script_execution_freq: execution frequency of subscripts
Returns:
the default setting for the iterator
"""
def populate_sweep_param(scripts, parameter_list, trace=''):
'''
Args:
scripts: a dict of {'class name': <class object>} pairs
Returns: A list of all parameters of the input scripts
'''
def get_parameter_from_dict(trace, dic, parameter_list, valid_values=None):
"""
appends keys in the dict to a list in the form trace.key.subkey.subsubkey...
Args:
trace: initial prefix (path through scripts and parameters to current location)
dic: dictionary
parameter_list: list to which append the parameters
valid_values: valid values of dictionary values if None dic should be a dictionary
Returns:
"""
if valid_values is None and isinstance(dic, Parameter):
valid_values = dic.valid_values
for key, value in dic.items():
if isinstance(value, dict): # for nested parameters ex {point: {'x': int, 'y': int}}
parameter_list = get_parameter_from_dict(trace + '.' + key, value, parameter_list,
dic.valid_values[key])
elif (valid_values[key] in (float, int)) or \
(isinstance(valid_values[key], list) and valid_values[key][0] in (float, int)):
parameter_list.append(trace + '.' + key)
else: # once down to the form {key: value}
# in all other cases ignore parameter
print(('ignoring sweep parameter', key))
return parameter_list
for script_name in list(scripts.keys()):
from pylabcontrol.core import ScriptIterator
script_trace = trace
if script_trace == '':
script_trace = script_name
else:
script_trace = script_trace + '->' + script_name
if issubclass(scripts[script_name], ScriptIterator): # gets subscripts of ScriptIterator objects
populate_sweep_param(vars(scripts[script_name])['_SCRIPTS'], parameter_list=parameter_list,
trace=script_trace)
else:
# use inspect instead of vars to get _DEFAULT_SETTINGS also for classes that inherit _DEFAULT_SETTINGS from a superclass
for setting in \
[elem[1] for elem in inspect.getmembers(scripts[script_name]) if elem[0] == '_DEFAULT_SETTINGS'][0]:
parameter_list = get_parameter_from_dict(script_trace, setting, parameter_list)
return parameter_list
if iterator_type == 'loop':
script_default_settings = [
Parameter('script_order', script_order),
Parameter('script_execution_freq', script_execution_freq),
Parameter('num_loops', 0, int, 'times the subscripts will be executed'),
Parameter('run_all_first', True, bool, 'Run all scripts with nonzero frequency in first pass')
]
elif iterator_type == 'sweep':
sweep_params = populate_sweep_param(sub_scripts, [])
script_default_settings = [
Parameter('script_order', script_order),
Parameter('script_execution_freq', script_execution_freq),
Parameter('sweep_param', sweep_params[0], sweep_params, 'variable over which to sweep'),
Parameter('sweep_range',
[Parameter('min_value', 0, float, 'min parameter value'),
Parameter('max_value', 0, float, 'max parameter value'),
Parameter('N/value_step', 0, float,
'either number of steps or parameter value step, depending on mode')]),
Parameter('stepping_mode', 'N', ['N', 'value_step'],
'Switch between number of steps and step amount'),
Parameter('run_all_first', True, bool, 'Run all scripts with nonzero frequency in first pass')
]
else:
print(('unknown iterator type ' + iterator_type))
raise TypeError('unknown iterator type ' + iterator_type)
return script_default_settings
|
def function[get_default_settings, parameter[sub_scripts, script_order, script_execution_freq, iterator_type]]:
constant[
assigning the actual script settings depending on the iterator type
this might be overwritten by classes that inherit form ScriptIterator
Args:
sub_scripts: dictionary with the subscripts
script_order: execution order of subscripts
script_execution_freq: execution frequency of subscripts
Returns:
the default setting for the iterator
]
def function[populate_sweep_param, parameter[scripts, parameter_list, trace]]:
constant[
Args:
scripts: a dict of {'class name': <class object>} pairs
Returns: A list of all parameters of the input scripts
]
def function[get_parameter_from_dict, parameter[trace, dic, parameter_list, valid_values]]:
constant[
appends keys in the dict to a list in the form trace.key.subkey.subsubkey...
Args:
trace: initial prefix (path through scripts and parameters to current location)
dic: dictionary
parameter_list: list to which append the parameters
valid_values: valid values of dictionary values if None dic should be a dictionary
Returns:
]
if <ast.BoolOp object at 0x7da1b2449d20> begin[:]
variable[valid_values] assign[=] name[dic].valid_values
for taget[tuple[[<ast.Name object at 0x7da1b2449300>, <ast.Name object at 0x7da1b24489a0>]]] in starred[call[name[dic].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[value], name[dict]]] begin[:]
variable[parameter_list] assign[=] call[name[get_parameter_from_dict], parameter[binary_operation[binary_operation[name[trace] + constant[.]] + name[key]], name[value], name[parameter_list], call[name[dic].valid_values][name[key]]]]
return[name[parameter_list]]
for taget[name[script_name]] in starred[call[name[list], parameter[call[name[scripts].keys, parameter[]]]]] begin[:]
from relative_module[pylabcontrol.core] import module[ScriptIterator]
variable[script_trace] assign[=] name[trace]
if compare[name[script_trace] equal[==] constant[]] begin[:]
variable[script_trace] assign[=] name[script_name]
if call[name[issubclass], parameter[call[name[scripts]][name[script_name]], name[ScriptIterator]]] begin[:]
call[name[populate_sweep_param], parameter[call[call[name[vars], parameter[call[name[scripts]][name[script_name]]]]][constant[_SCRIPTS]]]]
return[name[parameter_list]]
if compare[name[iterator_type] equal[==] constant[loop]] begin[:]
variable[script_default_settings] assign[=] list[[<ast.Call object at 0x7da1b2585690>, <ast.Call object at 0x7da1b25874c0>, <ast.Call object at 0x7da1b25867d0>, <ast.Call object at 0x7da1b2586c80>]]
return[name[script_default_settings]]
|
keyword[def] identifier[get_default_settings] ( identifier[sub_scripts] , identifier[script_order] , identifier[script_execution_freq] , identifier[iterator_type] ):
literal[string]
keyword[def] identifier[populate_sweep_param] ( identifier[scripts] , identifier[parameter_list] , identifier[trace] = literal[string] ):
literal[string]
keyword[def] identifier[get_parameter_from_dict] ( identifier[trace] , identifier[dic] , identifier[parameter_list] , identifier[valid_values] = keyword[None] ):
literal[string]
keyword[if] identifier[valid_values] keyword[is] keyword[None] keyword[and] identifier[isinstance] ( identifier[dic] , identifier[Parameter] ):
identifier[valid_values] = identifier[dic] . identifier[valid_values]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[dic] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ):
identifier[parameter_list] = identifier[get_parameter_from_dict] ( identifier[trace] + literal[string] + identifier[key] , identifier[value] , identifier[parameter_list] ,
identifier[dic] . identifier[valid_values] [ identifier[key] ])
keyword[elif] ( identifier[valid_values] [ identifier[key] ] keyword[in] ( identifier[float] , identifier[int] )) keyword[or] ( identifier[isinstance] ( identifier[valid_values] [ identifier[key] ], identifier[list] ) keyword[and] identifier[valid_values] [ identifier[key] ][ literal[int] ] keyword[in] ( identifier[float] , identifier[int] )):
identifier[parameter_list] . identifier[append] ( identifier[trace] + literal[string] + identifier[key] )
keyword[else] :
identifier[print] (( literal[string] , identifier[key] ))
keyword[return] identifier[parameter_list]
keyword[for] identifier[script_name] keyword[in] identifier[list] ( identifier[scripts] . identifier[keys] ()):
keyword[from] identifier[pylabcontrol] . identifier[core] keyword[import] identifier[ScriptIterator]
identifier[script_trace] = identifier[trace]
keyword[if] identifier[script_trace] == literal[string] :
identifier[script_trace] = identifier[script_name]
keyword[else] :
identifier[script_trace] = identifier[script_trace] + literal[string] + identifier[script_name]
keyword[if] identifier[issubclass] ( identifier[scripts] [ identifier[script_name] ], identifier[ScriptIterator] ):
identifier[populate_sweep_param] ( identifier[vars] ( identifier[scripts] [ identifier[script_name] ])[ literal[string] ], identifier[parameter_list] = identifier[parameter_list] ,
identifier[trace] = identifier[script_trace] )
keyword[else] :
keyword[for] identifier[setting] keyword[in] [ identifier[elem] [ literal[int] ] keyword[for] identifier[elem] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[scripts] [ identifier[script_name] ]) keyword[if] identifier[elem] [ literal[int] ]== literal[string] ][ literal[int] ]:
identifier[parameter_list] = identifier[get_parameter_from_dict] ( identifier[script_trace] , identifier[setting] , identifier[parameter_list] )
keyword[return] identifier[parameter_list]
keyword[if] identifier[iterator_type] == literal[string] :
identifier[script_default_settings] =[
identifier[Parameter] ( literal[string] , identifier[script_order] ),
identifier[Parameter] ( literal[string] , identifier[script_execution_freq] ),
identifier[Parameter] ( literal[string] , literal[int] , identifier[int] , literal[string] ),
identifier[Parameter] ( literal[string] , keyword[True] , identifier[bool] , literal[string] )
]
keyword[elif] identifier[iterator_type] == literal[string] :
identifier[sweep_params] = identifier[populate_sweep_param] ( identifier[sub_scripts] ,[])
identifier[script_default_settings] =[
identifier[Parameter] ( literal[string] , identifier[script_order] ),
identifier[Parameter] ( literal[string] , identifier[script_execution_freq] ),
identifier[Parameter] ( literal[string] , identifier[sweep_params] [ literal[int] ], identifier[sweep_params] , literal[string] ),
identifier[Parameter] ( literal[string] ,
[ identifier[Parameter] ( literal[string] , literal[int] , identifier[float] , literal[string] ),
identifier[Parameter] ( literal[string] , literal[int] , identifier[float] , literal[string] ),
identifier[Parameter] ( literal[string] , literal[int] , identifier[float] ,
literal[string] )]),
identifier[Parameter] ( literal[string] , literal[string] ,[ literal[string] , literal[string] ],
literal[string] ),
identifier[Parameter] ( literal[string] , keyword[True] , identifier[bool] , literal[string] )
]
keyword[else] :
identifier[print] (( literal[string] + identifier[iterator_type] ))
keyword[raise] identifier[TypeError] ( literal[string] + identifier[iterator_type] )
keyword[return] identifier[script_default_settings]
|
def get_default_settings(sub_scripts, script_order, script_execution_freq, iterator_type):
"""
assigning the actual script settings depending on the iterator type
this might be overwritten by classes that inherit form ScriptIterator
Args:
sub_scripts: dictionary with the subscripts
script_order: execution order of subscripts
script_execution_freq: execution frequency of subscripts
Returns:
the default setting for the iterator
"""
def populate_sweep_param(scripts, parameter_list, trace=''):
"""
Args:
scripts: a dict of {'class name': <class object>} pairs
Returns: A list of all parameters of the input scripts
"""
def get_parameter_from_dict(trace, dic, parameter_list, valid_values=None):
"""
appends keys in the dict to a list in the form trace.key.subkey.subsubkey...
Args:
trace: initial prefix (path through scripts and parameters to current location)
dic: dictionary
parameter_list: list to which append the parameters
valid_values: valid values of dictionary values if None dic should be a dictionary
Returns:
"""
if valid_values is None and isinstance(dic, Parameter):
valid_values = dic.valid_values # depends on [control=['if'], data=[]]
for (key, value) in dic.items():
if isinstance(value, dict): # for nested parameters ex {point: {'x': int, 'y': int}}
parameter_list = get_parameter_from_dict(trace + '.' + key, value, parameter_list, dic.valid_values[key]) # depends on [control=['if'], data=[]]
elif valid_values[key] in (float, int) or (isinstance(valid_values[key], list) and valid_values[key][0] in (float, int)):
parameter_list.append(trace + '.' + key) # depends on [control=['if'], data=[]]
else: # once down to the form {key: value}
# in all other cases ignore parameter
print(('ignoring sweep parameter', key)) # depends on [control=['for'], data=[]]
return parameter_list
for script_name in list(scripts.keys()):
from pylabcontrol.core import ScriptIterator
script_trace = trace
if script_trace == '':
script_trace = script_name # depends on [control=['if'], data=['script_trace']]
else:
script_trace = script_trace + '->' + script_name
if issubclass(scripts[script_name], ScriptIterator): # gets subscripts of ScriptIterator objects
populate_sweep_param(vars(scripts[script_name])['_SCRIPTS'], parameter_list=parameter_list, trace=script_trace) # depends on [control=['if'], data=[]]
else:
# use inspect instead of vars to get _DEFAULT_SETTINGS also for classes that inherit _DEFAULT_SETTINGS from a superclass
for setting in [elem[1] for elem in inspect.getmembers(scripts[script_name]) if elem[0] == '_DEFAULT_SETTINGS'][0]:
parameter_list = get_parameter_from_dict(script_trace, setting, parameter_list) # depends on [control=['for'], data=['setting']] # depends on [control=['for'], data=['script_name']]
return parameter_list
if iterator_type == 'loop':
script_default_settings = [Parameter('script_order', script_order), Parameter('script_execution_freq', script_execution_freq), Parameter('num_loops', 0, int, 'times the subscripts will be executed'), Parameter('run_all_first', True, bool, 'Run all scripts with nonzero frequency in first pass')] # depends on [control=['if'], data=[]]
elif iterator_type == 'sweep':
sweep_params = populate_sweep_param(sub_scripts, [])
script_default_settings = [Parameter('script_order', script_order), Parameter('script_execution_freq', script_execution_freq), Parameter('sweep_param', sweep_params[0], sweep_params, 'variable over which to sweep'), Parameter('sweep_range', [Parameter('min_value', 0, float, 'min parameter value'), Parameter('max_value', 0, float, 'max parameter value'), Parameter('N/value_step', 0, float, 'either number of steps or parameter value step, depending on mode')]), Parameter('stepping_mode', 'N', ['N', 'value_step'], 'Switch between number of steps and step amount'), Parameter('run_all_first', True, bool, 'Run all scripts with nonzero frequency in first pass')] # depends on [control=['if'], data=[]]
else:
print('unknown iterator type ' + iterator_type)
raise TypeError('unknown iterator type ' + iterator_type)
return script_default_settings
|
def move_right(self):
"""Make the drone move right."""
self.at(ardrone.at.pcmd, True, self.speed, 0, 0, 0)
|
def function[move_right, parameter[self]]:
constant[Make the drone move right.]
call[name[self].at, parameter[name[ardrone].at.pcmd, constant[True], name[self].speed, constant[0], constant[0], constant[0]]]
|
keyword[def] identifier[move_right] ( identifier[self] ):
literal[string]
identifier[self] . identifier[at] ( identifier[ardrone] . identifier[at] . identifier[pcmd] , keyword[True] , identifier[self] . identifier[speed] , literal[int] , literal[int] , literal[int] )
|
def move_right(self):
"""Make the drone move right."""
self.at(ardrone.at.pcmd, True, self.speed, 0, 0, 0)
|
def nvmlDeviceGetBAR1MemoryInfo(handle):
r"""
/**
* Gets Total, Available and Used size of BAR1 memory.
*
* BAR1 is used to map the FB (device memory) so that it can be directly accessed by the CPU or by 3rd party
* devices (peer-to-peer on the PCIE bus).
*
* For Kepler &tm; or newer fully supported devices.
*
* @param device The identifier of the target device
* @param bar1Memory Reference in which BAR1 memory
* information is returned.
*
* @return
* - \ref NVML_SUCCESS if BAR1 memory is successfully retrieved
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid, \a bar1Memory is NULL
* - \ref NVML_ERROR_NOT_SUPPORTED if this query is not supported by the device
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*
*/
nvmlReturn_t DECLDIR nvmlDeviceGetBAR1MemoryInfo
"""
c_bar1_memory = c_nvmlBAR1Memory_t()
fn = _nvmlGetFunctionPointer("nvmlDeviceGetBAR1MemoryInfo")
ret = fn(handle, byref(c_bar1_memory))
_nvmlCheckReturn(ret)
return bytes_to_str(c_bar1_memory)
|
def function[nvmlDeviceGetBAR1MemoryInfo, parameter[handle]]:
constant[
/**
* Gets Total, Available and Used size of BAR1 memory.
*
* BAR1 is used to map the FB (device memory) so that it can be directly accessed by the CPU or by 3rd party
* devices (peer-to-peer on the PCIE bus).
*
* For Kepler &tm; or newer fully supported devices.
*
* @param device The identifier of the target device
* @param bar1Memory Reference in which BAR1 memory
* information is returned.
*
* @return
* - \ref NVML_SUCCESS if BAR1 memory is successfully retrieved
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a device is invalid, \a bar1Memory is NULL
* - \ref NVML_ERROR_NOT_SUPPORTED if this query is not supported by the device
* - \ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \ref NVML_ERROR_UNKNOWN on any unexpected error
*
*/
nvmlReturn_t DECLDIR nvmlDeviceGetBAR1MemoryInfo
]
variable[c_bar1_memory] assign[=] call[name[c_nvmlBAR1Memory_t], parameter[]]
variable[fn] assign[=] call[name[_nvmlGetFunctionPointer], parameter[constant[nvmlDeviceGetBAR1MemoryInfo]]]
variable[ret] assign[=] call[name[fn], parameter[name[handle], call[name[byref], parameter[name[c_bar1_memory]]]]]
call[name[_nvmlCheckReturn], parameter[name[ret]]]
return[call[name[bytes_to_str], parameter[name[c_bar1_memory]]]]
|
keyword[def] identifier[nvmlDeviceGetBAR1MemoryInfo] ( identifier[handle] ):
literal[string]
identifier[c_bar1_memory] = identifier[c_nvmlBAR1Memory_t] ()
identifier[fn] = identifier[_nvmlGetFunctionPointer] ( literal[string] )
identifier[ret] = identifier[fn] ( identifier[handle] , identifier[byref] ( identifier[c_bar1_memory] ))
identifier[_nvmlCheckReturn] ( identifier[ret] )
keyword[return] identifier[bytes_to_str] ( identifier[c_bar1_memory] )
|
def nvmlDeviceGetBAR1MemoryInfo(handle):
"""
/**
* Gets Total, Available and Used size of BAR1 memory.
*
* BAR1 is used to map the FB (device memory) so that it can be directly accessed by the CPU or by 3rd party
* devices (peer-to-peer on the PCIE bus).
*
* For Kepler &tm; or newer fully supported devices.
*
* @param device The identifier of the target device
* @param bar1Memory Reference in which BAR1 memory
* information is returned.
*
* @return
* - \\ref NVML_SUCCESS if BAR1 memory is successfully retrieved
* - \\ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \\ref NVML_ERROR_INVALID_ARGUMENT if \\a device is invalid, \\a bar1Memory is NULL
* - \\ref NVML_ERROR_NOT_SUPPORTED if this query is not supported by the device
* - \\ref NVML_ERROR_GPU_IS_LOST if the target GPU has fallen off the bus or is otherwise inaccessible
* - \\ref NVML_ERROR_UNKNOWN on any unexpected error
*
*/
nvmlReturn_t DECLDIR nvmlDeviceGetBAR1MemoryInfo
"""
c_bar1_memory = c_nvmlBAR1Memory_t()
fn = _nvmlGetFunctionPointer('nvmlDeviceGetBAR1MemoryInfo')
ret = fn(handle, byref(c_bar1_memory))
_nvmlCheckReturn(ret)
return bytes_to_str(c_bar1_memory)
|
async def read(self, *_id):
"""Read data from database table.
Accepts ids of entries.
Returns list of results if success
or string with error code and explanation.
read(*id) => [(result), (result)] (if success)
read(*id) => [] (if missed)
read() => {"error":400, "reason":"Missed required fields"}
"""
if not _id:
return {"error":400,
"reason":"Missed required fields"}
result = []
for i in _id:
document = await self.collection.find_one({"id":i})
try:
result.append({i:document[i] for i in document
if i != "_id"})
except:
continue
return result
|
<ast.AsyncFunctionDef object at 0x7da1b0aa51e0>
|
keyword[async] keyword[def] identifier[read] ( identifier[self] ,* identifier[_id] ):
literal[string]
keyword[if] keyword[not] identifier[_id] :
keyword[return] { literal[string] : literal[int] ,
literal[string] : literal[string] }
identifier[result] =[]
keyword[for] identifier[i] keyword[in] identifier[_id] :
identifier[document] = keyword[await] identifier[self] . identifier[collection] . identifier[find_one] ({ literal[string] : identifier[i] })
keyword[try] :
identifier[result] . identifier[append] ({ identifier[i] : identifier[document] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[document]
keyword[if] identifier[i] != literal[string] })
keyword[except] :
keyword[continue]
keyword[return] identifier[result]
|
async def read(self, *_id):
"""Read data from database table.
Accepts ids of entries.
Returns list of results if success
or string with error code and explanation.
read(*id) => [(result), (result)] (if success)
read(*id) => [] (if missed)
read() => {"error":400, "reason":"Missed required fields"}
"""
if not _id:
return {'error': 400, 'reason': 'Missed required fields'} # depends on [control=['if'], data=[]]
result = []
for i in _id:
document = await self.collection.find_one({'id': i})
try:
result.append({i: document[i] for i in document if i != '_id'}) # depends on [control=['try'], data=[]]
except:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']]
return result
|
def _add_manifest_files(zip_file, dir_name, payload_info_list, tag_info_list):
"""Generate the manifest files and add them to the zip."""
for checksum_algorithm in _get_checksum_algorithm_set(payload_info_list):
_add_tag_file(
zip_file,
dir_name,
tag_info_list,
_gen_manifest_file_tup(payload_info_list, checksum_algorithm),
)
|
def function[_add_manifest_files, parameter[zip_file, dir_name, payload_info_list, tag_info_list]]:
constant[Generate the manifest files and add them to the zip.]
for taget[name[checksum_algorithm]] in starred[call[name[_get_checksum_algorithm_set], parameter[name[payload_info_list]]]] begin[:]
call[name[_add_tag_file], parameter[name[zip_file], name[dir_name], name[tag_info_list], call[name[_gen_manifest_file_tup], parameter[name[payload_info_list], name[checksum_algorithm]]]]]
|
keyword[def] identifier[_add_manifest_files] ( identifier[zip_file] , identifier[dir_name] , identifier[payload_info_list] , identifier[tag_info_list] ):
literal[string]
keyword[for] identifier[checksum_algorithm] keyword[in] identifier[_get_checksum_algorithm_set] ( identifier[payload_info_list] ):
identifier[_add_tag_file] (
identifier[zip_file] ,
identifier[dir_name] ,
identifier[tag_info_list] ,
identifier[_gen_manifest_file_tup] ( identifier[payload_info_list] , identifier[checksum_algorithm] ),
)
|
def _add_manifest_files(zip_file, dir_name, payload_info_list, tag_info_list):
"""Generate the manifest files and add them to the zip."""
for checksum_algorithm in _get_checksum_algorithm_set(payload_info_list):
_add_tag_file(zip_file, dir_name, tag_info_list, _gen_manifest_file_tup(payload_info_list, checksum_algorithm)) # depends on [control=['for'], data=['checksum_algorithm']]
|
def galactic2fk5(l, b):
"""
Convert galactic l/b to fk5 ra/dec
Parameters
----------
l, b : float
Galactic coordinates in radians.
Returns
-------
ra, dec : float
FK5 ecliptic coordinates in radians.
"""
a = SkyCoord(l, b, unit=(u.radian, u.radian), frame='galactic')
return a.fk5.ra.radian, a.fk5.dec.radian
|
def function[galactic2fk5, parameter[l, b]]:
constant[
Convert galactic l/b to fk5 ra/dec
Parameters
----------
l, b : float
Galactic coordinates in radians.
Returns
-------
ra, dec : float
FK5 ecliptic coordinates in radians.
]
variable[a] assign[=] call[name[SkyCoord], parameter[name[l], name[b]]]
return[tuple[[<ast.Attribute object at 0x7da2054a5270>, <ast.Attribute object at 0x7da2054a6050>]]]
|
keyword[def] identifier[galactic2fk5] ( identifier[l] , identifier[b] ):
literal[string]
identifier[a] = identifier[SkyCoord] ( identifier[l] , identifier[b] , identifier[unit] =( identifier[u] . identifier[radian] , identifier[u] . identifier[radian] ), identifier[frame] = literal[string] )
keyword[return] identifier[a] . identifier[fk5] . identifier[ra] . identifier[radian] , identifier[a] . identifier[fk5] . identifier[dec] . identifier[radian]
|
def galactic2fk5(l, b):
"""
Convert galactic l/b to fk5 ra/dec
Parameters
----------
l, b : float
Galactic coordinates in radians.
Returns
-------
ra, dec : float
FK5 ecliptic coordinates in radians.
"""
a = SkyCoord(l, b, unit=(u.radian, u.radian), frame='galactic')
return (a.fk5.ra.radian, a.fk5.dec.radian)
|
def get_card(self):
'''
Get card this checklist is on.
'''
card_id = self.get_checklist_information().get('idCard', None)
if card_id:
return self.client.get_card(card_id)
|
def function[get_card, parameter[self]]:
constant[
Get card this checklist is on.
]
variable[card_id] assign[=] call[call[name[self].get_checklist_information, parameter[]].get, parameter[constant[idCard], constant[None]]]
if name[card_id] begin[:]
return[call[name[self].client.get_card, parameter[name[card_id]]]]
|
keyword[def] identifier[get_card] ( identifier[self] ):
literal[string]
identifier[card_id] = identifier[self] . identifier[get_checklist_information] (). identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[card_id] :
keyword[return] identifier[self] . identifier[client] . identifier[get_card] ( identifier[card_id] )
|
def get_card(self):
"""
Get card this checklist is on.
"""
card_id = self.get_checklist_information().get('idCard', None)
if card_id:
return self.client.get_card(card_id) # depends on [control=['if'], data=[]]
|
def solve_for(self, var=None):
"""Ensure that the children is within its parent
"""
if not self.enable:
return
margin = self.margin_method()
def parent_width():
return self.parent_se[0].value - self.parent_nw[0].value
def parent_height():
return self.parent_se[1].value - self.parent_nw[1].value
def child_width():
child_width = self.child_se[0].value - self.child_nw[0].value
if child_width > parent_width() - 2 * margin:
child_width = parent_width() - 2 * margin
return max(self.child.min_width, child_width)
def child_height():
child_height = self.child_se[1].value - self.child_nw[1].value
if child_height > parent_height() - 2 * margin:
child_height = parent_height() - 2 * margin
return max(self.child.min_height, child_height)
updated = False
# Left edge (west)
if self.parent_nw[0].value > self.child_nw[0].value - margin + EPSILON:
width = child_width()
_update(self.child_nw[0], self.parent_nw[0].value + margin)
_update(self.child_se[0], self.child_nw[0].value + width)
updated = True
# Right edge (east)
elif self.parent_se[0].value < self.child_se[0].value + margin - EPSILON:
width = child_width()
_update(self.child_se[0], self.parent_se[0].value - margin)
_update(self.child_nw[0], self.child_se[0].value - width)
updated = True
# Upper edge (north)
if self.parent_nw[1].value > self.child_nw[1].value - margin + EPSILON:
height = child_height()
_update(self.child_nw[1], self.parent_nw[1].value + margin)
_update(self.child_se[1], self.child_nw[1].value + height)
updated = True
# Lower edge (south)
elif self.parent_se[1].value < self.child_se[1].value + margin - EPSILON:
height = child_height()
_update(self.child_se[1], self.parent_se[1].value - margin)
_update(self.child_nw[1], self.child_se[1].value - height)
updated = True
from rafcon.gui.mygaphas.items.state import StateView
if updated and isinstance(self.child, StateView):
self.child.update_minimum_size_of_children()
|
def function[solve_for, parameter[self, var]]:
constant[Ensure that the children is within its parent
]
if <ast.UnaryOp object at 0x7da2044c0c70> begin[:]
return[None]
variable[margin] assign[=] call[name[self].margin_method, parameter[]]
def function[parent_width, parameter[]]:
return[binary_operation[call[name[self].parent_se][constant[0]].value - call[name[self].parent_nw][constant[0]].value]]
def function[parent_height, parameter[]]:
return[binary_operation[call[name[self].parent_se][constant[1]].value - call[name[self].parent_nw][constant[1]].value]]
def function[child_width, parameter[]]:
variable[child_width] assign[=] binary_operation[call[name[self].child_se][constant[0]].value - call[name[self].child_nw][constant[0]].value]
if compare[name[child_width] greater[>] binary_operation[call[name[parent_width], parameter[]] - binary_operation[constant[2] * name[margin]]]] begin[:]
variable[child_width] assign[=] binary_operation[call[name[parent_width], parameter[]] - binary_operation[constant[2] * name[margin]]]
return[call[name[max], parameter[name[self].child.min_width, name[child_width]]]]
def function[child_height, parameter[]]:
variable[child_height] assign[=] binary_operation[call[name[self].child_se][constant[1]].value - call[name[self].child_nw][constant[1]].value]
if compare[name[child_height] greater[>] binary_operation[call[name[parent_height], parameter[]] - binary_operation[constant[2] * name[margin]]]] begin[:]
variable[child_height] assign[=] binary_operation[call[name[parent_height], parameter[]] - binary_operation[constant[2] * name[margin]]]
return[call[name[max], parameter[name[self].child.min_height, name[child_height]]]]
variable[updated] assign[=] constant[False]
if compare[call[name[self].parent_nw][constant[0]].value greater[>] binary_operation[binary_operation[call[name[self].child_nw][constant[0]].value - name[margin]] + name[EPSILON]]] begin[:]
variable[width] assign[=] call[name[child_width], parameter[]]
call[name[_update], parameter[call[name[self].child_nw][constant[0]], binary_operation[call[name[self].parent_nw][constant[0]].value + name[margin]]]]
call[name[_update], parameter[call[name[self].child_se][constant[0]], binary_operation[call[name[self].child_nw][constant[0]].value + name[width]]]]
variable[updated] assign[=] constant[True]
if compare[call[name[self].parent_nw][constant[1]].value greater[>] binary_operation[binary_operation[call[name[self].child_nw][constant[1]].value - name[margin]] + name[EPSILON]]] begin[:]
variable[height] assign[=] call[name[child_height], parameter[]]
call[name[_update], parameter[call[name[self].child_nw][constant[1]], binary_operation[call[name[self].parent_nw][constant[1]].value + name[margin]]]]
call[name[_update], parameter[call[name[self].child_se][constant[1]], binary_operation[call[name[self].child_nw][constant[1]].value + name[height]]]]
variable[updated] assign[=] constant[True]
from relative_module[rafcon.gui.mygaphas.items.state] import module[StateView]
if <ast.BoolOp object at 0x7da1b192f100> begin[:]
call[name[self].child.update_minimum_size_of_children, parameter[]]
|
keyword[def] identifier[solve_for] ( identifier[self] , identifier[var] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[enable] :
keyword[return]
identifier[margin] = identifier[self] . identifier[margin_method] ()
keyword[def] identifier[parent_width] ():
keyword[return] identifier[self] . identifier[parent_se] [ literal[int] ]. identifier[value] - identifier[self] . identifier[parent_nw] [ literal[int] ]. identifier[value]
keyword[def] identifier[parent_height] ():
keyword[return] identifier[self] . identifier[parent_se] [ literal[int] ]. identifier[value] - identifier[self] . identifier[parent_nw] [ literal[int] ]. identifier[value]
keyword[def] identifier[child_width] ():
identifier[child_width] = identifier[self] . identifier[child_se] [ literal[int] ]. identifier[value] - identifier[self] . identifier[child_nw] [ literal[int] ]. identifier[value]
keyword[if] identifier[child_width] > identifier[parent_width] ()- literal[int] * identifier[margin] :
identifier[child_width] = identifier[parent_width] ()- literal[int] * identifier[margin]
keyword[return] identifier[max] ( identifier[self] . identifier[child] . identifier[min_width] , identifier[child_width] )
keyword[def] identifier[child_height] ():
identifier[child_height] = identifier[self] . identifier[child_se] [ literal[int] ]. identifier[value] - identifier[self] . identifier[child_nw] [ literal[int] ]. identifier[value]
keyword[if] identifier[child_height] > identifier[parent_height] ()- literal[int] * identifier[margin] :
identifier[child_height] = identifier[parent_height] ()- literal[int] * identifier[margin]
keyword[return] identifier[max] ( identifier[self] . identifier[child] . identifier[min_height] , identifier[child_height] )
identifier[updated] = keyword[False]
keyword[if] identifier[self] . identifier[parent_nw] [ literal[int] ]. identifier[value] > identifier[self] . identifier[child_nw] [ literal[int] ]. identifier[value] - identifier[margin] + identifier[EPSILON] :
identifier[width] = identifier[child_width] ()
identifier[_update] ( identifier[self] . identifier[child_nw] [ literal[int] ], identifier[self] . identifier[parent_nw] [ literal[int] ]. identifier[value] + identifier[margin] )
identifier[_update] ( identifier[self] . identifier[child_se] [ literal[int] ], identifier[self] . identifier[child_nw] [ literal[int] ]. identifier[value] + identifier[width] )
identifier[updated] = keyword[True]
keyword[elif] identifier[self] . identifier[parent_se] [ literal[int] ]. identifier[value] < identifier[self] . identifier[child_se] [ literal[int] ]. identifier[value] + identifier[margin] - identifier[EPSILON] :
identifier[width] = identifier[child_width] ()
identifier[_update] ( identifier[self] . identifier[child_se] [ literal[int] ], identifier[self] . identifier[parent_se] [ literal[int] ]. identifier[value] - identifier[margin] )
identifier[_update] ( identifier[self] . identifier[child_nw] [ literal[int] ], identifier[self] . identifier[child_se] [ literal[int] ]. identifier[value] - identifier[width] )
identifier[updated] = keyword[True]
keyword[if] identifier[self] . identifier[parent_nw] [ literal[int] ]. identifier[value] > identifier[self] . identifier[child_nw] [ literal[int] ]. identifier[value] - identifier[margin] + identifier[EPSILON] :
identifier[height] = identifier[child_height] ()
identifier[_update] ( identifier[self] . identifier[child_nw] [ literal[int] ], identifier[self] . identifier[parent_nw] [ literal[int] ]. identifier[value] + identifier[margin] )
identifier[_update] ( identifier[self] . identifier[child_se] [ literal[int] ], identifier[self] . identifier[child_nw] [ literal[int] ]. identifier[value] + identifier[height] )
identifier[updated] = keyword[True]
keyword[elif] identifier[self] . identifier[parent_se] [ literal[int] ]. identifier[value] < identifier[self] . identifier[child_se] [ literal[int] ]. identifier[value] + identifier[margin] - identifier[EPSILON] :
identifier[height] = identifier[child_height] ()
identifier[_update] ( identifier[self] . identifier[child_se] [ literal[int] ], identifier[self] . identifier[parent_se] [ literal[int] ]. identifier[value] - identifier[margin] )
identifier[_update] ( identifier[self] . identifier[child_nw] [ literal[int] ], identifier[self] . identifier[child_se] [ literal[int] ]. identifier[value] - identifier[height] )
identifier[updated] = keyword[True]
keyword[from] identifier[rafcon] . identifier[gui] . identifier[mygaphas] . identifier[items] . identifier[state] keyword[import] identifier[StateView]
keyword[if] identifier[updated] keyword[and] identifier[isinstance] ( identifier[self] . identifier[child] , identifier[StateView] ):
identifier[self] . identifier[child] . identifier[update_minimum_size_of_children] ()
|
def solve_for(self, var=None):
"""Ensure that the children is within its parent
"""
if not self.enable:
return # depends on [control=['if'], data=[]]
margin = self.margin_method()
def parent_width():
return self.parent_se[0].value - self.parent_nw[0].value
def parent_height():
return self.parent_se[1].value - self.parent_nw[1].value
def child_width():
child_width = self.child_se[0].value - self.child_nw[0].value
if child_width > parent_width() - 2 * margin:
child_width = parent_width() - 2 * margin # depends on [control=['if'], data=['child_width']]
return max(self.child.min_width, child_width)
def child_height():
child_height = self.child_se[1].value - self.child_nw[1].value
if child_height > parent_height() - 2 * margin:
child_height = parent_height() - 2 * margin # depends on [control=['if'], data=['child_height']]
return max(self.child.min_height, child_height)
updated = False
# Left edge (west)
if self.parent_nw[0].value > self.child_nw[0].value - margin + EPSILON:
width = child_width()
_update(self.child_nw[0], self.parent_nw[0].value + margin)
_update(self.child_se[0], self.child_nw[0].value + width)
updated = True # depends on [control=['if'], data=[]]
# Right edge (east)
elif self.parent_se[0].value < self.child_se[0].value + margin - EPSILON:
width = child_width()
_update(self.child_se[0], self.parent_se[0].value - margin)
_update(self.child_nw[0], self.child_se[0].value - width)
updated = True # depends on [control=['if'], data=[]]
# Upper edge (north)
if self.parent_nw[1].value > self.child_nw[1].value - margin + EPSILON:
height = child_height()
_update(self.child_nw[1], self.parent_nw[1].value + margin)
_update(self.child_se[1], self.child_nw[1].value + height)
updated = True # depends on [control=['if'], data=[]]
# Lower edge (south)
elif self.parent_se[1].value < self.child_se[1].value + margin - EPSILON:
height = child_height()
_update(self.child_se[1], self.parent_se[1].value - margin)
_update(self.child_nw[1], self.child_se[1].value - height)
updated = True # depends on [control=['if'], data=[]]
from rafcon.gui.mygaphas.items.state import StateView
if updated and isinstance(self.child, StateView):
self.child.update_minimum_size_of_children() # depends on [control=['if'], data=[]]
|
def _get_nop_length(cls, insns):
"""
Calculate the total size of leading nop instructions.
:param insns: A list of capstone insn objects.
:return: Number of bytes of leading nop instructions.
:rtype: int
"""
nop_length = 0
if insns and cls._is_noop_insn(insns[0]):
# see where those nop instructions terminate
for insn in insns:
if cls._is_noop_insn(insn):
nop_length += insn.size
else:
break
return nop_length
|
def function[_get_nop_length, parameter[cls, insns]]:
constant[
Calculate the total size of leading nop instructions.
:param insns: A list of capstone insn objects.
:return: Number of bytes of leading nop instructions.
:rtype: int
]
variable[nop_length] assign[=] constant[0]
if <ast.BoolOp object at 0x7da18c4cf250> begin[:]
for taget[name[insn]] in starred[name[insns]] begin[:]
if call[name[cls]._is_noop_insn, parameter[name[insn]]] begin[:]
<ast.AugAssign object at 0x7da18c4ce620>
return[name[nop_length]]
|
keyword[def] identifier[_get_nop_length] ( identifier[cls] , identifier[insns] ):
literal[string]
identifier[nop_length] = literal[int]
keyword[if] identifier[insns] keyword[and] identifier[cls] . identifier[_is_noop_insn] ( identifier[insns] [ literal[int] ]):
keyword[for] identifier[insn] keyword[in] identifier[insns] :
keyword[if] identifier[cls] . identifier[_is_noop_insn] ( identifier[insn] ):
identifier[nop_length] += identifier[insn] . identifier[size]
keyword[else] :
keyword[break]
keyword[return] identifier[nop_length]
|
def _get_nop_length(cls, insns):
"""
Calculate the total size of leading nop instructions.
:param insns: A list of capstone insn objects.
:return: Number of bytes of leading nop instructions.
:rtype: int
"""
nop_length = 0
if insns and cls._is_noop_insn(insns[0]):
# see where those nop instructions terminate
for insn in insns:
if cls._is_noop_insn(insn):
nop_length += insn.size # depends on [control=['if'], data=[]]
else:
break # depends on [control=['for'], data=['insn']] # depends on [control=['if'], data=[]]
return nop_length
|
def __get_html(self, body=None):
"""
Returns the html content with given body tag content.
:param body: Body tag content.
:type body: unicode
:return: Html.
:rtype: unicode
"""
output = []
output.append("<html>")
output.append("<head>")
for javascript in (self.__jquery_javascript,
self.__crittercism_javascript,
self.__reporter_javascript):
output.append("<script type=\"text/javascript\">")
output.append(javascript)
output.append("</script>")
output.append("<style type=\"text/css\">")
output.append(self.__style)
output.append("</style>")
output.append("</head>")
if body is not None:
output.append(body)
else:
output.append("<body>")
output.append("<div id=\"report\">")
output.append("</div>")
output.append("</body>")
output.append("</html>")
return "\n".join(output)
|
def function[__get_html, parameter[self, body]]:
constant[
Returns the html content with given body tag content.
:param body: Body tag content.
:type body: unicode
:return: Html.
:rtype: unicode
]
variable[output] assign[=] list[[]]
call[name[output].append, parameter[constant[<html>]]]
call[name[output].append, parameter[constant[<head>]]]
for taget[name[javascript]] in starred[tuple[[<ast.Attribute object at 0x7da1b0852590>, <ast.Attribute object at 0x7da1b0852410>, <ast.Attribute object at 0x7da1b08530d0>]]] begin[:]
call[name[output].append, parameter[constant[<script type="text/javascript">]]]
call[name[output].append, parameter[name[javascript]]]
call[name[output].append, parameter[constant[</script>]]]
call[name[output].append, parameter[constant[<style type="text/css">]]]
call[name[output].append, parameter[name[self].__style]]
call[name[output].append, parameter[constant[</style>]]]
call[name[output].append, parameter[constant[</head>]]]
if compare[name[body] is_not constant[None]] begin[:]
call[name[output].append, parameter[name[body]]]
call[name[output].append, parameter[constant[</html>]]]
return[call[constant[
].join, parameter[name[output]]]]
|
keyword[def] identifier[__get_html] ( identifier[self] , identifier[body] = keyword[None] ):
literal[string]
identifier[output] =[]
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[for] identifier[javascript] keyword[in] ( identifier[self] . identifier[__jquery_javascript] ,
identifier[self] . identifier[__crittercism_javascript] ,
identifier[self] . identifier[__reporter_javascript] ):
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( identifier[javascript] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( identifier[self] . identifier[__style] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[if] identifier[body] keyword[is] keyword[not] keyword[None] :
identifier[output] . identifier[append] ( identifier[body] )
keyword[else] :
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[output] )
|
def __get_html(self, body=None):
"""
Returns the html content with given body tag content.
:param body: Body tag content.
:type body: unicode
:return: Html.
:rtype: unicode
"""
output = []
output.append('<html>')
output.append('<head>')
for javascript in (self.__jquery_javascript, self.__crittercism_javascript, self.__reporter_javascript):
output.append('<script type="text/javascript">')
output.append(javascript)
output.append('</script>') # depends on [control=['for'], data=['javascript']]
output.append('<style type="text/css">')
output.append(self.__style)
output.append('</style>')
output.append('</head>')
if body is not None:
output.append(body) # depends on [control=['if'], data=['body']]
else:
output.append('<body>')
output.append('<div id="report">')
output.append('</div>')
output.append('</body>')
output.append('</html>')
return '\n'.join(output)
|
def update_button_status(self):
"""Function to enable or disable the Ok button.
"""
# enable/disable ok button
if len(self.displaced.currentField()) > 0:
self.button_box.button(
QtWidgets.QDialogButtonBox.Ok).setEnabled(True)
else:
self.button_box.button(
QtWidgets.QDialogButtonBox.Ok).setEnabled(False)
|
def function[update_button_status, parameter[self]]:
constant[Function to enable or disable the Ok button.
]
if compare[call[name[len], parameter[call[name[self].displaced.currentField, parameter[]]]] greater[>] constant[0]] begin[:]
call[call[name[self].button_box.button, parameter[name[QtWidgets].QDialogButtonBox.Ok]].setEnabled, parameter[constant[True]]]
|
keyword[def] identifier[update_button_status] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[displaced] . identifier[currentField] ())> literal[int] :
identifier[self] . identifier[button_box] . identifier[button] (
identifier[QtWidgets] . identifier[QDialogButtonBox] . identifier[Ok] ). identifier[setEnabled] ( keyword[True] )
keyword[else] :
identifier[self] . identifier[button_box] . identifier[button] (
identifier[QtWidgets] . identifier[QDialogButtonBox] . identifier[Ok] ). identifier[setEnabled] ( keyword[False] )
|
def update_button_status(self):
"""Function to enable or disable the Ok button.
"""
# enable/disable ok button
if len(self.displaced.currentField()) > 0:
self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(True) # depends on [control=['if'], data=[]]
else:
self.button_box.button(QtWidgets.QDialogButtonBox.Ok).setEnabled(False)
|
def main(ctx, host, port, transport_type, timeout, ca_certs):
"""Connects to a Riemann server to send events or query the index
By default, will attempt to contact Riemann on localhost:5555 over TCP. The
RIEMANN_HOST and RIEMANN_PORT environment variables can be used to
configure the host and port used. Command line parameters will override the
environment variables.
Use `-T none` to test commands without actually connecting to a server.
"""
if transport_type == 'udp':
if timeout is not None:
ctx.fail('--timeout cannot be used with the UDP transport')
transport = riemann_client.transport.UDPTransport(host, port)
elif transport_type == 'tcp':
transport = riemann_client.transport.TCPTransport(host, port, timeout)
elif transport_type == 'tls':
if ca_certs is None:
ctx.fail('--ca-certs must be set when using the TLS transport')
transport = riemann_client.transport.TLSTransport(
host, port, timeout, ca_certs)
elif transport_type == 'none':
transport = riemann_client.transport.BlankTransport()
ctx.obj = transport
|
def function[main, parameter[ctx, host, port, transport_type, timeout, ca_certs]]:
constant[Connects to a Riemann server to send events or query the index
By default, will attempt to contact Riemann on localhost:5555 over TCP. The
RIEMANN_HOST and RIEMANN_PORT environment variables can be used to
configure the host and port used. Command line parameters will override the
environment variables.
Use `-T none` to test commands without actually connecting to a server.
]
if compare[name[transport_type] equal[==] constant[udp]] begin[:]
if compare[name[timeout] is_not constant[None]] begin[:]
call[name[ctx].fail, parameter[constant[--timeout cannot be used with the UDP transport]]]
variable[transport] assign[=] call[name[riemann_client].transport.UDPTransport, parameter[name[host], name[port]]]
name[ctx].obj assign[=] name[transport]
|
keyword[def] identifier[main] ( identifier[ctx] , identifier[host] , identifier[port] , identifier[transport_type] , identifier[timeout] , identifier[ca_certs] ):
literal[string]
keyword[if] identifier[transport_type] == literal[string] :
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[ctx] . identifier[fail] ( literal[string] )
identifier[transport] = identifier[riemann_client] . identifier[transport] . identifier[UDPTransport] ( identifier[host] , identifier[port] )
keyword[elif] identifier[transport_type] == literal[string] :
identifier[transport] = identifier[riemann_client] . identifier[transport] . identifier[TCPTransport] ( identifier[host] , identifier[port] , identifier[timeout] )
keyword[elif] identifier[transport_type] == literal[string] :
keyword[if] identifier[ca_certs] keyword[is] keyword[None] :
identifier[ctx] . identifier[fail] ( literal[string] )
identifier[transport] = identifier[riemann_client] . identifier[transport] . identifier[TLSTransport] (
identifier[host] , identifier[port] , identifier[timeout] , identifier[ca_certs] )
keyword[elif] identifier[transport_type] == literal[string] :
identifier[transport] = identifier[riemann_client] . identifier[transport] . identifier[BlankTransport] ()
identifier[ctx] . identifier[obj] = identifier[transport]
|
def main(ctx, host, port, transport_type, timeout, ca_certs):
"""Connects to a Riemann server to send events or query the index
By default, will attempt to contact Riemann on localhost:5555 over TCP. The
RIEMANN_HOST and RIEMANN_PORT environment variables can be used to
configure the host and port used. Command line parameters will override the
environment variables.
Use `-T none` to test commands without actually connecting to a server.
"""
if transport_type == 'udp':
if timeout is not None:
ctx.fail('--timeout cannot be used with the UDP transport') # depends on [control=['if'], data=[]]
transport = riemann_client.transport.UDPTransport(host, port) # depends on [control=['if'], data=[]]
elif transport_type == 'tcp':
transport = riemann_client.transport.TCPTransport(host, port, timeout) # depends on [control=['if'], data=[]]
elif transport_type == 'tls':
if ca_certs is None:
ctx.fail('--ca-certs must be set when using the TLS transport') # depends on [control=['if'], data=[]]
transport = riemann_client.transport.TLSTransport(host, port, timeout, ca_certs) # depends on [control=['if'], data=[]]
elif transport_type == 'none':
transport = riemann_client.transport.BlankTransport() # depends on [control=['if'], data=[]]
ctx.obj = transport
|
def snpflow(args):
"""
%prog snpflow trimmed reference.fasta
Run SNP calling pipeline until allele_counts are generated. This includes
generation of native files, SNP_Het file. Speedup for fragmented genomes
are also supported.
"""
p = OptionParser(snpflow.__doc__)
p.set_fastq_names()
p.set_cpus()
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
trimmed, ref = args
nseqs = len(Fasta(ref))
supercat = nseqs >= 1000
if supercat:
logging.debug("Total seqs in ref: {0} (supercat={1})".
format(nseqs, supercat))
reads, samples = scan_read_files(trimmed, opts.names)
# Set up directory structure
nativedir, countsdir = "native", "allele_counts"
for d in (nativedir, countsdir):
mkdir(d)
mm = MakeManager()
# Step 0 - index database
db = op.join(*check_index(ref, supercat=supercat, go=False))
cmd = "python -m jcvi.apps.gmap index {0}".format(ref)
if supercat:
cmd += " --supercat"
coordsfile = db + ".coords"
supercatfile = ref.rsplit(".", 1)[0] + ".supercat.fasta"
mm.add(ref, (db, coordsfile), cmd)
else:
mm.add(ref, db, cmd)
# Step 1 - GSNAP alignment and conversion to native file
allnatives = []
allsamstats = []
gmapdb = supercatfile if supercat else ref
for f in reads:
prefix = get_prefix(f, ref)
gsnapfile = op.join(nativedir, prefix + ".gsnap")
nativefile = op.join(nativedir, prefix + ".unique.native")
samstatsfile = op.join(nativedir, prefix + ".unique.sam.stats")
cmd = "python -m jcvi.apps.gmap align {0} {1}".format(gmapdb, f)
cmd += " --outdir={0} --native --cpus=1".format(nativedir)
mm.add((f, db), nativefile, cmd)
cmd = "python -m jcvi.apps.gmap bam {0} {1} --cpus=1".\
format(gsnapfile, gmapdb)
mm.add(nativefile, samstatsfile, cmd)
allnatives.append(nativefile)
allsamstats.append(samstatsfile)
# Step 2 - call SNP discovery
if supercat:
nativeconverted = nativedir + "-converted"
mkdir(nativeconverted)
allnativesc = [op.join(nativeconverted, op.basename(x))
for x in allnatives]
cmd = "tGBS-Convert_Pseudo_Genome_NATIVE_Coordinates.pl"
cmd += " -i {0}/*.native -o {1}".format(nativedir, nativeconverted)
cmd += " -c {0}".format(coordsfile)
cmds = ["rm -rf {0}".format(nativeconverted), cmd]
mm.add(allnatives + [coordsfile], allnativesc, cmds)
runfile = "speedup.sh"
write_file(runfile, speedupsh.format(nativeconverted, opts.cpus))
nativedir = nativeconverted
allsnps = [
op.join(nativedir, "{0}.SNPs_Het.txt".format(x)) for x in samples]
mm.add(allnativesc, allsnps, "./{0}".format(runfile))
else:
for s in samples:
snpfile = op.join(nativedir, "{0}.SNPs_Het.txt".format(s))
cmd = "SNP_Discovery-short.pl"
cmd += " -native {0}/{1}.*unique.native".format(nativedir, s)
cmd += " -o {0} -a 2 -ac 0.3 -c 0.8".format(snpfile)
flist = [x for x in allnatives if op.basename(x).split(".")[
0] == s]
mm.add(flist, snpfile, cmd)
# Step 3 - generate equal file
allsnps = [op.join(nativedir, "{0}.SNPs_Het.txt".format(x))
for x in samples]
for s in samples:
equalfile = op.join(nativedir, "{0}.equal".format(s))
cmd = "extract_reference_alleles.pl"
cmd += " --native {0}/{1}.*unique.native".format(nativedir, s)
cmd += " --genotype {0}/{1}.SNPs_Het.txt".format(nativedir, s)
cmd += " --allgenotypes {0}/*.SNPs_Het.txt".format(nativedir)
cmd += " --fasta {0} --output {1}".format(ref, equalfile)
mm.add(allsnps, equalfile, cmd)
# Step 4 - generate snp matrix
allequals = [op.join(nativedir, "{0}.equal".format(x)) for x in samples]
matrix = "snps.matrix.txt"
cmd = "generate_matrix.pl"
cmd += " --tables {0}/*SNPs_Het.txt --equal {0}/*equal".format(nativedir)
cmd += " --fasta {0} --output {1}".format(ref, matrix)
mm.add(allsnps + allequals, matrix, cmd)
# Step 5 - generate allele counts
allcounts = []
for s in samples:
allele_counts = op.join(
countsdir, "{0}.SNPs_Het.allele_counts".format(s))
cmd = "count_reads_per_allele.pl -m snps.matrix.txt"
cmd += " -s {0} --native {1}/{0}.*unique.native".format(s, nativedir)
cmd += " -o {0}".format(allele_counts)
mm.add(matrix, allele_counts, cmd)
allcounts.append(allele_counts)
# Step 6 - generate raw snps
rawsnps = "Genotyping.H3.txt"
cmd = "/home/shared/scripts/delin/SamplesGenotyping.pl --homo 3"
cmd += " -pf allele_counts -f {0} --outfile {1}".format(countsdir, rawsnps)
cmds = ["rm -f {0}".format(rawsnps), cmd]
mm.add(allcounts, rawsnps, cmds)
# Step 7 - generate alignment report
sam_summary = "sam.summary"
cmd = "/home/shared/scripts/eddyyeh/alignment_stats.pl"
cmd += " -f {0} -o {1}".format(" ".join(allsamstats), sam_summary)
mm.add(allsamstats, sam_summary, cmd)
native_summary = "native.summary"
cmd = "/home/shared/scripts/eddyyeh/alignment_stats.pl"
cmd += " -n {0} -o {1}".format(" ".join(allnatives), native_summary)
mm.add(allnatives, native_summary, cmd)
mm.write()
|
def function[snpflow, parameter[args]]:
constant[
%prog snpflow trimmed reference.fasta
Run SNP calling pipeline until allele_counts are generated. This includes
generation of native files, SNP_Het file. Speedup for fragmented genomes
are also supported.
]
variable[p] assign[=] call[name[OptionParser], parameter[name[snpflow].__doc__]]
call[name[p].set_fastq_names, parameter[]]
call[name[p].set_cpus, parameter[]]
<ast.Tuple object at 0x7da237d34f40> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18bccb280>]]
<ast.Tuple object at 0x7da18bccabc0> assign[=] name[args]
variable[nseqs] assign[=] call[name[len], parameter[call[name[Fasta], parameter[name[ref]]]]]
variable[supercat] assign[=] compare[name[nseqs] greater_or_equal[>=] constant[1000]]
if name[supercat] begin[:]
call[name[logging].debug, parameter[call[constant[Total seqs in ref: {0} (supercat={1})].format, parameter[name[nseqs], name[supercat]]]]]
<ast.Tuple object at 0x7da1b2345360> assign[=] call[name[scan_read_files], parameter[name[trimmed], name[opts].names]]
<ast.Tuple object at 0x7da1b23444c0> assign[=] tuple[[<ast.Constant object at 0x7da1b23446a0>, <ast.Constant object at 0x7da1b2346b60>]]
for taget[name[d]] in starred[tuple[[<ast.Name object at 0x7da1b2346a40>, <ast.Name object at 0x7da1b23440a0>]]] begin[:]
call[name[mkdir], parameter[name[d]]]
variable[mm] assign[=] call[name[MakeManager], parameter[]]
variable[db] assign[=] call[name[op].join, parameter[<ast.Starred object at 0x7da1b2344d60>]]
variable[cmd] assign[=] call[constant[python -m jcvi.apps.gmap index {0}].format, parameter[name[ref]]]
if name[supercat] begin[:]
<ast.AugAssign object at 0x7da1b2344eb0>
variable[coordsfile] assign[=] binary_operation[name[db] + constant[.coords]]
variable[supercatfile] assign[=] binary_operation[call[call[name[ref].rsplit, parameter[constant[.], constant[1]]]][constant[0]] + constant[.supercat.fasta]]
call[name[mm].add, parameter[name[ref], tuple[[<ast.Name object at 0x7da1b2344430>, <ast.Name object at 0x7da1b2346c20>]], name[cmd]]]
variable[allnatives] assign[=] list[[]]
variable[allsamstats] assign[=] list[[]]
variable[gmapdb] assign[=] <ast.IfExp object at 0x7da1b2346260>
for taget[name[f]] in starred[name[reads]] begin[:]
variable[prefix] assign[=] call[name[get_prefix], parameter[name[f], name[ref]]]
variable[gsnapfile] assign[=] call[name[op].join, parameter[name[nativedir], binary_operation[name[prefix] + constant[.gsnap]]]]
variable[nativefile] assign[=] call[name[op].join, parameter[name[nativedir], binary_operation[name[prefix] + constant[.unique.native]]]]
variable[samstatsfile] assign[=] call[name[op].join, parameter[name[nativedir], binary_operation[name[prefix] + constant[.unique.sam.stats]]]]
variable[cmd] assign[=] call[constant[python -m jcvi.apps.gmap align {0} {1}].format, parameter[name[gmapdb], name[f]]]
<ast.AugAssign object at 0x7da1b2346ef0>
call[name[mm].add, parameter[tuple[[<ast.Name object at 0x7da1b2346170>, <ast.Name object at 0x7da1b2345ba0>]], name[nativefile], name[cmd]]]
variable[cmd] assign[=] call[constant[python -m jcvi.apps.gmap bam {0} {1} --cpus=1].format, parameter[name[gsnapfile], name[gmapdb]]]
call[name[mm].add, parameter[name[nativefile], name[samstatsfile], name[cmd]]]
call[name[allnatives].append, parameter[name[nativefile]]]
call[name[allsamstats].append, parameter[name[samstatsfile]]]
if name[supercat] begin[:]
variable[nativeconverted] assign[=] binary_operation[name[nativedir] + constant[-converted]]
call[name[mkdir], parameter[name[nativeconverted]]]
variable[allnativesc] assign[=] <ast.ListComp object at 0x7da1b23447f0>
variable[cmd] assign[=] constant[tGBS-Convert_Pseudo_Genome_NATIVE_Coordinates.pl]
<ast.AugAssign object at 0x7da1b2344a60>
<ast.AugAssign object at 0x7da1b2344730>
variable[cmds] assign[=] list[[<ast.Call object at 0x7da20c76f2b0>, <ast.Name object at 0x7da20c76ed70>]]
call[name[mm].add, parameter[binary_operation[name[allnatives] + list[[<ast.Name object at 0x7da20c76de10>]]], name[allnativesc], name[cmds]]]
variable[runfile] assign[=] constant[speedup.sh]
call[name[write_file], parameter[name[runfile], call[name[speedupsh].format, parameter[name[nativeconverted], name[opts].cpus]]]]
variable[nativedir] assign[=] name[nativeconverted]
variable[allsnps] assign[=] <ast.ListComp object at 0x7da20c76e830>
call[name[mm].add, parameter[name[allnativesc], name[allsnps], call[constant[./{0}].format, parameter[name[runfile]]]]]
variable[allsnps] assign[=] <ast.ListComp object at 0x7da20c76e950>
for taget[name[s]] in starred[name[samples]] begin[:]
variable[equalfile] assign[=] call[name[op].join, parameter[name[nativedir], call[constant[{0}.equal].format, parameter[name[s]]]]]
variable[cmd] assign[=] constant[extract_reference_alleles.pl]
<ast.AugAssign object at 0x7da20c76f040>
<ast.AugAssign object at 0x7da20c76d390>
<ast.AugAssign object at 0x7da20c76cfa0>
<ast.AugAssign object at 0x7da20c76ef50>
call[name[mm].add, parameter[name[allsnps], name[equalfile], name[cmd]]]
variable[allequals] assign[=] <ast.ListComp object at 0x7da20c76d000>
variable[matrix] assign[=] constant[snps.matrix.txt]
variable[cmd] assign[=] constant[generate_matrix.pl]
<ast.AugAssign object at 0x7da20c76fbe0>
<ast.AugAssign object at 0x7da18fe929b0>
call[name[mm].add, parameter[binary_operation[name[allsnps] + name[allequals]], name[matrix], name[cmd]]]
variable[allcounts] assign[=] list[[]]
for taget[name[s]] in starred[name[samples]] begin[:]
variable[allele_counts] assign[=] call[name[op].join, parameter[name[countsdir], call[constant[{0}.SNPs_Het.allele_counts].format, parameter[name[s]]]]]
variable[cmd] assign[=] constant[count_reads_per_allele.pl -m snps.matrix.txt]
<ast.AugAssign object at 0x7da18fe920b0>
<ast.AugAssign object at 0x7da18fe90460>
call[name[mm].add, parameter[name[matrix], name[allele_counts], name[cmd]]]
call[name[allcounts].append, parameter[name[allele_counts]]]
variable[rawsnps] assign[=] constant[Genotyping.H3.txt]
variable[cmd] assign[=] constant[/home/shared/scripts/delin/SamplesGenotyping.pl --homo 3]
<ast.AugAssign object at 0x7da18fe93bb0>
variable[cmds] assign[=] list[[<ast.Call object at 0x7da18fe921d0>, <ast.Name object at 0x7da18fe93d90>]]
call[name[mm].add, parameter[name[allcounts], name[rawsnps], name[cmds]]]
variable[sam_summary] assign[=] constant[sam.summary]
variable[cmd] assign[=] constant[/home/shared/scripts/eddyyeh/alignment_stats.pl]
<ast.AugAssign object at 0x7da18dc06ad0>
call[name[mm].add, parameter[name[allsamstats], name[sam_summary], name[cmd]]]
variable[native_summary] assign[=] constant[native.summary]
variable[cmd] assign[=] constant[/home/shared/scripts/eddyyeh/alignment_stats.pl]
<ast.AugAssign object at 0x7da18dc051e0>
call[name[mm].add, parameter[name[allnatives], name[native_summary], name[cmd]]]
call[name[mm].write, parameter[]]
|
keyword[def] identifier[snpflow] ( identifier[args] ):
literal[string]
identifier[p] = identifier[OptionParser] ( identifier[snpflow] . identifier[__doc__] )
identifier[p] . identifier[set_fastq_names] ()
identifier[p] . identifier[set_cpus] ()
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[trimmed] , identifier[ref] = identifier[args]
identifier[nseqs] = identifier[len] ( identifier[Fasta] ( identifier[ref] ))
identifier[supercat] = identifier[nseqs] >= literal[int]
keyword[if] identifier[supercat] :
identifier[logging] . identifier[debug] ( literal[string] .
identifier[format] ( identifier[nseqs] , identifier[supercat] ))
identifier[reads] , identifier[samples] = identifier[scan_read_files] ( identifier[trimmed] , identifier[opts] . identifier[names] )
identifier[nativedir] , identifier[countsdir] = literal[string] , literal[string]
keyword[for] identifier[d] keyword[in] ( identifier[nativedir] , identifier[countsdir] ):
identifier[mkdir] ( identifier[d] )
identifier[mm] = identifier[MakeManager] ()
identifier[db] = identifier[op] . identifier[join] (* identifier[check_index] ( identifier[ref] , identifier[supercat] = identifier[supercat] , identifier[go] = keyword[False] ))
identifier[cmd] = literal[string] . identifier[format] ( identifier[ref] )
keyword[if] identifier[supercat] :
identifier[cmd] += literal[string]
identifier[coordsfile] = identifier[db] + literal[string]
identifier[supercatfile] = identifier[ref] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]+ literal[string]
identifier[mm] . identifier[add] ( identifier[ref] ,( identifier[db] , identifier[coordsfile] ), identifier[cmd] )
keyword[else] :
identifier[mm] . identifier[add] ( identifier[ref] , identifier[db] , identifier[cmd] )
identifier[allnatives] =[]
identifier[allsamstats] =[]
identifier[gmapdb] = identifier[supercatfile] keyword[if] identifier[supercat] keyword[else] identifier[ref]
keyword[for] identifier[f] keyword[in] identifier[reads] :
identifier[prefix] = identifier[get_prefix] ( identifier[f] , identifier[ref] )
identifier[gsnapfile] = identifier[op] . identifier[join] ( identifier[nativedir] , identifier[prefix] + literal[string] )
identifier[nativefile] = identifier[op] . identifier[join] ( identifier[nativedir] , identifier[prefix] + literal[string] )
identifier[samstatsfile] = identifier[op] . identifier[join] ( identifier[nativedir] , identifier[prefix] + literal[string] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[gmapdb] , identifier[f] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] )
identifier[mm] . identifier[add] (( identifier[f] , identifier[db] ), identifier[nativefile] , identifier[cmd] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[gsnapfile] , identifier[gmapdb] )
identifier[mm] . identifier[add] ( identifier[nativefile] , identifier[samstatsfile] , identifier[cmd] )
identifier[allnatives] . identifier[append] ( identifier[nativefile] )
identifier[allsamstats] . identifier[append] ( identifier[samstatsfile] )
keyword[if] identifier[supercat] :
identifier[nativeconverted] = identifier[nativedir] + literal[string]
identifier[mkdir] ( identifier[nativeconverted] )
identifier[allnativesc] =[ identifier[op] . identifier[join] ( identifier[nativeconverted] , identifier[op] . identifier[basename] ( identifier[x] ))
keyword[for] identifier[x] keyword[in] identifier[allnatives] ]
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] , identifier[nativeconverted] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[coordsfile] )
identifier[cmds] =[ literal[string] . identifier[format] ( identifier[nativeconverted] ), identifier[cmd] ]
identifier[mm] . identifier[add] ( identifier[allnatives] +[ identifier[coordsfile] ], identifier[allnativesc] , identifier[cmds] )
identifier[runfile] = literal[string]
identifier[write_file] ( identifier[runfile] , identifier[speedupsh] . identifier[format] ( identifier[nativeconverted] , identifier[opts] . identifier[cpus] ))
identifier[nativedir] = identifier[nativeconverted]
identifier[allsnps] =[
identifier[op] . identifier[join] ( identifier[nativedir] , literal[string] . identifier[format] ( identifier[x] )) keyword[for] identifier[x] keyword[in] identifier[samples] ]
identifier[mm] . identifier[add] ( identifier[allnativesc] , identifier[allsnps] , literal[string] . identifier[format] ( identifier[runfile] ))
keyword[else] :
keyword[for] identifier[s] keyword[in] identifier[samples] :
identifier[snpfile] = identifier[op] . identifier[join] ( identifier[nativedir] , literal[string] . identifier[format] ( identifier[s] ))
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] , identifier[s] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[snpfile] )
identifier[flist] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[allnatives] keyword[if] identifier[op] . identifier[basename] ( identifier[x] ). identifier[split] ( literal[string] )[
literal[int] ]== identifier[s] ]
identifier[mm] . identifier[add] ( identifier[flist] , identifier[snpfile] , identifier[cmd] )
identifier[allsnps] =[ identifier[op] . identifier[join] ( identifier[nativedir] , literal[string] . identifier[format] ( identifier[x] ))
keyword[for] identifier[x] keyword[in] identifier[samples] ]
keyword[for] identifier[s] keyword[in] identifier[samples] :
identifier[equalfile] = identifier[op] . identifier[join] ( identifier[nativedir] , literal[string] . identifier[format] ( identifier[s] ))
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] , identifier[s] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] , identifier[s] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[ref] , identifier[equalfile] )
identifier[mm] . identifier[add] ( identifier[allsnps] , identifier[equalfile] , identifier[cmd] )
identifier[allequals] =[ identifier[op] . identifier[join] ( identifier[nativedir] , literal[string] . identifier[format] ( identifier[x] )) keyword[for] identifier[x] keyword[in] identifier[samples] ]
identifier[matrix] = literal[string]
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[nativedir] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[ref] , identifier[matrix] )
identifier[mm] . identifier[add] ( identifier[allsnps] + identifier[allequals] , identifier[matrix] , identifier[cmd] )
identifier[allcounts] =[]
keyword[for] identifier[s] keyword[in] identifier[samples] :
identifier[allele_counts] = identifier[op] . identifier[join] (
identifier[countsdir] , literal[string] . identifier[format] ( identifier[s] ))
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[s] , identifier[nativedir] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[allele_counts] )
identifier[mm] . identifier[add] ( identifier[matrix] , identifier[allele_counts] , identifier[cmd] )
identifier[allcounts] . identifier[append] ( identifier[allele_counts] )
identifier[rawsnps] = literal[string]
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[countsdir] , identifier[rawsnps] )
identifier[cmds] =[ literal[string] . identifier[format] ( identifier[rawsnps] ), identifier[cmd] ]
identifier[mm] . identifier[add] ( identifier[allcounts] , identifier[rawsnps] , identifier[cmds] )
identifier[sam_summary] = literal[string]
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[allsamstats] ), identifier[sam_summary] )
identifier[mm] . identifier[add] ( identifier[allsamstats] , identifier[sam_summary] , identifier[cmd] )
identifier[native_summary] = literal[string]
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[allnatives] ), identifier[native_summary] )
identifier[mm] . identifier[add] ( identifier[allnatives] , identifier[native_summary] , identifier[cmd] )
identifier[mm] . identifier[write] ()
|
def snpflow(args):
"""
%prog snpflow trimmed reference.fasta
Run SNP calling pipeline until allele_counts are generated. This includes
generation of native files, SNP_Het file. Speedup for fragmented genomes
are also supported.
"""
p = OptionParser(snpflow.__doc__)
p.set_fastq_names()
p.set_cpus()
(opts, args) = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(trimmed, ref) = args
nseqs = len(Fasta(ref))
supercat = nseqs >= 1000
if supercat:
logging.debug('Total seqs in ref: {0} (supercat={1})'.format(nseqs, supercat)) # depends on [control=['if'], data=[]]
(reads, samples) = scan_read_files(trimmed, opts.names)
# Set up directory structure
(nativedir, countsdir) = ('native', 'allele_counts')
for d in (nativedir, countsdir):
mkdir(d) # depends on [control=['for'], data=['d']]
mm = MakeManager()
# Step 0 - index database
db = op.join(*check_index(ref, supercat=supercat, go=False))
cmd = 'python -m jcvi.apps.gmap index {0}'.format(ref)
if supercat:
cmd += ' --supercat'
coordsfile = db + '.coords'
supercatfile = ref.rsplit('.', 1)[0] + '.supercat.fasta'
mm.add(ref, (db, coordsfile), cmd) # depends on [control=['if'], data=[]]
else:
mm.add(ref, db, cmd)
# Step 1 - GSNAP alignment and conversion to native file
allnatives = []
allsamstats = []
gmapdb = supercatfile if supercat else ref
for f in reads:
prefix = get_prefix(f, ref)
gsnapfile = op.join(nativedir, prefix + '.gsnap')
nativefile = op.join(nativedir, prefix + '.unique.native')
samstatsfile = op.join(nativedir, prefix + '.unique.sam.stats')
cmd = 'python -m jcvi.apps.gmap align {0} {1}'.format(gmapdb, f)
cmd += ' --outdir={0} --native --cpus=1'.format(nativedir)
mm.add((f, db), nativefile, cmd)
cmd = 'python -m jcvi.apps.gmap bam {0} {1} --cpus=1'.format(gsnapfile, gmapdb)
mm.add(nativefile, samstatsfile, cmd)
allnatives.append(nativefile)
allsamstats.append(samstatsfile) # depends on [control=['for'], data=['f']]
# Step 2 - call SNP discovery
if supercat:
nativeconverted = nativedir + '-converted'
mkdir(nativeconverted)
allnativesc = [op.join(nativeconverted, op.basename(x)) for x in allnatives]
cmd = 'tGBS-Convert_Pseudo_Genome_NATIVE_Coordinates.pl'
cmd += ' -i {0}/*.native -o {1}'.format(nativedir, nativeconverted)
cmd += ' -c {0}'.format(coordsfile)
cmds = ['rm -rf {0}'.format(nativeconverted), cmd]
mm.add(allnatives + [coordsfile], allnativesc, cmds)
runfile = 'speedup.sh'
write_file(runfile, speedupsh.format(nativeconverted, opts.cpus))
nativedir = nativeconverted
allsnps = [op.join(nativedir, '{0}.SNPs_Het.txt'.format(x)) for x in samples]
mm.add(allnativesc, allsnps, './{0}'.format(runfile)) # depends on [control=['if'], data=[]]
else:
for s in samples:
snpfile = op.join(nativedir, '{0}.SNPs_Het.txt'.format(s))
cmd = 'SNP_Discovery-short.pl'
cmd += ' -native {0}/{1}.*unique.native'.format(nativedir, s)
cmd += ' -o {0} -a 2 -ac 0.3 -c 0.8'.format(snpfile)
flist = [x for x in allnatives if op.basename(x).split('.')[0] == s]
mm.add(flist, snpfile, cmd) # depends on [control=['for'], data=['s']]
# Step 3 - generate equal file
allsnps = [op.join(nativedir, '{0}.SNPs_Het.txt'.format(x)) for x in samples]
for s in samples:
equalfile = op.join(nativedir, '{0}.equal'.format(s))
cmd = 'extract_reference_alleles.pl'
cmd += ' --native {0}/{1}.*unique.native'.format(nativedir, s)
cmd += ' --genotype {0}/{1}.SNPs_Het.txt'.format(nativedir, s)
cmd += ' --allgenotypes {0}/*.SNPs_Het.txt'.format(nativedir)
cmd += ' --fasta {0} --output {1}'.format(ref, equalfile)
mm.add(allsnps, equalfile, cmd) # depends on [control=['for'], data=['s']]
# Step 4 - generate snp matrix
allequals = [op.join(nativedir, '{0}.equal'.format(x)) for x in samples]
matrix = 'snps.matrix.txt'
cmd = 'generate_matrix.pl'
cmd += ' --tables {0}/*SNPs_Het.txt --equal {0}/*equal'.format(nativedir)
cmd += ' --fasta {0} --output {1}'.format(ref, matrix)
mm.add(allsnps + allequals, matrix, cmd)
# Step 5 - generate allele counts
allcounts = []
for s in samples:
allele_counts = op.join(countsdir, '{0}.SNPs_Het.allele_counts'.format(s))
cmd = 'count_reads_per_allele.pl -m snps.matrix.txt'
cmd += ' -s {0} --native {1}/{0}.*unique.native'.format(s, nativedir)
cmd += ' -o {0}'.format(allele_counts)
mm.add(matrix, allele_counts, cmd)
allcounts.append(allele_counts) # depends on [control=['for'], data=['s']]
# Step 6 - generate raw snps
rawsnps = 'Genotyping.H3.txt'
cmd = '/home/shared/scripts/delin/SamplesGenotyping.pl --homo 3'
cmd += ' -pf allele_counts -f {0} --outfile {1}'.format(countsdir, rawsnps)
cmds = ['rm -f {0}'.format(rawsnps), cmd]
mm.add(allcounts, rawsnps, cmds)
# Step 7 - generate alignment report
sam_summary = 'sam.summary'
cmd = '/home/shared/scripts/eddyyeh/alignment_stats.pl'
cmd += ' -f {0} -o {1}'.format(' '.join(allsamstats), sam_summary)
mm.add(allsamstats, sam_summary, cmd)
native_summary = 'native.summary'
cmd = '/home/shared/scripts/eddyyeh/alignment_stats.pl'
cmd += ' -n {0} -o {1}'.format(' '.join(allnatives), native_summary)
mm.add(allnatives, native_summary, cmd)
mm.write()
|
def get_userplaycount(self):
"""Returns the number of plays by a given username"""
if not self.username:
return
params = self._get_params()
params["username"] = self.username
doc = self._request(self.ws_prefix + ".getInfo", True, params)
return _number(_extract(doc, "userplaycount"))
|
def function[get_userplaycount, parameter[self]]:
constant[Returns the number of plays by a given username]
if <ast.UnaryOp object at 0x7da1b0b4a0b0> begin[:]
return[None]
variable[params] assign[=] call[name[self]._get_params, parameter[]]
call[name[params]][constant[username]] assign[=] name[self].username
variable[doc] assign[=] call[name[self]._request, parameter[binary_operation[name[self].ws_prefix + constant[.getInfo]], constant[True], name[params]]]
return[call[name[_number], parameter[call[name[_extract], parameter[name[doc], constant[userplaycount]]]]]]
|
keyword[def] identifier[get_userplaycount] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[username] :
keyword[return]
identifier[params] = identifier[self] . identifier[_get_params] ()
identifier[params] [ literal[string] ]= identifier[self] . identifier[username]
identifier[doc] = identifier[self] . identifier[_request] ( identifier[self] . identifier[ws_prefix] + literal[string] , keyword[True] , identifier[params] )
keyword[return] identifier[_number] ( identifier[_extract] ( identifier[doc] , literal[string] ))
|
def get_userplaycount(self):
"""Returns the number of plays by a given username"""
if not self.username:
return # depends on [control=['if'], data=[]]
params = self._get_params()
params['username'] = self.username
doc = self._request(self.ws_prefix + '.getInfo', True, params)
return _number(_extract(doc, 'userplaycount'))
|
def show(self, username):
"""Return a specific user's info in LDIF format."""
filter = ['(objectclass=posixAccount)', "(uid={})".format(username)]
return self.client.search(filter)
|
def function[show, parameter[self, username]]:
constant[Return a specific user's info in LDIF format.]
variable[filter] assign[=] list[[<ast.Constant object at 0x7da1b20a86a0>, <ast.Call object at 0x7da1b20a8e50>]]
return[call[name[self].client.search, parameter[name[filter]]]]
|
keyword[def] identifier[show] ( identifier[self] , identifier[username] ):
literal[string]
identifier[filter] =[ literal[string] , literal[string] . identifier[format] ( identifier[username] )]
keyword[return] identifier[self] . identifier[client] . identifier[search] ( identifier[filter] )
|
def show(self, username):
"""Return a specific user's info in LDIF format."""
filter = ['(objectclass=posixAccount)', '(uid={})'.format(username)]
return self.client.search(filter)
|
def load_frontends(config, callback, internal_attributes):
"""
Load all frontend modules specified in the config
:type config: satosa.satosa_config.SATOSAConfig
:type callback:
(satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response
:type internal_attributes: dict[string, dict[str, str | list[str]]]
:rtype: Sequence[satosa.frontends.base.FrontendModule]
:param config: The configuration of the satosa proxy
:param callback: Function that will be called by the frontend after the authentication request
has been processed.
:return: A list of frontend modules
"""
frontend_modules = _load_plugins(config.get("CUSTOM_PLUGIN_MODULE_PATHS"), config["FRONTEND_MODULES"],
frontend_filter, config["BASE"], internal_attributes, callback)
logger.info("Setup frontends: %s" % [frontend.name for frontend in frontend_modules])
return frontend_modules
|
def function[load_frontends, parameter[config, callback, internal_attributes]]:
constant[
Load all frontend modules specified in the config
:type config: satosa.satosa_config.SATOSAConfig
:type callback:
(satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response
:type internal_attributes: dict[string, dict[str, str | list[str]]]
:rtype: Sequence[satosa.frontends.base.FrontendModule]
:param config: The configuration of the satosa proxy
:param callback: Function that will be called by the frontend after the authentication request
has been processed.
:return: A list of frontend modules
]
variable[frontend_modules] assign[=] call[name[_load_plugins], parameter[call[name[config].get, parameter[constant[CUSTOM_PLUGIN_MODULE_PATHS]]], call[name[config]][constant[FRONTEND_MODULES]], name[frontend_filter], call[name[config]][constant[BASE]], name[internal_attributes], name[callback]]]
call[name[logger].info, parameter[binary_operation[constant[Setup frontends: %s] <ast.Mod object at 0x7da2590d6920> <ast.ListComp object at 0x7da1b1633850>]]]
return[name[frontend_modules]]
|
keyword[def] identifier[load_frontends] ( identifier[config] , identifier[callback] , identifier[internal_attributes] ):
literal[string]
identifier[frontend_modules] = identifier[_load_plugins] ( identifier[config] . identifier[get] ( literal[string] ), identifier[config] [ literal[string] ],
identifier[frontend_filter] , identifier[config] [ literal[string] ], identifier[internal_attributes] , identifier[callback] )
identifier[logger] . identifier[info] ( literal[string] %[ identifier[frontend] . identifier[name] keyword[for] identifier[frontend] keyword[in] identifier[frontend_modules] ])
keyword[return] identifier[frontend_modules]
|
def load_frontends(config, callback, internal_attributes):
"""
Load all frontend modules specified in the config
:type config: satosa.satosa_config.SATOSAConfig
:type callback:
(satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response
:type internal_attributes: dict[string, dict[str, str | list[str]]]
:rtype: Sequence[satosa.frontends.base.FrontendModule]
:param config: The configuration of the satosa proxy
:param callback: Function that will be called by the frontend after the authentication request
has been processed.
:return: A list of frontend modules
"""
frontend_modules = _load_plugins(config.get('CUSTOM_PLUGIN_MODULE_PATHS'), config['FRONTEND_MODULES'], frontend_filter, config['BASE'], internal_attributes, callback)
logger.info('Setup frontends: %s' % [frontend.name for frontend in frontend_modules])
return frontend_modules
|
def _find_sub_controllers(self, remainder, request):
'''
Identifies the correct controller to route to by analyzing the
request URI.
'''
# need either a get_one or get to parse args
method = None
for name in ('get_one', 'get'):
if hasattr(self, name):
method = name
break
if not method:
return
# get the args to figure out how much to chop off
args = self._get_args_for_controller(getattr(self, method))
fixed_args = len(args) - len(
request.pecan.get('routing_args', [])
)
var_args = getargspec(getattr(self, method)).varargs
# attempt to locate a sub-controller
if var_args:
for i, item in enumerate(remainder):
controller = self._lookup_child(item)
if controller and not ismethod(controller):
self._set_routing_args(request, remainder[:i])
return lookup_controller(controller, remainder[i + 1:],
request)
elif fixed_args < len(remainder) and hasattr(
self, remainder[fixed_args]
):
controller = self._lookup_child(remainder[fixed_args])
if not ismethod(controller):
self._set_routing_args(request, remainder[:fixed_args])
return lookup_controller(
controller,
remainder[fixed_args + 1:],
request
)
|
def function[_find_sub_controllers, parameter[self, remainder, request]]:
constant[
Identifies the correct controller to route to by analyzing the
request URI.
]
variable[method] assign[=] constant[None]
for taget[name[name]] in starred[tuple[[<ast.Constant object at 0x7da20c795f30>, <ast.Constant object at 0x7da20c7942b0>]]] begin[:]
if call[name[hasattr], parameter[name[self], name[name]]] begin[:]
variable[method] assign[=] name[name]
break
if <ast.UnaryOp object at 0x7da20c794580> begin[:]
return[None]
variable[args] assign[=] call[name[self]._get_args_for_controller, parameter[call[name[getattr], parameter[name[self], name[method]]]]]
variable[fixed_args] assign[=] binary_operation[call[name[len], parameter[name[args]]] - call[name[len], parameter[call[name[request].pecan.get, parameter[constant[routing_args], list[[]]]]]]]
variable[var_args] assign[=] call[name[getargspec], parameter[call[name[getattr], parameter[name[self], name[method]]]]].varargs
if name[var_args] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20c7969e0>, <ast.Name object at 0x7da20c796440>]]] in starred[call[name[enumerate], parameter[name[remainder]]]] begin[:]
variable[controller] assign[=] call[name[self]._lookup_child, parameter[name[item]]]
if <ast.BoolOp object at 0x7da20c7961a0> begin[:]
call[name[self]._set_routing_args, parameter[name[request], call[name[remainder]][<ast.Slice object at 0x7da20c795930>]]]
return[call[name[lookup_controller], parameter[name[controller], call[name[remainder]][<ast.Slice object at 0x7da20c794070>], name[request]]]]
|
keyword[def] identifier[_find_sub_controllers] ( identifier[self] , identifier[remainder] , identifier[request] ):
literal[string]
identifier[method] = keyword[None]
keyword[for] identifier[name] keyword[in] ( literal[string] , literal[string] ):
keyword[if] identifier[hasattr] ( identifier[self] , identifier[name] ):
identifier[method] = identifier[name]
keyword[break]
keyword[if] keyword[not] identifier[method] :
keyword[return]
identifier[args] = identifier[self] . identifier[_get_args_for_controller] ( identifier[getattr] ( identifier[self] , identifier[method] ))
identifier[fixed_args] = identifier[len] ( identifier[args] )- identifier[len] (
identifier[request] . identifier[pecan] . identifier[get] ( literal[string] ,[])
)
identifier[var_args] = identifier[getargspec] ( identifier[getattr] ( identifier[self] , identifier[method] )). identifier[varargs]
keyword[if] identifier[var_args] :
keyword[for] identifier[i] , identifier[item] keyword[in] identifier[enumerate] ( identifier[remainder] ):
identifier[controller] = identifier[self] . identifier[_lookup_child] ( identifier[item] )
keyword[if] identifier[controller] keyword[and] keyword[not] identifier[ismethod] ( identifier[controller] ):
identifier[self] . identifier[_set_routing_args] ( identifier[request] , identifier[remainder] [: identifier[i] ])
keyword[return] identifier[lookup_controller] ( identifier[controller] , identifier[remainder] [ identifier[i] + literal[int] :],
identifier[request] )
keyword[elif] identifier[fixed_args] < identifier[len] ( identifier[remainder] ) keyword[and] identifier[hasattr] (
identifier[self] , identifier[remainder] [ identifier[fixed_args] ]
):
identifier[controller] = identifier[self] . identifier[_lookup_child] ( identifier[remainder] [ identifier[fixed_args] ])
keyword[if] keyword[not] identifier[ismethod] ( identifier[controller] ):
identifier[self] . identifier[_set_routing_args] ( identifier[request] , identifier[remainder] [: identifier[fixed_args] ])
keyword[return] identifier[lookup_controller] (
identifier[controller] ,
identifier[remainder] [ identifier[fixed_args] + literal[int] :],
identifier[request]
)
|
def _find_sub_controllers(self, remainder, request):
"""
Identifies the correct controller to route to by analyzing the
request URI.
"""
# need either a get_one or get to parse args
method = None
for name in ('get_one', 'get'):
if hasattr(self, name):
method = name
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
if not method:
return # depends on [control=['if'], data=[]]
# get the args to figure out how much to chop off
args = self._get_args_for_controller(getattr(self, method))
fixed_args = len(args) - len(request.pecan.get('routing_args', []))
var_args = getargspec(getattr(self, method)).varargs
# attempt to locate a sub-controller
if var_args:
for (i, item) in enumerate(remainder):
controller = self._lookup_child(item)
if controller and (not ismethod(controller)):
self._set_routing_args(request, remainder[:i])
return lookup_controller(controller, remainder[i + 1:], request) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif fixed_args < len(remainder) and hasattr(self, remainder[fixed_args]):
controller = self._lookup_child(remainder[fixed_args])
if not ismethod(controller):
self._set_routing_args(request, remainder[:fixed_args])
return lookup_controller(controller, remainder[fixed_args + 1:], request) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
|
def coords(self):
"""The (X, Y) coordinates of the tablet tool, in mm from
the top left corner of the tablet in its current logical orientation
and whether they have changed in this event.
Use :meth:`transform_coords` for transforming the axes values into
a different coordinate space.
Note:
On some devices, returned value may be negative or larger than
the width of the device. See `Out-of-bounds motion events`_
for more details.
Returns:
((float, float), bool): The current values of the the axes and
whether they have changed.
"""
x_changed = self._libinput.libinput_event_tablet_tool_x_has_changed(
self._handle)
y_changed = self._libinput.libinput_event_tablet_tool_y_has_changed(
self._handle)
x = self._libinput.libinput_event_tablet_tool_get_x(self._handle)
y = self._libinput.libinput_event_tablet_tool_get_y(self._handle)
return (x, y), x_changed or y_changed
|
def function[coords, parameter[self]]:
constant[The (X, Y) coordinates of the tablet tool, in mm from
the top left corner of the tablet in its current logical orientation
and whether they have changed in this event.
Use :meth:`transform_coords` for transforming the axes values into
a different coordinate space.
Note:
On some devices, returned value may be negative or larger than
the width of the device. See `Out-of-bounds motion events`_
for more details.
Returns:
((float, float), bool): The current values of the the axes and
whether they have changed.
]
variable[x_changed] assign[=] call[name[self]._libinput.libinput_event_tablet_tool_x_has_changed, parameter[name[self]._handle]]
variable[y_changed] assign[=] call[name[self]._libinput.libinput_event_tablet_tool_y_has_changed, parameter[name[self]._handle]]
variable[x] assign[=] call[name[self]._libinput.libinput_event_tablet_tool_get_x, parameter[name[self]._handle]]
variable[y] assign[=] call[name[self]._libinput.libinput_event_tablet_tool_get_y, parameter[name[self]._handle]]
return[tuple[[<ast.Tuple object at 0x7da204623070>, <ast.BoolOp object at 0x7da204622b30>]]]
|
keyword[def] identifier[coords] ( identifier[self] ):
literal[string]
identifier[x_changed] = identifier[self] . identifier[_libinput] . identifier[libinput_event_tablet_tool_x_has_changed] (
identifier[self] . identifier[_handle] )
identifier[y_changed] = identifier[self] . identifier[_libinput] . identifier[libinput_event_tablet_tool_y_has_changed] (
identifier[self] . identifier[_handle] )
identifier[x] = identifier[self] . identifier[_libinput] . identifier[libinput_event_tablet_tool_get_x] ( identifier[self] . identifier[_handle] )
identifier[y] = identifier[self] . identifier[_libinput] . identifier[libinput_event_tablet_tool_get_y] ( identifier[self] . identifier[_handle] )
keyword[return] ( identifier[x] , identifier[y] ), identifier[x_changed] keyword[or] identifier[y_changed]
|
def coords(self):
"""The (X, Y) coordinates of the tablet tool, in mm from
the top left corner of the tablet in its current logical orientation
and whether they have changed in this event.
Use :meth:`transform_coords` for transforming the axes values into
a different coordinate space.
Note:
On some devices, returned value may be negative or larger than
the width of the device. See `Out-of-bounds motion events`_
for more details.
Returns:
((float, float), bool): The current values of the the axes and
whether they have changed.
"""
x_changed = self._libinput.libinput_event_tablet_tool_x_has_changed(self._handle)
y_changed = self._libinput.libinput_event_tablet_tool_y_has_changed(self._handle)
x = self._libinput.libinput_event_tablet_tool_get_x(self._handle)
y = self._libinput.libinput_event_tablet_tool_get_y(self._handle)
return ((x, y), x_changed or y_changed)
|
def universal_read(fname):
'''Will open and read a file with universal line endings, trying to decode whatever format it's in (e.g., utf8 or utf16)'''
with open(fname,'rU') as f:
data = f.read()
enc_guess = chardet.detect(data)
return data.decode(enc_guess['encoding'])
|
def function[universal_read, parameter[fname]]:
constant[Will open and read a file with universal line endings, trying to decode whatever format it's in (e.g., utf8 or utf16)]
with call[name[open], parameter[name[fname], constant[rU]]] begin[:]
variable[data] assign[=] call[name[f].read, parameter[]]
variable[enc_guess] assign[=] call[name[chardet].detect, parameter[name[data]]]
return[call[name[data].decode, parameter[call[name[enc_guess]][constant[encoding]]]]]
|
keyword[def] identifier[universal_read] ( identifier[fname] ):
literal[string]
keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[f] :
identifier[data] = identifier[f] . identifier[read] ()
identifier[enc_guess] = identifier[chardet] . identifier[detect] ( identifier[data] )
keyword[return] identifier[data] . identifier[decode] ( identifier[enc_guess] [ literal[string] ])
|
def universal_read(fname):
"""Will open and read a file with universal line endings, trying to decode whatever format it's in (e.g., utf8 or utf16)"""
with open(fname, 'rU') as f:
data = f.read() # depends on [control=['with'], data=['f']]
enc_guess = chardet.detect(data)
return data.decode(enc_guess['encoding'])
|
def is_draft(request):
"""
A request is considered to be in draft mode if:
- it is for *any* admin resource, since the admin site deals only with
draft objects and hides the published version from admin users
- it is for *any* view in *any* app that deals only with draft objects
- user is a member of the "Content Reviewer" group, since content
reviewers' sole purpose is to review draft content and they need not
see the published content
- the user is a staff member and therefore can see draft versions of
pages if they wish, and the 'preview' GET parameter flag is included
to show the draft page is definitely wanted instead of a normal
published page.
- the 'preview' GET parameter flag is included with a valid HMAC for
the requested URL, regardless of authenticated permissions.
"""
# Admin resource requested.
if PublishingMiddleware.is_admin_request(request):
return True
# API resource requested.
if PublishingMiddleware.is_api_request(request):
return True
# Draft-only view requested.
if PublishingMiddleware.is_draft_only_view(request):
return True
# Content reviewer made request.
if PublishingMiddleware.is_content_reviewer_user(request):
return True
# Draft mode requested.
if PublishingMiddleware.is_draft_request(request):
# User is staff.
if PublishingMiddleware.is_staff_user(request):
return True
# Request contains a valid draft mode HMAC in the querystring.
if verify_draft_url(request.get_full_path()):
return True
# Not draft mode.
return False
|
def function[is_draft, parameter[request]]:
constant[
A request is considered to be in draft mode if:
- it is for *any* admin resource, since the admin site deals only with
draft objects and hides the published version from admin users
- it is for *any* view in *any* app that deals only with draft objects
- user is a member of the "Content Reviewer" group, since content
reviewers' sole purpose is to review draft content and they need not
see the published content
- the user is a staff member and therefore can see draft versions of
pages if they wish, and the 'preview' GET parameter flag is included
to show the draft page is definitely wanted instead of a normal
published page.
- the 'preview' GET parameter flag is included with a valid HMAC for
the requested URL, regardless of authenticated permissions.
]
if call[name[PublishingMiddleware].is_admin_request, parameter[name[request]]] begin[:]
return[constant[True]]
if call[name[PublishingMiddleware].is_api_request, parameter[name[request]]] begin[:]
return[constant[True]]
if call[name[PublishingMiddleware].is_draft_only_view, parameter[name[request]]] begin[:]
return[constant[True]]
if call[name[PublishingMiddleware].is_content_reviewer_user, parameter[name[request]]] begin[:]
return[constant[True]]
if call[name[PublishingMiddleware].is_draft_request, parameter[name[request]]] begin[:]
if call[name[PublishingMiddleware].is_staff_user, parameter[name[request]]] begin[:]
return[constant[True]]
if call[name[verify_draft_url], parameter[call[name[request].get_full_path, parameter[]]]] begin[:]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[is_draft] ( identifier[request] ):
literal[string]
keyword[if] identifier[PublishingMiddleware] . identifier[is_admin_request] ( identifier[request] ):
keyword[return] keyword[True]
keyword[if] identifier[PublishingMiddleware] . identifier[is_api_request] ( identifier[request] ):
keyword[return] keyword[True]
keyword[if] identifier[PublishingMiddleware] . identifier[is_draft_only_view] ( identifier[request] ):
keyword[return] keyword[True]
keyword[if] identifier[PublishingMiddleware] . identifier[is_content_reviewer_user] ( identifier[request] ):
keyword[return] keyword[True]
keyword[if] identifier[PublishingMiddleware] . identifier[is_draft_request] ( identifier[request] ):
keyword[if] identifier[PublishingMiddleware] . identifier[is_staff_user] ( identifier[request] ):
keyword[return] keyword[True]
keyword[if] identifier[verify_draft_url] ( identifier[request] . identifier[get_full_path] ()):
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def is_draft(request):
"""
A request is considered to be in draft mode if:
- it is for *any* admin resource, since the admin site deals only with
draft objects and hides the published version from admin users
- it is for *any* view in *any* app that deals only with draft objects
- user is a member of the "Content Reviewer" group, since content
reviewers' sole purpose is to review draft content and they need not
see the published content
- the user is a staff member and therefore can see draft versions of
pages if they wish, and the 'preview' GET parameter flag is included
to show the draft page is definitely wanted instead of a normal
published page.
- the 'preview' GET parameter flag is included with a valid HMAC for
the requested URL, regardless of authenticated permissions.
"""
# Admin resource requested.
if PublishingMiddleware.is_admin_request(request):
return True # depends on [control=['if'], data=[]]
# API resource requested.
if PublishingMiddleware.is_api_request(request):
return True # depends on [control=['if'], data=[]]
# Draft-only view requested.
if PublishingMiddleware.is_draft_only_view(request):
return True # depends on [control=['if'], data=[]]
# Content reviewer made request.
if PublishingMiddleware.is_content_reviewer_user(request):
return True # depends on [control=['if'], data=[]]
# Draft mode requested.
if PublishingMiddleware.is_draft_request(request):
# User is staff.
if PublishingMiddleware.is_staff_user(request):
return True # depends on [control=['if'], data=[]]
# Request contains a valid draft mode HMAC in the querystring.
if verify_draft_url(request.get_full_path()):
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Not draft mode.
return False
|
def process_form(self, instance, field, form, empty_marker=None,
emptyReturnsMarker=False):
"""Return a list of dictionaries fit for ARTemplate/Analyses field
consumption.
"""
value = []
# selected services
service_uids = form.get("uids", [])
# defined partitions
partitions = form.get("Partition", [])
partitions = partitions and partitions[0] or {}
# hidden services
hidden_services = form.get("Hidden", {})
# get the service objects
services = map(api.get_object_by_uid, service_uids)
# get dependencies
dependencies = map(lambda s: s.getServiceDependencies(), services)
dependencies = list(itertools.chain.from_iterable(dependencies))
# Merge dependencies and services
services = set(services + dependencies)
# get the profile
profile_uid = form.get("AnalysisProfile_uid")
if profile_uid:
profile = api.get_object_by_uid(profile_uid)
# update the services with those from the profile
services.update(profile.getService())
as_settings = []
for service in services:
service_uid = api.get_uid(service)
value.append({
"service_uid": service_uid,
"partition": partitions.get(service_uid, "part-1")
})
hidden = hidden_services.get(service_uid, "") == "on"
as_settings.append({"uid": service_uid, "hidden": hidden})
# set the analysis services settings
instance.setAnalysisServicesSettings(as_settings)
# This returns the value for the Analyses Schema Field
return value, {}
|
def function[process_form, parameter[self, instance, field, form, empty_marker, emptyReturnsMarker]]:
constant[Return a list of dictionaries fit for ARTemplate/Analyses field
consumption.
]
variable[value] assign[=] list[[]]
variable[service_uids] assign[=] call[name[form].get, parameter[constant[uids], list[[]]]]
variable[partitions] assign[=] call[name[form].get, parameter[constant[Partition], list[[]]]]
variable[partitions] assign[=] <ast.BoolOp object at 0x7da2047e8d90>
variable[hidden_services] assign[=] call[name[form].get, parameter[constant[Hidden], dictionary[[], []]]]
variable[services] assign[=] call[name[map], parameter[name[api].get_object_by_uid, name[service_uids]]]
variable[dependencies] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da2047eaa40>, name[services]]]
variable[dependencies] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[name[dependencies]]]]]
variable[services] assign[=] call[name[set], parameter[binary_operation[name[services] + name[dependencies]]]]
variable[profile_uid] assign[=] call[name[form].get, parameter[constant[AnalysisProfile_uid]]]
if name[profile_uid] begin[:]
variable[profile] assign[=] call[name[api].get_object_by_uid, parameter[name[profile_uid]]]
call[name[services].update, parameter[call[name[profile].getService, parameter[]]]]
variable[as_settings] assign[=] list[[]]
for taget[name[service]] in starred[name[services]] begin[:]
variable[service_uid] assign[=] call[name[api].get_uid, parameter[name[service]]]
call[name[value].append, parameter[dictionary[[<ast.Constant object at 0x7da18f09db10>, <ast.Constant object at 0x7da18f09f9a0>], [<ast.Name object at 0x7da18f09ded0>, <ast.Call object at 0x7da18f09c5b0>]]]]
variable[hidden] assign[=] compare[call[name[hidden_services].get, parameter[name[service_uid], constant[]]] equal[==] constant[on]]
call[name[as_settings].append, parameter[dictionary[[<ast.Constant object at 0x7da18f09d540>, <ast.Constant object at 0x7da18f09f8e0>], [<ast.Name object at 0x7da18f09f700>, <ast.Name object at 0x7da18f09f5e0>]]]]
call[name[instance].setAnalysisServicesSettings, parameter[name[as_settings]]]
return[tuple[[<ast.Name object at 0x7da18f09e350>, <ast.Dict object at 0x7da18f09e080>]]]
|
keyword[def] identifier[process_form] ( identifier[self] , identifier[instance] , identifier[field] , identifier[form] , identifier[empty_marker] = keyword[None] ,
identifier[emptyReturnsMarker] = keyword[False] ):
literal[string]
identifier[value] =[]
identifier[service_uids] = identifier[form] . identifier[get] ( literal[string] ,[])
identifier[partitions] = identifier[form] . identifier[get] ( literal[string] ,[])
identifier[partitions] = identifier[partitions] keyword[and] identifier[partitions] [ literal[int] ] keyword[or] {}
identifier[hidden_services] = identifier[form] . identifier[get] ( literal[string] ,{})
identifier[services] = identifier[map] ( identifier[api] . identifier[get_object_by_uid] , identifier[service_uids] )
identifier[dependencies] = identifier[map] ( keyword[lambda] identifier[s] : identifier[s] . identifier[getServiceDependencies] (), identifier[services] )
identifier[dependencies] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[dependencies] ))
identifier[services] = identifier[set] ( identifier[services] + identifier[dependencies] )
identifier[profile_uid] = identifier[form] . identifier[get] ( literal[string] )
keyword[if] identifier[profile_uid] :
identifier[profile] = identifier[api] . identifier[get_object_by_uid] ( identifier[profile_uid] )
identifier[services] . identifier[update] ( identifier[profile] . identifier[getService] ())
identifier[as_settings] =[]
keyword[for] identifier[service] keyword[in] identifier[services] :
identifier[service_uid] = identifier[api] . identifier[get_uid] ( identifier[service] )
identifier[value] . identifier[append] ({
literal[string] : identifier[service_uid] ,
literal[string] : identifier[partitions] . identifier[get] ( identifier[service_uid] , literal[string] )
})
identifier[hidden] = identifier[hidden_services] . identifier[get] ( identifier[service_uid] , literal[string] )== literal[string]
identifier[as_settings] . identifier[append] ({ literal[string] : identifier[service_uid] , literal[string] : identifier[hidden] })
identifier[instance] . identifier[setAnalysisServicesSettings] ( identifier[as_settings] )
keyword[return] identifier[value] ,{}
|
def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False):
"""Return a list of dictionaries fit for ARTemplate/Analyses field
consumption.
"""
value = []
# selected services
service_uids = form.get('uids', [])
# defined partitions
partitions = form.get('Partition', [])
partitions = partitions and partitions[0] or {}
# hidden services
hidden_services = form.get('Hidden', {})
# get the service objects
services = map(api.get_object_by_uid, service_uids)
# get dependencies
dependencies = map(lambda s: s.getServiceDependencies(), services)
dependencies = list(itertools.chain.from_iterable(dependencies))
# Merge dependencies and services
services = set(services + dependencies)
# get the profile
profile_uid = form.get('AnalysisProfile_uid')
if profile_uid:
profile = api.get_object_by_uid(profile_uid)
# update the services with those from the profile
services.update(profile.getService()) # depends on [control=['if'], data=[]]
as_settings = []
for service in services:
service_uid = api.get_uid(service)
value.append({'service_uid': service_uid, 'partition': partitions.get(service_uid, 'part-1')})
hidden = hidden_services.get(service_uid, '') == 'on'
as_settings.append({'uid': service_uid, 'hidden': hidden}) # depends on [control=['for'], data=['service']]
# set the analysis services settings
instance.setAnalysisServicesSettings(as_settings)
# This returns the value for the Analyses Schema Field
return (value, {})
|
def _process_genes(self, limit=None):
"""
This method processes the KEGG gene IDs.
The label for the gene is pulled as
the first symbol in the list of gene symbols;
the rest are added as synonyms.
The long-form of the gene name is added as a definition.
This is hardcoded to just processes human genes.
Triples created:
<gene_id> is a SO:gene
<gene_id> rdfs:label <gene_name>
:param limit:
:return:
"""
LOG.info("Processing genes")
if self.test_mode:
graph = self.testgraph
else:
graph = self.graph
model = Model(graph)
line_counter = 0
family = Family(graph)
geno = Genotype(graph)
raw = '/'.join((self.rawdir, self.files['hsa_genes']['file']))
with open(raw, 'r', encoding="iso-8859-1") as csvfile:
filereader = csv.reader(csvfile, delimiter='\t', quotechar='\"')
for row in filereader:
line_counter += 1
(gene_id, gene_name) = row
gene_id = 'KEGG-'+gene_id.strip()
# the gene listing has a bunch of labels
# that are delimited, as:
# DST, BP240, BPA, BPAG1, CATX-15, CATX15, D6S1101, DMH, DT,
# EBSB2, HSAN6, MACF2; dystonin; K10382 dystonin
# it looks like the list is semicolon delimited
# (symbol, name, gene_class)
# where the symbol is a comma-delimited list
# here, we split them up.
# we will take the first abbreviation and make it the symbol
# then take the rest as synonyms
gene_stuff = re.split('r;', gene_name)
symbollist = re.split(r',', gene_stuff[0])
first_symbol = symbollist[0].strip()
if gene_id not in self.label_hash:
self.label_hash[gene_id] = first_symbol
if self.test_mode and gene_id not in self.test_ids['genes']:
continue
# Add the gene as a class.
geno.addGene(gene_id, first_symbol)
# add the long name as the description
if len(gene_stuff) > 1:
description = gene_stuff[1].strip()
model.addDefinition(gene_id, description)
# add the rest of the symbols as synonyms
for i in enumerate(symbollist, start=1):
model.addSynonym(gene_id, i[1].strip())
if len(gene_stuff) > 2:
ko_part = gene_stuff[2]
ko_match = re.search(r'K\d+', ko_part)
if ko_match is not None and len(ko_match.groups()) == 1:
ko = 'KEGG-ko:'+ko_match.group(1)
family.addMemberOf(gene_id, ko)
if not self.test_mode and limit is not None and line_counter > limit:
break
LOG.info("Done with genes")
return
|
def function[_process_genes, parameter[self, limit]]:
constant[
This method processes the KEGG gene IDs.
The label for the gene is pulled as
the first symbol in the list of gene symbols;
the rest are added as synonyms.
The long-form of the gene name is added as a definition.
This is hardcoded to just processes human genes.
Triples created:
<gene_id> is a SO:gene
<gene_id> rdfs:label <gene_name>
:param limit:
:return:
]
call[name[LOG].info, parameter[constant[Processing genes]]]
if name[self].test_mode begin[:]
variable[graph] assign[=] name[self].testgraph
variable[model] assign[=] call[name[Model], parameter[name[graph]]]
variable[line_counter] assign[=] constant[0]
variable[family] assign[=] call[name[Family], parameter[name[graph]]]
variable[geno] assign[=] call[name[Genotype], parameter[name[graph]]]
variable[raw] assign[=] call[constant[/].join, parameter[tuple[[<ast.Attribute object at 0x7da18f58ca00>, <ast.Subscript object at 0x7da18f58cbb0>]]]]
with call[name[open], parameter[name[raw], constant[r]]] begin[:]
variable[filereader] assign[=] call[name[csv].reader, parameter[name[csvfile]]]
for taget[name[row]] in starred[name[filereader]] begin[:]
<ast.AugAssign object at 0x7da18f58c340>
<ast.Tuple object at 0x7da18f58c9a0> assign[=] name[row]
variable[gene_id] assign[=] binary_operation[constant[KEGG-] + call[name[gene_id].strip, parameter[]]]
variable[gene_stuff] assign[=] call[name[re].split, parameter[constant[r;], name[gene_name]]]
variable[symbollist] assign[=] call[name[re].split, parameter[constant[,], call[name[gene_stuff]][constant[0]]]]
variable[first_symbol] assign[=] call[call[name[symbollist]][constant[0]].strip, parameter[]]
if compare[name[gene_id] <ast.NotIn object at 0x7da2590d7190> name[self].label_hash] begin[:]
call[name[self].label_hash][name[gene_id]] assign[=] name[first_symbol]
if <ast.BoolOp object at 0x7da18f58c6a0> begin[:]
continue
call[name[geno].addGene, parameter[name[gene_id], name[first_symbol]]]
if compare[call[name[len], parameter[name[gene_stuff]]] greater[>] constant[1]] begin[:]
variable[description] assign[=] call[call[name[gene_stuff]][constant[1]].strip, parameter[]]
call[name[model].addDefinition, parameter[name[gene_id], name[description]]]
for taget[name[i]] in starred[call[name[enumerate], parameter[name[symbollist]]]] begin[:]
call[name[model].addSynonym, parameter[name[gene_id], call[call[name[i]][constant[1]].strip, parameter[]]]]
if compare[call[name[len], parameter[name[gene_stuff]]] greater[>] constant[2]] begin[:]
variable[ko_part] assign[=] call[name[gene_stuff]][constant[2]]
variable[ko_match] assign[=] call[name[re].search, parameter[constant[K\d+], name[ko_part]]]
if <ast.BoolOp object at 0x7da18f720280> begin[:]
variable[ko] assign[=] binary_operation[constant[KEGG-ko:] + call[name[ko_match].group, parameter[constant[1]]]]
call[name[family].addMemberOf, parameter[name[gene_id], name[ko]]]
if <ast.BoolOp object at 0x7da18f721ae0> begin[:]
break
call[name[LOG].info, parameter[constant[Done with genes]]]
return[None]
|
keyword[def] identifier[_process_genes] ( identifier[self] , identifier[limit] = keyword[None] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] )
keyword[if] identifier[self] . identifier[test_mode] :
identifier[graph] = identifier[self] . identifier[testgraph]
keyword[else] :
identifier[graph] = identifier[self] . identifier[graph]
identifier[model] = identifier[Model] ( identifier[graph] )
identifier[line_counter] = literal[int]
identifier[family] = identifier[Family] ( identifier[graph] )
identifier[geno] = identifier[Genotype] ( identifier[graph] )
identifier[raw] = literal[string] . identifier[join] (( identifier[self] . identifier[rawdir] , identifier[self] . identifier[files] [ literal[string] ][ literal[string] ]))
keyword[with] identifier[open] ( identifier[raw] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[csvfile] :
identifier[filereader] = identifier[csv] . identifier[reader] ( identifier[csvfile] , identifier[delimiter] = literal[string] , identifier[quotechar] = literal[string] )
keyword[for] identifier[row] keyword[in] identifier[filereader] :
identifier[line_counter] += literal[int]
( identifier[gene_id] , identifier[gene_name] )= identifier[row]
identifier[gene_id] = literal[string] + identifier[gene_id] . identifier[strip] ()
identifier[gene_stuff] = identifier[re] . identifier[split] ( literal[string] , identifier[gene_name] )
identifier[symbollist] = identifier[re] . identifier[split] ( literal[string] , identifier[gene_stuff] [ literal[int] ])
identifier[first_symbol] = identifier[symbollist] [ literal[int] ]. identifier[strip] ()
keyword[if] identifier[gene_id] keyword[not] keyword[in] identifier[self] . identifier[label_hash] :
identifier[self] . identifier[label_hash] [ identifier[gene_id] ]= identifier[first_symbol]
keyword[if] identifier[self] . identifier[test_mode] keyword[and] identifier[gene_id] keyword[not] keyword[in] identifier[self] . identifier[test_ids] [ literal[string] ]:
keyword[continue]
identifier[geno] . identifier[addGene] ( identifier[gene_id] , identifier[first_symbol] )
keyword[if] identifier[len] ( identifier[gene_stuff] )> literal[int] :
identifier[description] = identifier[gene_stuff] [ literal[int] ]. identifier[strip] ()
identifier[model] . identifier[addDefinition] ( identifier[gene_id] , identifier[description] )
keyword[for] identifier[i] keyword[in] identifier[enumerate] ( identifier[symbollist] , identifier[start] = literal[int] ):
identifier[model] . identifier[addSynonym] ( identifier[gene_id] , identifier[i] [ literal[int] ]. identifier[strip] ())
keyword[if] identifier[len] ( identifier[gene_stuff] )> literal[int] :
identifier[ko_part] = identifier[gene_stuff] [ literal[int] ]
identifier[ko_match] = identifier[re] . identifier[search] ( literal[string] , identifier[ko_part] )
keyword[if] identifier[ko_match] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[ko_match] . identifier[groups] ())== literal[int] :
identifier[ko] = literal[string] + identifier[ko_match] . identifier[group] ( literal[int] )
identifier[family] . identifier[addMemberOf] ( identifier[gene_id] , identifier[ko] )
keyword[if] keyword[not] identifier[self] . identifier[test_mode] keyword[and] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[and] identifier[line_counter] > identifier[limit] :
keyword[break]
identifier[LOG] . identifier[info] ( literal[string] )
keyword[return]
|
def _process_genes(self, limit=None):
"""
This method processes the KEGG gene IDs.
The label for the gene is pulled as
the first symbol in the list of gene symbols;
the rest are added as synonyms.
The long-form of the gene name is added as a definition.
This is hardcoded to just processes human genes.
Triples created:
<gene_id> is a SO:gene
<gene_id> rdfs:label <gene_name>
:param limit:
:return:
"""
LOG.info('Processing genes')
if self.test_mode:
graph = self.testgraph # depends on [control=['if'], data=[]]
else:
graph = self.graph
model = Model(graph)
line_counter = 0
family = Family(graph)
geno = Genotype(graph)
raw = '/'.join((self.rawdir, self.files['hsa_genes']['file']))
with open(raw, 'r', encoding='iso-8859-1') as csvfile:
filereader = csv.reader(csvfile, delimiter='\t', quotechar='"')
for row in filereader:
line_counter += 1
(gene_id, gene_name) = row
gene_id = 'KEGG-' + gene_id.strip()
# the gene listing has a bunch of labels
# that are delimited, as:
# DST, BP240, BPA, BPAG1, CATX-15, CATX15, D6S1101, DMH, DT,
# EBSB2, HSAN6, MACF2; dystonin; K10382 dystonin
# it looks like the list is semicolon delimited
# (symbol, name, gene_class)
# where the symbol is a comma-delimited list
# here, we split them up.
# we will take the first abbreviation and make it the symbol
# then take the rest as synonyms
gene_stuff = re.split('r;', gene_name)
symbollist = re.split(',', gene_stuff[0])
first_symbol = symbollist[0].strip()
if gene_id not in self.label_hash:
self.label_hash[gene_id] = first_symbol # depends on [control=['if'], data=['gene_id']]
if self.test_mode and gene_id not in self.test_ids['genes']:
continue # depends on [control=['if'], data=[]]
# Add the gene as a class.
geno.addGene(gene_id, first_symbol)
# add the long name as the description
if len(gene_stuff) > 1:
description = gene_stuff[1].strip()
model.addDefinition(gene_id, description) # depends on [control=['if'], data=[]]
# add the rest of the symbols as synonyms
for i in enumerate(symbollist, start=1):
model.addSynonym(gene_id, i[1].strip()) # depends on [control=['for'], data=['i']]
if len(gene_stuff) > 2:
ko_part = gene_stuff[2]
ko_match = re.search('K\\d+', ko_part)
if ko_match is not None and len(ko_match.groups()) == 1:
ko = 'KEGG-ko:' + ko_match.group(1)
family.addMemberOf(gene_id, ko) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not self.test_mode and limit is not None and (line_counter > limit):
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['csvfile']]
LOG.info('Done with genes')
return
|
def parse_auth_token_from_request(self, auth_header):
"""
Parses and returns the Hawk Authorization header if it is present and well-formed.
Raises `falcon.HTTPUnauthoried exception` with proper error message
"""
if not auth_header:
raise falcon.HTTPUnauthorized(
description='Missing Authorization Header')
try:
auth_header_prefix, _ = auth_header.split(' ', 1)
except ValueError:
raise falcon.HTTPUnauthorized(
description='Invalid Authorization Header: Missing Scheme or Parameters')
if auth_header_prefix.lower() != self.auth_header_prefix.lower():
raise falcon.HTTPUnauthorized(
description='Invalid Authorization Header: '
'Must start with {0}'.format(self.auth_header_prefix))
return auth_header
|
def function[parse_auth_token_from_request, parameter[self, auth_header]]:
constant[
Parses and returns the Hawk Authorization header if it is present and well-formed.
Raises `falcon.HTTPUnauthoried exception` with proper error message
]
if <ast.UnaryOp object at 0x7da18ede6f80> begin[:]
<ast.Raise object at 0x7da18ede5cf0>
<ast.Try object at 0x7da18ede6bf0>
if compare[call[name[auth_header_prefix].lower, parameter[]] not_equal[!=] call[name[self].auth_header_prefix.lower, parameter[]]] begin[:]
<ast.Raise object at 0x7da18ede6ad0>
return[name[auth_header]]
|
keyword[def] identifier[parse_auth_token_from_request] ( identifier[self] , identifier[auth_header] ):
literal[string]
keyword[if] keyword[not] identifier[auth_header] :
keyword[raise] identifier[falcon] . identifier[HTTPUnauthorized] (
identifier[description] = literal[string] )
keyword[try] :
identifier[auth_header_prefix] , identifier[_] = identifier[auth_header] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
keyword[raise] identifier[falcon] . identifier[HTTPUnauthorized] (
identifier[description] = literal[string] )
keyword[if] identifier[auth_header_prefix] . identifier[lower] ()!= identifier[self] . identifier[auth_header_prefix] . identifier[lower] ():
keyword[raise] identifier[falcon] . identifier[HTTPUnauthorized] (
identifier[description] = literal[string]
literal[string] . identifier[format] ( identifier[self] . identifier[auth_header_prefix] ))
keyword[return] identifier[auth_header]
|
def parse_auth_token_from_request(self, auth_header):
"""
Parses and returns the Hawk Authorization header if it is present and well-formed.
Raises `falcon.HTTPUnauthoried exception` with proper error message
"""
if not auth_header:
raise falcon.HTTPUnauthorized(description='Missing Authorization Header') # depends on [control=['if'], data=[]]
try:
(auth_header_prefix, _) = auth_header.split(' ', 1) # depends on [control=['try'], data=[]]
except ValueError:
raise falcon.HTTPUnauthorized(description='Invalid Authorization Header: Missing Scheme or Parameters') # depends on [control=['except'], data=[]]
if auth_header_prefix.lower() != self.auth_header_prefix.lower():
raise falcon.HTTPUnauthorized(description='Invalid Authorization Header: Must start with {0}'.format(self.auth_header_prefix)) # depends on [control=['if'], data=[]]
return auth_header
|
def docx_text_from_xml(xml: str, config: TextProcessingConfig) -> str:
"""
Converts an XML tree of a DOCX file to string contents.
Args:
xml: raw XML text
config: :class:`TextProcessingConfig` control object
Returns:
contents as a string
"""
root = ElementTree.fromstring(xml)
return docx_text_from_xml_node(root, 0, config)
|
def function[docx_text_from_xml, parameter[xml, config]]:
constant[
Converts an XML tree of a DOCX file to string contents.
Args:
xml: raw XML text
config: :class:`TextProcessingConfig` control object
Returns:
contents as a string
]
variable[root] assign[=] call[name[ElementTree].fromstring, parameter[name[xml]]]
return[call[name[docx_text_from_xml_node], parameter[name[root], constant[0], name[config]]]]
|
keyword[def] identifier[docx_text_from_xml] ( identifier[xml] : identifier[str] , identifier[config] : identifier[TextProcessingConfig] )-> identifier[str] :
literal[string]
identifier[root] = identifier[ElementTree] . identifier[fromstring] ( identifier[xml] )
keyword[return] identifier[docx_text_from_xml_node] ( identifier[root] , literal[int] , identifier[config] )
|
def docx_text_from_xml(xml: str, config: TextProcessingConfig) -> str:
"""
Converts an XML tree of a DOCX file to string contents.
Args:
xml: raw XML text
config: :class:`TextProcessingConfig` control object
Returns:
contents as a string
"""
root = ElementTree.fromstring(xml)
return docx_text_from_xml_node(root, 0, config)
|
def update_hmet_card_file(hmet_card_file_path, new_hmet_data_path):
"""This function updates the paths in the HMET card file to the new
location of the HMET data. This is necessary because the file paths
are absolute and will need to be updated if moved.
Args:
hmet_card_file_path(str): Location of the file used for the HMET_ASCII card.
new_hmet_data_path(str): Location where the HMET ASCII files are currently.
Example::
new_hmet_data_path = "E:\\GSSHA\\new_hmet_directory"
hmet_card_file_path = "E:\\GSSHA\\hmet_card_file.txt"
update_hmet_card_file(hmet_card_file_path, new_hmet_data_path)
"""
hmet_card_file_path_temp = "{0}_tmp".format(hmet_card_file_path)
try:
remove(hmet_card_file_path_temp)
except OSError:
pass
copy(hmet_card_file_path, hmet_card_file_path_temp)
with io_open(hmet_card_file_path_temp, 'w', newline='\r\n') as out_hmet_list_file:
with open(hmet_card_file_path) as old_hmet_list_file:
for date_path in old_hmet_list_file:
out_hmet_list_file.write(u"{0}\n".format(path.join(new_hmet_data_path,
path.basename(date_path))))
try:
remove(hmet_card_file_path)
except OSError:
pass
rename(hmet_card_file_path_temp, hmet_card_file_path)
|
def function[update_hmet_card_file, parameter[hmet_card_file_path, new_hmet_data_path]]:
constant[This function updates the paths in the HMET card file to the new
location of the HMET data. This is necessary because the file paths
are absolute and will need to be updated if moved.
Args:
hmet_card_file_path(str): Location of the file used for the HMET_ASCII card.
new_hmet_data_path(str): Location where the HMET ASCII files are currently.
Example::
new_hmet_data_path = "E:\GSSHA\new_hmet_directory"
hmet_card_file_path = "E:\GSSHA\hmet_card_file.txt"
update_hmet_card_file(hmet_card_file_path, new_hmet_data_path)
]
variable[hmet_card_file_path_temp] assign[=] call[constant[{0}_tmp].format, parameter[name[hmet_card_file_path]]]
<ast.Try object at 0x7da1b24ae020>
call[name[copy], parameter[name[hmet_card_file_path], name[hmet_card_file_path_temp]]]
with call[name[io_open], parameter[name[hmet_card_file_path_temp], constant[w]]] begin[:]
with call[name[open], parameter[name[hmet_card_file_path]]] begin[:]
for taget[name[date_path]] in starred[name[old_hmet_list_file]] begin[:]
call[name[out_hmet_list_file].write, parameter[call[constant[{0}
].format, parameter[call[name[path].join, parameter[name[new_hmet_data_path], call[name[path].basename, parameter[name[date_path]]]]]]]]]
<ast.Try object at 0x7da1b24ada50>
call[name[rename], parameter[name[hmet_card_file_path_temp], name[hmet_card_file_path]]]
|
keyword[def] identifier[update_hmet_card_file] ( identifier[hmet_card_file_path] , identifier[new_hmet_data_path] ):
literal[string]
identifier[hmet_card_file_path_temp] = literal[string] . identifier[format] ( identifier[hmet_card_file_path] )
keyword[try] :
identifier[remove] ( identifier[hmet_card_file_path_temp] )
keyword[except] identifier[OSError] :
keyword[pass]
identifier[copy] ( identifier[hmet_card_file_path] , identifier[hmet_card_file_path_temp] )
keyword[with] identifier[io_open] ( identifier[hmet_card_file_path_temp] , literal[string] , identifier[newline] = literal[string] ) keyword[as] identifier[out_hmet_list_file] :
keyword[with] identifier[open] ( identifier[hmet_card_file_path] ) keyword[as] identifier[old_hmet_list_file] :
keyword[for] identifier[date_path] keyword[in] identifier[old_hmet_list_file] :
identifier[out_hmet_list_file] . identifier[write] ( literal[string] . identifier[format] ( identifier[path] . identifier[join] ( identifier[new_hmet_data_path] ,
identifier[path] . identifier[basename] ( identifier[date_path] ))))
keyword[try] :
identifier[remove] ( identifier[hmet_card_file_path] )
keyword[except] identifier[OSError] :
keyword[pass]
identifier[rename] ( identifier[hmet_card_file_path_temp] , identifier[hmet_card_file_path] )
|
def update_hmet_card_file(hmet_card_file_path, new_hmet_data_path):
"""This function updates the paths in the HMET card file to the new
location of the HMET data. This is necessary because the file paths
are absolute and will need to be updated if moved.
Args:
hmet_card_file_path(str): Location of the file used for the HMET_ASCII card.
new_hmet_data_path(str): Location where the HMET ASCII files are currently.
Example::
new_hmet_data_path = "E:\\GSSHA\\new_hmet_directory"
hmet_card_file_path = "E:\\GSSHA\\hmet_card_file.txt"
update_hmet_card_file(hmet_card_file_path, new_hmet_data_path)
"""
hmet_card_file_path_temp = '{0}_tmp'.format(hmet_card_file_path)
try:
remove(hmet_card_file_path_temp) # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]]
copy(hmet_card_file_path, hmet_card_file_path_temp)
with io_open(hmet_card_file_path_temp, 'w', newline='\r\n') as out_hmet_list_file:
with open(hmet_card_file_path) as old_hmet_list_file:
for date_path in old_hmet_list_file:
out_hmet_list_file.write(u'{0}\n'.format(path.join(new_hmet_data_path, path.basename(date_path)))) # depends on [control=['for'], data=['date_path']] # depends on [control=['with'], data=['old_hmet_list_file']] # depends on [control=['with'], data=['out_hmet_list_file']]
try:
remove(hmet_card_file_path) # depends on [control=['try'], data=[]]
except OSError:
pass # depends on [control=['except'], data=[]]
rename(hmet_card_file_path_temp, hmet_card_file_path)
|
def extend_distribution_substation_overvoltage(network, critical_stations):
"""
Reinforce MV/LV substations due to voltage issues.
A parallel standard transformer is installed.
Parameters
----------
network : :class:`~.grid.network.Network`
critical_stations : :obj:`dict`
Dictionary with :class:`~.grid.grids.LVGrid` as key and a
:pandas:`pandas.DataFrame<dataframe>` with its critical station and
maximum voltage deviation as value.
Index of the dataframe is the :class:`~.grid.components.LVStation`
with over-voltage issues. Columns are 'v_mag_pu' containing the
maximum voltage deviation as float and 'time_index' containing the
corresponding time step the over-voltage occured in as
:pandas:`pandas.Timestamp<timestamp>`.
Returns
-------
Dictionary with lists of added transformers.
"""
# get parameters for standard transformer
try:
standard_transformer = network.equipment_data['lv_trafos'].loc[
network.config['grid_expansion_standard_equipment'][
'mv_lv_transformer']]
except KeyError:
print('Standard MV/LV transformer is not in equipment list.')
transformers_changes = {'added': {}}
for grid in critical_stations.keys():
# get any transformer to get attributes for new transformer from
station_transformer = grid.station.transformers[0]
new_transformer = Transformer(
id='LVStation_{}_transformer_{}'.format(
str(grid.station.id), str(len(grid.station.transformers) + 1)),
geom=station_transformer.geom,
mv_grid=station_transformer.mv_grid,
grid=station_transformer.grid,
voltage_op=station_transformer.voltage_op,
type=copy.deepcopy(standard_transformer))
# add standard transformer to station and return value
grid.station.add_transformer(new_transformer)
transformers_changes['added'][grid.station] = [new_transformer]
if transformers_changes['added']:
logger.debug("==> {} LV station(s) has/have been reinforced ".format(
str(len(transformers_changes['added']))) +
"due to overloading issues.")
return transformers_changes
|
def function[extend_distribution_substation_overvoltage, parameter[network, critical_stations]]:
constant[
Reinforce MV/LV substations due to voltage issues.
A parallel standard transformer is installed.
Parameters
----------
network : :class:`~.grid.network.Network`
critical_stations : :obj:`dict`
Dictionary with :class:`~.grid.grids.LVGrid` as key and a
:pandas:`pandas.DataFrame<dataframe>` with its critical station and
maximum voltage deviation as value.
Index of the dataframe is the :class:`~.grid.components.LVStation`
with over-voltage issues. Columns are 'v_mag_pu' containing the
maximum voltage deviation as float and 'time_index' containing the
corresponding time step the over-voltage occured in as
:pandas:`pandas.Timestamp<timestamp>`.
Returns
-------
Dictionary with lists of added transformers.
]
<ast.Try object at 0x7da1b02b3f10>
variable[transformers_changes] assign[=] dictionary[[<ast.Constant object at 0x7da1b02b3cd0>], [<ast.Dict object at 0x7da1b02b3d00>]]
for taget[name[grid]] in starred[call[name[critical_stations].keys, parameter[]]] begin[:]
variable[station_transformer] assign[=] call[name[grid].station.transformers][constant[0]]
variable[new_transformer] assign[=] call[name[Transformer], parameter[]]
call[name[grid].station.add_transformer, parameter[name[new_transformer]]]
call[call[name[transformers_changes]][constant[added]]][name[grid].station] assign[=] list[[<ast.Name object at 0x7da1b02b2b30>]]
if call[name[transformers_changes]][constant[added]] begin[:]
call[name[logger].debug, parameter[binary_operation[call[constant[==> {} LV station(s) has/have been reinforced ].format, parameter[call[name[str], parameter[call[name[len], parameter[call[name[transformers_changes]][constant[added]]]]]]]] + constant[due to overloading issues.]]]]
return[name[transformers_changes]]
|
keyword[def] identifier[extend_distribution_substation_overvoltage] ( identifier[network] , identifier[critical_stations] ):
literal[string]
keyword[try] :
identifier[standard_transformer] = identifier[network] . identifier[equipment_data] [ literal[string] ]. identifier[loc] [
identifier[network] . identifier[config] [ literal[string] ][
literal[string] ]]
keyword[except] identifier[KeyError] :
identifier[print] ( literal[string] )
identifier[transformers_changes] ={ literal[string] :{}}
keyword[for] identifier[grid] keyword[in] identifier[critical_stations] . identifier[keys] ():
identifier[station_transformer] = identifier[grid] . identifier[station] . identifier[transformers] [ literal[int] ]
identifier[new_transformer] = identifier[Transformer] (
identifier[id] = literal[string] . identifier[format] (
identifier[str] ( identifier[grid] . identifier[station] . identifier[id] ), identifier[str] ( identifier[len] ( identifier[grid] . identifier[station] . identifier[transformers] )+ literal[int] )),
identifier[geom] = identifier[station_transformer] . identifier[geom] ,
identifier[mv_grid] = identifier[station_transformer] . identifier[mv_grid] ,
identifier[grid] = identifier[station_transformer] . identifier[grid] ,
identifier[voltage_op] = identifier[station_transformer] . identifier[voltage_op] ,
identifier[type] = identifier[copy] . identifier[deepcopy] ( identifier[standard_transformer] ))
identifier[grid] . identifier[station] . identifier[add_transformer] ( identifier[new_transformer] )
identifier[transformers_changes] [ literal[string] ][ identifier[grid] . identifier[station] ]=[ identifier[new_transformer] ]
keyword[if] identifier[transformers_changes] [ literal[string] ]:
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] (
identifier[str] ( identifier[len] ( identifier[transformers_changes] [ literal[string] ])))+
literal[string] )
keyword[return] identifier[transformers_changes]
|
def extend_distribution_substation_overvoltage(network, critical_stations):
"""
Reinforce MV/LV substations due to voltage issues.
A parallel standard transformer is installed.
Parameters
----------
network : :class:`~.grid.network.Network`
critical_stations : :obj:`dict`
Dictionary with :class:`~.grid.grids.LVGrid` as key and a
:pandas:`pandas.DataFrame<dataframe>` with its critical station and
maximum voltage deviation as value.
Index of the dataframe is the :class:`~.grid.components.LVStation`
with over-voltage issues. Columns are 'v_mag_pu' containing the
maximum voltage deviation as float and 'time_index' containing the
corresponding time step the over-voltage occured in as
:pandas:`pandas.Timestamp<timestamp>`.
Returns
-------
Dictionary with lists of added transformers.
"""
# get parameters for standard transformer
try:
standard_transformer = network.equipment_data['lv_trafos'].loc[network.config['grid_expansion_standard_equipment']['mv_lv_transformer']] # depends on [control=['try'], data=[]]
except KeyError:
print('Standard MV/LV transformer is not in equipment list.') # depends on [control=['except'], data=[]]
transformers_changes = {'added': {}}
for grid in critical_stations.keys():
# get any transformer to get attributes for new transformer from
station_transformer = grid.station.transformers[0]
new_transformer = Transformer(id='LVStation_{}_transformer_{}'.format(str(grid.station.id), str(len(grid.station.transformers) + 1)), geom=station_transformer.geom, mv_grid=station_transformer.mv_grid, grid=station_transformer.grid, voltage_op=station_transformer.voltage_op, type=copy.deepcopy(standard_transformer))
# add standard transformer to station and return value
grid.station.add_transformer(new_transformer)
transformers_changes['added'][grid.station] = [new_transformer] # depends on [control=['for'], data=['grid']]
if transformers_changes['added']:
logger.debug('==> {} LV station(s) has/have been reinforced '.format(str(len(transformers_changes['added']))) + 'due to overloading issues.') # depends on [control=['if'], data=[]]
return transformers_changes
|
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
for dist in self:
callback(dist)
|
def function[subscribe, parameter[self, callback]]:
constant[Invoke `callback` for all distributions (including existing ones)]
if compare[name[callback] in name[self].callbacks] begin[:]
return[None]
call[name[self].callbacks.append, parameter[name[callback]]]
for taget[name[dist]] in starred[name[self]] begin[:]
call[name[callback], parameter[name[dist]]]
|
keyword[def] identifier[subscribe] ( identifier[self] , identifier[callback] ):
literal[string]
keyword[if] identifier[callback] keyword[in] identifier[self] . identifier[callbacks] :
keyword[return]
identifier[self] . identifier[callbacks] . identifier[append] ( identifier[callback] )
keyword[for] identifier[dist] keyword[in] identifier[self] :
identifier[callback] ( identifier[dist] )
|
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
return # depends on [control=['if'], data=[]]
self.callbacks.append(callback)
for dist in self:
callback(dist) # depends on [control=['for'], data=['dist']]
|
def has_split(self, split_name):
""" Checks whether or not the split with the given name exists.
Parameters
----------
split_name : str
name of the split
"""
if os.path.exists(os.path.join(self.split_dir, split_name)):
return True
return False
|
def function[has_split, parameter[self, split_name]]:
constant[ Checks whether or not the split with the given name exists.
Parameters
----------
split_name : str
name of the split
]
if call[name[os].path.exists, parameter[call[name[os].path.join, parameter[name[self].split_dir, name[split_name]]]]] begin[:]
return[constant[True]]
return[constant[False]]
|
keyword[def] identifier[has_split] ( identifier[self] , identifier[split_name] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[split_dir] , identifier[split_name] )):
keyword[return] keyword[True]
keyword[return] keyword[False]
|
def has_split(self, split_name):
""" Checks whether or not the split with the given name exists.
Parameters
----------
split_name : str
name of the split
"""
if os.path.exists(os.path.join(self.split_dir, split_name)):
return True # depends on [control=['if'], data=[]]
return False
|
def offset(self, offset, allow_negative=False, min_begin_value=None, max_end_value=None):
"""
Move this interval by the given shift ``offset``.
The begin and end time points of the translated interval
are ensured to be non-negative
(i.e., they are maxed with ``0.000``),
unless ``allow_negative`` is set to ``True``.
:param offset: the shift to be applied
:type offset: :class:`~aeneas.exacttiming.TimeValue`
:param allow_negative: if ``True``, allow the translated interval to have negative extrema
:type allow_negative: bool
:param min_begin_value: if not ``None``, specify the minimum value for the begin of the translated interval
:type min_begin_value: :class:`~aeneas.exacttiming.TimeValue`
:param max_begin_value: if not ``None``, specify the maximum value for the end of the translated interval
:type max_begin_value: :class:`~aeneas.exacttiming.TimeValue`
:raises TypeError: if ``offset`` is not an instance of ``TimeValue``
:rtype: :class:`~aeneas.exacttiming.TimeInterval`
"""
if not isinstance(offset, TimeValue):
raise TypeError(u"offset is not an instance of TimeValue")
self.begin += offset
self.end += offset
if not allow_negative:
self.begin = max(self.begin, TimeValue("0.000"))
self.end = max(self.end, TimeValue("0.000"))
if (min_begin_value is not None) and (max_end_value is not None):
self.begin = min(max(self.begin, min_begin_value), max_end_value)
self.end = min(self.end, max_end_value)
return self
|
def function[offset, parameter[self, offset, allow_negative, min_begin_value, max_end_value]]:
constant[
Move this interval by the given shift ``offset``.
The begin and end time points of the translated interval
are ensured to be non-negative
(i.e., they are maxed with ``0.000``),
unless ``allow_negative`` is set to ``True``.
:param offset: the shift to be applied
:type offset: :class:`~aeneas.exacttiming.TimeValue`
:param allow_negative: if ``True``, allow the translated interval to have negative extrema
:type allow_negative: bool
:param min_begin_value: if not ``None``, specify the minimum value for the begin of the translated interval
:type min_begin_value: :class:`~aeneas.exacttiming.TimeValue`
:param max_begin_value: if not ``None``, specify the maximum value for the end of the translated interval
:type max_begin_value: :class:`~aeneas.exacttiming.TimeValue`
:raises TypeError: if ``offset`` is not an instance of ``TimeValue``
:rtype: :class:`~aeneas.exacttiming.TimeInterval`
]
if <ast.UnaryOp object at 0x7da18bc717e0> begin[:]
<ast.Raise object at 0x7da18bc70b80>
<ast.AugAssign object at 0x7da18bc72ec0>
<ast.AugAssign object at 0x7da18bc71d80>
if <ast.UnaryOp object at 0x7da18bc71c90> begin[:]
name[self].begin assign[=] call[name[max], parameter[name[self].begin, call[name[TimeValue], parameter[constant[0.000]]]]]
name[self].end assign[=] call[name[max], parameter[name[self].end, call[name[TimeValue], parameter[constant[0.000]]]]]
if <ast.BoolOp object at 0x7da18bc71f00> begin[:]
name[self].begin assign[=] call[name[min], parameter[call[name[max], parameter[name[self].begin, name[min_begin_value]]], name[max_end_value]]]
name[self].end assign[=] call[name[min], parameter[name[self].end, name[max_end_value]]]
return[name[self]]
|
keyword[def] identifier[offset] ( identifier[self] , identifier[offset] , identifier[allow_negative] = keyword[False] , identifier[min_begin_value] = keyword[None] , identifier[max_end_value] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[offset] , identifier[TimeValue] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[self] . identifier[begin] += identifier[offset]
identifier[self] . identifier[end] += identifier[offset]
keyword[if] keyword[not] identifier[allow_negative] :
identifier[self] . identifier[begin] = identifier[max] ( identifier[self] . identifier[begin] , identifier[TimeValue] ( literal[string] ))
identifier[self] . identifier[end] = identifier[max] ( identifier[self] . identifier[end] , identifier[TimeValue] ( literal[string] ))
keyword[if] ( identifier[min_begin_value] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[max_end_value] keyword[is] keyword[not] keyword[None] ):
identifier[self] . identifier[begin] = identifier[min] ( identifier[max] ( identifier[self] . identifier[begin] , identifier[min_begin_value] ), identifier[max_end_value] )
identifier[self] . identifier[end] = identifier[min] ( identifier[self] . identifier[end] , identifier[max_end_value] )
keyword[return] identifier[self]
|
def offset(self, offset, allow_negative=False, min_begin_value=None, max_end_value=None):
"""
Move this interval by the given shift ``offset``.
The begin and end time points of the translated interval
are ensured to be non-negative
(i.e., they are maxed with ``0.000``),
unless ``allow_negative`` is set to ``True``.
:param offset: the shift to be applied
:type offset: :class:`~aeneas.exacttiming.TimeValue`
:param allow_negative: if ``True``, allow the translated interval to have negative extrema
:type allow_negative: bool
:param min_begin_value: if not ``None``, specify the minimum value for the begin of the translated interval
:type min_begin_value: :class:`~aeneas.exacttiming.TimeValue`
:param max_begin_value: if not ``None``, specify the maximum value for the end of the translated interval
:type max_begin_value: :class:`~aeneas.exacttiming.TimeValue`
:raises TypeError: if ``offset`` is not an instance of ``TimeValue``
:rtype: :class:`~aeneas.exacttiming.TimeInterval`
"""
if not isinstance(offset, TimeValue):
raise TypeError(u'offset is not an instance of TimeValue') # depends on [control=['if'], data=[]]
self.begin += offset
self.end += offset
if not allow_negative:
self.begin = max(self.begin, TimeValue('0.000'))
self.end = max(self.end, TimeValue('0.000')) # depends on [control=['if'], data=[]]
if min_begin_value is not None and max_end_value is not None:
self.begin = min(max(self.begin, min_begin_value), max_end_value)
self.end = min(self.end, max_end_value) # depends on [control=['if'], data=[]]
return self
|
def get_nameserver_detail_output_show_nameserver_nameserver_nodename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_nameserver_detail = ET.Element("get_nameserver_detail")
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, "output")
show_nameserver = ET.SubElement(output, "show-nameserver")
nameserver_portid_key = ET.SubElement(show_nameserver, "nameserver-portid")
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_nodename = ET.SubElement(show_nameserver, "nameserver-nodename")
nameserver_nodename.text = kwargs.pop('nameserver_nodename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[get_nameserver_detail_output_show_nameserver_nameserver_nodename, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_nameserver_detail] assign[=] call[name[ET].Element, parameter[constant[get_nameserver_detail]]]
variable[config] assign[=] name[get_nameserver_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_nameserver_detail], constant[output]]]
variable[show_nameserver] assign[=] call[name[ET].SubElement, parameter[name[output], constant[show-nameserver]]]
variable[nameserver_portid_key] assign[=] call[name[ET].SubElement, parameter[name[show_nameserver], constant[nameserver-portid]]]
name[nameserver_portid_key].text assign[=] call[name[kwargs].pop, parameter[constant[nameserver_portid]]]
variable[nameserver_nodename] assign[=] call[name[ET].SubElement, parameter[name[show_nameserver], constant[nameserver-nodename]]]
name[nameserver_nodename].text assign[=] call[name[kwargs].pop, parameter[constant[nameserver_nodename]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[get_nameserver_detail_output_show_nameserver_nameserver_nodename] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_nameserver_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_nameserver_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_nameserver_detail] , literal[string] )
identifier[show_nameserver] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[nameserver_portid_key] = identifier[ET] . identifier[SubElement] ( identifier[show_nameserver] , literal[string] )
identifier[nameserver_portid_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[nameserver_nodename] = identifier[ET] . identifier[SubElement] ( identifier[show_nameserver] , literal[string] )
identifier[nameserver_nodename] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def get_nameserver_detail_output_show_nameserver_nameserver_nodename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_nameserver_detail = ET.Element('get_nameserver_detail')
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, 'output')
show_nameserver = ET.SubElement(output, 'show-nameserver')
nameserver_portid_key = ET.SubElement(show_nameserver, 'nameserver-portid')
nameserver_portid_key.text = kwargs.pop('nameserver_portid')
nameserver_nodename = ET.SubElement(show_nameserver, 'nameserver-nodename')
nameserver_nodename.text = kwargs.pop('nameserver_nodename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def create_upload_specifications(ctx_cli_options, config):
# type: (dict, dict) -> List[blobxfer.models.upload.Specification]
"""Create a list of Upload Specification objects from configuration
:param dict ctx_cli_options: cli options
:param dict config: config dict
:rtype: list
:return: list of Upload Specification objects
"""
cli_conf = ctx_cli_options[ctx_cli_options['_action']]
cli_options = cli_conf['options']
specs = []
for conf in config['upload']:
if 'options' in conf:
conf_options = conf['options']
else:
conf_options = {}
# create upload options
mode = _merge_setting(
cli_options, conf_options, 'mode', default='auto').lower()
if mode == 'auto':
mode = blobxfer.models.azure.StorageModes.Auto
elif mode == 'append':
mode = blobxfer.models.azure.StorageModes.Append
elif mode == 'block':
mode = blobxfer.models.azure.StorageModes.Block
elif mode == 'file':
mode = blobxfer.models.azure.StorageModes.File
elif mode == 'page':
mode = blobxfer.models.azure.StorageModes.Page
else:
raise ValueError('unknown mode: {}'.format(mode))
# load RSA public key PEM if specified
rpk = _merge_setting(cli_options, conf_options, 'rsa_public_key')
if blobxfer.util.is_not_empty(rpk):
rpk = blobxfer.operations.crypto.load_rsa_public_key_file(rpk)
if rpk is None:
# load RSA private key PEM file if specified
rpk = _merge_setting(
cli_options, conf_options, 'rsa_private_key')
if blobxfer.util.is_not_empty(rpk):
rpkp = _merge_setting(
cli_options, conf_options, 'rsa_private_key_passphrase')
rpk = blobxfer.operations.crypto.load_rsa_private_key_file(
rpk, rpkp)
rpk = rpk.public_key()
else:
rpk = None
# create local source paths
lsp = blobxfer.models.upload.LocalSourcePath()
lsp.add_paths(conf['source'])
incl = _merge_setting(cli_conf, conf, 'include', default=None)
if blobxfer.util.is_not_empty(incl):
lsp.add_includes(incl)
excl = _merge_setting(cli_conf, conf, 'exclude', default=None)
if blobxfer.util.is_not_empty(excl):
lsp.add_excludes(excl)
# create specification
conf_sfp = conf_options.get('store_file_properties', {})
cli_sfp = cli_options['store_file_properties']
conf_vio = conf_options.get('vectored_io', {})
cli_vio = cli_options['vectored_io']
conf_sod = conf_options.get('skip_on', {})
cli_sod = cli_options['skip_on']
us = blobxfer.models.upload.Specification(
upload_options=blobxfer.models.options.Upload(
access_tier=_merge_setting(
cli_options, conf_options, 'access_tier', default=None),
chunk_size_bytes=_merge_setting(
cli_options, conf_options, 'chunk_size_bytes',
default=0),
delete_extraneous_destination=_merge_setting(
cli_options, conf_options,
'delete_extraneous_destination', default=False),
mode=mode,
one_shot_bytes=_merge_setting(
cli_options, conf_options, 'one_shot_bytes', default=0),
overwrite=_merge_setting(
cli_options, conf_options, 'overwrite', default=True),
recursive=_merge_setting(
cli_options, conf_options, 'recursive', default=True),
rename=_merge_setting(
cli_options, conf_options, 'rename', default=False),
rsa_public_key=rpk,
store_file_properties=blobxfer.models.options.FileProperties(
attributes=_merge_setting(
cli_sfp, conf_sfp, 'attributes', default=False),
cache_control=_merge_setting(
cli_sfp, conf_sfp, 'cache_control', default=None),
lmt=None,
md5=_merge_setting(
cli_sfp, conf_sfp, 'md5', default=False),
),
stdin_as_page_blob_size=_merge_setting(
cli_options, conf_options, 'stdin_as_page_blob_size',
default=0),
strip_components=_merge_setting(
cli_options, conf_options, 'strip_components',
default=0),
vectored_io=blobxfer.models.options.VectoredIo(
stripe_chunk_size_bytes=_merge_setting(
cli_vio, conf_vio, 'stripe_chunk_size_bytes',
default=1073741824),
distribution_mode=blobxfer.
models.upload.VectoredIoDistributionMode(
_merge_setting(
cli_vio, conf_vio, 'distribution_mode',
default='disabled').lower()),
),
),
skip_on_options=blobxfer.models.options.SkipOn(
filesize_match=_merge_setting(
cli_sod, conf_sod, 'filesize_match', default=False),
lmt_ge=_merge_setting(
cli_sod, conf_sod, 'lmt_ge', default=False),
md5_match=_merge_setting(
cli_sod, conf_sod, 'md5_match', default=False),
),
local_source_path=lsp,
)
# create remote destination paths
for dst in conf['destination']:
if len(dst) != 1:
raise RuntimeError(
'invalid number of destination pairs specified per entry')
sa = next(iter(dst))
adp = blobxfer.operations.azure.DestinationPath()
adp.add_path_with_storage_account(dst[sa], sa)
us.add_azure_destination_path(adp)
# append spec to list
specs.append(us)
return specs
|
def function[create_upload_specifications, parameter[ctx_cli_options, config]]:
constant[Create a list of Upload Specification objects from configuration
:param dict ctx_cli_options: cli options
:param dict config: config dict
:rtype: list
:return: list of Upload Specification objects
]
variable[cli_conf] assign[=] call[name[ctx_cli_options]][call[name[ctx_cli_options]][constant[_action]]]
variable[cli_options] assign[=] call[name[cli_conf]][constant[options]]
variable[specs] assign[=] list[[]]
for taget[name[conf]] in starred[call[name[config]][constant[upload]]] begin[:]
if compare[constant[options] in name[conf]] begin[:]
variable[conf_options] assign[=] call[name[conf]][constant[options]]
variable[mode] assign[=] call[call[name[_merge_setting], parameter[name[cli_options], name[conf_options], constant[mode]]].lower, parameter[]]
if compare[name[mode] equal[==] constant[auto]] begin[:]
variable[mode] assign[=] name[blobxfer].models.azure.StorageModes.Auto
variable[rpk] assign[=] call[name[_merge_setting], parameter[name[cli_options], name[conf_options], constant[rsa_public_key]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[rpk]]] begin[:]
variable[rpk] assign[=] call[name[blobxfer].operations.crypto.load_rsa_public_key_file, parameter[name[rpk]]]
if compare[name[rpk] is constant[None]] begin[:]
variable[rpk] assign[=] call[name[_merge_setting], parameter[name[cli_options], name[conf_options], constant[rsa_private_key]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[rpk]]] begin[:]
variable[rpkp] assign[=] call[name[_merge_setting], parameter[name[cli_options], name[conf_options], constant[rsa_private_key_passphrase]]]
variable[rpk] assign[=] call[name[blobxfer].operations.crypto.load_rsa_private_key_file, parameter[name[rpk], name[rpkp]]]
variable[rpk] assign[=] call[name[rpk].public_key, parameter[]]
variable[lsp] assign[=] call[name[blobxfer].models.upload.LocalSourcePath, parameter[]]
call[name[lsp].add_paths, parameter[call[name[conf]][constant[source]]]]
variable[incl] assign[=] call[name[_merge_setting], parameter[name[cli_conf], name[conf], constant[include]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[incl]]] begin[:]
call[name[lsp].add_includes, parameter[name[incl]]]
variable[excl] assign[=] call[name[_merge_setting], parameter[name[cli_conf], name[conf], constant[exclude]]]
if call[name[blobxfer].util.is_not_empty, parameter[name[excl]]] begin[:]
call[name[lsp].add_excludes, parameter[name[excl]]]
variable[conf_sfp] assign[=] call[name[conf_options].get, parameter[constant[store_file_properties], dictionary[[], []]]]
variable[cli_sfp] assign[=] call[name[cli_options]][constant[store_file_properties]]
variable[conf_vio] assign[=] call[name[conf_options].get, parameter[constant[vectored_io], dictionary[[], []]]]
variable[cli_vio] assign[=] call[name[cli_options]][constant[vectored_io]]
variable[conf_sod] assign[=] call[name[conf_options].get, parameter[constant[skip_on], dictionary[[], []]]]
variable[cli_sod] assign[=] call[name[cli_options]][constant[skip_on]]
variable[us] assign[=] call[name[blobxfer].models.upload.Specification, parameter[]]
for taget[name[dst]] in starred[call[name[conf]][constant[destination]]] begin[:]
if compare[call[name[len], parameter[name[dst]]] not_equal[!=] constant[1]] begin[:]
<ast.Raise object at 0x7da207f9b910>
variable[sa] assign[=] call[name[next], parameter[call[name[iter], parameter[name[dst]]]]]
variable[adp] assign[=] call[name[blobxfer].operations.azure.DestinationPath, parameter[]]
call[name[adp].add_path_with_storage_account, parameter[call[name[dst]][name[sa]], name[sa]]]
call[name[us].add_azure_destination_path, parameter[name[adp]]]
call[name[specs].append, parameter[name[us]]]
return[name[specs]]
|
keyword[def] identifier[create_upload_specifications] ( identifier[ctx_cli_options] , identifier[config] ):
literal[string]
identifier[cli_conf] = identifier[ctx_cli_options] [ identifier[ctx_cli_options] [ literal[string] ]]
identifier[cli_options] = identifier[cli_conf] [ literal[string] ]
identifier[specs] =[]
keyword[for] identifier[conf] keyword[in] identifier[config] [ literal[string] ]:
keyword[if] literal[string] keyword[in] identifier[conf] :
identifier[conf_options] = identifier[conf] [ literal[string] ]
keyword[else] :
identifier[conf_options] ={}
identifier[mode] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] , identifier[default] = literal[string] ). identifier[lower] ()
keyword[if] identifier[mode] == literal[string] :
identifier[mode] = identifier[blobxfer] . identifier[models] . identifier[azure] . identifier[StorageModes] . identifier[Auto]
keyword[elif] identifier[mode] == literal[string] :
identifier[mode] = identifier[blobxfer] . identifier[models] . identifier[azure] . identifier[StorageModes] . identifier[Append]
keyword[elif] identifier[mode] == literal[string] :
identifier[mode] = identifier[blobxfer] . identifier[models] . identifier[azure] . identifier[StorageModes] . identifier[Block]
keyword[elif] identifier[mode] == literal[string] :
identifier[mode] = identifier[blobxfer] . identifier[models] . identifier[azure] . identifier[StorageModes] . identifier[File]
keyword[elif] identifier[mode] == literal[string] :
identifier[mode] = identifier[blobxfer] . identifier[models] . identifier[azure] . identifier[StorageModes] . identifier[Page]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[mode] ))
identifier[rpk] = identifier[_merge_setting] ( identifier[cli_options] , identifier[conf_options] , literal[string] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[rpk] ):
identifier[rpk] = identifier[blobxfer] . identifier[operations] . identifier[crypto] . identifier[load_rsa_public_key_file] ( identifier[rpk] )
keyword[if] identifier[rpk] keyword[is] keyword[None] :
identifier[rpk] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[rpk] ):
identifier[rpkp] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] )
identifier[rpk] = identifier[blobxfer] . identifier[operations] . identifier[crypto] . identifier[load_rsa_private_key_file] (
identifier[rpk] , identifier[rpkp] )
identifier[rpk] = identifier[rpk] . identifier[public_key] ()
keyword[else] :
identifier[rpk] = keyword[None]
identifier[lsp] = identifier[blobxfer] . identifier[models] . identifier[upload] . identifier[LocalSourcePath] ()
identifier[lsp] . identifier[add_paths] ( identifier[conf] [ literal[string] ])
identifier[incl] = identifier[_merge_setting] ( identifier[cli_conf] , identifier[conf] , literal[string] , identifier[default] = keyword[None] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[incl] ):
identifier[lsp] . identifier[add_includes] ( identifier[incl] )
identifier[excl] = identifier[_merge_setting] ( identifier[cli_conf] , identifier[conf] , literal[string] , identifier[default] = keyword[None] )
keyword[if] identifier[blobxfer] . identifier[util] . identifier[is_not_empty] ( identifier[excl] ):
identifier[lsp] . identifier[add_excludes] ( identifier[excl] )
identifier[conf_sfp] = identifier[conf_options] . identifier[get] ( literal[string] ,{})
identifier[cli_sfp] = identifier[cli_options] [ literal[string] ]
identifier[conf_vio] = identifier[conf_options] . identifier[get] ( literal[string] ,{})
identifier[cli_vio] = identifier[cli_options] [ literal[string] ]
identifier[conf_sod] = identifier[conf_options] . identifier[get] ( literal[string] ,{})
identifier[cli_sod] = identifier[cli_options] [ literal[string] ]
identifier[us] = identifier[blobxfer] . identifier[models] . identifier[upload] . identifier[Specification] (
identifier[upload_options] = identifier[blobxfer] . identifier[models] . identifier[options] . identifier[Upload] (
identifier[access_tier] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] , identifier[default] = keyword[None] ),
identifier[chunk_size_bytes] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] ,
identifier[default] = literal[int] ),
identifier[delete_extraneous_destination] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] ,
literal[string] , identifier[default] = keyword[False] ),
identifier[mode] = identifier[mode] ,
identifier[one_shot_bytes] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] , identifier[default] = literal[int] ),
identifier[overwrite] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] , identifier[default] = keyword[True] ),
identifier[recursive] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] , identifier[default] = keyword[True] ),
identifier[rename] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] , identifier[default] = keyword[False] ),
identifier[rsa_public_key] = identifier[rpk] ,
identifier[store_file_properties] = identifier[blobxfer] . identifier[models] . identifier[options] . identifier[FileProperties] (
identifier[attributes] = identifier[_merge_setting] (
identifier[cli_sfp] , identifier[conf_sfp] , literal[string] , identifier[default] = keyword[False] ),
identifier[cache_control] = identifier[_merge_setting] (
identifier[cli_sfp] , identifier[conf_sfp] , literal[string] , identifier[default] = keyword[None] ),
identifier[lmt] = keyword[None] ,
identifier[md5] = identifier[_merge_setting] (
identifier[cli_sfp] , identifier[conf_sfp] , literal[string] , identifier[default] = keyword[False] ),
),
identifier[stdin_as_page_blob_size] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] ,
identifier[default] = literal[int] ),
identifier[strip_components] = identifier[_merge_setting] (
identifier[cli_options] , identifier[conf_options] , literal[string] ,
identifier[default] = literal[int] ),
identifier[vectored_io] = identifier[blobxfer] . identifier[models] . identifier[options] . identifier[VectoredIo] (
identifier[stripe_chunk_size_bytes] = identifier[_merge_setting] (
identifier[cli_vio] , identifier[conf_vio] , literal[string] ,
identifier[default] = literal[int] ),
identifier[distribution_mode] = identifier[blobxfer] .
identifier[models] . identifier[upload] . identifier[VectoredIoDistributionMode] (
identifier[_merge_setting] (
identifier[cli_vio] , identifier[conf_vio] , literal[string] ,
identifier[default] = literal[string] ). identifier[lower] ()),
),
),
identifier[skip_on_options] = identifier[blobxfer] . identifier[models] . identifier[options] . identifier[SkipOn] (
identifier[filesize_match] = identifier[_merge_setting] (
identifier[cli_sod] , identifier[conf_sod] , literal[string] , identifier[default] = keyword[False] ),
identifier[lmt_ge] = identifier[_merge_setting] (
identifier[cli_sod] , identifier[conf_sod] , literal[string] , identifier[default] = keyword[False] ),
identifier[md5_match] = identifier[_merge_setting] (
identifier[cli_sod] , identifier[conf_sod] , literal[string] , identifier[default] = keyword[False] ),
),
identifier[local_source_path] = identifier[lsp] ,
)
keyword[for] identifier[dst] keyword[in] identifier[conf] [ literal[string] ]:
keyword[if] identifier[len] ( identifier[dst] )!= literal[int] :
keyword[raise] identifier[RuntimeError] (
literal[string] )
identifier[sa] = identifier[next] ( identifier[iter] ( identifier[dst] ))
identifier[adp] = identifier[blobxfer] . identifier[operations] . identifier[azure] . identifier[DestinationPath] ()
identifier[adp] . identifier[add_path_with_storage_account] ( identifier[dst] [ identifier[sa] ], identifier[sa] )
identifier[us] . identifier[add_azure_destination_path] ( identifier[adp] )
identifier[specs] . identifier[append] ( identifier[us] )
keyword[return] identifier[specs]
|
def create_upload_specifications(ctx_cli_options, config):
# type: (dict, dict) -> List[blobxfer.models.upload.Specification]
'Create a list of Upload Specification objects from configuration\n :param dict ctx_cli_options: cli options\n :param dict config: config dict\n :rtype: list\n :return: list of Upload Specification objects\n '
cli_conf = ctx_cli_options[ctx_cli_options['_action']]
cli_options = cli_conf['options']
specs = []
for conf in config['upload']:
if 'options' in conf:
conf_options = conf['options'] # depends on [control=['if'], data=['conf']]
else:
conf_options = {}
# create upload options
mode = _merge_setting(cli_options, conf_options, 'mode', default='auto').lower()
if mode == 'auto':
mode = blobxfer.models.azure.StorageModes.Auto # depends on [control=['if'], data=['mode']]
elif mode == 'append':
mode = blobxfer.models.azure.StorageModes.Append # depends on [control=['if'], data=['mode']]
elif mode == 'block':
mode = blobxfer.models.azure.StorageModes.Block # depends on [control=['if'], data=['mode']]
elif mode == 'file':
mode = blobxfer.models.azure.StorageModes.File # depends on [control=['if'], data=['mode']]
elif mode == 'page':
mode = blobxfer.models.azure.StorageModes.Page # depends on [control=['if'], data=['mode']]
else:
raise ValueError('unknown mode: {}'.format(mode))
# load RSA public key PEM if specified
rpk = _merge_setting(cli_options, conf_options, 'rsa_public_key')
if blobxfer.util.is_not_empty(rpk):
rpk = blobxfer.operations.crypto.load_rsa_public_key_file(rpk) # depends on [control=['if'], data=[]]
if rpk is None:
# load RSA private key PEM file if specified
rpk = _merge_setting(cli_options, conf_options, 'rsa_private_key')
if blobxfer.util.is_not_empty(rpk):
rpkp = _merge_setting(cli_options, conf_options, 'rsa_private_key_passphrase')
rpk = blobxfer.operations.crypto.load_rsa_private_key_file(rpk, rpkp)
rpk = rpk.public_key() # depends on [control=['if'], data=[]]
else:
rpk = None # depends on [control=['if'], data=['rpk']]
# create local source paths
lsp = blobxfer.models.upload.LocalSourcePath()
lsp.add_paths(conf['source'])
incl = _merge_setting(cli_conf, conf, 'include', default=None)
if blobxfer.util.is_not_empty(incl):
lsp.add_includes(incl) # depends on [control=['if'], data=[]]
excl = _merge_setting(cli_conf, conf, 'exclude', default=None)
if blobxfer.util.is_not_empty(excl):
lsp.add_excludes(excl) # depends on [control=['if'], data=[]]
# create specification
conf_sfp = conf_options.get('store_file_properties', {})
cli_sfp = cli_options['store_file_properties']
conf_vio = conf_options.get('vectored_io', {})
cli_vio = cli_options['vectored_io']
conf_sod = conf_options.get('skip_on', {})
cli_sod = cli_options['skip_on']
us = blobxfer.models.upload.Specification(upload_options=blobxfer.models.options.Upload(access_tier=_merge_setting(cli_options, conf_options, 'access_tier', default=None), chunk_size_bytes=_merge_setting(cli_options, conf_options, 'chunk_size_bytes', default=0), delete_extraneous_destination=_merge_setting(cli_options, conf_options, 'delete_extraneous_destination', default=False), mode=mode, one_shot_bytes=_merge_setting(cli_options, conf_options, 'one_shot_bytes', default=0), overwrite=_merge_setting(cli_options, conf_options, 'overwrite', default=True), recursive=_merge_setting(cli_options, conf_options, 'recursive', default=True), rename=_merge_setting(cli_options, conf_options, 'rename', default=False), rsa_public_key=rpk, store_file_properties=blobxfer.models.options.FileProperties(attributes=_merge_setting(cli_sfp, conf_sfp, 'attributes', default=False), cache_control=_merge_setting(cli_sfp, conf_sfp, 'cache_control', default=None), lmt=None, md5=_merge_setting(cli_sfp, conf_sfp, 'md5', default=False)), stdin_as_page_blob_size=_merge_setting(cli_options, conf_options, 'stdin_as_page_blob_size', default=0), strip_components=_merge_setting(cli_options, conf_options, 'strip_components', default=0), vectored_io=blobxfer.models.options.VectoredIo(stripe_chunk_size_bytes=_merge_setting(cli_vio, conf_vio, 'stripe_chunk_size_bytes', default=1073741824), distribution_mode=blobxfer.models.upload.VectoredIoDistributionMode(_merge_setting(cli_vio, conf_vio, 'distribution_mode', default='disabled').lower()))), skip_on_options=blobxfer.models.options.SkipOn(filesize_match=_merge_setting(cli_sod, conf_sod, 'filesize_match', default=False), lmt_ge=_merge_setting(cli_sod, conf_sod, 'lmt_ge', default=False), md5_match=_merge_setting(cli_sod, conf_sod, 'md5_match', default=False)), local_source_path=lsp)
# create remote destination paths
for dst in conf['destination']:
if len(dst) != 1:
raise RuntimeError('invalid number of destination pairs specified per entry') # depends on [control=['if'], data=[]]
sa = next(iter(dst))
adp = blobxfer.operations.azure.DestinationPath()
adp.add_path_with_storage_account(dst[sa], sa)
us.add_azure_destination_path(adp) # depends on [control=['for'], data=['dst']]
# append spec to list
specs.append(us) # depends on [control=['for'], data=['conf']]
return specs
|
def nuclear_norm(data):
r"""Nuclear norm
This method computes the nuclear (or trace) norm of the input data.
Parameters
----------
data : np.ndarray
Input data array
Returns
-------
float nuclear norm value
Examples
--------
>>> from modopt.math.matrix import nuclear_norm
>>> a = np.arange(9).reshape(3, 3)
>>> nuclear_norm(a)
15.49193338482967
Notes
-----
Implements the following equation:
.. math::
\|\mathbf{A}\|_* = \sum_{i=1}^{\min\{m,n\}} \sigma_i (\mathbf{A})
"""
# Get SVD of the data.
u, s, v = np.linalg.svd(data)
# Return nuclear norm.
return np.sum(s)
|
def function[nuclear_norm, parameter[data]]:
constant[Nuclear norm
This method computes the nuclear (or trace) norm of the input data.
Parameters
----------
data : np.ndarray
Input data array
Returns
-------
float nuclear norm value
Examples
--------
>>> from modopt.math.matrix import nuclear_norm
>>> a = np.arange(9).reshape(3, 3)
>>> nuclear_norm(a)
15.49193338482967
Notes
-----
Implements the following equation:
.. math::
\|\mathbf{A}\|_* = \sum_{i=1}^{\min\{m,n\}} \sigma_i (\mathbf{A})
]
<ast.Tuple object at 0x7da1b0e9c790> assign[=] call[name[np].linalg.svd, parameter[name[data]]]
return[call[name[np].sum, parameter[name[s]]]]
|
keyword[def] identifier[nuclear_norm] ( identifier[data] ):
literal[string]
identifier[u] , identifier[s] , identifier[v] = identifier[np] . identifier[linalg] . identifier[svd] ( identifier[data] )
keyword[return] identifier[np] . identifier[sum] ( identifier[s] )
|
def nuclear_norm(data):
"""Nuclear norm
This method computes the nuclear (or trace) norm of the input data.
Parameters
----------
data : np.ndarray
Input data array
Returns
-------
float nuclear norm value
Examples
--------
>>> from modopt.math.matrix import nuclear_norm
>>> a = np.arange(9).reshape(3, 3)
>>> nuclear_norm(a)
15.49193338482967
Notes
-----
Implements the following equation:
.. math::
\\|\\mathbf{A}\\|_* = \\sum_{i=1}^{\\min\\{m,n\\}} \\sigma_i (\\mathbf{A})
"""
# Get SVD of the data.
(u, s, v) = np.linalg.svd(data)
# Return nuclear norm.
return np.sum(s)
|
def parse_makefile_aliases(filepath):
'''
Parse a makefile to find commands and substitute variables. Expects a
makefile with only aliases and a line return between each command.
Returns a dict, with a list of commands for each alias.
'''
# -- Parsing the Makefile using ConfigParser
# Adding a fake section to make the Makefile a valid Ini file
ini_str = '[root]\n'
with open(filepath, 'r') as fd:
ini_str = ini_str + fd.read().replace('@make ', '')
ini_fp = StringIO.StringIO(ini_str)
# Parse using ConfigParser
config = ConfigParser.RawConfigParser()
config.readfp(ini_fp)
# Fetch the list of aliases
aliases = config.options('root')
# -- Extracting commands for each alias
commands = {}
for alias in aliases:
# strip the first line return, and then split by any line return
commands[alias] = config.get('root', alias).lstrip('\n').split('\n')
# -- Commands substitution
# Loop until all aliases are substituted by their commands:
# Check each command of each alias, and if there is one command that is to
# be substituted by an alias, try to do it right away. If this is not
# possible because this alias itself points to other aliases , then stop
# and put the current alias back in the queue to be processed again later.
# Create the queue of aliases to process
aliases_todo = commands.keys()
# Create the dict that will hold the full commands
commands_new = {}
# Loop until we have processed all aliases
while aliases_todo:
# Pick the first alias in the queue
alias = aliases_todo.pop(0)
# Create a new entry in the resulting dict
commands_new[alias] = []
# For each command of this alias
for cmd in commands[alias]:
# Ignore self-referencing (alias points to itself)
if cmd == alias:
pass
# Substitute full command
elif cmd in aliases and cmd in commands_new:
# Append all the commands referenced by the alias
commands_new[alias].extend(commands_new[cmd])
# Delay substituting another alias, waiting for the other alias to
# be substituted first
elif cmd in aliases and cmd not in commands_new:
# Delete the current entry to avoid other aliases
# to reference this one wrongly (as it is empty)
del commands_new[alias]
aliases_todo.append(alias)
break
# Full command (no aliases)
else:
commands_new[alias].append(cmd)
commands = commands_new
del commands_new
# -- Prepending prefix to avoid conflicts with standard setup.py commands
# for alias in commands.keys():
# commands['make_'+alias] = commands[alias]
# del commands[alias]
return commands
|
def function[parse_makefile_aliases, parameter[filepath]]:
constant[
Parse a makefile to find commands and substitute variables. Expects a
makefile with only aliases and a line return between each command.
Returns a dict, with a list of commands for each alias.
]
variable[ini_str] assign[=] constant[[root]
]
with call[name[open], parameter[name[filepath], constant[r]]] begin[:]
variable[ini_str] assign[=] binary_operation[name[ini_str] + call[call[name[fd].read, parameter[]].replace, parameter[constant[@make ], constant[]]]]
variable[ini_fp] assign[=] call[name[StringIO].StringIO, parameter[name[ini_str]]]
variable[config] assign[=] call[name[ConfigParser].RawConfigParser, parameter[]]
call[name[config].readfp, parameter[name[ini_fp]]]
variable[aliases] assign[=] call[name[config].options, parameter[constant[root]]]
variable[commands] assign[=] dictionary[[], []]
for taget[name[alias]] in starred[name[aliases]] begin[:]
call[name[commands]][name[alias]] assign[=] call[call[call[name[config].get, parameter[constant[root], name[alias]]].lstrip, parameter[constant[
]]].split, parameter[constant[
]]]
variable[aliases_todo] assign[=] call[name[commands].keys, parameter[]]
variable[commands_new] assign[=] dictionary[[], []]
while name[aliases_todo] begin[:]
variable[alias] assign[=] call[name[aliases_todo].pop, parameter[constant[0]]]
call[name[commands_new]][name[alias]] assign[=] list[[]]
for taget[name[cmd]] in starred[call[name[commands]][name[alias]]] begin[:]
if compare[name[cmd] equal[==] name[alias]] begin[:]
pass
variable[commands] assign[=] name[commands_new]
<ast.Delete object at 0x7da18f00ebf0>
return[name[commands]]
|
keyword[def] identifier[parse_makefile_aliases] ( identifier[filepath] ):
literal[string]
identifier[ini_str] = literal[string]
keyword[with] identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[fd] :
identifier[ini_str] = identifier[ini_str] + identifier[fd] . identifier[read] (). identifier[replace] ( literal[string] , literal[string] )
identifier[ini_fp] = identifier[StringIO] . identifier[StringIO] ( identifier[ini_str] )
identifier[config] = identifier[ConfigParser] . identifier[RawConfigParser] ()
identifier[config] . identifier[readfp] ( identifier[ini_fp] )
identifier[aliases] = identifier[config] . identifier[options] ( literal[string] )
identifier[commands] ={}
keyword[for] identifier[alias] keyword[in] identifier[aliases] :
identifier[commands] [ identifier[alias] ]= identifier[config] . identifier[get] ( literal[string] , identifier[alias] ). identifier[lstrip] ( literal[string] ). identifier[split] ( literal[string] )
identifier[aliases_todo] = identifier[commands] . identifier[keys] ()
identifier[commands_new] ={}
keyword[while] identifier[aliases_todo] :
identifier[alias] = identifier[aliases_todo] . identifier[pop] ( literal[int] )
identifier[commands_new] [ identifier[alias] ]=[]
keyword[for] identifier[cmd] keyword[in] identifier[commands] [ identifier[alias] ]:
keyword[if] identifier[cmd] == identifier[alias] :
keyword[pass]
keyword[elif] identifier[cmd] keyword[in] identifier[aliases] keyword[and] identifier[cmd] keyword[in] identifier[commands_new] :
identifier[commands_new] [ identifier[alias] ]. identifier[extend] ( identifier[commands_new] [ identifier[cmd] ])
keyword[elif] identifier[cmd] keyword[in] identifier[aliases] keyword[and] identifier[cmd] keyword[not] keyword[in] identifier[commands_new] :
keyword[del] identifier[commands_new] [ identifier[alias] ]
identifier[aliases_todo] . identifier[append] ( identifier[alias] )
keyword[break]
keyword[else] :
identifier[commands_new] [ identifier[alias] ]. identifier[append] ( identifier[cmd] )
identifier[commands] = identifier[commands_new]
keyword[del] identifier[commands_new]
keyword[return] identifier[commands]
|
def parse_makefile_aliases(filepath):
"""
Parse a makefile to find commands and substitute variables. Expects a
makefile with only aliases and a line return between each command.
Returns a dict, with a list of commands for each alias.
"""
# -- Parsing the Makefile using ConfigParser
# Adding a fake section to make the Makefile a valid Ini file
ini_str = '[root]\n'
with open(filepath, 'r') as fd:
ini_str = ini_str + fd.read().replace('@make ', '') # depends on [control=['with'], data=['fd']]
ini_fp = StringIO.StringIO(ini_str)
# Parse using ConfigParser
config = ConfigParser.RawConfigParser()
config.readfp(ini_fp)
# Fetch the list of aliases
aliases = config.options('root')
# -- Extracting commands for each alias
commands = {}
for alias in aliases:
# strip the first line return, and then split by any line return
commands[alias] = config.get('root', alias).lstrip('\n').split('\n') # depends on [control=['for'], data=['alias']]
# -- Commands substitution
# Loop until all aliases are substituted by their commands:
# Check each command of each alias, and if there is one command that is to
# be substituted by an alias, try to do it right away. If this is not
# possible because this alias itself points to other aliases , then stop
# and put the current alias back in the queue to be processed again later.
# Create the queue of aliases to process
aliases_todo = commands.keys()
# Create the dict that will hold the full commands
commands_new = {}
# Loop until we have processed all aliases
while aliases_todo:
# Pick the first alias in the queue
alias = aliases_todo.pop(0)
# Create a new entry in the resulting dict
commands_new[alias] = []
# For each command of this alias
for cmd in commands[alias]:
# Ignore self-referencing (alias points to itself)
if cmd == alias:
pass # depends on [control=['if'], data=[]]
# Substitute full command
elif cmd in aliases and cmd in commands_new:
# Append all the commands referenced by the alias
commands_new[alias].extend(commands_new[cmd]) # depends on [control=['if'], data=[]]
# Delay substituting another alias, waiting for the other alias to
# be substituted first
elif cmd in aliases and cmd not in commands_new:
# Delete the current entry to avoid other aliases
# to reference this one wrongly (as it is empty)
del commands_new[alias]
aliases_todo.append(alias)
break # depends on [control=['if'], data=[]]
else:
# Full command (no aliases)
commands_new[alias].append(cmd) # depends on [control=['for'], data=['cmd']] # depends on [control=['while'], data=[]]
commands = commands_new
del commands_new
# -- Prepending prefix to avoid conflicts with standard setup.py commands
# for alias in commands.keys():
# commands['make_'+alias] = commands[alias]
# del commands[alias]
return commands
|
def last_session_date(self):
"""Return date/time for start of last session data."""
try:
date = self.intervals[1]['ts']
except KeyError:
return None
date_f = datetime.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')
now = time.time()
offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
return date_f + offset
|
def function[last_session_date, parameter[self]]:
constant[Return date/time for start of last session data.]
<ast.Try object at 0x7da20c6c7550>
variable[date_f] assign[=] call[name[datetime].strptime, parameter[name[date], constant[%Y-%m-%dT%H:%M:%S.%fZ]]]
variable[now] assign[=] call[name[time].time, parameter[]]
variable[offset] assign[=] binary_operation[call[name[datetime].fromtimestamp, parameter[name[now]]] - call[name[datetime].utcfromtimestamp, parameter[name[now]]]]
return[binary_operation[name[date_f] + name[offset]]]
|
keyword[def] identifier[last_session_date] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[date] = identifier[self] . identifier[intervals] [ literal[int] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[return] keyword[None]
identifier[date_f] = identifier[datetime] . identifier[strptime] ( identifier[date] , literal[string] )
identifier[now] = identifier[time] . identifier[time] ()
identifier[offset] = identifier[datetime] . identifier[fromtimestamp] ( identifier[now] )- identifier[datetime] . identifier[utcfromtimestamp] ( identifier[now] )
keyword[return] identifier[date_f] + identifier[offset]
|
def last_session_date(self):
"""Return date/time for start of last session data."""
try:
date = self.intervals[1]['ts'] # depends on [control=['try'], data=[]]
except KeyError:
return None # depends on [control=['except'], data=[]]
date_f = datetime.strptime(date, '%Y-%m-%dT%H:%M:%S.%fZ')
now = time.time()
offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
return date_f + offset
|
def tag_audio_file(audio_file, tracklisting):
"""
Adds tracklisting as list to lyrics tag of audio file if not present.
Returns True if successful or not needed, False if tagging fails.
"""
try:
save_tag_to_audio_file(audio_file, tracklisting)
# TODO: is IOError required now or would the mediafile exception cover it?
except (IOError, mediafile.UnreadableFileError):
print("Unable to save tag to file:", audio_file)
audio_tagging_successful = False
except TagNotNeededError:
audio_tagging_successful = True
else:
audio_tagging_successful = True
return audio_tagging_successful
|
def function[tag_audio_file, parameter[audio_file, tracklisting]]:
constant[
Adds tracklisting as list to lyrics tag of audio file if not present.
Returns True if successful or not needed, False if tagging fails.
]
<ast.Try object at 0x7da20e960bb0>
return[name[audio_tagging_successful]]
|
keyword[def] identifier[tag_audio_file] ( identifier[audio_file] , identifier[tracklisting] ):
literal[string]
keyword[try] :
identifier[save_tag_to_audio_file] ( identifier[audio_file] , identifier[tracklisting] )
keyword[except] ( identifier[IOError] , identifier[mediafile] . identifier[UnreadableFileError] ):
identifier[print] ( literal[string] , identifier[audio_file] )
identifier[audio_tagging_successful] = keyword[False]
keyword[except] identifier[TagNotNeededError] :
identifier[audio_tagging_successful] = keyword[True]
keyword[else] :
identifier[audio_tagging_successful] = keyword[True]
keyword[return] identifier[audio_tagging_successful]
|
def tag_audio_file(audio_file, tracklisting):
"""
Adds tracklisting as list to lyrics tag of audio file if not present.
Returns True if successful or not needed, False if tagging fails.
"""
try:
save_tag_to_audio_file(audio_file, tracklisting) # depends on [control=['try'], data=[]]
# TODO: is IOError required now or would the mediafile exception cover it?
except (IOError, mediafile.UnreadableFileError):
print('Unable to save tag to file:', audio_file)
audio_tagging_successful = False # depends on [control=['except'], data=[]]
except TagNotNeededError:
audio_tagging_successful = True # depends on [control=['except'], data=[]]
else:
audio_tagging_successful = True
return audio_tagging_successful
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.