code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _getslice(self, maps):
"""Determines how to slice the scratch for returning values."""
invals = maps[list(self.inputs)[0]]
if not isinstance(invals, (numpy.ndarray, list)):
getslice = 0
else:
getslice = slice(None, None)
return getslice | def function[_getslice, parameter[self, maps]]:
constant[Determines how to slice the scratch for returning values.]
variable[invals] assign[=] call[name[maps]][call[call[name[list], parameter[name[self].inputs]]][constant[0]]]
if <ast.UnaryOp object at 0x7da207f01630> begin[:]
variable[getslice] assign[=] constant[0]
return[name[getslice]] | keyword[def] identifier[_getslice] ( identifier[self] , identifier[maps] ):
literal[string]
identifier[invals] = identifier[maps] [ identifier[list] ( identifier[self] . identifier[inputs] )[ literal[int] ]]
keyword[if] keyword[not] identifier[isinstance] ( identifier[invals] ,( identifier[numpy] . identifier[ndarray] , identifier[list] )):
identifier[getslice] = literal[int]
keyword[else] :
identifier[getslice] = identifier[slice] ( keyword[None] , keyword[None] )
keyword[return] identifier[getslice] | def _getslice(self, maps):
"""Determines how to slice the scratch for returning values."""
invals = maps[list(self.inputs)[0]]
if not isinstance(invals, (numpy.ndarray, list)):
getslice = 0 # depends on [control=['if'], data=[]]
else:
getslice = slice(None, None)
return getslice |
def advance_recurring_todo(p_todo, p_offset=None, p_strict=False):
"""
Given a Todo item, return a new instance of a Todo item with the dates
shifted according to the recurrence rule.
Strict means that the real due date is taken as a offset, not today or a
future date to determine the offset.
When the todo item has no due date, then the date is used passed by the
caller (defaulting to today).
When no recurrence tag is present, an exception is raised.
"""
todo = Todo(p_todo.source())
pattern = todo.tag_value('rec')
if not pattern:
raise NoRecurrenceException()
elif pattern.startswith('+'):
p_strict = True
# strip off the +
pattern = pattern[1:]
if p_strict:
offset = p_todo.due_date() or p_offset or date.today()
else:
offset = p_offset or date.today()
length = todo.length()
new_due = relative_date_to_date(pattern, offset)
if not new_due:
raise NoRecurrenceException()
# pylint: disable=E1103
todo.set_tag(config().tag_due(), new_due.isoformat())
if todo.start_date():
new_start = new_due - timedelta(length)
todo.set_tag(config().tag_start(), new_start.isoformat())
todo.set_creation_date(date.today())
return todo | def function[advance_recurring_todo, parameter[p_todo, p_offset, p_strict]]:
constant[
Given a Todo item, return a new instance of a Todo item with the dates
shifted according to the recurrence rule.
Strict means that the real due date is taken as a offset, not today or a
future date to determine the offset.
When the todo item has no due date, then the date is used passed by the
caller (defaulting to today).
When no recurrence tag is present, an exception is raised.
]
variable[todo] assign[=] call[name[Todo], parameter[call[name[p_todo].source, parameter[]]]]
variable[pattern] assign[=] call[name[todo].tag_value, parameter[constant[rec]]]
if <ast.UnaryOp object at 0x7da207f02b00> begin[:]
<ast.Raise object at 0x7da207f023e0>
if name[p_strict] begin[:]
variable[offset] assign[=] <ast.BoolOp object at 0x7da207f01690>
variable[length] assign[=] call[name[todo].length, parameter[]]
variable[new_due] assign[=] call[name[relative_date_to_date], parameter[name[pattern], name[offset]]]
if <ast.UnaryOp object at 0x7da207f013f0> begin[:]
<ast.Raise object at 0x7da207f00a60>
call[name[todo].set_tag, parameter[call[call[name[config], parameter[]].tag_due, parameter[]], call[name[new_due].isoformat, parameter[]]]]
if call[name[todo].start_date, parameter[]] begin[:]
variable[new_start] assign[=] binary_operation[name[new_due] - call[name[timedelta], parameter[name[length]]]]
call[name[todo].set_tag, parameter[call[call[name[config], parameter[]].tag_start, parameter[]], call[name[new_start].isoformat, parameter[]]]]
call[name[todo].set_creation_date, parameter[call[name[date].today, parameter[]]]]
return[name[todo]] | keyword[def] identifier[advance_recurring_todo] ( identifier[p_todo] , identifier[p_offset] = keyword[None] , identifier[p_strict] = keyword[False] ):
literal[string]
identifier[todo] = identifier[Todo] ( identifier[p_todo] . identifier[source] ())
identifier[pattern] = identifier[todo] . identifier[tag_value] ( literal[string] )
keyword[if] keyword[not] identifier[pattern] :
keyword[raise] identifier[NoRecurrenceException] ()
keyword[elif] identifier[pattern] . identifier[startswith] ( literal[string] ):
identifier[p_strict] = keyword[True]
identifier[pattern] = identifier[pattern] [ literal[int] :]
keyword[if] identifier[p_strict] :
identifier[offset] = identifier[p_todo] . identifier[due_date] () keyword[or] identifier[p_offset] keyword[or] identifier[date] . identifier[today] ()
keyword[else] :
identifier[offset] = identifier[p_offset] keyword[or] identifier[date] . identifier[today] ()
identifier[length] = identifier[todo] . identifier[length] ()
identifier[new_due] = identifier[relative_date_to_date] ( identifier[pattern] , identifier[offset] )
keyword[if] keyword[not] identifier[new_due] :
keyword[raise] identifier[NoRecurrenceException] ()
identifier[todo] . identifier[set_tag] ( identifier[config] (). identifier[tag_due] (), identifier[new_due] . identifier[isoformat] ())
keyword[if] identifier[todo] . identifier[start_date] ():
identifier[new_start] = identifier[new_due] - identifier[timedelta] ( identifier[length] )
identifier[todo] . identifier[set_tag] ( identifier[config] (). identifier[tag_start] (), identifier[new_start] . identifier[isoformat] ())
identifier[todo] . identifier[set_creation_date] ( identifier[date] . identifier[today] ())
keyword[return] identifier[todo] | def advance_recurring_todo(p_todo, p_offset=None, p_strict=False):
"""
Given a Todo item, return a new instance of a Todo item with the dates
shifted according to the recurrence rule.
Strict means that the real due date is taken as a offset, not today or a
future date to determine the offset.
When the todo item has no due date, then the date is used passed by the
caller (defaulting to today).
When no recurrence tag is present, an exception is raised.
"""
todo = Todo(p_todo.source())
pattern = todo.tag_value('rec')
if not pattern:
raise NoRecurrenceException() # depends on [control=['if'], data=[]]
elif pattern.startswith('+'):
p_strict = True
# strip off the +
pattern = pattern[1:] # depends on [control=['if'], data=[]]
if p_strict:
offset = p_todo.due_date() or p_offset or date.today() # depends on [control=['if'], data=[]]
else:
offset = p_offset or date.today()
length = todo.length()
new_due = relative_date_to_date(pattern, offset)
if not new_due:
raise NoRecurrenceException() # depends on [control=['if'], data=[]]
# pylint: disable=E1103
todo.set_tag(config().tag_due(), new_due.isoformat())
if todo.start_date():
new_start = new_due - timedelta(length)
todo.set_tag(config().tag_start(), new_start.isoformat()) # depends on [control=['if'], data=[]]
todo.set_creation_date(date.today())
return todo |
def is_class_file(filename):
"""
checks whether the given file is a Java class file, by opening it
and checking for the magic header
"""
with open(filename, "rb") as fd:
c = fd.read(len(JAVA_CLASS_MAGIC))
if isinstance(c, str): # Python 2
c = map(ord, c)
return tuple(c) == JAVA_CLASS_MAGIC | def function[is_class_file, parameter[filename]]:
constant[
checks whether the given file is a Java class file, by opening it
and checking for the magic header
]
with call[name[open], parameter[name[filename], constant[rb]]] begin[:]
variable[c] assign[=] call[name[fd].read, parameter[call[name[len], parameter[name[JAVA_CLASS_MAGIC]]]]]
if call[name[isinstance], parameter[name[c], name[str]]] begin[:]
variable[c] assign[=] call[name[map], parameter[name[ord], name[c]]]
return[compare[call[name[tuple], parameter[name[c]]] equal[==] name[JAVA_CLASS_MAGIC]]] | keyword[def] identifier[is_class_file] ( identifier[filename] ):
literal[string]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fd] :
identifier[c] = identifier[fd] . identifier[read] ( identifier[len] ( identifier[JAVA_CLASS_MAGIC] ))
keyword[if] identifier[isinstance] ( identifier[c] , identifier[str] ):
identifier[c] = identifier[map] ( identifier[ord] , identifier[c] )
keyword[return] identifier[tuple] ( identifier[c] )== identifier[JAVA_CLASS_MAGIC] | def is_class_file(filename):
"""
checks whether the given file is a Java class file, by opening it
and checking for the magic header
"""
with open(filename, 'rb') as fd:
c = fd.read(len(JAVA_CLASS_MAGIC))
if isinstance(c, str): # Python 2
c = map(ord, c) # depends on [control=['if'], data=[]]
return tuple(c) == JAVA_CLASS_MAGIC # depends on [control=['with'], data=['fd']] |
def enter_context(self, cm):
"""Enters the supplied context manager.
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with
# statement.
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result | def function[enter_context, parameter[self, cm]]:
constant[Enters the supplied context manager.
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
]
variable[_cm_type] assign[=] call[name[type], parameter[name[cm]]]
variable[_exit] assign[=] name[_cm_type].__exit__
variable[result] assign[=] call[name[_cm_type].__enter__, parameter[name[cm]]]
call[name[self]._push_cm_exit, parameter[name[cm], name[_exit]]]
return[name[result]] | keyword[def] identifier[enter_context] ( identifier[self] , identifier[cm] ):
literal[string]
identifier[_cm_type] = identifier[type] ( identifier[cm] )
identifier[_exit] = identifier[_cm_type] . identifier[__exit__]
identifier[result] = identifier[_cm_type] . identifier[__enter__] ( identifier[cm] )
identifier[self] . identifier[_push_cm_exit] ( identifier[cm] , identifier[_exit] )
keyword[return] identifier[result] | def enter_context(self, cm):
"""Enters the supplied context manager.
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with
# statement.
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result |
def set_matrix(self, matrix):
"""Modifies the current transformation matrix (CTM)
by setting it equal to :obj:`matrix`.
:param matrix:
A transformation :class:`Matrix` from user space to device space.
"""
cairo.cairo_set_matrix(self._pointer, matrix._pointer)
self._check_status() | def function[set_matrix, parameter[self, matrix]]:
constant[Modifies the current transformation matrix (CTM)
by setting it equal to :obj:`matrix`.
:param matrix:
A transformation :class:`Matrix` from user space to device space.
]
call[name[cairo].cairo_set_matrix, parameter[name[self]._pointer, name[matrix]._pointer]]
call[name[self]._check_status, parameter[]] | keyword[def] identifier[set_matrix] ( identifier[self] , identifier[matrix] ):
literal[string]
identifier[cairo] . identifier[cairo_set_matrix] ( identifier[self] . identifier[_pointer] , identifier[matrix] . identifier[_pointer] )
identifier[self] . identifier[_check_status] () | def set_matrix(self, matrix):
"""Modifies the current transformation matrix (CTM)
by setting it equal to :obj:`matrix`.
:param matrix:
A transformation :class:`Matrix` from user space to device space.
"""
cairo.cairo_set_matrix(self._pointer, matrix._pointer)
self._check_status() |
def absent(
name,
region,
user=None,
opts=False):
'''
Remove the named SQS queue if it exists.
name
Name of the SQS queue.
region
Region to remove the queue from
user
Name of the user performing the SQS operations
opts
Include additional arguments and options to the aws command line
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
does_exist = __salt__['aws_sqs.queue_exists'](name, region, opts, user)
if does_exist:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'AWS SQS queue {0} is set to be removed'.format(
name)
return ret
removed = __salt__['aws_sqs.delete_queue'](name, region, opts, user)
if removed['retcode'] == 0:
ret['changes']['removed'] = removed['stdout']
else:
ret['result'] = False
ret['comment'] = removed['stderr']
else:
ret['comment'] = '{0} does not exist in {1}'.format(name, region)
return ret | def function[absent, parameter[name, region, user, opts]]:
constant[
Remove the named SQS queue if it exists.
name
Name of the SQS queue.
region
Region to remove the queue from
user
Name of the user performing the SQS operations
opts
Include additional arguments and options to the aws command line
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2135d50>, <ast.Constant object at 0x7da1b2136440>, <ast.Constant object at 0x7da1b1f82860>, <ast.Constant object at 0x7da1b1f834c0>], [<ast.Name object at 0x7da1b1f82410>, <ast.Constant object at 0x7da1b1f833d0>, <ast.Constant object at 0x7da1b1f820b0>, <ast.Dict object at 0x7da1b1f83250>]]
variable[does_exist] assign[=] call[call[name[__salt__]][constant[aws_sqs.queue_exists]], parameter[name[name], name[region], name[opts], name[user]]]
if name[does_exist] begin[:]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[None]
call[name[ret]][constant[comment]] assign[=] call[constant[AWS SQS queue {0} is set to be removed].format, parameter[name[name]]]
return[name[ret]]
variable[removed] assign[=] call[call[name[__salt__]][constant[aws_sqs.delete_queue]], parameter[name[name], name[region], name[opts], name[user]]]
if compare[call[name[removed]][constant[retcode]] equal[==] constant[0]] begin[:]
call[call[name[ret]][constant[changes]]][constant[removed]] assign[=] call[name[removed]][constant[stdout]]
return[name[ret]] | keyword[def] identifier[absent] (
identifier[name] ,
identifier[region] ,
identifier[user] = keyword[None] ,
identifier[opts] = keyword[False] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] :{}}
identifier[does_exist] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[region] , identifier[opts] , identifier[user] )
keyword[if] identifier[does_exist] :
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] (
identifier[name] )
keyword[return] identifier[ret]
identifier[removed] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[region] , identifier[opts] , identifier[user] )
keyword[if] identifier[removed] [ literal[string] ]== literal[int] :
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[removed] [ literal[string] ]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= identifier[removed] [ literal[string] ]
keyword[else] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] , identifier[region] )
keyword[return] identifier[ret] | def absent(name, region, user=None, opts=False):
"""
Remove the named SQS queue if it exists.
name
Name of the SQS queue.
region
Region to remove the queue from
user
Name of the user performing the SQS operations
opts
Include additional arguments and options to the aws command line
"""
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}}
does_exist = __salt__['aws_sqs.queue_exists'](name, region, opts, user)
if does_exist:
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'AWS SQS queue {0} is set to be removed'.format(name)
return ret # depends on [control=['if'], data=[]]
removed = __salt__['aws_sqs.delete_queue'](name, region, opts, user)
if removed['retcode'] == 0:
ret['changes']['removed'] = removed['stdout'] # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = removed['stderr'] # depends on [control=['if'], data=[]]
else:
ret['comment'] = '{0} does not exist in {1}'.format(name, region)
return ret |
def add_cpds(self, *cpds):
"""
Add linear Gaussian CPD (Conditional Probability Distribution)
to the Bayesian Model.
Parameters
----------
cpds : instances of LinearGaussianCPD
List of LinearGaussianCPDs which will be associated with the model
Examples
--------
>>> from pgmpy.models import LinearGaussianBayesianNetwork
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> model = LinearGaussianBayesianNetwork([('x1', 'x2'), ('x2', 'x3')])
>>> cpd1 = LinearGaussianCPD('x1', [1], 4)
>>> cpd2 = LinearGaussianCPD('x2', [-5, 0.5], 4, ['x1'])
>>> cpd3 = LinearGaussianCPD('x3', [4, -1], 3, ['x2'])
>>> model.add_cpds(cpd1, cpd2, cpd3)
>>> for cpd in model.cpds:
print(cpd)
P(x1) = N(1; 4)
P(x2| x1) = N(0.5*x1_mu); -5)
P(x3| x2) = N(-1*x2_mu); 4)
"""
for cpd in cpds:
if not isinstance(cpd, LinearGaussianCPD):
raise ValueError('Only LinearGaussianCPD can be added.')
if set(cpd.variables) - set(cpd.variables).intersection(
set(self.nodes())):
raise ValueError('CPD defined on variable not in the model', cpd)
for prev_cpd_index in range(len(self.cpds)):
if self.cpds[prev_cpd_index].variable == cpd.variable:
logging.warning("Replacing existing CPD for {var}".format(var=cpd.variable))
self.cpds[prev_cpd_index] = cpd
break
else:
self.cpds.append(cpd) | def function[add_cpds, parameter[self]]:
constant[
Add linear Gaussian CPD (Conditional Probability Distribution)
to the Bayesian Model.
Parameters
----------
cpds : instances of LinearGaussianCPD
List of LinearGaussianCPDs which will be associated with the model
Examples
--------
>>> from pgmpy.models import LinearGaussianBayesianNetwork
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> model = LinearGaussianBayesianNetwork([('x1', 'x2'), ('x2', 'x3')])
>>> cpd1 = LinearGaussianCPD('x1', [1], 4)
>>> cpd2 = LinearGaussianCPD('x2', [-5, 0.5], 4, ['x1'])
>>> cpd3 = LinearGaussianCPD('x3', [4, -1], 3, ['x2'])
>>> model.add_cpds(cpd1, cpd2, cpd3)
>>> for cpd in model.cpds:
print(cpd)
P(x1) = N(1; 4)
P(x2| x1) = N(0.5*x1_mu); -5)
P(x3| x2) = N(-1*x2_mu); 4)
]
for taget[name[cpd]] in starred[name[cpds]] begin[:]
if <ast.UnaryOp object at 0x7da20c990fd0> begin[:]
<ast.Raise object at 0x7da20c991d50>
if binary_operation[call[name[set], parameter[name[cpd].variables]] - call[call[name[set], parameter[name[cpd].variables]].intersection, parameter[call[name[set], parameter[call[name[self].nodes, parameter[]]]]]]] begin[:]
<ast.Raise object at 0x7da20c9934f0>
for taget[name[prev_cpd_index]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].cpds]]]]] begin[:]
if compare[call[name[self].cpds][name[prev_cpd_index]].variable equal[==] name[cpd].variable] begin[:]
call[name[logging].warning, parameter[call[constant[Replacing existing CPD for {var}].format, parameter[]]]]
call[name[self].cpds][name[prev_cpd_index]] assign[=] name[cpd]
break | keyword[def] identifier[add_cpds] ( identifier[self] ,* identifier[cpds] ):
literal[string]
keyword[for] identifier[cpd] keyword[in] identifier[cpds] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[cpd] , identifier[LinearGaussianCPD] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[set] ( identifier[cpd] . identifier[variables] )- identifier[set] ( identifier[cpd] . identifier[variables] ). identifier[intersection] (
identifier[set] ( identifier[self] . identifier[nodes] ())):
keyword[raise] identifier[ValueError] ( literal[string] , identifier[cpd] )
keyword[for] identifier[prev_cpd_index] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[cpds] )):
keyword[if] identifier[self] . identifier[cpds] [ identifier[prev_cpd_index] ]. identifier[variable] == identifier[cpd] . identifier[variable] :
identifier[logging] . identifier[warning] ( literal[string] . identifier[format] ( identifier[var] = identifier[cpd] . identifier[variable] ))
identifier[self] . identifier[cpds] [ identifier[prev_cpd_index] ]= identifier[cpd]
keyword[break]
keyword[else] :
identifier[self] . identifier[cpds] . identifier[append] ( identifier[cpd] ) | def add_cpds(self, *cpds):
"""
Add linear Gaussian CPD (Conditional Probability Distribution)
to the Bayesian Model.
Parameters
----------
cpds : instances of LinearGaussianCPD
List of LinearGaussianCPDs which will be associated with the model
Examples
--------
>>> from pgmpy.models import LinearGaussianBayesianNetwork
>>> from pgmpy.factors.continuous import LinearGaussianCPD
>>> model = LinearGaussianBayesianNetwork([('x1', 'x2'), ('x2', 'x3')])
>>> cpd1 = LinearGaussianCPD('x1', [1], 4)
>>> cpd2 = LinearGaussianCPD('x2', [-5, 0.5], 4, ['x1'])
>>> cpd3 = LinearGaussianCPD('x3', [4, -1], 3, ['x2'])
>>> model.add_cpds(cpd1, cpd2, cpd3)
>>> for cpd in model.cpds:
print(cpd)
P(x1) = N(1; 4)
P(x2| x1) = N(0.5*x1_mu); -5)
P(x3| x2) = N(-1*x2_mu); 4)
"""
for cpd in cpds:
if not isinstance(cpd, LinearGaussianCPD):
raise ValueError('Only LinearGaussianCPD can be added.') # depends on [control=['if'], data=[]]
if set(cpd.variables) - set(cpd.variables).intersection(set(self.nodes())):
raise ValueError('CPD defined on variable not in the model', cpd) # depends on [control=['if'], data=[]]
for prev_cpd_index in range(len(self.cpds)):
if self.cpds[prev_cpd_index].variable == cpd.variable:
logging.warning('Replacing existing CPD for {var}'.format(var=cpd.variable))
self.cpds[prev_cpd_index] = cpd
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prev_cpd_index']]
else:
self.cpds.append(cpd) # depends on [control=['for'], data=['cpd']] |
def main():
"""
Main method
"""
print("Creating a new game...")
new_game = Game(Human(color.white), Human(color.black))
result = new_game.play()
print("Result is ", result) | def function[main, parameter[]]:
constant[
Main method
]
call[name[print], parameter[constant[Creating a new game...]]]
variable[new_game] assign[=] call[name[Game], parameter[call[name[Human], parameter[name[color].white]], call[name[Human], parameter[name[color].black]]]]
variable[result] assign[=] call[name[new_game].play, parameter[]]
call[name[print], parameter[constant[Result is ], name[result]]] | keyword[def] identifier[main] ():
literal[string]
identifier[print] ( literal[string] )
identifier[new_game] = identifier[Game] ( identifier[Human] ( identifier[color] . identifier[white] ), identifier[Human] ( identifier[color] . identifier[black] ))
identifier[result] = identifier[new_game] . identifier[play] ()
identifier[print] ( literal[string] , identifier[result] ) | def main():
"""
Main method
"""
print('Creating a new game...')
new_game = Game(Human(color.white), Human(color.black))
result = new_game.play()
print('Result is ', result) |
def get_level(self, level=2):
"""Get all nodes that are exactly this far away."""
if level == 1:
for child in self.children.values(): yield child
else:
for child in self.children.values():
for node in child.get_level(level-1): yield node | def function[get_level, parameter[self, level]]:
constant[Get all nodes that are exactly this far away.]
if compare[name[level] equal[==] constant[1]] begin[:]
for taget[name[child]] in starred[call[name[self].children.values, parameter[]]] begin[:]
<ast.Yield object at 0x7da18f810f70> | keyword[def] identifier[get_level] ( identifier[self] , identifier[level] = literal[int] ):
literal[string]
keyword[if] identifier[level] == literal[int] :
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] . identifier[values] (): keyword[yield] identifier[child]
keyword[else] :
keyword[for] identifier[child] keyword[in] identifier[self] . identifier[children] . identifier[values] ():
keyword[for] identifier[node] keyword[in] identifier[child] . identifier[get_level] ( identifier[level] - literal[int] ): keyword[yield] identifier[node] | def get_level(self, level=2):
"""Get all nodes that are exactly this far away."""
if level == 1:
for child in self.children.values():
yield child # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]]
else:
for child in self.children.values():
for node in child.get_level(level - 1):
yield node # depends on [control=['for'], data=['node']] # depends on [control=['for'], data=['child']] |
def dpod_port_id_port_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
dpod = ET.SubElement(config, "dpod", xmlns="urn:brocade.com:mgmt:brocade-license")
port_id = ET.SubElement(dpod, "port-id")
port_id = ET.SubElement(port_id, "port-id")
port_id.text = kwargs.pop('port_id')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[dpod_port_id_port_id, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[dpod] assign[=] call[name[ET].SubElement, parameter[name[config], constant[dpod]]]
variable[port_id] assign[=] call[name[ET].SubElement, parameter[name[dpod], constant[port-id]]]
variable[port_id] assign[=] call[name[ET].SubElement, parameter[name[port_id], constant[port-id]]]
name[port_id].text assign[=] call[name[kwargs].pop, parameter[constant[port_id]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[dpod_port_id_port_id] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[dpod] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[port_id] = identifier[ET] . identifier[SubElement] ( identifier[dpod] , literal[string] )
identifier[port_id] = identifier[ET] . identifier[SubElement] ( identifier[port_id] , literal[string] )
identifier[port_id] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def dpod_port_id_port_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
dpod = ET.SubElement(config, 'dpod', xmlns='urn:brocade.com:mgmt:brocade-license')
port_id = ET.SubElement(dpod, 'port-id')
port_id = ET.SubElement(port_id, 'port-id')
port_id.text = kwargs.pop('port_id')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def _sat(lexer, varname):
"""Return a DIMACS SAT."""
_expect_token(lexer, {KW_p})
fmt = _expect_token(lexer, {KW_sat, KW_satx, KW_sate, KW_satex}).value
nvars = _expect_token(lexer, {IntegerToken}).value
return _sat_formula(lexer, varname, fmt, nvars) | def function[_sat, parameter[lexer, varname]]:
constant[Return a DIMACS SAT.]
call[name[_expect_token], parameter[name[lexer], <ast.Set object at 0x7da1b0e24250>]]
variable[fmt] assign[=] call[name[_expect_token], parameter[name[lexer], <ast.Set object at 0x7da1b0ed1ab0>]].value
variable[nvars] assign[=] call[name[_expect_token], parameter[name[lexer], <ast.Set object at 0x7da1b0ed16c0>]].value
return[call[name[_sat_formula], parameter[name[lexer], name[varname], name[fmt], name[nvars]]]] | keyword[def] identifier[_sat] ( identifier[lexer] , identifier[varname] ):
literal[string]
identifier[_expect_token] ( identifier[lexer] ,{ identifier[KW_p] })
identifier[fmt] = identifier[_expect_token] ( identifier[lexer] ,{ identifier[KW_sat] , identifier[KW_satx] , identifier[KW_sate] , identifier[KW_satex] }). identifier[value]
identifier[nvars] = identifier[_expect_token] ( identifier[lexer] ,{ identifier[IntegerToken] }). identifier[value]
keyword[return] identifier[_sat_formula] ( identifier[lexer] , identifier[varname] , identifier[fmt] , identifier[nvars] ) | def _sat(lexer, varname):
"""Return a DIMACS SAT."""
_expect_token(lexer, {KW_p})
fmt = _expect_token(lexer, {KW_sat, KW_satx, KW_sate, KW_satex}).value
nvars = _expect_token(lexer, {IntegerToken}).value
return _sat_formula(lexer, varname, fmt, nvars) |
def decensor(post_info: dict, site_url: str = DEFAULT_SITE) -> dict:
"Decensor a post info dict from Danbooru API if needed."
return post_info \
if "md5" in post_info else fill_missing_info(post_info, site_url) | def function[decensor, parameter[post_info, site_url]]:
constant[Decensor a post info dict from Danbooru API if needed.]
return[<ast.IfExp object at 0x7da18f09e0e0>] | keyword[def] identifier[decensor] ( identifier[post_info] : identifier[dict] , identifier[site_url] : identifier[str] = identifier[DEFAULT_SITE] )-> identifier[dict] :
literal[string]
keyword[return] identifier[post_info] keyword[if] literal[string] keyword[in] identifier[post_info] keyword[else] identifier[fill_missing_info] ( identifier[post_info] , identifier[site_url] ) | def decensor(post_info: dict, site_url: str=DEFAULT_SITE) -> dict:
"""Decensor a post info dict from Danbooru API if needed."""
return post_info if 'md5' in post_info else fill_missing_info(post_info, site_url) |
def dequeue(self, k):
"""Outputs *k* draws from the multinomial distribution."""
if self.j + k <= self.M:
out = self.A[self.j:(self.j + k)]
self.j += k
elif k <= self.M:
out = np.empty(k, 'int')
nextra = self.j + k - self.M
out[:(k - nextra)] = self.A[self.j:]
self.enqueue()
out[(k - nextra):] = self.A[:nextra]
self.j = nextra
else:
raise ValueError('MultinomialQueue: k must be <= M (the max \
capacity of the queue)')
return out | def function[dequeue, parameter[self, k]]:
constant[Outputs *k* draws from the multinomial distribution.]
if compare[binary_operation[name[self].j + name[k]] less_or_equal[<=] name[self].M] begin[:]
variable[out] assign[=] call[name[self].A][<ast.Slice object at 0x7da18bccb3d0>]
<ast.AugAssign object at 0x7da18bccace0>
return[name[out]] | keyword[def] identifier[dequeue] ( identifier[self] , identifier[k] ):
literal[string]
keyword[if] identifier[self] . identifier[j] + identifier[k] <= identifier[self] . identifier[M] :
identifier[out] = identifier[self] . identifier[A] [ identifier[self] . identifier[j] :( identifier[self] . identifier[j] + identifier[k] )]
identifier[self] . identifier[j] += identifier[k]
keyword[elif] identifier[k] <= identifier[self] . identifier[M] :
identifier[out] = identifier[np] . identifier[empty] ( identifier[k] , literal[string] )
identifier[nextra] = identifier[self] . identifier[j] + identifier[k] - identifier[self] . identifier[M]
identifier[out] [:( identifier[k] - identifier[nextra] )]= identifier[self] . identifier[A] [ identifier[self] . identifier[j] :]
identifier[self] . identifier[enqueue] ()
identifier[out] [( identifier[k] - identifier[nextra] ):]= identifier[self] . identifier[A] [: identifier[nextra] ]
identifier[self] . identifier[j] = identifier[nextra]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[out] | def dequeue(self, k):
"""Outputs *k* draws from the multinomial distribution."""
if self.j + k <= self.M:
out = self.A[self.j:self.j + k]
self.j += k # depends on [control=['if'], data=[]]
elif k <= self.M:
out = np.empty(k, 'int')
nextra = self.j + k - self.M
out[:k - nextra] = self.A[self.j:]
self.enqueue()
out[k - nextra:] = self.A[:nextra]
self.j = nextra # depends on [control=['if'], data=['k']]
else:
raise ValueError('MultinomialQueue: k must be <= M (the max capacity of the queue)')
return out |
def Page_setDeviceOrientationOverride(self, alpha, beta, gamma):
"""
Function path: Page.setDeviceOrientationOverride
Domain: Page
Method name: setDeviceOrientationOverride
WARNING: This function is marked 'Experimental'!
Parameters:
Required arguments:
'alpha' (type: number) -> Mock alpha
'beta' (type: number) -> Mock beta
'gamma' (type: number) -> Mock gamma
No return value.
Description: Overrides the Device Orientation.
"""
assert isinstance(alpha, (float, int)
), "Argument 'alpha' must be of type '['float', 'int']'. Received type: '%s'" % type(
alpha)
assert isinstance(beta, (float, int)
), "Argument 'beta' must be of type '['float', 'int']'. Received type: '%s'" % type(
beta)
assert isinstance(gamma, (float, int)
), "Argument 'gamma' must be of type '['float', 'int']'. Received type: '%s'" % type(
gamma)
subdom_funcs = self.synchronous_command('Page.setDeviceOrientationOverride',
alpha=alpha, beta=beta, gamma=gamma)
return subdom_funcs | def function[Page_setDeviceOrientationOverride, parameter[self, alpha, beta, gamma]]:
constant[
Function path: Page.setDeviceOrientationOverride
Domain: Page
Method name: setDeviceOrientationOverride
WARNING: This function is marked 'Experimental'!
Parameters:
Required arguments:
'alpha' (type: number) -> Mock alpha
'beta' (type: number) -> Mock beta
'gamma' (type: number) -> Mock gamma
No return value.
Description: Overrides the Device Orientation.
]
assert[call[name[isinstance], parameter[name[alpha], tuple[[<ast.Name object at 0x7da1b1106ad0>, <ast.Name object at 0x7da1b1106dd0>]]]]]
assert[call[name[isinstance], parameter[name[beta], tuple[[<ast.Name object at 0x7da1b1106da0>, <ast.Name object at 0x7da1b1105e40>]]]]]
assert[call[name[isinstance], parameter[name[gamma], tuple[[<ast.Name object at 0x7da1b1106770>, <ast.Name object at 0x7da1b1105210>]]]]]
variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[Page.setDeviceOrientationOverride]]]
return[name[subdom_funcs]] | keyword[def] identifier[Page_setDeviceOrientationOverride] ( identifier[self] , identifier[alpha] , identifier[beta] , identifier[gamma] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[alpha] ,( identifier[float] , identifier[int] )
), literal[string] % identifier[type] (
identifier[alpha] )
keyword[assert] identifier[isinstance] ( identifier[beta] ,( identifier[float] , identifier[int] )
), literal[string] % identifier[type] (
identifier[beta] )
keyword[assert] identifier[isinstance] ( identifier[gamma] ,( identifier[float] , identifier[int] )
), literal[string] % identifier[type] (
identifier[gamma] )
identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] ,
identifier[alpha] = identifier[alpha] , identifier[beta] = identifier[beta] , identifier[gamma] = identifier[gamma] )
keyword[return] identifier[subdom_funcs] | def Page_setDeviceOrientationOverride(self, alpha, beta, gamma):
"""
Function path: Page.setDeviceOrientationOverride
Domain: Page
Method name: setDeviceOrientationOverride
WARNING: This function is marked 'Experimental'!
Parameters:
Required arguments:
'alpha' (type: number) -> Mock alpha
'beta' (type: number) -> Mock beta
'gamma' (type: number) -> Mock gamma
No return value.
Description: Overrides the Device Orientation.
"""
assert isinstance(alpha, (float, int)), "Argument 'alpha' must be of type '['float', 'int']'. Received type: '%s'" % type(alpha)
assert isinstance(beta, (float, int)), "Argument 'beta' must be of type '['float', 'int']'. Received type: '%s'" % type(beta)
assert isinstance(gamma, (float, int)), "Argument 'gamma' must be of type '['float', 'int']'. Received type: '%s'" % type(gamma)
subdom_funcs = self.synchronous_command('Page.setDeviceOrientationOverride', alpha=alpha, beta=beta, gamma=gamma)
return subdom_funcs |
def OpenEnumerateInstancePaths(self, ClassName, namespace=None,
FilterQueryLanguage=None, FilterQuery=None,
OperationTimeout=None, ContinueOnError=None,
MaxObjectCount=None, **extra):
# pylint: disable=invalid-name
"""
Open an enumeration session to enumerate the instance paths of
instances of a class (including instances of its subclasses) in
a namespace.
*New in pywbem 0.9.*
This method performs the OpenEnumerateInstancePaths operation
(see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all
methods performing such operations.
If the operation succeeds, this method returns status on the
enumeration session and optionally instance paths.
Otherwise, this method raises an exception.
Use the :meth:`~pywbem.WBEMConnection.PullInstancePaths` method to
retrieve the next set of instance paths or the
:meth:`~pywbem.WBEMConnection.CloseEnumeration` method to close the
enumeration session before it is exhausted.
Parameters:
ClassName (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be enumerated (case independent).
If specified as a :class:`~pywbem.CIMClassName` object, its
`namespace` attribute will be used as a default namespace as
described for the `namespace` parameter, and its `host` attribute
will be ignored.
namespace (:term:`string`):
Name of the CIM namespace to be used (case independent).
Leading and trailing slash characters will be stripped. The lexical
case will be preserved.
If `None`, the namespace of the `ClassName` parameter will be used,
if specified as a :class:`~pywbem.CIMClassName` object. If that is
also `None`, the default namespace of the connection will be used.
FilterQueryLanguage (:term:`string`):
The name of the filter query language used for the `FilterQuery`
parameter. The DMTF-defined Filter Query Language (see
:term:`DSP0212`) is specified as "DMTF:FQL".
Not all WBEM servers support filtering for this operation because
it returns instance paths and the act of the server filtering
requires that it generate instances just for that purpose and then
discard them.
FilterQuery (:term:`string`):
The filter query in the query language defined by the
`FilterQueryLanguage` parameter.
OperationTimeout (:class:`~pywbem.Uint32`):
Minimum time in seconds the WBEM Server shall maintain an open
enumeration session after a previous Open or Pull request is
sent to the client. Once this timeout time has expired, the
WBEM server may close the enumeration session.
* If not `None`, this parameter is sent to the WBEM server as the
proposed timeout for the enumeration session. A value of 0
indicates that the server is expected to never time out. The
server may reject the proposed value, causing a
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_INVALID_OPERATION_TIMEOUT`.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default timeout to be used.
ContinueOnError (:class:`py:bool`):
Indicates to the WBEM server to continue sending responses
after an error response has been sent.
* If `True`, the server is to continue sending responses after
sending an error response. Not all servers support continuation
on error; a server that does not support it must send an error
response if `True` was specified, causing
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_CONTINUATION_ON_ERROR_NOT_SUPPORTED`.
* If `False`, the server is requested to close the enumeration after
sending an error response.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
`False`.
MaxObjectCount (:class:`~pywbem.Uint32`)
Maximum number of instances the WBEM server may return
for this request.
* If positive, the WBEM server is to return no more than the
specified number of instances.
* If zero, the WBEM server is to return no instances. This may
be used by a client to leave the handling of any returned
instances to a loop of Pull operations.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
to return zero instances.
**extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server.
Note that :term:`DSP0200` does not define any additional parameters
for this operation.
Returns:
A :func:`~py:collections.namedtuple` object containing the following
named items:
* **paths** (:class:`py:list` of :class:`~pywbem.CIMInstanceName`):
Representations of the retrieved instance paths, with their
attributes set as follows:
* `classname`: Name of the creation class of the instance.
* `keybindings`: Keybindings of the instance.
* `namespace`: Name of the CIM namespace containing the instance.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace.
* **eos** (:class:`py:bool`):
Indicates whether the enumeration session is exhausted after
this operation:
- If `True`, the enumeration session is exhausted, and the
server has closed the enumeration session.
- If `False`, the enumeration session is not exhausted and the
`context` item is the context object for the next operation on
the enumeration session.
* **context** (:func:`py:tuple` of server_context, namespace):
A context object identifying the open enumeration session,
including its current enumeration state, and the namespace. This
object must be supplied with the next pull or close operation for
this enumeration session.
The tuple items are:
* server_context (:term:`string`):
Enumeration context string returned by the server if
the session is not exhausted, or `None` otherwise. This string
is opaque for the client.
* namespace (:term:`string`):
Name of the CIM namespace that was used for this operation.
NOTE: This inner tuple hides the need for a CIM namespace
on subsequent operations in the enumeration session. CIM
operations always require target namespace, but it never
makes sense to specify a different one in subsequent
operations on the same enumeration session.
Raises:
Exceptions described in :class:`~pywbem.WBEMConnection`.
Example::
max_object_count = 100
rslt_tuple = conn.OpenEnumerateInstancePaths(
'CIM_Blah', MaxObjectCount=max_object_count)
paths = rslt_tuple.paths
while not rslt_tuple.eos:
rslt_tuple = conn.PullInstancePaths(rslt_tupl.context,
max_object_count)
paths.extend(rslt_tupl.paths)
for path in paths:
print('path {0}'.format(path))
"""
exc = None
result_tuple = None
method_name = 'OpenEnumerateInstancePaths'
if self._operation_recorders:
self.operation_recorder_reset(pull_op=True)
self.operation_recorder_stage_pywbem_args(
method=method_name,
ClassName=ClassName,
namespace=namespace,
FilterQueryLanguage=FilterQueryLanguage,
FilterQuery=FilterQuery,
OperationTimeout=OperationTimeout,
ContinueOnError=ContinueOnError,
MaxObjectCount=MaxObjectCount,
**extra)
try:
stats = self.statistics.start_timer(method_name)
if namespace is None and isinstance(ClassName, CIMClassName):
namespace = ClassName.namespace
namespace = self._iparam_namespace_from_namespace(namespace)
classname = self._iparam_classname(ClassName, 'ClassName')
result = self._imethodcall(
method_name,
namespace,
ClassName=classname,
FilterQueryLanguage=FilterQueryLanguage,
FilterQuery=FilterQuery,
OperationTimeout=OperationTimeout,
ContinueOnError=ContinueOnError,
MaxObjectCount=MaxObjectCount,
has_out_params=True,
**extra)
result_tuple = pull_path_result_tuple(
*self._get_rslt_params(result, namespace))
return result_tuple
except (CIMXMLParseError, XMLParseError) as exce:
exce.request_data = self.last_raw_request
exce.response_data = self.last_raw_reply
exc = exce
raise
except Exception as exce:
exc = exce
raise
finally:
self._last_operation_time = stats.stop_timer(
self.last_request_len, self.last_reply_len,
self.last_server_response_time, exc)
if self._operation_recorders:
self.operation_recorder_stage_result(result_tuple, exc) | def function[OpenEnumerateInstancePaths, parameter[self, ClassName, namespace, FilterQueryLanguage, FilterQuery, OperationTimeout, ContinueOnError, MaxObjectCount]]:
constant[
Open an enumeration session to enumerate the instance paths of
instances of a class (including instances of its subclasses) in
a namespace.
*New in pywbem 0.9.*
This method performs the OpenEnumerateInstancePaths operation
(see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all
methods performing such operations.
If the operation succeeds, this method returns status on the
enumeration session and optionally instance paths.
Otherwise, this method raises an exception.
Use the :meth:`~pywbem.WBEMConnection.PullInstancePaths` method to
retrieve the next set of instance paths or the
:meth:`~pywbem.WBEMConnection.CloseEnumeration` method to close the
enumeration session before it is exhausted.
Parameters:
ClassName (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be enumerated (case independent).
If specified as a :class:`~pywbem.CIMClassName` object, its
`namespace` attribute will be used as a default namespace as
described for the `namespace` parameter, and its `host` attribute
will be ignored.
namespace (:term:`string`):
Name of the CIM namespace to be used (case independent).
Leading and trailing slash characters will be stripped. The lexical
case will be preserved.
If `None`, the namespace of the `ClassName` parameter will be used,
if specified as a :class:`~pywbem.CIMClassName` object. If that is
also `None`, the default namespace of the connection will be used.
FilterQueryLanguage (:term:`string`):
The name of the filter query language used for the `FilterQuery`
parameter. The DMTF-defined Filter Query Language (see
:term:`DSP0212`) is specified as "DMTF:FQL".
Not all WBEM servers support filtering for this operation because
it returns instance paths and the act of the server filtering
requires that it generate instances just for that purpose and then
discard them.
FilterQuery (:term:`string`):
The filter query in the query language defined by the
`FilterQueryLanguage` parameter.
OperationTimeout (:class:`~pywbem.Uint32`):
Minimum time in seconds the WBEM Server shall maintain an open
enumeration session after a previous Open or Pull request is
sent to the client. Once this timeout time has expired, the
WBEM server may close the enumeration session.
* If not `None`, this parameter is sent to the WBEM server as the
proposed timeout for the enumeration session. A value of 0
indicates that the server is expected to never time out. The
server may reject the proposed value, causing a
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_INVALID_OPERATION_TIMEOUT`.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default timeout to be used.
ContinueOnError (:class:`py:bool`):
Indicates to the WBEM server to continue sending responses
after an error response has been sent.
* If `True`, the server is to continue sending responses after
sending an error response. Not all servers support continuation
on error; a server that does not support it must send an error
response if `True` was specified, causing
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_CONTINUATION_ON_ERROR_NOT_SUPPORTED`.
* If `False`, the server is requested to close the enumeration after
sending an error response.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
`False`.
MaxObjectCount (:class:`~pywbem.Uint32`)
Maximum number of instances the WBEM server may return
for this request.
* If positive, the WBEM server is to return no more than the
specified number of instances.
* If zero, the WBEM server is to return no instances. This may
be used by a client to leave the handling of any returned
instances to a loop of Pull operations.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
to return zero instances.
**extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server.
Note that :term:`DSP0200` does not define any additional parameters
for this operation.
Returns:
A :func:`~py:collections.namedtuple` object containing the following
named items:
* **paths** (:class:`py:list` of :class:`~pywbem.CIMInstanceName`):
Representations of the retrieved instance paths, with their
attributes set as follows:
* `classname`: Name of the creation class of the instance.
* `keybindings`: Keybindings of the instance.
* `namespace`: Name of the CIM namespace containing the instance.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace.
* **eos** (:class:`py:bool`):
Indicates whether the enumeration session is exhausted after
this operation:
- If `True`, the enumeration session is exhausted, and the
server has closed the enumeration session.
- If `False`, the enumeration session is not exhausted and the
`context` item is the context object for the next operation on
the enumeration session.
* **context** (:func:`py:tuple` of server_context, namespace):
A context object identifying the open enumeration session,
including its current enumeration state, and the namespace. This
object must be supplied with the next pull or close operation for
this enumeration session.
The tuple items are:
* server_context (:term:`string`):
Enumeration context string returned by the server if
the session is not exhausted, or `None` otherwise. This string
is opaque for the client.
* namespace (:term:`string`):
Name of the CIM namespace that was used for this operation.
NOTE: This inner tuple hides the need for a CIM namespace
on subsequent operations in the enumeration session. CIM
operations always require target namespace, but it never
makes sense to specify a different one in subsequent
operations on the same enumeration session.
Raises:
Exceptions described in :class:`~pywbem.WBEMConnection`.
Example::
max_object_count = 100
rslt_tuple = conn.OpenEnumerateInstancePaths(
'CIM_Blah', MaxObjectCount=max_object_count)
paths = rslt_tuple.paths
while not rslt_tuple.eos:
rslt_tuple = conn.PullInstancePaths(rslt_tupl.context,
max_object_count)
paths.extend(rslt_tupl.paths)
for path in paths:
print('path {0}'.format(path))
]
variable[exc] assign[=] constant[None]
variable[result_tuple] assign[=] constant[None]
variable[method_name] assign[=] constant[OpenEnumerateInstancePaths]
if name[self]._operation_recorders begin[:]
call[name[self].operation_recorder_reset, parameter[]]
call[name[self].operation_recorder_stage_pywbem_args, parameter[]]
<ast.Try object at 0x7da20e9b2c80> | keyword[def] identifier[OpenEnumerateInstancePaths] ( identifier[self] , identifier[ClassName] , identifier[namespace] = keyword[None] ,
identifier[FilterQueryLanguage] = keyword[None] , identifier[FilterQuery] = keyword[None] ,
identifier[OperationTimeout] = keyword[None] , identifier[ContinueOnError] = keyword[None] ,
identifier[MaxObjectCount] = keyword[None] ,** identifier[extra] ):
literal[string]
identifier[exc] = keyword[None]
identifier[result_tuple] = keyword[None]
identifier[method_name] = literal[string]
keyword[if] identifier[self] . identifier[_operation_recorders] :
identifier[self] . identifier[operation_recorder_reset] ( identifier[pull_op] = keyword[True] )
identifier[self] . identifier[operation_recorder_stage_pywbem_args] (
identifier[method] = identifier[method_name] ,
identifier[ClassName] = identifier[ClassName] ,
identifier[namespace] = identifier[namespace] ,
identifier[FilterQueryLanguage] = identifier[FilterQueryLanguage] ,
identifier[FilterQuery] = identifier[FilterQuery] ,
identifier[OperationTimeout] = identifier[OperationTimeout] ,
identifier[ContinueOnError] = identifier[ContinueOnError] ,
identifier[MaxObjectCount] = identifier[MaxObjectCount] ,
** identifier[extra] )
keyword[try] :
identifier[stats] = identifier[self] . identifier[statistics] . identifier[start_timer] ( identifier[method_name] )
keyword[if] identifier[namespace] keyword[is] keyword[None] keyword[and] identifier[isinstance] ( identifier[ClassName] , identifier[CIMClassName] ):
identifier[namespace] = identifier[ClassName] . identifier[namespace]
identifier[namespace] = identifier[self] . identifier[_iparam_namespace_from_namespace] ( identifier[namespace] )
identifier[classname] = identifier[self] . identifier[_iparam_classname] ( identifier[ClassName] , literal[string] )
identifier[result] = identifier[self] . identifier[_imethodcall] (
identifier[method_name] ,
identifier[namespace] ,
identifier[ClassName] = identifier[classname] ,
identifier[FilterQueryLanguage] = identifier[FilterQueryLanguage] ,
identifier[FilterQuery] = identifier[FilterQuery] ,
identifier[OperationTimeout] = identifier[OperationTimeout] ,
identifier[ContinueOnError] = identifier[ContinueOnError] ,
identifier[MaxObjectCount] = identifier[MaxObjectCount] ,
identifier[has_out_params] = keyword[True] ,
** identifier[extra] )
identifier[result_tuple] = identifier[pull_path_result_tuple] (
* identifier[self] . identifier[_get_rslt_params] ( identifier[result] , identifier[namespace] ))
keyword[return] identifier[result_tuple]
keyword[except] ( identifier[CIMXMLParseError] , identifier[XMLParseError] ) keyword[as] identifier[exce] :
identifier[exce] . identifier[request_data] = identifier[self] . identifier[last_raw_request]
identifier[exce] . identifier[response_data] = identifier[self] . identifier[last_raw_reply]
identifier[exc] = identifier[exce]
keyword[raise]
keyword[except] identifier[Exception] keyword[as] identifier[exce] :
identifier[exc] = identifier[exce]
keyword[raise]
keyword[finally] :
identifier[self] . identifier[_last_operation_time] = identifier[stats] . identifier[stop_timer] (
identifier[self] . identifier[last_request_len] , identifier[self] . identifier[last_reply_len] ,
identifier[self] . identifier[last_server_response_time] , identifier[exc] )
keyword[if] identifier[self] . identifier[_operation_recorders] :
identifier[self] . identifier[operation_recorder_stage_result] ( identifier[result_tuple] , identifier[exc] ) | def OpenEnumerateInstancePaths(self, ClassName, namespace=None, FilterQueryLanguage=None, FilterQuery=None, OperationTimeout=None, ContinueOnError=None, MaxObjectCount=None, **extra):
# pylint: disable=invalid-name
'\n Open an enumeration session to enumerate the instance paths of\n instances of a class (including instances of its subclasses) in\n a namespace.\n\n *New in pywbem 0.9.*\n\n This method performs the OpenEnumerateInstancePaths operation\n (see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all\n methods performing such operations.\n\n If the operation succeeds, this method returns status on the\n enumeration session and optionally instance paths.\n Otherwise, this method raises an exception.\n\n Use the :meth:`~pywbem.WBEMConnection.PullInstancePaths` method to\n retrieve the next set of instance paths or the\n :meth:`~pywbem.WBEMConnection.CloseEnumeration` method to close the\n enumeration session before it is exhausted.\n\n Parameters:\n\n ClassName (:term:`string` or :class:`~pywbem.CIMClassName`):\n Name of the class to be enumerated (case independent).\n If specified as a :class:`~pywbem.CIMClassName` object, its\n `namespace` attribute will be used as a default namespace as\n described for the `namespace` parameter, and its `host` attribute\n will be ignored.\n\n namespace (:term:`string`):\n Name of the CIM namespace to be used (case independent).\n\n Leading and trailing slash characters will be stripped. The lexical\n case will be preserved.\n\n If `None`, the namespace of the `ClassName` parameter will be used,\n if specified as a :class:`~pywbem.CIMClassName` object. If that is\n also `None`, the default namespace of the connection will be used.\n\n FilterQueryLanguage (:term:`string`):\n The name of the filter query language used for the `FilterQuery`\n parameter. The DMTF-defined Filter Query Language (see\n :term:`DSP0212`) is specified as "DMTF:FQL".\n\n Not all WBEM servers support filtering for this operation because\n it returns instance paths and the act of the server filtering\n requires that it generate instances just for that purpose and then\n discard them.\n\n FilterQuery (:term:`string`):\n The filter query in the query language defined by the\n `FilterQueryLanguage` parameter.\n\n OperationTimeout (:class:`~pywbem.Uint32`):\n Minimum time in seconds the WBEM Server shall maintain an open\n enumeration session after a previous Open or Pull request is\n sent to the client. Once this timeout time has expired, the\n WBEM server may close the enumeration session.\n\n * If not `None`, this parameter is sent to the WBEM server as the\n proposed timeout for the enumeration session. A value of 0\n indicates that the server is expected to never time out. The\n server may reject the proposed value, causing a\n :class:`~pywbem.CIMError` to be raised with status code\n :attr:`~pywbem.CIM_ERR_INVALID_OPERATION_TIMEOUT`.\n * If `None`, this parameter is not passed to the WBEM server, and\n causes the server-implemented default timeout to be used.\n\n ContinueOnError (:class:`py:bool`):\n Indicates to the WBEM server to continue sending responses\n after an error response has been sent.\n\n * If `True`, the server is to continue sending responses after\n sending an error response. Not all servers support continuation\n on error; a server that does not support it must send an error\n response if `True` was specified, causing\n :class:`~pywbem.CIMError` to be raised with status code\n :attr:`~pywbem.CIM_ERR_CONTINUATION_ON_ERROR_NOT_SUPPORTED`.\n * If `False`, the server is requested to close the enumeration after\n sending an error response.\n * If `None`, this parameter is not passed to the WBEM server, and\n causes the server-implemented default behaviour to be used.\n :term:`DSP0200` defines that the server-implemented default is\n `False`.\n\n MaxObjectCount (:class:`~pywbem.Uint32`)\n Maximum number of instances the WBEM server may return\n for this request.\n\n * If positive, the WBEM server is to return no more than the\n specified number of instances.\n * If zero, the WBEM server is to return no instances. This may\n be used by a client to leave the handling of any returned\n instances to a loop of Pull operations.\n * If `None`, this parameter is not passed to the WBEM server, and\n causes the server-implemented default behaviour to be used.\n :term:`DSP0200` defines that the server-implemented default is\n to return zero instances.\n\n **extra :\n Additional keyword arguments are passed as additional operation\n parameters to the WBEM server.\n Note that :term:`DSP0200` does not define any additional parameters\n for this operation.\n\n Returns:\n\n A :func:`~py:collections.namedtuple` object containing the following\n named items:\n\n * **paths** (:class:`py:list` of :class:`~pywbem.CIMInstanceName`):\n Representations of the retrieved instance paths, with their\n attributes set as follows:\n\n * `classname`: Name of the creation class of the instance.\n * `keybindings`: Keybindings of the instance.\n * `namespace`: Name of the CIM namespace containing the instance.\n * `host`: Host and optionally port of the WBEM server containing\n the CIM namespace.\n\n * **eos** (:class:`py:bool`):\n Indicates whether the enumeration session is exhausted after\n this operation:\n\n - If `True`, the enumeration session is exhausted, and the\n server has closed the enumeration session.\n - If `False`, the enumeration session is not exhausted and the\n `context` item is the context object for the next operation on\n the enumeration session.\n\n * **context** (:func:`py:tuple` of server_context, namespace):\n A context object identifying the open enumeration session,\n including its current enumeration state, and the namespace. This\n object must be supplied with the next pull or close operation for\n this enumeration session.\n\n The tuple items are:\n\n * server_context (:term:`string`):\n Enumeration context string returned by the server if\n the session is not exhausted, or `None` otherwise. This string\n is opaque for the client.\n * namespace (:term:`string`):\n Name of the CIM namespace that was used for this operation.\n\n NOTE: This inner tuple hides the need for a CIM namespace\n on subsequent operations in the enumeration session. CIM\n operations always require target namespace, but it never\n makes sense to specify a different one in subsequent\n operations on the same enumeration session.\n\n Raises:\n\n Exceptions described in :class:`~pywbem.WBEMConnection`.\n\n Example::\n\n max_object_count = 100\n rslt_tuple = conn.OpenEnumerateInstancePaths(\n \'CIM_Blah\', MaxObjectCount=max_object_count)\n paths = rslt_tuple.paths\n while not rslt_tuple.eos:\n rslt_tuple = conn.PullInstancePaths(rslt_tupl.context,\n max_object_count)\n paths.extend(rslt_tupl.paths)\n for path in paths:\n print(\'path {0}\'.format(path))\n '
exc = None
result_tuple = None
method_name = 'OpenEnumerateInstancePaths'
if self._operation_recorders:
self.operation_recorder_reset(pull_op=True)
self.operation_recorder_stage_pywbem_args(method=method_name, ClassName=ClassName, namespace=namespace, FilterQueryLanguage=FilterQueryLanguage, FilterQuery=FilterQuery, OperationTimeout=OperationTimeout, ContinueOnError=ContinueOnError, MaxObjectCount=MaxObjectCount, **extra) # depends on [control=['if'], data=[]]
try:
stats = self.statistics.start_timer(method_name)
if namespace is None and isinstance(ClassName, CIMClassName):
namespace = ClassName.namespace # depends on [control=['if'], data=[]]
namespace = self._iparam_namespace_from_namespace(namespace)
classname = self._iparam_classname(ClassName, 'ClassName')
result = self._imethodcall(method_name, namespace, ClassName=classname, FilterQueryLanguage=FilterQueryLanguage, FilterQuery=FilterQuery, OperationTimeout=OperationTimeout, ContinueOnError=ContinueOnError, MaxObjectCount=MaxObjectCount, has_out_params=True, **extra)
result_tuple = pull_path_result_tuple(*self._get_rslt_params(result, namespace))
return result_tuple # depends on [control=['try'], data=[]]
except (CIMXMLParseError, XMLParseError) as exce:
exce.request_data = self.last_raw_request
exce.response_data = self.last_raw_reply
exc = exce
raise # depends on [control=['except'], data=['exce']]
except Exception as exce:
exc = exce
raise # depends on [control=['except'], data=['exce']]
finally:
self._last_operation_time = stats.stop_timer(self.last_request_len, self.last_reply_len, self.last_server_response_time, exc)
if self._operation_recorders:
self.operation_recorder_stage_result(result_tuple, exc) # depends on [control=['if'], data=[]] |
def ready(self, count):
"""Indicate you are ready to receive ``count`` messages."""
self.ready_count = count
self.send(nsq.ready(count)) | def function[ready, parameter[self, count]]:
constant[Indicate you are ready to receive ``count`` messages.]
name[self].ready_count assign[=] name[count]
call[name[self].send, parameter[call[name[nsq].ready, parameter[name[count]]]]] | keyword[def] identifier[ready] ( identifier[self] , identifier[count] ):
literal[string]
identifier[self] . identifier[ready_count] = identifier[count]
identifier[self] . identifier[send] ( identifier[nsq] . identifier[ready] ( identifier[count] )) | def ready(self, count):
"""Indicate you are ready to receive ``count`` messages."""
self.ready_count = count
self.send(nsq.ready(count)) |
def find_links_or_emphs(text, root_node):
"""Fink links/images or emphasis from text.
:param text: the original text.
:param root_node: a reference to the root node of the AST.
:returns: an iterable of match object.
"""
delimiters_re = re.compile(r'(?:!?\[|\*+|_+)')
i = 0
delimiters = []
escape = False
matches = []
code_pattern = re.compile(r'(?<!`)(`+)(?!`)([\s\S]+?)(?<!`)\1(?!`)')
while i < len(text):
if escape:
escape = False
i += 1
elif text[i] == '\\':
escape = True
i += 1
elif code_pattern.match(text, i):
i = code_pattern.match(text, i).end()
elif text[i] == ']':
node = look_for_image_or_link(text, delimiters, i, root_node, matches)
if node:
i = node.end()
matches.append(node)
else:
i += 1
else:
m = delimiters_re.match(text, i)
if m:
delimiters.append(Delimiter(m, text))
i = m.end()
else:
i += 1
process_emphasis(text, delimiters, None, matches)
return matches | def function[find_links_or_emphs, parameter[text, root_node]]:
constant[Fink links/images or emphasis from text.
:param text: the original text.
:param root_node: a reference to the root node of the AST.
:returns: an iterable of match object.
]
variable[delimiters_re] assign[=] call[name[re].compile, parameter[constant[(?:!?\[|\*+|_+)]]]
variable[i] assign[=] constant[0]
variable[delimiters] assign[=] list[[]]
variable[escape] assign[=] constant[False]
variable[matches] assign[=] list[[]]
variable[code_pattern] assign[=] call[name[re].compile, parameter[constant[(?<!`)(`+)(?!`)([\s\S]+?)(?<!`)\1(?!`)]]]
while compare[name[i] less[<] call[name[len], parameter[name[text]]]] begin[:]
if name[escape] begin[:]
variable[escape] assign[=] constant[False]
<ast.AugAssign object at 0x7da2041d95d0>
call[name[process_emphasis], parameter[name[text], name[delimiters], constant[None], name[matches]]]
return[name[matches]] | keyword[def] identifier[find_links_or_emphs] ( identifier[text] , identifier[root_node] ):
literal[string]
identifier[delimiters_re] = identifier[re] . identifier[compile] ( literal[string] )
identifier[i] = literal[int]
identifier[delimiters] =[]
identifier[escape] = keyword[False]
identifier[matches] =[]
identifier[code_pattern] = identifier[re] . identifier[compile] ( literal[string] )
keyword[while] identifier[i] < identifier[len] ( identifier[text] ):
keyword[if] identifier[escape] :
identifier[escape] = keyword[False]
identifier[i] += literal[int]
keyword[elif] identifier[text] [ identifier[i] ]== literal[string] :
identifier[escape] = keyword[True]
identifier[i] += literal[int]
keyword[elif] identifier[code_pattern] . identifier[match] ( identifier[text] , identifier[i] ):
identifier[i] = identifier[code_pattern] . identifier[match] ( identifier[text] , identifier[i] ). identifier[end] ()
keyword[elif] identifier[text] [ identifier[i] ]== literal[string] :
identifier[node] = identifier[look_for_image_or_link] ( identifier[text] , identifier[delimiters] , identifier[i] , identifier[root_node] , identifier[matches] )
keyword[if] identifier[node] :
identifier[i] = identifier[node] . identifier[end] ()
identifier[matches] . identifier[append] ( identifier[node] )
keyword[else] :
identifier[i] += literal[int]
keyword[else] :
identifier[m] = identifier[delimiters_re] . identifier[match] ( identifier[text] , identifier[i] )
keyword[if] identifier[m] :
identifier[delimiters] . identifier[append] ( identifier[Delimiter] ( identifier[m] , identifier[text] ))
identifier[i] = identifier[m] . identifier[end] ()
keyword[else] :
identifier[i] += literal[int]
identifier[process_emphasis] ( identifier[text] , identifier[delimiters] , keyword[None] , identifier[matches] )
keyword[return] identifier[matches] | def find_links_or_emphs(text, root_node):
"""Fink links/images or emphasis from text.
:param text: the original text.
:param root_node: a reference to the root node of the AST.
:returns: an iterable of match object.
"""
delimiters_re = re.compile('(?:!?\\[|\\*+|_+)')
i = 0
delimiters = []
escape = False
matches = []
code_pattern = re.compile('(?<!`)(`+)(?!`)([\\s\\S]+?)(?<!`)\\1(?!`)')
while i < len(text):
if escape:
escape = False
i += 1 # depends on [control=['if'], data=[]]
elif text[i] == '\\':
escape = True
i += 1 # depends on [control=['if'], data=[]]
elif code_pattern.match(text, i):
i = code_pattern.match(text, i).end() # depends on [control=['if'], data=[]]
elif text[i] == ']':
node = look_for_image_or_link(text, delimiters, i, root_node, matches)
if node:
i = node.end()
matches.append(node) # depends on [control=['if'], data=[]]
else:
i += 1 # depends on [control=['if'], data=[]]
else:
m = delimiters_re.match(text, i)
if m:
delimiters.append(Delimiter(m, text))
i = m.end() # depends on [control=['if'], data=[]]
else:
i += 1 # depends on [control=['while'], data=['i']]
process_emphasis(text, delimiters, None, matches)
return matches |
def account_distance(A1, A2):
"""Return the distance between two accounts. Here that is just the
difference in sum(alpha)
Args:
A1 (Account): The first account.
A2 (Account): The second account
Returns:
float: The distance between the two accounts.
"""
return (sum([action.alpha for action in A1]) -
sum([action.alpha for action in A2])) | def function[account_distance, parameter[A1, A2]]:
constant[Return the distance between two accounts. Here that is just the
difference in sum(alpha)
Args:
A1 (Account): The first account.
A2 (Account): The second account
Returns:
float: The distance between the two accounts.
]
return[binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da18ede7d90>]] - call[name[sum], parameter[<ast.ListComp object at 0x7da18ede4dc0>]]]] | keyword[def] identifier[account_distance] ( identifier[A1] , identifier[A2] ):
literal[string]
keyword[return] ( identifier[sum] ([ identifier[action] . identifier[alpha] keyword[for] identifier[action] keyword[in] identifier[A1] ])-
identifier[sum] ([ identifier[action] . identifier[alpha] keyword[for] identifier[action] keyword[in] identifier[A2] ])) | def account_distance(A1, A2):
"""Return the distance between two accounts. Here that is just the
difference in sum(alpha)
Args:
A1 (Account): The first account.
A2 (Account): The second account
Returns:
float: The distance between the two accounts.
"""
return sum([action.alpha for action in A1]) - sum([action.alpha for action in A2]) |
def deployed(name, template=None, environment=None, params=None, poll=5,
rollback=False, timeout=60, update=False, profile=None,
**connection_args):
'''
Deploy stack with the specified properties
name
The name of the stack
template
File of template
environment
File of environment
params
Parameter dict used to create the stack
poll
Poll (in sec.) and report events until stack complete
rollback
Enable rollback on create failure
timeout
Stack creation timeout in minutes
profile
Profile to use
.. versionadded:: 2017.7.5,2018.3.1
The spelling mistake in parameter `enviroment` was corrected to `environment`.
The misspelled version is still supported for backward compatibility, but will
be removed in Salt Neon.
'''
if environment is None and 'enviroment' in connection_args:
salt.utils.versions.warn_until('Neon', (
"Please use the 'environment' parameter instead of the misspelled 'enviroment' "
"parameter which will be removed in Salt Neon."
))
environment = connection_args.pop('enviroment')
log.debug('Deployed with (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)',
name, template, environment, params, poll, rollback,
timeout, update, profile, connection_args)
ret = {'name': None,
'comment': '',
'changes': {},
'result': True}
if not name:
ret['result'] = False
ret['comment'] = 'Name ist not valid'
return ret
ret['name'] = name,
existing_stack = __salt__['heat.show_stack'](name, profile=profile)
if existing_stack['result'] and not update:
ret['comment'] = 'Stack {0} is deployed'.format(name)
return ret
if existing_stack['result'] and update:
if template:
template_tmp_file = salt.utils.files.mkstemp()
tsfn, source_sum, comment_ = __salt__['file.get_managed'](
name=template_tmp_file,
template=None,
source=template,
source_hash=None,
user=None,
group=None,
mode=None,
saltenv='base',
context=None,
defaults=None,
skip_verify=False,
kwargs=None)
template_manage_result = __salt__['file.manage_file'](
name=template_tmp_file,
sfn=tsfn,
ret=None,
source=template,
source_sum=source_sum,
user=None,
group=None,
mode=None,
saltenv='base',
backup=None,
makedirs=True,
template=None,
show_changes=False,
contents=None,
dir_mode=None)
if (template_manage_result['result']) or \
((__opts__['test']) and (template_manage_result['result'] is not False)):
with salt.utils.files.fopen(template_tmp_file, 'r') as tfp_:
tpl = salt.utils.stringutils.to_unicode(tfp_.read())
salt.utils.files.safe_rm(template_tmp_file)
try:
template_parse = _parse_template(tpl)
if 'heat_template_version' in template_parse:
template_new = salt.utils.yaml.safe_dump(template_parse)
else:
template_new = jsonutils.dumps(template_parse, indent=2, ensure_ascii=False)
salt.utils.files.safe_rm(template_tmp_file)
except ValueError as ex:
ret['result'] = False
ret['comment'] = 'Error parsing template {0}'.format(ex)
else:
ret['result'] = False
ret['comment'] = 'Can not open template: {0} {1}'.format(template, comment_)
else:
ret['result'] = False
ret['comment'] = 'Can not open template'
if ret['result'] is True:
template_stack = __salt__['heat.template_stack'](name=name, profile=profile)
if not template_stack['result']:
ret['result'] = False
ret['comment'] = template_stack['comment']
if ret['result'] is False:
return ret
try:
checksum_template = __salt__['hashutil.digest'](template_new)
checksum_stack = __salt__['hashutil.digest'](template_stack['template'])
except salt.exceptions.CommandExecutionError as cmdexc:
ret['result'] = False
ret['comment'] = '{0}'.format(cmdexc)
if ret['result'] is True:
if checksum_template == checksum_stack:
if __opts__['test']:
ret['result'] = True
ret['comment'] = 'Stack {0} is deployed'.format(name)
return ret
else:
ret['result'] = False
ret['comment'] = 'Templates have same checksum: {0} {1}'\
.format(checksum_template, checksum_stack)
if ret['result'] is False:
return ret
if __opts__['test']:
stack = {
'result': None,
'comment': 'Stack {0} is set to be updated'.format(name)
}
else:
stack = __salt__['heat.update_stack'](name=name,
template_file=template,
environment=environment,
parameters=params, poll=poll,
rollback=rollback,
timeout=timeout,
profile=profile)
ret['changes']['stack_name'] = name
ret['changes']['comment'] = 'Update stack'
else:
if __opts__['test']:
stack = {
'result': None,
'comment': 'Stack {0} is set to be created'.format(name)
}
else:
stack = __salt__['heat.create_stack'](name=name,
template_file=template,
environment=environment,
parameters=params, poll=poll,
rollback=rollback,
timeout=timeout,
profile=profile)
ret['changes']['stack_name'] = name
ret['changes']['comment'] = 'Create stack'
ret['result'] = stack['result']
ret['comment'] = stack['comment']
return ret | def function[deployed, parameter[name, template, environment, params, poll, rollback, timeout, update, profile]]:
constant[
Deploy stack with the specified properties
name
The name of the stack
template
File of template
environment
File of environment
params
Parameter dict used to create the stack
poll
Poll (in sec.) and report events until stack complete
rollback
Enable rollback on create failure
timeout
Stack creation timeout in minutes
profile
Profile to use
.. versionadded:: 2017.7.5,2018.3.1
The spelling mistake in parameter `enviroment` was corrected to `environment`.
The misspelled version is still supported for backward compatibility, but will
be removed in Salt Neon.
]
if <ast.BoolOp object at 0x7da1b1c2e230> begin[:]
call[name[salt].utils.versions.warn_until, parameter[constant[Neon], constant[Please use the 'environment' parameter instead of the misspelled 'enviroment' parameter which will be removed in Salt Neon.]]]
variable[environment] assign[=] call[name[connection_args].pop, parameter[constant[enviroment]]]
call[name[log].debug, parameter[constant[Deployed with (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)], name[name], name[template], name[environment], name[params], name[poll], name[rollback], name[timeout], name[update], name[profile], name[connection_args]]]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c2da80>, <ast.Constant object at 0x7da1b1c2da50>, <ast.Constant object at 0x7da1b1c2da20>, <ast.Constant object at 0x7da1b1c2d9f0>], [<ast.Constant object at 0x7da1b1c2d9c0>, <ast.Constant object at 0x7da1b1c2d990>, <ast.Dict object at 0x7da1b1c2d960>, <ast.Constant object at 0x7da1b1c2d930>]]
if <ast.UnaryOp object at 0x7da1b1c2d8d0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] constant[Name ist not valid]
return[name[ret]]
call[name[ret]][constant[name]] assign[=] tuple[[<ast.Name object at 0x7da1b1c2d510>]]
variable[existing_stack] assign[=] call[call[name[__salt__]][constant[heat.show_stack]], parameter[name[name]]]
if <ast.BoolOp object at 0x7da1b1c2d300> begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Stack {0} is deployed].format, parameter[name[name]]]
return[name[ret]]
if <ast.BoolOp object at 0x7da1b1c2cfa0> begin[:]
if name[template] begin[:]
variable[template_tmp_file] assign[=] call[name[salt].utils.files.mkstemp, parameter[]]
<ast.Tuple object at 0x7da1b1c2ccd0> assign[=] call[call[name[__salt__]][constant[file.get_managed]], parameter[]]
variable[template_manage_result] assign[=] call[call[name[__salt__]][constant[file.manage_file]], parameter[]]
if <ast.BoolOp object at 0x7da1b1f75cf0> begin[:]
with call[name[salt].utils.files.fopen, parameter[name[template_tmp_file], constant[r]]] begin[:]
variable[tpl] assign[=] call[name[salt].utils.stringutils.to_unicode, parameter[call[name[tfp_].read, parameter[]]]]
call[name[salt].utils.files.safe_rm, parameter[name[template_tmp_file]]]
<ast.Try object at 0x7da1b1f75b70>
if compare[call[name[ret]][constant[result]] is constant[True]] begin[:]
variable[template_stack] assign[=] call[call[name[__salt__]][constant[heat.template_stack]], parameter[]]
if <ast.UnaryOp object at 0x7da1b1f75d20> begin[:]
call[name[ret]][constant[result]] assign[=] constant[False]
call[name[ret]][constant[comment]] assign[=] call[name[template_stack]][constant[comment]]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
return[name[ret]]
<ast.Try object at 0x7da1b1f74040>
if compare[call[name[ret]][constant[result]] is constant[True]] begin[:]
if compare[name[checksum_template] equal[==] name[checksum_stack]] begin[:]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[Stack {0} is deployed].format, parameter[name[name]]]
return[name[ret]]
if compare[call[name[ret]][constant[result]] is constant[False]] begin[:]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
variable[stack] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c36ec0>, <ast.Constant object at 0x7da1b1c348b0>], [<ast.Constant object at 0x7da1b1c358d0>, <ast.Call object at 0x7da1b1c35f90>]]
call[name[ret]][constant[result]] assign[=] call[name[stack]][constant[result]]
call[name[ret]][constant[comment]] assign[=] call[name[stack]][constant[comment]]
return[name[ret]] | keyword[def] identifier[deployed] ( identifier[name] , identifier[template] = keyword[None] , identifier[environment] = keyword[None] , identifier[params] = keyword[None] , identifier[poll] = literal[int] ,
identifier[rollback] = keyword[False] , identifier[timeout] = literal[int] , identifier[update] = keyword[False] , identifier[profile] = keyword[None] ,
** identifier[connection_args] ):
literal[string]
keyword[if] identifier[environment] keyword[is] keyword[None] keyword[and] literal[string] keyword[in] identifier[connection_args] :
identifier[salt] . identifier[utils] . identifier[versions] . identifier[warn_until] ( literal[string] ,(
literal[string]
literal[string]
))
identifier[environment] = identifier[connection_args] . identifier[pop] ( literal[string] )
identifier[log] . identifier[debug] ( literal[string] ,
identifier[name] , identifier[template] , identifier[environment] , identifier[params] , identifier[poll] , identifier[rollback] ,
identifier[timeout] , identifier[update] , identifier[profile] , identifier[connection_args] )
identifier[ret] ={ literal[string] : keyword[None] ,
literal[string] : literal[string] ,
literal[string] :{},
literal[string] : keyword[True] }
keyword[if] keyword[not] identifier[name] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ]= identifier[name] ,
identifier[existing_stack] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[profile] = identifier[profile] )
keyword[if] identifier[existing_stack] [ literal[string] ] keyword[and] keyword[not] identifier[update] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[if] identifier[existing_stack] [ literal[string] ] keyword[and] identifier[update] :
keyword[if] identifier[template] :
identifier[template_tmp_file] = identifier[salt] . identifier[utils] . identifier[files] . identifier[mkstemp] ()
identifier[tsfn] , identifier[source_sum] , identifier[comment_] = identifier[__salt__] [ literal[string] ](
identifier[name] = identifier[template_tmp_file] ,
identifier[template] = keyword[None] ,
identifier[source] = identifier[template] ,
identifier[source_hash] = keyword[None] ,
identifier[user] = keyword[None] ,
identifier[group] = keyword[None] ,
identifier[mode] = keyword[None] ,
identifier[saltenv] = literal[string] ,
identifier[context] = keyword[None] ,
identifier[defaults] = keyword[None] ,
identifier[skip_verify] = keyword[False] ,
identifier[kwargs] = keyword[None] )
identifier[template_manage_result] = identifier[__salt__] [ literal[string] ](
identifier[name] = identifier[template_tmp_file] ,
identifier[sfn] = identifier[tsfn] ,
identifier[ret] = keyword[None] ,
identifier[source] = identifier[template] ,
identifier[source_sum] = identifier[source_sum] ,
identifier[user] = keyword[None] ,
identifier[group] = keyword[None] ,
identifier[mode] = keyword[None] ,
identifier[saltenv] = literal[string] ,
identifier[backup] = keyword[None] ,
identifier[makedirs] = keyword[True] ,
identifier[template] = keyword[None] ,
identifier[show_changes] = keyword[False] ,
identifier[contents] = keyword[None] ,
identifier[dir_mode] = keyword[None] )
keyword[if] ( identifier[template_manage_result] [ literal[string] ]) keyword[or] (( identifier[__opts__] [ literal[string] ]) keyword[and] ( identifier[template_manage_result] [ literal[string] ] keyword[is] keyword[not] keyword[False] )):
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[template_tmp_file] , literal[string] ) keyword[as] identifier[tfp_] :
identifier[tpl] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[tfp_] . identifier[read] ())
identifier[salt] . identifier[utils] . identifier[files] . identifier[safe_rm] ( identifier[template_tmp_file] )
keyword[try] :
identifier[template_parse] = identifier[_parse_template] ( identifier[tpl] )
keyword[if] literal[string] keyword[in] identifier[template_parse] :
identifier[template_new] = identifier[salt] . identifier[utils] . identifier[yaml] . identifier[safe_dump] ( identifier[template_parse] )
keyword[else] :
identifier[template_new] = identifier[jsonutils] . identifier[dumps] ( identifier[template_parse] , identifier[indent] = literal[int] , identifier[ensure_ascii] = keyword[False] )
identifier[salt] . identifier[utils] . identifier[files] . identifier[safe_rm] ( identifier[template_tmp_file] )
keyword[except] identifier[ValueError] keyword[as] identifier[ex] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[ex] )
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[template] , identifier[comment_] )
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string]
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[True] :
identifier[template_stack] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[name] , identifier[profile] = identifier[profile] )
keyword[if] keyword[not] identifier[template_stack] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= identifier[template_stack] [ literal[string] ]
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
keyword[return] identifier[ret]
keyword[try] :
identifier[checksum_template] = identifier[__salt__] [ literal[string] ]( identifier[template_new] )
identifier[checksum_stack] = identifier[__salt__] [ literal[string] ]( identifier[template_stack] [ literal[string] ])
keyword[except] identifier[salt] . identifier[exceptions] . identifier[CommandExecutionError] keyword[as] identifier[cmdexc] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[cmdexc] )
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[True] :
keyword[if] identifier[checksum_template] == identifier[checksum_stack] :
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
keyword[return] identifier[ret]
keyword[else] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[checksum_template] , identifier[checksum_stack] )
keyword[if] identifier[ret] [ literal[string] ] keyword[is] keyword[False] :
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[stack] ={
literal[string] : keyword[None] ,
literal[string] : literal[string] . identifier[format] ( identifier[name] )
}
keyword[else] :
identifier[stack] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[name] ,
identifier[template_file] = identifier[template] ,
identifier[environment] = identifier[environment] ,
identifier[parameters] = identifier[params] , identifier[poll] = identifier[poll] ,
identifier[rollback] = identifier[rollback] ,
identifier[timeout] = identifier[timeout] ,
identifier[profile] = identifier[profile] )
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[name]
identifier[ret] [ literal[string] ][ literal[string] ]= literal[string]
keyword[else] :
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[stack] ={
literal[string] : keyword[None] ,
literal[string] : literal[string] . identifier[format] ( identifier[name] )
}
keyword[else] :
identifier[stack] = identifier[__salt__] [ literal[string] ]( identifier[name] = identifier[name] ,
identifier[template_file] = identifier[template] ,
identifier[environment] = identifier[environment] ,
identifier[parameters] = identifier[params] , identifier[poll] = identifier[poll] ,
identifier[rollback] = identifier[rollback] ,
identifier[timeout] = identifier[timeout] ,
identifier[profile] = identifier[profile] )
identifier[ret] [ literal[string] ][ literal[string] ]= identifier[name]
identifier[ret] [ literal[string] ][ literal[string] ]= literal[string]
identifier[ret] [ literal[string] ]= identifier[stack] [ literal[string] ]
identifier[ret] [ literal[string] ]= identifier[stack] [ literal[string] ]
keyword[return] identifier[ret] | def deployed(name, template=None, environment=None, params=None, poll=5, rollback=False, timeout=60, update=False, profile=None, **connection_args):
"""
Deploy stack with the specified properties
name
The name of the stack
template
File of template
environment
File of environment
params
Parameter dict used to create the stack
poll
Poll (in sec.) and report events until stack complete
rollback
Enable rollback on create failure
timeout
Stack creation timeout in minutes
profile
Profile to use
.. versionadded:: 2017.7.5,2018.3.1
The spelling mistake in parameter `enviroment` was corrected to `environment`.
The misspelled version is still supported for backward compatibility, but will
be removed in Salt Neon.
"""
if environment is None and 'enviroment' in connection_args:
salt.utils.versions.warn_until('Neon', "Please use the 'environment' parameter instead of the misspelled 'enviroment' parameter which will be removed in Salt Neon.")
environment = connection_args.pop('enviroment') # depends on [control=['if'], data=[]]
log.debug('Deployed with (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', name, template, environment, params, poll, rollback, timeout, update, profile, connection_args)
ret = {'name': None, 'comment': '', 'changes': {}, 'result': True}
if not name:
ret['result'] = False
ret['comment'] = 'Name ist not valid'
return ret # depends on [control=['if'], data=[]]
ret['name'] = (name,)
existing_stack = __salt__['heat.show_stack'](name, profile=profile)
if existing_stack['result'] and (not update):
ret['comment'] = 'Stack {0} is deployed'.format(name)
return ret # depends on [control=['if'], data=[]]
if existing_stack['result'] and update:
if template:
template_tmp_file = salt.utils.files.mkstemp()
(tsfn, source_sum, comment_) = __salt__['file.get_managed'](name=template_tmp_file, template=None, source=template, source_hash=None, user=None, group=None, mode=None, saltenv='base', context=None, defaults=None, skip_verify=False, kwargs=None)
template_manage_result = __salt__['file.manage_file'](name=template_tmp_file, sfn=tsfn, ret=None, source=template, source_sum=source_sum, user=None, group=None, mode=None, saltenv='base', backup=None, makedirs=True, template=None, show_changes=False, contents=None, dir_mode=None)
if template_manage_result['result'] or (__opts__['test'] and template_manage_result['result'] is not False):
with salt.utils.files.fopen(template_tmp_file, 'r') as tfp_:
tpl = salt.utils.stringutils.to_unicode(tfp_.read())
salt.utils.files.safe_rm(template_tmp_file)
try:
template_parse = _parse_template(tpl)
if 'heat_template_version' in template_parse:
template_new = salt.utils.yaml.safe_dump(template_parse) # depends on [control=['if'], data=['template_parse']]
else:
template_new = jsonutils.dumps(template_parse, indent=2, ensure_ascii=False)
salt.utils.files.safe_rm(template_tmp_file) # depends on [control=['try'], data=[]]
except ValueError as ex:
ret['result'] = False
ret['comment'] = 'Error parsing template {0}'.format(ex) # depends on [control=['except'], data=['ex']] # depends on [control=['with'], data=['tfp_']] # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Can not open template: {0} {1}'.format(template, comment_) # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Can not open template'
if ret['result'] is True:
template_stack = __salt__['heat.template_stack'](name=name, profile=profile)
if not template_stack['result']:
ret['result'] = False
ret['comment'] = template_stack['comment'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if ret['result'] is False:
return ret # depends on [control=['if'], data=[]]
try:
checksum_template = __salt__['hashutil.digest'](template_new)
checksum_stack = __salt__['hashutil.digest'](template_stack['template']) # depends on [control=['try'], data=[]]
except salt.exceptions.CommandExecutionError as cmdexc:
ret['result'] = False
ret['comment'] = '{0}'.format(cmdexc) # depends on [control=['except'], data=['cmdexc']]
if ret['result'] is True:
if checksum_template == checksum_stack:
if __opts__['test']:
ret['result'] = True
ret['comment'] = 'Stack {0} is deployed'.format(name)
return ret # depends on [control=['if'], data=[]]
else:
ret['result'] = False
ret['comment'] = 'Templates have same checksum: {0} {1}'.format(checksum_template, checksum_stack) # depends on [control=['if'], data=['checksum_template', 'checksum_stack']] # depends on [control=['if'], data=[]]
if ret['result'] is False:
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
stack = {'result': None, 'comment': 'Stack {0} is set to be updated'.format(name)} # depends on [control=['if'], data=[]]
else:
stack = __salt__['heat.update_stack'](name=name, template_file=template, environment=environment, parameters=params, poll=poll, rollback=rollback, timeout=timeout, profile=profile)
ret['changes']['stack_name'] = name
ret['changes']['comment'] = 'Update stack' # depends on [control=['if'], data=[]]
elif __opts__['test']:
stack = {'result': None, 'comment': 'Stack {0} is set to be created'.format(name)} # depends on [control=['if'], data=[]]
else:
stack = __salt__['heat.create_stack'](name=name, template_file=template, environment=environment, parameters=params, poll=poll, rollback=rollback, timeout=timeout, profile=profile)
ret['changes']['stack_name'] = name
ret['changes']['comment'] = 'Create stack'
ret['result'] = stack['result']
ret['comment'] = stack['comment']
return ret |
def _construct_replset(self, basedir, portstart, name, num_nodes,
arbiter, extra=''):
"""
Construct command line strings for a replicaset.
Handles single set or sharded cluster.
"""
self.config_docs[name] = {'_id': name, 'members': []}
# Construct individual replica set nodes
for i in num_nodes:
datapath = self._create_paths(basedir, '%s/rs%i' % (name, i + 1))
self._construct_mongod(os.path.join(datapath, 'db'),
os.path.join(datapath, 'mongod.log'),
portstart + i, replset=name, extra=extra)
host = '%s:%i' % (self.args['hostname'], portstart + i)
member_config = {
'_id': len(self.config_docs[name]['members']),
'host': host,
}
# First node gets increased priority.
if i == 0 and self.args['priority']:
member_config['priority'] = 10
if i >= 7:
member_config['votes'] = 0
member_config['priority'] = 0
self.config_docs[name]['members'].append(member_config)
# launch arbiter if True
if arbiter:
datapath = self._create_paths(basedir, '%s/arb' % (name))
self._construct_mongod(os.path.join(datapath, 'db'),
os.path.join(datapath, 'mongod.log'),
portstart + self.args['nodes'],
replset=name)
host = '%s:%i' % (self.args['hostname'],
portstart + self.args['nodes'])
(self.config_docs[name]['members']
.append({'_id': len(self.config_docs[name]['members']),
'host': host,
'arbiterOnly': True}))
return(name + '/' +
','.join([c['host']
for c in self.config_docs[name]['members']])) | def function[_construct_replset, parameter[self, basedir, portstart, name, num_nodes, arbiter, extra]]:
constant[
Construct command line strings for a replicaset.
Handles single set or sharded cluster.
]
call[name[self].config_docs][name[name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b16351b0>, <ast.Constant object at 0x7da1b1636020>], [<ast.Name object at 0x7da1b1635390>, <ast.List object at 0x7da1b1635330>]]
for taget[name[i]] in starred[name[num_nodes]] begin[:]
variable[datapath] assign[=] call[name[self]._create_paths, parameter[name[basedir], binary_operation[constant[%s/rs%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1634f40>, <ast.BinOp object at 0x7da1b1635c90>]]]]]
call[name[self]._construct_mongod, parameter[call[name[os].path.join, parameter[name[datapath], constant[db]]], call[name[os].path.join, parameter[name[datapath], constant[mongod.log]]], binary_operation[name[portstart] + name[i]]]]
variable[host] assign[=] binary_operation[constant[%s:%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1635db0>, <ast.BinOp object at 0x7da1b1635e40>]]]
variable[member_config] assign[=] dictionary[[<ast.Constant object at 0x7da1b16350c0>, <ast.Constant object at 0x7da1b1635090>], [<ast.Call object at 0x7da1b1635d50>, <ast.Name object at 0x7da1b169b910>]]
if <ast.BoolOp object at 0x7da1b169b970> begin[:]
call[name[member_config]][constant[priority]] assign[=] constant[10]
if compare[name[i] greater_or_equal[>=] constant[7]] begin[:]
call[name[member_config]][constant[votes]] assign[=] constant[0]
call[name[member_config]][constant[priority]] assign[=] constant[0]
call[call[call[name[self].config_docs][name[name]]][constant[members]].append, parameter[name[member_config]]]
if name[arbiter] begin[:]
variable[datapath] assign[=] call[name[self]._create_paths, parameter[name[basedir], binary_operation[constant[%s/arb] <ast.Mod object at 0x7da2590d6920> name[name]]]]
call[name[self]._construct_mongod, parameter[call[name[os].path.join, parameter[name[datapath], constant[db]]], call[name[os].path.join, parameter[name[datapath], constant[mongod.log]]], binary_operation[name[portstart] + call[name[self].args][constant[nodes]]]]]
variable[host] assign[=] binary_operation[constant[%s:%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b17808b0>, <ast.BinOp object at 0x7da1b1780610>]]]
call[call[call[name[self].config_docs][name[name]]][constant[members]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b1782710>, <ast.Constant object at 0x7da1b1783940>, <ast.Constant object at 0x7da1b1780a30>], [<ast.Call object at 0x7da1b17828c0>, <ast.Name object at 0x7da1b1783760>, <ast.Constant object at 0x7da1b1783040>]]]]
return[binary_operation[binary_operation[name[name] + constant[/]] + call[constant[,].join, parameter[<ast.ListComp object at 0x7da1b1783820>]]]] | keyword[def] identifier[_construct_replset] ( identifier[self] , identifier[basedir] , identifier[portstart] , identifier[name] , identifier[num_nodes] ,
identifier[arbiter] , identifier[extra] = literal[string] ):
literal[string]
identifier[self] . identifier[config_docs] [ identifier[name] ]={ literal[string] : identifier[name] , literal[string] :[]}
keyword[for] identifier[i] keyword[in] identifier[num_nodes] :
identifier[datapath] = identifier[self] . identifier[_create_paths] ( identifier[basedir] , literal[string] %( identifier[name] , identifier[i] + literal[int] ))
identifier[self] . identifier[_construct_mongod] ( identifier[os] . identifier[path] . identifier[join] ( identifier[datapath] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[datapath] , literal[string] ),
identifier[portstart] + identifier[i] , identifier[replset] = identifier[name] , identifier[extra] = identifier[extra] )
identifier[host] = literal[string] %( identifier[self] . identifier[args] [ literal[string] ], identifier[portstart] + identifier[i] )
identifier[member_config] ={
literal[string] : identifier[len] ( identifier[self] . identifier[config_docs] [ identifier[name] ][ literal[string] ]),
literal[string] : identifier[host] ,
}
keyword[if] identifier[i] == literal[int] keyword[and] identifier[self] . identifier[args] [ literal[string] ]:
identifier[member_config] [ literal[string] ]= literal[int]
keyword[if] identifier[i] >= literal[int] :
identifier[member_config] [ literal[string] ]= literal[int]
identifier[member_config] [ literal[string] ]= literal[int]
identifier[self] . identifier[config_docs] [ identifier[name] ][ literal[string] ]. identifier[append] ( identifier[member_config] )
keyword[if] identifier[arbiter] :
identifier[datapath] = identifier[self] . identifier[_create_paths] ( identifier[basedir] , literal[string] %( identifier[name] ))
identifier[self] . identifier[_construct_mongod] ( identifier[os] . identifier[path] . identifier[join] ( identifier[datapath] , literal[string] ),
identifier[os] . identifier[path] . identifier[join] ( identifier[datapath] , literal[string] ),
identifier[portstart] + identifier[self] . identifier[args] [ literal[string] ],
identifier[replset] = identifier[name] )
identifier[host] = literal[string] %( identifier[self] . identifier[args] [ literal[string] ],
identifier[portstart] + identifier[self] . identifier[args] [ literal[string] ])
( identifier[self] . identifier[config_docs] [ identifier[name] ][ literal[string] ]
. identifier[append] ({ literal[string] : identifier[len] ( identifier[self] . identifier[config_docs] [ identifier[name] ][ literal[string] ]),
literal[string] : identifier[host] ,
literal[string] : keyword[True] }))
keyword[return] ( identifier[name] + literal[string] +
literal[string] . identifier[join] ([ identifier[c] [ literal[string] ]
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[config_docs] [ identifier[name] ][ literal[string] ]])) | def _construct_replset(self, basedir, portstart, name, num_nodes, arbiter, extra=''):
"""
Construct command line strings for a replicaset.
Handles single set or sharded cluster.
"""
self.config_docs[name] = {'_id': name, 'members': []}
# Construct individual replica set nodes
for i in num_nodes:
datapath = self._create_paths(basedir, '%s/rs%i' % (name, i + 1))
self._construct_mongod(os.path.join(datapath, 'db'), os.path.join(datapath, 'mongod.log'), portstart + i, replset=name, extra=extra)
host = '%s:%i' % (self.args['hostname'], portstart + i)
member_config = {'_id': len(self.config_docs[name]['members']), 'host': host}
# First node gets increased priority.
if i == 0 and self.args['priority']:
member_config['priority'] = 10 # depends on [control=['if'], data=[]]
if i >= 7:
member_config['votes'] = 0
member_config['priority'] = 0 # depends on [control=['if'], data=[]]
self.config_docs[name]['members'].append(member_config) # depends on [control=['for'], data=['i']]
# launch arbiter if True
if arbiter:
datapath = self._create_paths(basedir, '%s/arb' % name)
self._construct_mongod(os.path.join(datapath, 'db'), os.path.join(datapath, 'mongod.log'), portstart + self.args['nodes'], replset=name)
host = '%s:%i' % (self.args['hostname'], portstart + self.args['nodes'])
self.config_docs[name]['members'].append({'_id': len(self.config_docs[name]['members']), 'host': host, 'arbiterOnly': True}) # depends on [control=['if'], data=[]]
return name + '/' + ','.join([c['host'] for c in self.config_docs[name]['members']]) |
def _update_limits_from_api(self):
"""
Query ELB's DescribeAccountLimits API action, and update limits
with the quotas returned. Updates ``self.limits``.
"""
self.connect()
logger.debug("Querying ELB DescribeAccountLimits for limits")
attribs = self.conn.describe_account_limits()
name_to_limits = {
'classic-load-balancers': 'Active load balancers',
'classic-listeners': 'Listeners per load balancer',
'classic-registered-instances':
'Registered instances per load balancer'
}
for attrib in attribs['Limits']:
if int(attrib.get('Max', 0)) == 0:
continue
name = attrib.get('Name', 'unknown')
if name not in name_to_limits:
continue
self.limits[name_to_limits[name]]._set_api_limit(int(attrib['Max']))
# connect to ELBv2 API as well
self.conn2 = client('elbv2', **self._boto3_connection_kwargs)
logger.debug("Connected to %s in region %s",
'elbv2', self.conn2._client_config.region_name)
logger.debug("Querying ELBv2 (ALB) DescribeAccountLimits for limits")
attribs = self.conn2.describe_account_limits()
name_to_limits = {
'target-groups': 'Target groups',
'listeners-per-application-load-balancer':
'Listeners per application load balancer',
'rules-per-application-load-balancer':
'Rules per application load balancer',
'network-load-balancers': 'Network load balancers',
'listeners-per-network-load-balancer':
'Listeners per network load balancer'
}
for attrib in attribs['Limits']:
if int(attrib.get('Max', 0)) == 0:
continue
name = attrib.get('Name', 'unknown')
if name not in name_to_limits:
continue
self.limits[name_to_limits[name]]._set_api_limit(int(attrib['Max']))
logger.debug("Done setting limits from API") | def function[_update_limits_from_api, parameter[self]]:
constant[
Query ELB's DescribeAccountLimits API action, and update limits
with the quotas returned. Updates ``self.limits``.
]
call[name[self].connect, parameter[]]
call[name[logger].debug, parameter[constant[Querying ELB DescribeAccountLimits for limits]]]
variable[attribs] assign[=] call[name[self].conn.describe_account_limits, parameter[]]
variable[name_to_limits] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9810>, <ast.Constant object at 0x7da20c6a8160>, <ast.Constant object at 0x7da20c6abf10>], [<ast.Constant object at 0x7da20c6a8b80>, <ast.Constant object at 0x7da20c6a9f60>, <ast.Constant object at 0x7da20c6aacb0>]]
for taget[name[attrib]] in starred[call[name[attribs]][constant[Limits]]] begin[:]
if compare[call[name[int], parameter[call[name[attrib].get, parameter[constant[Max], constant[0]]]]] equal[==] constant[0]] begin[:]
continue
variable[name] assign[=] call[name[attrib].get, parameter[constant[Name], constant[unknown]]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[name_to_limits]] begin[:]
continue
call[call[name[self].limits][call[name[name_to_limits]][name[name]]]._set_api_limit, parameter[call[name[int], parameter[call[name[attrib]][constant[Max]]]]]]
name[self].conn2 assign[=] call[name[client], parameter[constant[elbv2]]]
call[name[logger].debug, parameter[constant[Connected to %s in region %s], constant[elbv2], name[self].conn2._client_config.region_name]]
call[name[logger].debug, parameter[constant[Querying ELBv2 (ALB) DescribeAccountLimits for limits]]]
variable[attribs] assign[=] call[name[self].conn2.describe_account_limits, parameter[]]
variable[name_to_limits] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9b70>, <ast.Constant object at 0x7da20c6aae60>, <ast.Constant object at 0x7da20c6a8730>, <ast.Constant object at 0x7da20c6a8940>, <ast.Constant object at 0x7da20c6ab640>], [<ast.Constant object at 0x7da20c6a8130>, <ast.Constant object at 0x7da20c6abcd0>, <ast.Constant object at 0x7da20c6a8ee0>, <ast.Constant object at 0x7da20c6a9240>, <ast.Constant object at 0x7da20c6aadd0>]]
for taget[name[attrib]] in starred[call[name[attribs]][constant[Limits]]] begin[:]
if compare[call[name[int], parameter[call[name[attrib].get, parameter[constant[Max], constant[0]]]]] equal[==] constant[0]] begin[:]
continue
variable[name] assign[=] call[name[attrib].get, parameter[constant[Name], constant[unknown]]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[name_to_limits]] begin[:]
continue
call[call[name[self].limits][call[name[name_to_limits]][name[name]]]._set_api_limit, parameter[call[name[int], parameter[call[name[attrib]][constant[Max]]]]]]
call[name[logger].debug, parameter[constant[Done setting limits from API]]] | keyword[def] identifier[_update_limits_from_api] ( identifier[self] ):
literal[string]
identifier[self] . identifier[connect] ()
identifier[logger] . identifier[debug] ( literal[string] )
identifier[attribs] = identifier[self] . identifier[conn] . identifier[describe_account_limits] ()
identifier[name_to_limits] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] :
literal[string]
}
keyword[for] identifier[attrib] keyword[in] identifier[attribs] [ literal[string] ]:
keyword[if] identifier[int] ( identifier[attrib] . identifier[get] ( literal[string] , literal[int] ))== literal[int] :
keyword[continue]
identifier[name] = identifier[attrib] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[name] keyword[not] keyword[in] identifier[name_to_limits] :
keyword[continue]
identifier[self] . identifier[limits] [ identifier[name_to_limits] [ identifier[name] ]]. identifier[_set_api_limit] ( identifier[int] ( identifier[attrib] [ literal[string] ]))
identifier[self] . identifier[conn2] = identifier[client] ( literal[string] ,** identifier[self] . identifier[_boto3_connection_kwargs] )
identifier[logger] . identifier[debug] ( literal[string] ,
literal[string] , identifier[self] . identifier[conn2] . identifier[_client_config] . identifier[region_name] )
identifier[logger] . identifier[debug] ( literal[string] )
identifier[attribs] = identifier[self] . identifier[conn2] . identifier[describe_account_limits] ()
identifier[name_to_limits] ={
literal[string] : literal[string] ,
literal[string] :
literal[string] ,
literal[string] :
literal[string] ,
literal[string] : literal[string] ,
literal[string] :
literal[string]
}
keyword[for] identifier[attrib] keyword[in] identifier[attribs] [ literal[string] ]:
keyword[if] identifier[int] ( identifier[attrib] . identifier[get] ( literal[string] , literal[int] ))== literal[int] :
keyword[continue]
identifier[name] = identifier[attrib] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[name] keyword[not] keyword[in] identifier[name_to_limits] :
keyword[continue]
identifier[self] . identifier[limits] [ identifier[name_to_limits] [ identifier[name] ]]. identifier[_set_api_limit] ( identifier[int] ( identifier[attrib] [ literal[string] ]))
identifier[logger] . identifier[debug] ( literal[string] ) | def _update_limits_from_api(self):
"""
Query ELB's DescribeAccountLimits API action, and update limits
with the quotas returned. Updates ``self.limits``.
"""
self.connect()
logger.debug('Querying ELB DescribeAccountLimits for limits')
attribs = self.conn.describe_account_limits()
name_to_limits = {'classic-load-balancers': 'Active load balancers', 'classic-listeners': 'Listeners per load balancer', 'classic-registered-instances': 'Registered instances per load balancer'}
for attrib in attribs['Limits']:
if int(attrib.get('Max', 0)) == 0:
continue # depends on [control=['if'], data=[]]
name = attrib.get('Name', 'unknown')
if name not in name_to_limits:
continue # depends on [control=['if'], data=[]]
self.limits[name_to_limits[name]]._set_api_limit(int(attrib['Max'])) # depends on [control=['for'], data=['attrib']]
# connect to ELBv2 API as well
self.conn2 = client('elbv2', **self._boto3_connection_kwargs)
logger.debug('Connected to %s in region %s', 'elbv2', self.conn2._client_config.region_name)
logger.debug('Querying ELBv2 (ALB) DescribeAccountLimits for limits')
attribs = self.conn2.describe_account_limits()
name_to_limits = {'target-groups': 'Target groups', 'listeners-per-application-load-balancer': 'Listeners per application load balancer', 'rules-per-application-load-balancer': 'Rules per application load balancer', 'network-load-balancers': 'Network load balancers', 'listeners-per-network-load-balancer': 'Listeners per network load balancer'}
for attrib in attribs['Limits']:
if int(attrib.get('Max', 0)) == 0:
continue # depends on [control=['if'], data=[]]
name = attrib.get('Name', 'unknown')
if name not in name_to_limits:
continue # depends on [control=['if'], data=[]]
self.limits[name_to_limits[name]]._set_api_limit(int(attrib['Max'])) # depends on [control=['for'], data=['attrib']]
logger.debug('Done setting limits from API') |
def subsample(self):
"""
Subsample 1000 reads from the baited files
"""
# Create the threads for the analysis
logging.info('Subsampling FASTQ reads')
for _ in range(self.cpus):
threads = Thread(target=self.subsamplethreads, args=())
threads.setDaemon(True)
threads.start()
with progressbar(self.runmetadata.samples) as bar:
for sample in bar:
if sample.general.bestassemblyfile != 'NA':
# Set the name of the subsampled FASTQ file
sample[self.analysistype].subsampledfastq = \
os.path.splitext(sample[self.analysistype].baitedfastq)[0] + '_subsampled.fastq'
# Set the system call
sample[self.analysistype].seqtkcall = 'reformat.sh in={} out={} samplereadstarget=1000'\
.format(sample[self.analysistype].baitedfastq,
sample[self.analysistype].subsampledfastq)
# Add the sample to the queue
self.samplequeue.put(sample)
self.samplequeue.join() | def function[subsample, parameter[self]]:
constant[
Subsample 1000 reads from the baited files
]
call[name[logging].info, parameter[constant[Subsampling FASTQ reads]]]
for taget[name[_]] in starred[call[name[range], parameter[name[self].cpus]]] begin[:]
variable[threads] assign[=] call[name[Thread], parameter[]]
call[name[threads].setDaemon, parameter[constant[True]]]
call[name[threads].start, parameter[]]
with call[name[progressbar], parameter[name[self].runmetadata.samples]] begin[:]
for taget[name[sample]] in starred[name[bar]] begin[:]
if compare[name[sample].general.bestassemblyfile not_equal[!=] constant[NA]] begin[:]
call[name[sample]][name[self].analysistype].subsampledfastq assign[=] binary_operation[call[call[name[os].path.splitext, parameter[call[name[sample]][name[self].analysistype].baitedfastq]]][constant[0]] + constant[_subsampled.fastq]]
call[name[sample]][name[self].analysistype].seqtkcall assign[=] call[constant[reformat.sh in={} out={} samplereadstarget=1000].format, parameter[call[name[sample]][name[self].analysistype].baitedfastq, call[name[sample]][name[self].analysistype].subsampledfastq]]
call[name[self].samplequeue.put, parameter[name[sample]]]
call[name[self].samplequeue.join, parameter[]] | keyword[def] identifier[subsample] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[info] ( literal[string] )
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[self] . identifier[cpus] ):
identifier[threads] = identifier[Thread] ( identifier[target] = identifier[self] . identifier[subsamplethreads] , identifier[args] =())
identifier[threads] . identifier[setDaemon] ( keyword[True] )
identifier[threads] . identifier[start] ()
keyword[with] identifier[progressbar] ( identifier[self] . identifier[runmetadata] . identifier[samples] ) keyword[as] identifier[bar] :
keyword[for] identifier[sample] keyword[in] identifier[bar] :
keyword[if] identifier[sample] . identifier[general] . identifier[bestassemblyfile] != literal[string] :
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[subsampledfastq] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[baitedfastq] )[ literal[int] ]+ literal[string]
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[seqtkcall] = literal[string] . identifier[format] ( identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[baitedfastq] ,
identifier[sample] [ identifier[self] . identifier[analysistype] ]. identifier[subsampledfastq] )
identifier[self] . identifier[samplequeue] . identifier[put] ( identifier[sample] )
identifier[self] . identifier[samplequeue] . identifier[join] () | def subsample(self):
"""
Subsample 1000 reads from the baited files
"""
# Create the threads for the analysis
logging.info('Subsampling FASTQ reads')
for _ in range(self.cpus):
threads = Thread(target=self.subsamplethreads, args=())
threads.setDaemon(True)
threads.start() # depends on [control=['for'], data=[]]
with progressbar(self.runmetadata.samples) as bar:
for sample in bar:
if sample.general.bestassemblyfile != 'NA':
# Set the name of the subsampled FASTQ file
sample[self.analysistype].subsampledfastq = os.path.splitext(sample[self.analysistype].baitedfastq)[0] + '_subsampled.fastq'
# Set the system call
sample[self.analysistype].seqtkcall = 'reformat.sh in={} out={} samplereadstarget=1000'.format(sample[self.analysistype].baitedfastq, sample[self.analysistype].subsampledfastq)
# Add the sample to the queue
self.samplequeue.put(sample) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']] # depends on [control=['with'], data=['bar']]
self.samplequeue.join() |
def upgrade():
"""Upgrade database."""
op.execute('COMMIT') # See https://bitbucket.org/zzzeek/alembic/issue/123
ctx = op.get_context()
metadata = ctx.opts['target_metadata']
metadata.naming_convention = NAMING_CONVENTION
metadata.bind = ctx.connection.engine
insp = Inspector.from_engine(ctx.connection.engine)
for table_name in insp.get_table_names():
if table_name not in metadata.tables:
continue
table = metadata.tables[table_name]
ixs = {}
uqs = {}
fks = {}
for ix in insp.get_indexes(table_name):
ixs[tuple(ix['column_names'])] = ix
for uq in insp.get_unique_constraints(table_name):
uqs[tuple(uq['column_names'])] = uq
for fk in insp.get_foreign_keys(table_name):
fks[(tuple(fk['constrained_columns']), fk['referred_table'])] = fk
with op.batch_alter_table(
table_name, naming_convention=NAMING_CONVENTION) as batch_op:
for c in list(table.constraints) + list(table.indexes):
key = None
if isinstance(c, sa.schema.ForeignKeyConstraint):
key = (tuple(c.column_keys), c.referred_table.name)
fk = fks.get(key)
if fk and c.name != fk['name']:
batch_op.drop_constraint(
fk['name'], type_='foreignkey')
batch_op.create_foreign_key(
op.f(c.name), fk['referred_table'],
fk['constrained_columns'],
fk['referred_columns'],
**fk['options']
)
elif isinstance(c, sa.schema.UniqueConstraint):
key = tuple(c.columns.keys())
uq = uqs.get(key)
if uq and c.name != uq['name']:
batch_op.drop_constraint(uq['name'], type_='unique')
batch_op.create_unique_constraint(
op.f(c.name), uq['column_names'])
elif isinstance(c, sa.schema.CheckConstraint):
util.warn('Update {0.table.name} CHECK {0.name} '
'manually'.format(c))
elif isinstance(c, sa.schema.Index):
key = tuple(c.columns.keys())
ix = ixs.get(key)
if ix and c.name != ix['name']:
batch_op.drop_index(ix['name'])
batch_op.create_index(
op.f(c.name), ix['column_names'],
unique=ix['unique'],
)
elif isinstance(c, sa.schema.PrimaryKeyConstraint) or \
c.name == '_unnamed_':
# NOTE we don't care about primary keys since they have
# specific syntax.
pass
else:
raise RuntimeError('Missing {0!r}'.format(c)) | def function[upgrade, parameter[]]:
constant[Upgrade database.]
call[name[op].execute, parameter[constant[COMMIT]]]
variable[ctx] assign[=] call[name[op].get_context, parameter[]]
variable[metadata] assign[=] call[name[ctx].opts][constant[target_metadata]]
name[metadata].naming_convention assign[=] name[NAMING_CONVENTION]
name[metadata].bind assign[=] name[ctx].connection.engine
variable[insp] assign[=] call[name[Inspector].from_engine, parameter[name[ctx].connection.engine]]
for taget[name[table_name]] in starred[call[name[insp].get_table_names, parameter[]]] begin[:]
if compare[name[table_name] <ast.NotIn object at 0x7da2590d7190> name[metadata].tables] begin[:]
continue
variable[table] assign[=] call[name[metadata].tables][name[table_name]]
variable[ixs] assign[=] dictionary[[], []]
variable[uqs] assign[=] dictionary[[], []]
variable[fks] assign[=] dictionary[[], []]
for taget[name[ix]] in starred[call[name[insp].get_indexes, parameter[name[table_name]]]] begin[:]
call[name[ixs]][call[name[tuple], parameter[call[name[ix]][constant[column_names]]]]] assign[=] name[ix]
for taget[name[uq]] in starred[call[name[insp].get_unique_constraints, parameter[name[table_name]]]] begin[:]
call[name[uqs]][call[name[tuple], parameter[call[name[uq]][constant[column_names]]]]] assign[=] name[uq]
for taget[name[fk]] in starred[call[name[insp].get_foreign_keys, parameter[name[table_name]]]] begin[:]
call[name[fks]][tuple[[<ast.Call object at 0x7da1b0e52b90>, <ast.Subscript object at 0x7da1b0e52aa0>]]] assign[=] name[fk]
with call[name[op].batch_alter_table, parameter[name[table_name]]] begin[:]
for taget[name[c]] in starred[binary_operation[call[name[list], parameter[name[table].constraints]] + call[name[list], parameter[name[table].indexes]]]] begin[:]
variable[key] assign[=] constant[None]
if call[name[isinstance], parameter[name[c], name[sa].schema.ForeignKeyConstraint]] begin[:]
variable[key] assign[=] tuple[[<ast.Call object at 0x7da1b0e52380>, <ast.Attribute object at 0x7da1b0e522c0>]]
variable[fk] assign[=] call[name[fks].get, parameter[name[key]]]
if <ast.BoolOp object at 0x7da1b0e520e0> begin[:]
call[name[batch_op].drop_constraint, parameter[call[name[fk]][constant[name]]]]
call[name[batch_op].create_foreign_key, parameter[call[name[op].f, parameter[name[c].name]], call[name[fk]][constant[referred_table]], call[name[fk]][constant[constrained_columns]], call[name[fk]][constant[referred_columns]]]] | keyword[def] identifier[upgrade] ():
literal[string]
identifier[op] . identifier[execute] ( literal[string] )
identifier[ctx] = identifier[op] . identifier[get_context] ()
identifier[metadata] = identifier[ctx] . identifier[opts] [ literal[string] ]
identifier[metadata] . identifier[naming_convention] = identifier[NAMING_CONVENTION]
identifier[metadata] . identifier[bind] = identifier[ctx] . identifier[connection] . identifier[engine]
identifier[insp] = identifier[Inspector] . identifier[from_engine] ( identifier[ctx] . identifier[connection] . identifier[engine] )
keyword[for] identifier[table_name] keyword[in] identifier[insp] . identifier[get_table_names] ():
keyword[if] identifier[table_name] keyword[not] keyword[in] identifier[metadata] . identifier[tables] :
keyword[continue]
identifier[table] = identifier[metadata] . identifier[tables] [ identifier[table_name] ]
identifier[ixs] ={}
identifier[uqs] ={}
identifier[fks] ={}
keyword[for] identifier[ix] keyword[in] identifier[insp] . identifier[get_indexes] ( identifier[table_name] ):
identifier[ixs] [ identifier[tuple] ( identifier[ix] [ literal[string] ])]= identifier[ix]
keyword[for] identifier[uq] keyword[in] identifier[insp] . identifier[get_unique_constraints] ( identifier[table_name] ):
identifier[uqs] [ identifier[tuple] ( identifier[uq] [ literal[string] ])]= identifier[uq]
keyword[for] identifier[fk] keyword[in] identifier[insp] . identifier[get_foreign_keys] ( identifier[table_name] ):
identifier[fks] [( identifier[tuple] ( identifier[fk] [ literal[string] ]), identifier[fk] [ literal[string] ])]= identifier[fk]
keyword[with] identifier[op] . identifier[batch_alter_table] (
identifier[table_name] , identifier[naming_convention] = identifier[NAMING_CONVENTION] ) keyword[as] identifier[batch_op] :
keyword[for] identifier[c] keyword[in] identifier[list] ( identifier[table] . identifier[constraints] )+ identifier[list] ( identifier[table] . identifier[indexes] ):
identifier[key] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[c] , identifier[sa] . identifier[schema] . identifier[ForeignKeyConstraint] ):
identifier[key] =( identifier[tuple] ( identifier[c] . identifier[column_keys] ), identifier[c] . identifier[referred_table] . identifier[name] )
identifier[fk] = identifier[fks] . identifier[get] ( identifier[key] )
keyword[if] identifier[fk] keyword[and] identifier[c] . identifier[name] != identifier[fk] [ literal[string] ]:
identifier[batch_op] . identifier[drop_constraint] (
identifier[fk] [ literal[string] ], identifier[type_] = literal[string] )
identifier[batch_op] . identifier[create_foreign_key] (
identifier[op] . identifier[f] ( identifier[c] . identifier[name] ), identifier[fk] [ literal[string] ],
identifier[fk] [ literal[string] ],
identifier[fk] [ literal[string] ],
** identifier[fk] [ literal[string] ]
)
keyword[elif] identifier[isinstance] ( identifier[c] , identifier[sa] . identifier[schema] . identifier[UniqueConstraint] ):
identifier[key] = identifier[tuple] ( identifier[c] . identifier[columns] . identifier[keys] ())
identifier[uq] = identifier[uqs] . identifier[get] ( identifier[key] )
keyword[if] identifier[uq] keyword[and] identifier[c] . identifier[name] != identifier[uq] [ literal[string] ]:
identifier[batch_op] . identifier[drop_constraint] ( identifier[uq] [ literal[string] ], identifier[type_] = literal[string] )
identifier[batch_op] . identifier[create_unique_constraint] (
identifier[op] . identifier[f] ( identifier[c] . identifier[name] ), identifier[uq] [ literal[string] ])
keyword[elif] identifier[isinstance] ( identifier[c] , identifier[sa] . identifier[schema] . identifier[CheckConstraint] ):
identifier[util] . identifier[warn] ( literal[string]
literal[string] . identifier[format] ( identifier[c] ))
keyword[elif] identifier[isinstance] ( identifier[c] , identifier[sa] . identifier[schema] . identifier[Index] ):
identifier[key] = identifier[tuple] ( identifier[c] . identifier[columns] . identifier[keys] ())
identifier[ix] = identifier[ixs] . identifier[get] ( identifier[key] )
keyword[if] identifier[ix] keyword[and] identifier[c] . identifier[name] != identifier[ix] [ literal[string] ]:
identifier[batch_op] . identifier[drop_index] ( identifier[ix] [ literal[string] ])
identifier[batch_op] . identifier[create_index] (
identifier[op] . identifier[f] ( identifier[c] . identifier[name] ), identifier[ix] [ literal[string] ],
identifier[unique] = identifier[ix] [ literal[string] ],
)
keyword[elif] identifier[isinstance] ( identifier[c] , identifier[sa] . identifier[schema] . identifier[PrimaryKeyConstraint] ) keyword[or] identifier[c] . identifier[name] == literal[string] :
keyword[pass]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[c] )) | def upgrade():
"""Upgrade database."""
op.execute('COMMIT') # See https://bitbucket.org/zzzeek/alembic/issue/123
ctx = op.get_context()
metadata = ctx.opts['target_metadata']
metadata.naming_convention = NAMING_CONVENTION
metadata.bind = ctx.connection.engine
insp = Inspector.from_engine(ctx.connection.engine)
for table_name in insp.get_table_names():
if table_name not in metadata.tables:
continue # depends on [control=['if'], data=[]]
table = metadata.tables[table_name]
ixs = {}
uqs = {}
fks = {}
for ix in insp.get_indexes(table_name):
ixs[tuple(ix['column_names'])] = ix # depends on [control=['for'], data=['ix']]
for uq in insp.get_unique_constraints(table_name):
uqs[tuple(uq['column_names'])] = uq # depends on [control=['for'], data=['uq']]
for fk in insp.get_foreign_keys(table_name):
fks[tuple(fk['constrained_columns']), fk['referred_table']] = fk # depends on [control=['for'], data=['fk']]
with op.batch_alter_table(table_name, naming_convention=NAMING_CONVENTION) as batch_op:
for c in list(table.constraints) + list(table.indexes):
key = None
if isinstance(c, sa.schema.ForeignKeyConstraint):
key = (tuple(c.column_keys), c.referred_table.name)
fk = fks.get(key)
if fk and c.name != fk['name']:
batch_op.drop_constraint(fk['name'], type_='foreignkey')
batch_op.create_foreign_key(op.f(c.name), fk['referred_table'], fk['constrained_columns'], fk['referred_columns'], **fk['options']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(c, sa.schema.UniqueConstraint):
key = tuple(c.columns.keys())
uq = uqs.get(key)
if uq and c.name != uq['name']:
batch_op.drop_constraint(uq['name'], type_='unique')
batch_op.create_unique_constraint(op.f(c.name), uq['column_names']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(c, sa.schema.CheckConstraint):
util.warn('Update {0.table.name} CHECK {0.name} manually'.format(c)) # depends on [control=['if'], data=[]]
elif isinstance(c, sa.schema.Index):
key = tuple(c.columns.keys())
ix = ixs.get(key)
if ix and c.name != ix['name']:
batch_op.drop_index(ix['name'])
batch_op.create_index(op.f(c.name), ix['column_names'], unique=ix['unique']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(c, sa.schema.PrimaryKeyConstraint) or c.name == '_unnamed_':
# NOTE we don't care about primary keys since they have
# specific syntax.
pass # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Missing {0!r}'.format(c)) # depends on [control=['for'], data=['c']] # depends on [control=['with'], data=['batch_op']] # depends on [control=['for'], data=['table_name']] |
def chrome_setup_view(request):
"""Set up a browser-side GCM session.
This *requires* a valid login session. A "token" POST parameter is saved under the "gcm_token"
parameter in the logged in user's NotificationConfig.
"""
logger.debug(request.POST)
token = None
if request.method == "POST":
if "token" in request.POST:
token = request.POST.get("token")
if not token:
return HttpResponse('{"error":"Invalid data."}', content_type="text/json")
ncfg, _ = NotificationConfig.objects.get_or_create(user=request.user)
ncfg.gcm_token = token
ncfg.save()
return HttpResponse('{"success":"Now registered."}', content_type="text/json") | def function[chrome_setup_view, parameter[request]]:
constant[Set up a browser-side GCM session.
This *requires* a valid login session. A "token" POST parameter is saved under the "gcm_token"
parameter in the logged in user's NotificationConfig.
]
call[name[logger].debug, parameter[name[request].POST]]
variable[token] assign[=] constant[None]
if compare[name[request].method equal[==] constant[POST]] begin[:]
if compare[constant[token] in name[request].POST] begin[:]
variable[token] assign[=] call[name[request].POST.get, parameter[constant[token]]]
if <ast.UnaryOp object at 0x7da20c6a9300> begin[:]
return[call[name[HttpResponse], parameter[constant[{"error":"Invalid data."}]]]]
<ast.Tuple object at 0x7da20c6a95d0> assign[=] call[name[NotificationConfig].objects.get_or_create, parameter[]]
name[ncfg].gcm_token assign[=] name[token]
call[name[ncfg].save, parameter[]]
return[call[name[HttpResponse], parameter[constant[{"success":"Now registered."}]]]] | keyword[def] identifier[chrome_setup_view] ( identifier[request] ):
literal[string]
identifier[logger] . identifier[debug] ( identifier[request] . identifier[POST] )
identifier[token] = keyword[None]
keyword[if] identifier[request] . identifier[method] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[request] . identifier[POST] :
identifier[token] = identifier[request] . identifier[POST] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[token] :
keyword[return] identifier[HttpResponse] ( literal[string] , identifier[content_type] = literal[string] )
identifier[ncfg] , identifier[_] = identifier[NotificationConfig] . identifier[objects] . identifier[get_or_create] ( identifier[user] = identifier[request] . identifier[user] )
identifier[ncfg] . identifier[gcm_token] = identifier[token]
identifier[ncfg] . identifier[save] ()
keyword[return] identifier[HttpResponse] ( literal[string] , identifier[content_type] = literal[string] ) | def chrome_setup_view(request):
"""Set up a browser-side GCM session.
This *requires* a valid login session. A "token" POST parameter is saved under the "gcm_token"
parameter in the logged in user's NotificationConfig.
"""
logger.debug(request.POST)
token = None
if request.method == 'POST':
if 'token' in request.POST:
token = request.POST.get('token') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not token:
return HttpResponse('{"error":"Invalid data."}', content_type='text/json') # depends on [control=['if'], data=[]]
(ncfg, _) = NotificationConfig.objects.get_or_create(user=request.user)
ncfg.gcm_token = token
ncfg.save()
return HttpResponse('{"success":"Now registered."}', content_type='text/json') |
def get_client(self, email=None, password=None, **__):
"""Get the google data client."""
if self.client is not None:
return self.client
return Auth(email, password) | def function[get_client, parameter[self, email, password]]:
constant[Get the google data client.]
if compare[name[self].client is_not constant[None]] begin[:]
return[name[self].client]
return[call[name[Auth], parameter[name[email], name[password]]]] | keyword[def] identifier[get_client] ( identifier[self] , identifier[email] = keyword[None] , identifier[password] = keyword[None] ,** identifier[__] ):
literal[string]
keyword[if] identifier[self] . identifier[client] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[client]
keyword[return] identifier[Auth] ( identifier[email] , identifier[password] ) | def get_client(self, email=None, password=None, **__):
"""Get the google data client."""
if self.client is not None:
return self.client # depends on [control=['if'], data=[]]
return Auth(email, password) |
def rewrite_single_shorthand_state_decl(data): # pylint: disable=C0103
'''
Rewrite all state declarations that look like this::
state_id_decl:
state.func
into::
state_id_decl:
state.func: []
'''
for sid, states in six.iteritems(data):
if isinstance(states, six.string_types):
data[sid] = {states: []} | def function[rewrite_single_shorthand_state_decl, parameter[data]]:
constant[
Rewrite all state declarations that look like this::
state_id_decl:
state.func
into::
state_id_decl:
state.func: []
]
for taget[tuple[[<ast.Name object at 0x7da20c7c9570>, <ast.Name object at 0x7da20c7c9750>]]] in starred[call[name[six].iteritems, parameter[name[data]]]] begin[:]
if call[name[isinstance], parameter[name[states], name[six].string_types]] begin[:]
call[name[data]][name[sid]] assign[=] dictionary[[<ast.Name object at 0x7da20c7caaa0>], [<ast.List object at 0x7da20c7c9e70>]] | keyword[def] identifier[rewrite_single_shorthand_state_decl] ( identifier[data] ):
literal[string]
keyword[for] identifier[sid] , identifier[states] keyword[in] identifier[six] . identifier[iteritems] ( identifier[data] ):
keyword[if] identifier[isinstance] ( identifier[states] , identifier[six] . identifier[string_types] ):
identifier[data] [ identifier[sid] ]={ identifier[states] :[]} | def rewrite_single_shorthand_state_decl(data): # pylint: disable=C0103
'\n Rewrite all state declarations that look like this::\n\n state_id_decl:\n state.func\n\n into::\n\n state_id_decl:\n state.func: []\n '
for (sid, states) in six.iteritems(data):
if isinstance(states, six.string_types):
data[sid] = {states: []} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def filterfalse(coro, iterable, limit=0, loop=None):
"""
Returns a list of all the values in coll which pass an asynchronous truth
test coroutine.
Operations are executed concurrently by default, but results
will be in order.
You can configure the concurrency via `limit` param.
This function is the asynchronous equivalent port Python built-in
`filterfalse()` function.
This function is a coroutine.
This function can be composed in a pipeline chain with ``|`` operator.
Arguments:
coro (coroutine function): coroutine filter function to call accepting
iterable values.
iterable (iterable): an iterable collection yielding
coroutines functions.
assert_fn (coroutinefunction): optional assertion function.
limit (int): max filtering concurrency limit. Use ``0`` for no limit.
loop (asyncio.BaseEventLoop): optional event loop to use.
Raises:
TypeError: if coro argument is not a coroutine function.
Returns:
filtered values (list): ordered list containing values that do not
passed the filter.
Usage::
async def iseven(num):
return num % 2 == 0
await paco.filterfalse(coro, [1, 2, 3, 4, 5])
# => [1, 3, 5]
"""
return (yield from filter(coro, iterable,
assert_fn=assert_false,
limit=limit, loop=loop)) | def function[filterfalse, parameter[coro, iterable, limit, loop]]:
constant[
Returns a list of all the values in coll which pass an asynchronous truth
test coroutine.
Operations are executed concurrently by default, but results
will be in order.
You can configure the concurrency via `limit` param.
This function is the asynchronous equivalent port Python built-in
`filterfalse()` function.
This function is a coroutine.
This function can be composed in a pipeline chain with ``|`` operator.
Arguments:
coro (coroutine function): coroutine filter function to call accepting
iterable values.
iterable (iterable): an iterable collection yielding
coroutines functions.
assert_fn (coroutinefunction): optional assertion function.
limit (int): max filtering concurrency limit. Use ``0`` for no limit.
loop (asyncio.BaseEventLoop): optional event loop to use.
Raises:
TypeError: if coro argument is not a coroutine function.
Returns:
filtered values (list): ordered list containing values that do not
passed the filter.
Usage::
async def iseven(num):
return num % 2 == 0
await paco.filterfalse(coro, [1, 2, 3, 4, 5])
# => [1, 3, 5]
]
return[<ast.YieldFrom object at 0x7da18bccbd60>] | keyword[def] identifier[filterfalse] ( identifier[coro] , identifier[iterable] , identifier[limit] = literal[int] , identifier[loop] = keyword[None] ):
literal[string]
keyword[return] ( keyword[yield] keyword[from] identifier[filter] ( identifier[coro] , identifier[iterable] ,
identifier[assert_fn] = identifier[assert_false] ,
identifier[limit] = identifier[limit] , identifier[loop] = identifier[loop] )) | def filterfalse(coro, iterable, limit=0, loop=None):
"""
Returns a list of all the values in coll which pass an asynchronous truth
test coroutine.
Operations are executed concurrently by default, but results
will be in order.
You can configure the concurrency via `limit` param.
This function is the asynchronous equivalent port Python built-in
`filterfalse()` function.
This function is a coroutine.
This function can be composed in a pipeline chain with ``|`` operator.
Arguments:
coro (coroutine function): coroutine filter function to call accepting
iterable values.
iterable (iterable): an iterable collection yielding
coroutines functions.
assert_fn (coroutinefunction): optional assertion function.
limit (int): max filtering concurrency limit. Use ``0`` for no limit.
loop (asyncio.BaseEventLoop): optional event loop to use.
Raises:
TypeError: if coro argument is not a coroutine function.
Returns:
filtered values (list): ordered list containing values that do not
passed the filter.
Usage::
async def iseven(num):
return num % 2 == 0
await paco.filterfalse(coro, [1, 2, 3, 4, 5])
# => [1, 3, 5]
"""
return (yield from filter(coro, iterable, assert_fn=assert_false, limit=limit, loop=loop)) |
def setLogFile(self, filepath):
""" Defines the file to which output log messages should be sent.
Set to `None` to print to STDOUT instead.
"""
if filepath is None:
self._log_file = None
return
parsed_path = os.path.abspath(filepath)
# Checks if the provided log filename is in a real directory, and that
# the filename itself is not a directory.
if os.path.isdir(os.path.dirname(parsed_path)) and not os.path.isdir(parsed_path):
self._log_file = parsed_path
else:
raise IOError("File not found: " + filepath) | def function[setLogFile, parameter[self, filepath]]:
constant[ Defines the file to which output log messages should be sent.
Set to `None` to print to STDOUT instead.
]
if compare[name[filepath] is constant[None]] begin[:]
name[self]._log_file assign[=] constant[None]
return[None]
variable[parsed_path] assign[=] call[name[os].path.abspath, parameter[name[filepath]]]
if <ast.BoolOp object at 0x7da1b1113340> begin[:]
name[self]._log_file assign[=] name[parsed_path] | keyword[def] identifier[setLogFile] ( identifier[self] , identifier[filepath] ):
literal[string]
keyword[if] identifier[filepath] keyword[is] keyword[None] :
identifier[self] . identifier[_log_file] = keyword[None]
keyword[return]
identifier[parsed_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[filepath] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[parsed_path] )) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[parsed_path] ):
identifier[self] . identifier[_log_file] = identifier[parsed_path]
keyword[else] :
keyword[raise] identifier[IOError] ( literal[string] + identifier[filepath] ) | def setLogFile(self, filepath):
""" Defines the file to which output log messages should be sent.
Set to `None` to print to STDOUT instead.
"""
if filepath is None:
self._log_file = None
return # depends on [control=['if'], data=[]]
parsed_path = os.path.abspath(filepath)
# Checks if the provided log filename is in a real directory, and that
# the filename itself is not a directory.
if os.path.isdir(os.path.dirname(parsed_path)) and (not os.path.isdir(parsed_path)):
self._log_file = parsed_path # depends on [control=['if'], data=[]]
else:
raise IOError('File not found: ' + filepath) |
def Gaussian(x, mu, sig):
"""
Gaussian pdf.
:param x: free variable.
:param mu: mean of the distribution.
:param sig: standard deviation of the distribution.
:return: sympy.Expr for a Gaussian pdf.
"""
return sympy.exp(-(x - mu)**2/(2*sig**2))/sympy.sqrt(2*sympy.pi*sig**2) | def function[Gaussian, parameter[x, mu, sig]]:
constant[
Gaussian pdf.
:param x: free variable.
:param mu: mean of the distribution.
:param sig: standard deviation of the distribution.
:return: sympy.Expr for a Gaussian pdf.
]
return[binary_operation[call[name[sympy].exp, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b13a9090> / binary_operation[constant[2] * binary_operation[name[sig] ** constant[2]]]]]] / call[name[sympy].sqrt, parameter[binary_operation[binary_operation[constant[2] * name[sympy].pi] * binary_operation[name[sig] ** constant[2]]]]]]] | keyword[def] identifier[Gaussian] ( identifier[x] , identifier[mu] , identifier[sig] ):
literal[string]
keyword[return] identifier[sympy] . identifier[exp] (-( identifier[x] - identifier[mu] )** literal[int] /( literal[int] * identifier[sig] ** literal[int] ))/ identifier[sympy] . identifier[sqrt] ( literal[int] * identifier[sympy] . identifier[pi] * identifier[sig] ** literal[int] ) | def Gaussian(x, mu, sig):
"""
Gaussian pdf.
:param x: free variable.
:param mu: mean of the distribution.
:param sig: standard deviation of the distribution.
:return: sympy.Expr for a Gaussian pdf.
"""
return sympy.exp(-(x - mu) ** 2 / (2 * sig ** 2)) / sympy.sqrt(2 * sympy.pi * sig ** 2) |
def create(self, list_id, data):
"""
Add a new merge field for a specific list.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param data: The request body parameters
:type data: :py:class:`dict`
data = {
"name": string*,
"type": string*
}
"""
self.list_id = list_id
if 'name' not in data:
raise KeyError('The list merge field must have a name')
if 'type' not in data:
raise KeyError('The list merge field must have a type')
response = self._mc_client._post(url=self._build_path(list_id, 'merge-fields'), data=data)
if response is not None:
self.merge_id = response['merge_id']
else:
self.merge_id = None
return response | def function[create, parameter[self, list_id, data]]:
constant[
Add a new merge field for a specific list.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param data: The request body parameters
:type data: :py:class:`dict`
data = {
"name": string*,
"type": string*
}
]
name[self].list_id assign[=] name[list_id]
if compare[constant[name] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
<ast.Raise object at 0x7da1b01e69e0>
if compare[constant[type] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
<ast.Raise object at 0x7da1b01e7100>
variable[response] assign[=] call[name[self]._mc_client._post, parameter[]]
if compare[name[response] is_not constant[None]] begin[:]
name[self].merge_id assign[=] call[name[response]][constant[merge_id]]
return[name[response]] | keyword[def] identifier[create] ( identifier[self] , identifier[list_id] , identifier[data] ):
literal[string]
identifier[self] . identifier[list_id] = identifier[list_id]
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] :
keyword[raise] identifier[KeyError] ( literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] :
keyword[raise] identifier[KeyError] ( literal[string] )
identifier[response] = identifier[self] . identifier[_mc_client] . identifier[_post] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[list_id] , literal[string] ), identifier[data] = identifier[data] )
keyword[if] identifier[response] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[merge_id] = identifier[response] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[merge_id] = keyword[None]
keyword[return] identifier[response] | def create(self, list_id, data):
"""
Add a new merge field for a specific list.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param data: The request body parameters
:type data: :py:class:`dict`
data = {
"name": string*,
"type": string*
}
"""
self.list_id = list_id
if 'name' not in data:
raise KeyError('The list merge field must have a name') # depends on [control=['if'], data=[]]
if 'type' not in data:
raise KeyError('The list merge field must have a type') # depends on [control=['if'], data=[]]
response = self._mc_client._post(url=self._build_path(list_id, 'merge-fields'), data=data)
if response is not None:
self.merge_id = response['merge_id'] # depends on [control=['if'], data=['response']]
else:
self.merge_id = None
return response |
def _session_check(self):
"""Attempt to authenticate the user through a session file.
This process is done to avoid having to authenticate the user every
single time. It uses a session file that is saved when a valid session
is captured and then reused. Because sessions can expire, we need to
test the session prior to calling the user authenticated. Right now
that is done with a test string found in an unauthenticated session.
This approach is not an ideal method, but it works.
"""
if not os.path.exists(SESSION_FILE):
self._log.debug("Session file does not exist")
return False
with open(SESSION_FILE, 'rb') as f:
cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
self._session.cookies = cookies
self._log.debug("Loaded cookies from session file")
response = self._session.get(url=self.TEST_URL, headers=self.HEADERS)
if self.TEST_KEY in str(response.content):
self._log.debug("Session file appears invalid")
return False
self._is_authenticated = True
self._process_state()
return True | def function[_session_check, parameter[self]]:
constant[Attempt to authenticate the user through a session file.
This process is done to avoid having to authenticate the user every
single time. It uses a session file that is saved when a valid session
is captured and then reused. Because sessions can expire, we need to
test the session prior to calling the user authenticated. Right now
that is done with a test string found in an unauthenticated session.
This approach is not an ideal method, but it works.
]
if <ast.UnaryOp object at 0x7da1b100e860> begin[:]
call[name[self]._log.debug, parameter[constant[Session file does not exist]]]
return[constant[False]]
with call[name[open], parameter[name[SESSION_FILE], constant[rb]]] begin[:]
variable[cookies] assign[=] call[name[requests].utils.cookiejar_from_dict, parameter[call[name[pickle].load, parameter[name[f]]]]]
name[self]._session.cookies assign[=] name[cookies]
call[name[self]._log.debug, parameter[constant[Loaded cookies from session file]]]
variable[response] assign[=] call[name[self]._session.get, parameter[]]
if compare[name[self].TEST_KEY in call[name[str], parameter[name[response].content]]] begin[:]
call[name[self]._log.debug, parameter[constant[Session file appears invalid]]]
return[constant[False]]
name[self]._is_authenticated assign[=] constant[True]
call[name[self]._process_state, parameter[]]
return[constant[True]] | keyword[def] identifier[_session_check] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[SESSION_FILE] ):
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[return] keyword[False]
keyword[with] identifier[open] ( identifier[SESSION_FILE] , literal[string] ) keyword[as] identifier[f] :
identifier[cookies] = identifier[requests] . identifier[utils] . identifier[cookiejar_from_dict] ( identifier[pickle] . identifier[load] ( identifier[f] ))
identifier[self] . identifier[_session] . identifier[cookies] = identifier[cookies]
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
identifier[response] = identifier[self] . identifier[_session] . identifier[get] ( identifier[url] = identifier[self] . identifier[TEST_URL] , identifier[headers] = identifier[self] . identifier[HEADERS] )
keyword[if] identifier[self] . identifier[TEST_KEY] keyword[in] identifier[str] ( identifier[response] . identifier[content] ):
identifier[self] . identifier[_log] . identifier[debug] ( literal[string] )
keyword[return] keyword[False]
identifier[self] . identifier[_is_authenticated] = keyword[True]
identifier[self] . identifier[_process_state] ()
keyword[return] keyword[True] | def _session_check(self):
"""Attempt to authenticate the user through a session file.
This process is done to avoid having to authenticate the user every
single time. It uses a session file that is saved when a valid session
is captured and then reused. Because sessions can expire, we need to
test the session prior to calling the user authenticated. Right now
that is done with a test string found in an unauthenticated session.
This approach is not an ideal method, but it works.
"""
if not os.path.exists(SESSION_FILE):
self._log.debug('Session file does not exist')
return False # depends on [control=['if'], data=[]]
with open(SESSION_FILE, 'rb') as f:
cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
self._session.cookies = cookies
self._log.debug('Loaded cookies from session file') # depends on [control=['with'], data=['f']]
response = self._session.get(url=self.TEST_URL, headers=self.HEADERS)
if self.TEST_KEY in str(response.content):
self._log.debug('Session file appears invalid')
return False # depends on [control=['if'], data=[]]
self._is_authenticated = True
self._process_state()
return True |
def anti_alias(image):
"""
Apply Anti-Alias filter to a binary image
ANTsR function: N/A
Arguments
---------
image : ANTsImage
binary image to which anti-aliasing will be applied
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_data('r16'))
>>> mask = ants.get_mask(img)
>>> mask_aa = ants.anti_alias(mask)
>>> ants.plot(mask)
>>> ants.plot(mask_aa)
"""
if image.pixeltype != 'unsigned char':
if image.max() > 255.:
image = (image - image.max()) / (image.max() - image.min())
image = image.clone('unsigned char')
libfn = utils.get_lib_fn('antiAlias%s' % image._libsuffix)
new_ptr = libfn(image.pointer)
return iio.ANTsImage(pixeltype='float', dimension=image.dimension,
components=image.components, pointer=new_ptr) | def function[anti_alias, parameter[image]]:
constant[
Apply Anti-Alias filter to a binary image
ANTsR function: N/A
Arguments
---------
image : ANTsImage
binary image to which anti-aliasing will be applied
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_data('r16'))
>>> mask = ants.get_mask(img)
>>> mask_aa = ants.anti_alias(mask)
>>> ants.plot(mask)
>>> ants.plot(mask_aa)
]
if compare[name[image].pixeltype not_equal[!=] constant[unsigned char]] begin[:]
if compare[call[name[image].max, parameter[]] greater[>] constant[255.0]] begin[:]
variable[image] assign[=] binary_operation[binary_operation[name[image] - call[name[image].max, parameter[]]] / binary_operation[call[name[image].max, parameter[]] - call[name[image].min, parameter[]]]]
variable[image] assign[=] call[name[image].clone, parameter[constant[unsigned char]]]
variable[libfn] assign[=] call[name[utils].get_lib_fn, parameter[binary_operation[constant[antiAlias%s] <ast.Mod object at 0x7da2590d6920> name[image]._libsuffix]]]
variable[new_ptr] assign[=] call[name[libfn], parameter[name[image].pointer]]
return[call[name[iio].ANTsImage, parameter[]]] | keyword[def] identifier[anti_alias] ( identifier[image] ):
literal[string]
keyword[if] identifier[image] . identifier[pixeltype] != literal[string] :
keyword[if] identifier[image] . identifier[max] ()> literal[int] :
identifier[image] =( identifier[image] - identifier[image] . identifier[max] ())/( identifier[image] . identifier[max] ()- identifier[image] . identifier[min] ())
identifier[image] = identifier[image] . identifier[clone] ( literal[string] )
identifier[libfn] = identifier[utils] . identifier[get_lib_fn] ( literal[string] % identifier[image] . identifier[_libsuffix] )
identifier[new_ptr] = identifier[libfn] ( identifier[image] . identifier[pointer] )
keyword[return] identifier[iio] . identifier[ANTsImage] ( identifier[pixeltype] = literal[string] , identifier[dimension] = identifier[image] . identifier[dimension] ,
identifier[components] = identifier[image] . identifier[components] , identifier[pointer] = identifier[new_ptr] ) | def anti_alias(image):
"""
Apply Anti-Alias filter to a binary image
ANTsR function: N/A
Arguments
---------
image : ANTsImage
binary image to which anti-aliasing will be applied
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> img = ants.image_read(ants.get_data('r16'))
>>> mask = ants.get_mask(img)
>>> mask_aa = ants.anti_alias(mask)
>>> ants.plot(mask)
>>> ants.plot(mask_aa)
"""
if image.pixeltype != 'unsigned char':
if image.max() > 255.0:
image = (image - image.max()) / (image.max() - image.min()) # depends on [control=['if'], data=[]]
image = image.clone('unsigned char') # depends on [control=['if'], data=[]]
libfn = utils.get_lib_fn('antiAlias%s' % image._libsuffix)
new_ptr = libfn(image.pointer)
return iio.ANTsImage(pixeltype='float', dimension=image.dimension, components=image.components, pointer=new_ptr) |
def _unescape(s, uri=False):
"""
Iterative parser for string escapes.
"""
out = ''
while len(s) > 0:
c = s[0]
if c == '\\':
# Backslash escape
esc_c = s[1]
if esc_c in ('u', 'U'):
# Unicode escape
out += six.unichr(int(s[2:6], base=16))
s = s[6:]
continue
else:
if esc_c == 'b':
out += '\b'
elif esc_c == 'f':
out += '\f'
elif esc_c == 'n':
out += '\n'
elif esc_c == 'r':
out += '\r'
elif esc_c == 't':
out += '\t'
else:
if uri and (esc_c == '#'):
# \# is passed through with backslash.
out += '\\'
# Pass through
out += esc_c
s = s[2:]
continue
else:
out += c
s = s[1:]
return out | def function[_unescape, parameter[s, uri]]:
constant[
Iterative parser for string escapes.
]
variable[out] assign[=] constant[]
while compare[call[name[len], parameter[name[s]]] greater[>] constant[0]] begin[:]
variable[c] assign[=] call[name[s]][constant[0]]
if compare[name[c] equal[==] constant[\]] begin[:]
variable[esc_c] assign[=] call[name[s]][constant[1]]
if compare[name[esc_c] in tuple[[<ast.Constant object at 0x7da1afe72a70>, <ast.Constant object at 0x7da1afe71960>]]] begin[:]
<ast.AugAssign object at 0x7da1afe73be0>
variable[s] assign[=] call[name[s]][<ast.Slice object at 0x7da1afe70e20>]
continue
return[name[out]] | keyword[def] identifier[_unescape] ( identifier[s] , identifier[uri] = keyword[False] ):
literal[string]
identifier[out] = literal[string]
keyword[while] identifier[len] ( identifier[s] )> literal[int] :
identifier[c] = identifier[s] [ literal[int] ]
keyword[if] identifier[c] == literal[string] :
identifier[esc_c] = identifier[s] [ literal[int] ]
keyword[if] identifier[esc_c] keyword[in] ( literal[string] , literal[string] ):
identifier[out] += identifier[six] . identifier[unichr] ( identifier[int] ( identifier[s] [ literal[int] : literal[int] ], identifier[base] = literal[int] ))
identifier[s] = identifier[s] [ literal[int] :]
keyword[continue]
keyword[else] :
keyword[if] identifier[esc_c] == literal[string] :
identifier[out] += literal[string]
keyword[elif] identifier[esc_c] == literal[string] :
identifier[out] += literal[string]
keyword[elif] identifier[esc_c] == literal[string] :
identifier[out] += literal[string]
keyword[elif] identifier[esc_c] == literal[string] :
identifier[out] += literal[string]
keyword[elif] identifier[esc_c] == literal[string] :
identifier[out] += literal[string]
keyword[else] :
keyword[if] identifier[uri] keyword[and] ( identifier[esc_c] == literal[string] ):
identifier[out] += literal[string]
identifier[out] += identifier[esc_c]
identifier[s] = identifier[s] [ literal[int] :]
keyword[continue]
keyword[else] :
identifier[out] += identifier[c]
identifier[s] = identifier[s] [ literal[int] :]
keyword[return] identifier[out] | def _unescape(s, uri=False):
"""
Iterative parser for string escapes.
"""
out = ''
while len(s) > 0:
c = s[0]
if c == '\\':
# Backslash escape
esc_c = s[1]
if esc_c in ('u', 'U'):
# Unicode escape
out += six.unichr(int(s[2:6], base=16))
s = s[6:]
continue # depends on [control=['if'], data=[]]
else:
if esc_c == 'b':
out += '\x08' # depends on [control=['if'], data=[]]
elif esc_c == 'f':
out += '\x0c' # depends on [control=['if'], data=[]]
elif esc_c == 'n':
out += '\n' # depends on [control=['if'], data=[]]
elif esc_c == 'r':
out += '\r' # depends on [control=['if'], data=[]]
elif esc_c == 't':
out += '\t' # depends on [control=['if'], data=[]]
else:
if uri and esc_c == '#':
# \# is passed through with backslash.
out += '\\' # depends on [control=['if'], data=[]]
# Pass through
out += esc_c
s = s[2:]
continue # depends on [control=['if'], data=[]]
else:
out += c
s = s[1:] # depends on [control=['while'], data=[]]
return out |
def add_cell_code(self, cell_str, pos=None):
"""
Add Python cell
:param cell_str: cell content
:return:
"""
cell_str = cell_str.strip()
logging.debug("add_cell_code: {}".format(cell_str))
cell = nbf.v4.new_code_cell(cell_str)
if pos is None:
self.nb['cells'].append(cell)
else:
self.nb['cells'].insert(pos, cell) | def function[add_cell_code, parameter[self, cell_str, pos]]:
constant[
Add Python cell
:param cell_str: cell content
:return:
]
variable[cell_str] assign[=] call[name[cell_str].strip, parameter[]]
call[name[logging].debug, parameter[call[constant[add_cell_code: {}].format, parameter[name[cell_str]]]]]
variable[cell] assign[=] call[name[nbf].v4.new_code_cell, parameter[name[cell_str]]]
if compare[name[pos] is constant[None]] begin[:]
call[call[name[self].nb][constant[cells]].append, parameter[name[cell]]] | keyword[def] identifier[add_cell_code] ( identifier[self] , identifier[cell_str] , identifier[pos] = keyword[None] ):
literal[string]
identifier[cell_str] = identifier[cell_str] . identifier[strip] ()
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[cell_str] ))
identifier[cell] = identifier[nbf] . identifier[v4] . identifier[new_code_cell] ( identifier[cell_str] )
keyword[if] identifier[pos] keyword[is] keyword[None] :
identifier[self] . identifier[nb] [ literal[string] ]. identifier[append] ( identifier[cell] )
keyword[else] :
identifier[self] . identifier[nb] [ literal[string] ]. identifier[insert] ( identifier[pos] , identifier[cell] ) | def add_cell_code(self, cell_str, pos=None):
"""
Add Python cell
:param cell_str: cell content
:return:
"""
cell_str = cell_str.strip()
logging.debug('add_cell_code: {}'.format(cell_str))
cell = nbf.v4.new_code_cell(cell_str)
if pos is None:
self.nb['cells'].append(cell) # depends on [control=['if'], data=[]]
else:
self.nb['cells'].insert(pos, cell) |
def _starttls(self):
"""
Exchange a STARTTLS message with Riak to initiate secure communications
return True is Riak responds with a STARTTLS response, False otherwise
"""
resp_code, _ = self._non_connect_send_recv(
riak.pb.messages.MSG_CODE_START_TLS)
if resp_code == riak.pb.messages.MSG_CODE_START_TLS:
return True
else:
return False | def function[_starttls, parameter[self]]:
constant[
Exchange a STARTTLS message with Riak to initiate secure communications
return True is Riak responds with a STARTTLS response, False otherwise
]
<ast.Tuple object at 0x7da20c7c9a20> assign[=] call[name[self]._non_connect_send_recv, parameter[name[riak].pb.messages.MSG_CODE_START_TLS]]
if compare[name[resp_code] equal[==] name[riak].pb.messages.MSG_CODE_START_TLS] begin[:]
return[constant[True]] | keyword[def] identifier[_starttls] ( identifier[self] ):
literal[string]
identifier[resp_code] , identifier[_] = identifier[self] . identifier[_non_connect_send_recv] (
identifier[riak] . identifier[pb] . identifier[messages] . identifier[MSG_CODE_START_TLS] )
keyword[if] identifier[resp_code] == identifier[riak] . identifier[pb] . identifier[messages] . identifier[MSG_CODE_START_TLS] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def _starttls(self):
"""
Exchange a STARTTLS message with Riak to initiate secure communications
return True is Riak responds with a STARTTLS response, False otherwise
"""
(resp_code, _) = self._non_connect_send_recv(riak.pb.messages.MSG_CODE_START_TLS)
if resp_code == riak.pb.messages.MSG_CODE_START_TLS:
return True # depends on [control=['if'], data=[]]
else:
return False |
def _insert_uow(self, freerun_entry, flow_request=None):
""" creates unit_of_work and inserts it into the DB
:raise DuplicateKeyError: if unit_of_work with given parameters already exists """
process_entry = context.process_context[freerun_entry.process_name]
arguments = process_entry.arguments
arguments.update(freerun_entry.arguments)
if flow_request:
schedulable_name = flow_request.schedulable_name
timeperiod = flow_request.timeperiod
start_timeperiod = flow_request.start_timeperiod
end_timeperiod = flow_request.end_timeperiod
arguments.update(flow_request.arguments)
else:
schedulable_name = freerun_entry.schedulable_name
timeperiod = time_helper.actual_timeperiod(QUALIFIER_REAL_TIME)
start_timeperiod = timeperiod
end_timeperiod = timeperiod
uow = UnitOfWork()
uow.process_name = schedulable_name
uow.timeperiod = timeperiod
uow.start_id = 0
uow.end_id = 0
uow.start_timeperiod = start_timeperiod
uow.end_timeperiod = end_timeperiod
uow.created_at = datetime.utcnow()
uow.submitted_at = datetime.utcnow()
uow.source = process_entry.source if hasattr(process_entry, 'source') else None
uow.sink = process_entry.sink if hasattr(process_entry, 'sink') else None
uow.state = unit_of_work.STATE_REQUESTED
uow.unit_of_work_type = unit_of_work.TYPE_FREERUN
uow.number_of_retries = 0
uow.arguments = arguments
uow.db_id = self.uow_dao.insert(uow)
msg = 'Created: UOW {0} for {1}@{2}.' \
.format(uow.db_id, freerun_entry.schedulable_name, timeperiod)
self._log_message(INFO, freerun_entry, msg)
return uow | def function[_insert_uow, parameter[self, freerun_entry, flow_request]]:
constant[ creates unit_of_work and inserts it into the DB
:raise DuplicateKeyError: if unit_of_work with given parameters already exists ]
variable[process_entry] assign[=] call[name[context].process_context][name[freerun_entry].process_name]
variable[arguments] assign[=] name[process_entry].arguments
call[name[arguments].update, parameter[name[freerun_entry].arguments]]
if name[flow_request] begin[:]
variable[schedulable_name] assign[=] name[flow_request].schedulable_name
variable[timeperiod] assign[=] name[flow_request].timeperiod
variable[start_timeperiod] assign[=] name[flow_request].start_timeperiod
variable[end_timeperiod] assign[=] name[flow_request].end_timeperiod
call[name[arguments].update, parameter[name[flow_request].arguments]]
variable[uow] assign[=] call[name[UnitOfWork], parameter[]]
name[uow].process_name assign[=] name[schedulable_name]
name[uow].timeperiod assign[=] name[timeperiod]
name[uow].start_id assign[=] constant[0]
name[uow].end_id assign[=] constant[0]
name[uow].start_timeperiod assign[=] name[start_timeperiod]
name[uow].end_timeperiod assign[=] name[end_timeperiod]
name[uow].created_at assign[=] call[name[datetime].utcnow, parameter[]]
name[uow].submitted_at assign[=] call[name[datetime].utcnow, parameter[]]
name[uow].source assign[=] <ast.IfExp object at 0x7da20c76ed40>
name[uow].sink assign[=] <ast.IfExp object at 0x7da20c76d750>
name[uow].state assign[=] name[unit_of_work].STATE_REQUESTED
name[uow].unit_of_work_type assign[=] name[unit_of_work].TYPE_FREERUN
name[uow].number_of_retries assign[=] constant[0]
name[uow].arguments assign[=] name[arguments]
name[uow].db_id assign[=] call[name[self].uow_dao.insert, parameter[name[uow]]]
variable[msg] assign[=] call[constant[Created: UOW {0} for {1}@{2}.].format, parameter[name[uow].db_id, name[freerun_entry].schedulable_name, name[timeperiod]]]
call[name[self]._log_message, parameter[name[INFO], name[freerun_entry], name[msg]]]
return[name[uow]] | keyword[def] identifier[_insert_uow] ( identifier[self] , identifier[freerun_entry] , identifier[flow_request] = keyword[None] ):
literal[string]
identifier[process_entry] = identifier[context] . identifier[process_context] [ identifier[freerun_entry] . identifier[process_name] ]
identifier[arguments] = identifier[process_entry] . identifier[arguments]
identifier[arguments] . identifier[update] ( identifier[freerun_entry] . identifier[arguments] )
keyword[if] identifier[flow_request] :
identifier[schedulable_name] = identifier[flow_request] . identifier[schedulable_name]
identifier[timeperiod] = identifier[flow_request] . identifier[timeperiod]
identifier[start_timeperiod] = identifier[flow_request] . identifier[start_timeperiod]
identifier[end_timeperiod] = identifier[flow_request] . identifier[end_timeperiod]
identifier[arguments] . identifier[update] ( identifier[flow_request] . identifier[arguments] )
keyword[else] :
identifier[schedulable_name] = identifier[freerun_entry] . identifier[schedulable_name]
identifier[timeperiod] = identifier[time_helper] . identifier[actual_timeperiod] ( identifier[QUALIFIER_REAL_TIME] )
identifier[start_timeperiod] = identifier[timeperiod]
identifier[end_timeperiod] = identifier[timeperiod]
identifier[uow] = identifier[UnitOfWork] ()
identifier[uow] . identifier[process_name] = identifier[schedulable_name]
identifier[uow] . identifier[timeperiod] = identifier[timeperiod]
identifier[uow] . identifier[start_id] = literal[int]
identifier[uow] . identifier[end_id] = literal[int]
identifier[uow] . identifier[start_timeperiod] = identifier[start_timeperiod]
identifier[uow] . identifier[end_timeperiod] = identifier[end_timeperiod]
identifier[uow] . identifier[created_at] = identifier[datetime] . identifier[utcnow] ()
identifier[uow] . identifier[submitted_at] = identifier[datetime] . identifier[utcnow] ()
identifier[uow] . identifier[source] = identifier[process_entry] . identifier[source] keyword[if] identifier[hasattr] ( identifier[process_entry] , literal[string] ) keyword[else] keyword[None]
identifier[uow] . identifier[sink] = identifier[process_entry] . identifier[sink] keyword[if] identifier[hasattr] ( identifier[process_entry] , literal[string] ) keyword[else] keyword[None]
identifier[uow] . identifier[state] = identifier[unit_of_work] . identifier[STATE_REQUESTED]
identifier[uow] . identifier[unit_of_work_type] = identifier[unit_of_work] . identifier[TYPE_FREERUN]
identifier[uow] . identifier[number_of_retries] = literal[int]
identifier[uow] . identifier[arguments] = identifier[arguments]
identifier[uow] . identifier[db_id] = identifier[self] . identifier[uow_dao] . identifier[insert] ( identifier[uow] )
identifier[msg] = literal[string] . identifier[format] ( identifier[uow] . identifier[db_id] , identifier[freerun_entry] . identifier[schedulable_name] , identifier[timeperiod] )
identifier[self] . identifier[_log_message] ( identifier[INFO] , identifier[freerun_entry] , identifier[msg] )
keyword[return] identifier[uow] | def _insert_uow(self, freerun_entry, flow_request=None):
""" creates unit_of_work and inserts it into the DB
:raise DuplicateKeyError: if unit_of_work with given parameters already exists """
process_entry = context.process_context[freerun_entry.process_name]
arguments = process_entry.arguments
arguments.update(freerun_entry.arguments)
if flow_request:
schedulable_name = flow_request.schedulable_name
timeperiod = flow_request.timeperiod
start_timeperiod = flow_request.start_timeperiod
end_timeperiod = flow_request.end_timeperiod
arguments.update(flow_request.arguments) # depends on [control=['if'], data=[]]
else:
schedulable_name = freerun_entry.schedulable_name
timeperiod = time_helper.actual_timeperiod(QUALIFIER_REAL_TIME)
start_timeperiod = timeperiod
end_timeperiod = timeperiod
uow = UnitOfWork()
uow.process_name = schedulable_name
uow.timeperiod = timeperiod
uow.start_id = 0
uow.end_id = 0
uow.start_timeperiod = start_timeperiod
uow.end_timeperiod = end_timeperiod
uow.created_at = datetime.utcnow()
uow.submitted_at = datetime.utcnow()
uow.source = process_entry.source if hasattr(process_entry, 'source') else None
uow.sink = process_entry.sink if hasattr(process_entry, 'sink') else None
uow.state = unit_of_work.STATE_REQUESTED
uow.unit_of_work_type = unit_of_work.TYPE_FREERUN
uow.number_of_retries = 0
uow.arguments = arguments
uow.db_id = self.uow_dao.insert(uow)
msg = 'Created: UOW {0} for {1}@{2}.'.format(uow.db_id, freerun_entry.schedulable_name, timeperiod)
self._log_message(INFO, freerun_entry, msg)
return uow |
def save(self):
"""
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
"""
body = {'ip_address': {'ptr_record': self.ptr_record}}
data = self.cloud_manager.request('PUT', '/ip_address/' + self.address, body)
self._reset(**data['ip_address']) | def function[save, parameter[self]]:
constant[
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
]
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b0ef60b0>], [<ast.Dict object at 0x7da1b0ef5de0>]]
variable[data] assign[=] call[name[self].cloud_manager.request, parameter[constant[PUT], binary_operation[constant[/ip_address/] + name[self].address], name[body]]]
call[name[self]._reset, parameter[]] | keyword[def] identifier[save] ( identifier[self] ):
literal[string]
identifier[body] ={ literal[string] :{ literal[string] : identifier[self] . identifier[ptr_record] }}
identifier[data] = identifier[self] . identifier[cloud_manager] . identifier[request] ( literal[string] , literal[string] + identifier[self] . identifier[address] , identifier[body] )
identifier[self] . identifier[_reset] (** identifier[data] [ literal[string] ]) | def save(self):
"""
IPAddress can only change its PTR record. Saves the current state, PUT /ip_address/uuid.
"""
body = {'ip_address': {'ptr_record': self.ptr_record}}
data = self.cloud_manager.request('PUT', '/ip_address/' + self.address, body)
self._reset(**data['ip_address']) |
def save_state(self, fname: str):
"""
Saves the current state of iterator to a file, so that iteration can be
continued. Note that the data is not saved, i.e. the iterator must be
initialized with the same parameters as in the first call.
:param fname: File name to save the information to.
"""
with open(fname, "wb") as fp:
pickle.dump(self.batch_indices, fp)
pickle.dump(self.curr_batch_index, fp)
np.save(fp, [a.asnumpy() for a in self.inverse_data_permutations])
np.save(fp, [a.asnumpy() for a in self.data_permutations]) | def function[save_state, parameter[self, fname]]:
constant[
Saves the current state of iterator to a file, so that iteration can be
continued. Note that the data is not saved, i.e. the iterator must be
initialized with the same parameters as in the first call.
:param fname: File name to save the information to.
]
with call[name[open], parameter[name[fname], constant[wb]]] begin[:]
call[name[pickle].dump, parameter[name[self].batch_indices, name[fp]]]
call[name[pickle].dump, parameter[name[self].curr_batch_index, name[fp]]]
call[name[np].save, parameter[name[fp], <ast.ListComp object at 0x7da18ede5150>]]
call[name[np].save, parameter[name[fp], <ast.ListComp object at 0x7da1b1d0df90>]] | keyword[def] identifier[save_state] ( identifier[self] , identifier[fname] : identifier[str] ):
literal[string]
keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[fp] :
identifier[pickle] . identifier[dump] ( identifier[self] . identifier[batch_indices] , identifier[fp] )
identifier[pickle] . identifier[dump] ( identifier[self] . identifier[curr_batch_index] , identifier[fp] )
identifier[np] . identifier[save] ( identifier[fp] ,[ identifier[a] . identifier[asnumpy] () keyword[for] identifier[a] keyword[in] identifier[self] . identifier[inverse_data_permutations] ])
identifier[np] . identifier[save] ( identifier[fp] ,[ identifier[a] . identifier[asnumpy] () keyword[for] identifier[a] keyword[in] identifier[self] . identifier[data_permutations] ]) | def save_state(self, fname: str):
"""
Saves the current state of iterator to a file, so that iteration can be
continued. Note that the data is not saved, i.e. the iterator must be
initialized with the same parameters as in the first call.
:param fname: File name to save the information to.
"""
with open(fname, 'wb') as fp:
pickle.dump(self.batch_indices, fp)
pickle.dump(self.curr_batch_index, fp)
np.save(fp, [a.asnumpy() for a in self.inverse_data_permutations])
np.save(fp, [a.asnumpy() for a in self.data_permutations]) # depends on [control=['with'], data=['fp']] |
def remove(self,obj):
"""
Remove an object from the relation
"""
relationship_table = self.params['relationship_table']
with self.obj.backend.transaction(implicit = True):
condition = and_(relationship_table.c[self.params['related_pk_field_name']] == obj.pk,
relationship_table.c[self.params['pk_field_name']] == self.obj.pk)
self.obj.backend.connection.execute(delete(relationship_table).where(condition))
self._queryset = None | def function[remove, parameter[self, obj]]:
constant[
Remove an object from the relation
]
variable[relationship_table] assign[=] call[name[self].params][constant[relationship_table]]
with call[name[self].obj.backend.transaction, parameter[]] begin[:]
variable[condition] assign[=] call[name[and_], parameter[compare[call[name[relationship_table].c][call[name[self].params][constant[related_pk_field_name]]] equal[==] name[obj].pk], compare[call[name[relationship_table].c][call[name[self].params][constant[pk_field_name]]] equal[==] name[self].obj.pk]]]
call[name[self].obj.backend.connection.execute, parameter[call[call[name[delete], parameter[name[relationship_table]]].where, parameter[name[condition]]]]]
name[self]._queryset assign[=] constant[None] | keyword[def] identifier[remove] ( identifier[self] , identifier[obj] ):
literal[string]
identifier[relationship_table] = identifier[self] . identifier[params] [ literal[string] ]
keyword[with] identifier[self] . identifier[obj] . identifier[backend] . identifier[transaction] ( identifier[implicit] = keyword[True] ):
identifier[condition] = identifier[and_] ( identifier[relationship_table] . identifier[c] [ identifier[self] . identifier[params] [ literal[string] ]]== identifier[obj] . identifier[pk] ,
identifier[relationship_table] . identifier[c] [ identifier[self] . identifier[params] [ literal[string] ]]== identifier[self] . identifier[obj] . identifier[pk] )
identifier[self] . identifier[obj] . identifier[backend] . identifier[connection] . identifier[execute] ( identifier[delete] ( identifier[relationship_table] ). identifier[where] ( identifier[condition] ))
identifier[self] . identifier[_queryset] = keyword[None] | def remove(self, obj):
"""
Remove an object from the relation
"""
relationship_table = self.params['relationship_table']
with self.obj.backend.transaction(implicit=True):
condition = and_(relationship_table.c[self.params['related_pk_field_name']] == obj.pk, relationship_table.c[self.params['pk_field_name']] == self.obj.pk)
self.obj.backend.connection.execute(delete(relationship_table).where(condition))
self._queryset = None # depends on [control=['with'], data=[]] |
def nice_report(self):
"""Return a nicely formatted original report."""
if not self.json:
return '[no CSP report data]'
try:
data = json.loads(self.json)
except ValueError:
return "Invalid CSP report: '{}'".format(self.json)
if 'csp-report' not in data:
return 'Invalid CSP report: ' + json.dumps(data, indent=4, sort_keys=True, separators=(',', ': '))
return json.dumps(data['csp-report'], indent=4, sort_keys=True, separators=(',', ': ')) | def function[nice_report, parameter[self]]:
constant[Return a nicely formatted original report.]
if <ast.UnaryOp object at 0x7da1b0c939a0> begin[:]
return[constant[[no CSP report data]]]
<ast.Try object at 0x7da1b0c93700>
if compare[constant[csp-report] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
return[binary_operation[constant[Invalid CSP report: ] + call[name[json].dumps, parameter[name[data]]]]]
return[call[name[json].dumps, parameter[call[name[data]][constant[csp-report]]]]] | keyword[def] identifier[nice_report] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[json] :
keyword[return] literal[string]
keyword[try] :
identifier[data] = identifier[json] . identifier[loads] ( identifier[self] . identifier[json] )
keyword[except] identifier[ValueError] :
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[json] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] :
keyword[return] literal[string] + identifier[json] . identifier[dumps] ( identifier[data] , identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] , identifier[separators] =( literal[string] , literal[string] ))
keyword[return] identifier[json] . identifier[dumps] ( identifier[data] [ literal[string] ], identifier[indent] = literal[int] , identifier[sort_keys] = keyword[True] , identifier[separators] =( literal[string] , literal[string] )) | def nice_report(self):
"""Return a nicely formatted original report."""
if not self.json:
return '[no CSP report data]' # depends on [control=['if'], data=[]]
try:
data = json.loads(self.json) # depends on [control=['try'], data=[]]
except ValueError:
return "Invalid CSP report: '{}'".format(self.json) # depends on [control=['except'], data=[]]
if 'csp-report' not in data:
return 'Invalid CSP report: ' + json.dumps(data, indent=4, sort_keys=True, separators=(',', ': ')) # depends on [control=['if'], data=['data']]
return json.dumps(data['csp-report'], indent=4, sort_keys=True, separators=(',', ': ')) |
def _draw_using_figure(self, figure, axs):
"""
Draw onto already created figure and axes
This is can be used to draw animation frames,
or inset plots. It is intended to be used
after the key plot has been drawn.
Parameters
----------
figure : ~matplotlib.figure.Figure
Matplotlib figure
axs : array_like
Array of Axes onto which to draw the plots
"""
self = deepcopy(self)
self._build()
self.theme = self.theme or theme_get()
self.figure = figure
self.axs = axs
try:
with mpl.rc_context():
self.theme.apply_rcparams()
self._setup_parameters()
self._draw_layers()
self._draw_facet_labels()
self._draw_legend()
self._apply_theme()
except Exception as err:
if self.figure is not None:
plt.close(self.figure)
raise err
return self | def function[_draw_using_figure, parameter[self, figure, axs]]:
constant[
Draw onto already created figure and axes
This is can be used to draw animation frames,
or inset plots. It is intended to be used
after the key plot has been drawn.
Parameters
----------
figure : ~matplotlib.figure.Figure
Matplotlib figure
axs : array_like
Array of Axes onto which to draw the plots
]
variable[self] assign[=] call[name[deepcopy], parameter[name[self]]]
call[name[self]._build, parameter[]]
name[self].theme assign[=] <ast.BoolOp object at 0x7da20c7966e0>
name[self].figure assign[=] name[figure]
name[self].axs assign[=] name[axs]
<ast.Try object at 0x7da20c796320>
return[name[self]] | keyword[def] identifier[_draw_using_figure] ( identifier[self] , identifier[figure] , identifier[axs] ):
literal[string]
identifier[self] = identifier[deepcopy] ( identifier[self] )
identifier[self] . identifier[_build] ()
identifier[self] . identifier[theme] = identifier[self] . identifier[theme] keyword[or] identifier[theme_get] ()
identifier[self] . identifier[figure] = identifier[figure]
identifier[self] . identifier[axs] = identifier[axs]
keyword[try] :
keyword[with] identifier[mpl] . identifier[rc_context] ():
identifier[self] . identifier[theme] . identifier[apply_rcparams] ()
identifier[self] . identifier[_setup_parameters] ()
identifier[self] . identifier[_draw_layers] ()
identifier[self] . identifier[_draw_facet_labels] ()
identifier[self] . identifier[_draw_legend] ()
identifier[self] . identifier[_apply_theme] ()
keyword[except] identifier[Exception] keyword[as] identifier[err] :
keyword[if] identifier[self] . identifier[figure] keyword[is] keyword[not] keyword[None] :
identifier[plt] . identifier[close] ( identifier[self] . identifier[figure] )
keyword[raise] identifier[err]
keyword[return] identifier[self] | def _draw_using_figure(self, figure, axs):
"""
Draw onto already created figure and axes
This is can be used to draw animation frames,
or inset plots. It is intended to be used
after the key plot has been drawn.
Parameters
----------
figure : ~matplotlib.figure.Figure
Matplotlib figure
axs : array_like
Array of Axes onto which to draw the plots
"""
self = deepcopy(self)
self._build()
self.theme = self.theme or theme_get()
self.figure = figure
self.axs = axs
try:
with mpl.rc_context():
self.theme.apply_rcparams()
self._setup_parameters()
self._draw_layers()
self._draw_facet_labels()
self._draw_legend()
self._apply_theme() # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]]
except Exception as err:
if self.figure is not None:
plt.close(self.figure) # depends on [control=['if'], data=[]]
raise err # depends on [control=['except'], data=['err']]
return self |
def warnExtractFromRegexpGroups(self, line, match):
"""
Extract file name, line number, and warning text as groups (1,2,3)
of warningPattern match."""
file = match.group(1)
lineNo = match.group(2)
if lineNo is not None:
lineNo = int(lineNo)
text = match.group(3)
return (file, lineNo, text) | def function[warnExtractFromRegexpGroups, parameter[self, line, match]]:
constant[
Extract file name, line number, and warning text as groups (1,2,3)
of warningPattern match.]
variable[file] assign[=] call[name[match].group, parameter[constant[1]]]
variable[lineNo] assign[=] call[name[match].group, parameter[constant[2]]]
if compare[name[lineNo] is_not constant[None]] begin[:]
variable[lineNo] assign[=] call[name[int], parameter[name[lineNo]]]
variable[text] assign[=] call[name[match].group, parameter[constant[3]]]
return[tuple[[<ast.Name object at 0x7da1b1c3d900>, <ast.Name object at 0x7da1b1c3c910>, <ast.Name object at 0x7da18c4ce350>]]] | keyword[def] identifier[warnExtractFromRegexpGroups] ( identifier[self] , identifier[line] , identifier[match] ):
literal[string]
identifier[file] = identifier[match] . identifier[group] ( literal[int] )
identifier[lineNo] = identifier[match] . identifier[group] ( literal[int] )
keyword[if] identifier[lineNo] keyword[is] keyword[not] keyword[None] :
identifier[lineNo] = identifier[int] ( identifier[lineNo] )
identifier[text] = identifier[match] . identifier[group] ( literal[int] )
keyword[return] ( identifier[file] , identifier[lineNo] , identifier[text] ) | def warnExtractFromRegexpGroups(self, line, match):
"""
Extract file name, line number, and warning text as groups (1,2,3)
of warningPattern match."""
file = match.group(1)
lineNo = match.group(2)
if lineNo is not None:
lineNo = int(lineNo) # depends on [control=['if'], data=['lineNo']]
text = match.group(3)
return (file, lineNo, text) |
def sliding_window(a, ws, ss=None, flatten=True):
from numpy.lib.stride_tricks import as_strided as ast
'''
Return a sliding window over a in any number of dimensions
Parameters:
a - an n-dimensional numpy array
ws - an int (a is 1D) or tuple (a is 2D or greater) representing the size
of each dimension of the window
ss - an int (a is 1D) or tuple (a is 2D or greater) representing the
amount to slide the window in each dimension. If not specified, it
defaults to ws.
flatten - if True, all slices are flattened, otherwise, there is an
extra dimension for each dimension of the input.
Returns
an array containing each n-dimensional window from a
'''
if None is ss:
# ss was not provided. the windows will not overlap in any direction.
ss = ws
ws = norm_shape(ws)
ss = norm_shape(ss)
# convert ws, ss, and a.shape to numpy arrays so that we can do math in every
# dimension at once.
ws = np.array(ws)
ss = np.array(ss)
shape = np.array(a.shape)
# ensure that ws, ss, and a.shape all have the same number of dimensions
ls = [len(shape),len(ws),len(ss)]
if 1 != len(set(ls)):
raise ValueError(\
'a.shape, ws and ss must all have the same length. They were %s' % str(ls))
# ensure that ws is smaller than a in every dimension
if np.any(ws > shape):
raise ValueError(\
'ws cannot be larger than a in any dimension.\
a.shape was %s and ws was %s' % (str(a.shape),str(ws)))
# how many slices will there be in each dimension?
newshape = norm_shape(((shape - ws) // ss) + 1)
# the shape of the strided array will be the number of slices in each dimension
# plus the shape of the window (tuple addition)
newshape += norm_shape(ws)
# the strides tuple will be the array's strides multiplied by step size, plus
# the array's strides (tuple addition)
newstrides = norm_shape(np.array(a.strides) * ss) + a.strides
strided = ast(a,shape = newshape,strides = newstrides)
if not flatten:
return strided
# Collapse strided so that it has one more dimension than the window. I.e.,
# the new array is a flat list of slices.
meat = len(ws) if ws.shape else 0
firstdim = (np.product(newshape[:-meat]),) if ws.shape else ()
dim = firstdim + (newshape[-meat:])
# remove any dimensions with size 1
dim = [i for i in dim if i != 1]
return strided.reshape(dim) | def function[sliding_window, parameter[a, ws, ss, flatten]]:
from relative_module[numpy.lib.stride_tricks] import module[as_strided]
constant[
Return a sliding window over a in any number of dimensions
Parameters:
a - an n-dimensional numpy array
ws - an int (a is 1D) or tuple (a is 2D or greater) representing the size
of each dimension of the window
ss - an int (a is 1D) or tuple (a is 2D or greater) representing the
amount to slide the window in each dimension. If not specified, it
defaults to ws.
flatten - if True, all slices are flattened, otherwise, there is an
extra dimension for each dimension of the input.
Returns
an array containing each n-dimensional window from a
]
if compare[constant[None] is name[ss]] begin[:]
variable[ss] assign[=] name[ws]
variable[ws] assign[=] call[name[norm_shape], parameter[name[ws]]]
variable[ss] assign[=] call[name[norm_shape], parameter[name[ss]]]
variable[ws] assign[=] call[name[np].array, parameter[name[ws]]]
variable[ss] assign[=] call[name[np].array, parameter[name[ss]]]
variable[shape] assign[=] call[name[np].array, parameter[name[a].shape]]
variable[ls] assign[=] list[[<ast.Call object at 0x7da1b0654340>, <ast.Call object at 0x7da1b06552d0>, <ast.Call object at 0x7da1b065bc40>]]
if compare[constant[1] not_equal[!=] call[name[len], parameter[call[name[set], parameter[name[ls]]]]]] begin[:]
<ast.Raise object at 0x7da1b0659390>
if call[name[np].any, parameter[compare[name[ws] greater[>] name[shape]]]] begin[:]
<ast.Raise object at 0x7da1b0658fa0>
variable[newshape] assign[=] call[name[norm_shape], parameter[binary_operation[binary_operation[binary_operation[name[shape] - name[ws]] <ast.FloorDiv object at 0x7da2590d6bc0> name[ss]] + constant[1]]]]
<ast.AugAssign object at 0x7da1b065a650>
variable[newstrides] assign[=] binary_operation[call[name[norm_shape], parameter[binary_operation[call[name[np].array, parameter[name[a].strides]] * name[ss]]]] + name[a].strides]
variable[strided] assign[=] call[name[ast], parameter[name[a]]]
if <ast.UnaryOp object at 0x7da1b065a4d0> begin[:]
return[name[strided]]
variable[meat] assign[=] <ast.IfExp object at 0x7da1b0659930>
variable[firstdim] assign[=] <ast.IfExp object at 0x7da1b065add0>
variable[dim] assign[=] binary_operation[name[firstdim] + call[name[newshape]][<ast.Slice object at 0x7da1b0658af0>]]
variable[dim] assign[=] <ast.ListComp object at 0x7da1b065a8c0>
return[call[name[strided].reshape, parameter[name[dim]]]] | keyword[def] identifier[sliding_window] ( identifier[a] , identifier[ws] , identifier[ss] = keyword[None] , identifier[flatten] = keyword[True] ):
keyword[from] identifier[numpy] . identifier[lib] . identifier[stride_tricks] keyword[import] identifier[as_strided] keyword[as] identifier[ast]
literal[string]
keyword[if] keyword[None] keyword[is] identifier[ss] :
identifier[ss] = identifier[ws]
identifier[ws] = identifier[norm_shape] ( identifier[ws] )
identifier[ss] = identifier[norm_shape] ( identifier[ss] )
identifier[ws] = identifier[np] . identifier[array] ( identifier[ws] )
identifier[ss] = identifier[np] . identifier[array] ( identifier[ss] )
identifier[shape] = identifier[np] . identifier[array] ( identifier[a] . identifier[shape] )
identifier[ls] =[ identifier[len] ( identifier[shape] ), identifier[len] ( identifier[ws] ), identifier[len] ( identifier[ss] )]
keyword[if] literal[int] != identifier[len] ( identifier[set] ( identifier[ls] )):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[str] ( identifier[ls] ))
keyword[if] identifier[np] . identifier[any] ( identifier[ws] > identifier[shape] ):
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[str] ( identifier[a] . identifier[shape] ), identifier[str] ( identifier[ws] )))
identifier[newshape] = identifier[norm_shape] ((( identifier[shape] - identifier[ws] )// identifier[ss] )+ literal[int] )
identifier[newshape] += identifier[norm_shape] ( identifier[ws] )
identifier[newstrides] = identifier[norm_shape] ( identifier[np] . identifier[array] ( identifier[a] . identifier[strides] )* identifier[ss] )+ identifier[a] . identifier[strides]
identifier[strided] = identifier[ast] ( identifier[a] , identifier[shape] = identifier[newshape] , identifier[strides] = identifier[newstrides] )
keyword[if] keyword[not] identifier[flatten] :
keyword[return] identifier[strided]
identifier[meat] = identifier[len] ( identifier[ws] ) keyword[if] identifier[ws] . identifier[shape] keyword[else] literal[int]
identifier[firstdim] =( identifier[np] . identifier[product] ( identifier[newshape] [:- identifier[meat] ]),) keyword[if] identifier[ws] . identifier[shape] keyword[else] ()
identifier[dim] = identifier[firstdim] +( identifier[newshape] [- identifier[meat] :])
identifier[dim] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[dim] keyword[if] identifier[i] != literal[int] ]
keyword[return] identifier[strided] . identifier[reshape] ( identifier[dim] ) | def sliding_window(a, ws, ss=None, flatten=True):
from numpy.lib.stride_tricks import as_strided as ast
'\n Return a sliding window over a in any number of dimensions\n \n Parameters:\n a - an n-dimensional numpy array\n ws - an int (a is 1D) or tuple (a is 2D or greater) representing the size\n of each dimension of the window\n ss - an int (a is 1D) or tuple (a is 2D or greater) representing the\n amount to slide the window in each dimension. If not specified, it\n defaults to ws.\n flatten - if True, all slices are flattened, otherwise, there is an\n extra dimension for each dimension of the input.\n \n Returns\n an array containing each n-dimensional window from a\n '
if None is ss:
# ss was not provided. the windows will not overlap in any direction.
ss = ws # depends on [control=['if'], data=['ss']]
ws = norm_shape(ws)
ss = norm_shape(ss)
# convert ws, ss, and a.shape to numpy arrays so that we can do math in every
# dimension at once.
ws = np.array(ws)
ss = np.array(ss)
shape = np.array(a.shape)
# ensure that ws, ss, and a.shape all have the same number of dimensions
ls = [len(shape), len(ws), len(ss)]
if 1 != len(set(ls)):
raise ValueError('a.shape, ws and ss must all have the same length. They were %s' % str(ls)) # depends on [control=['if'], data=[]]
# ensure that ws is smaller than a in every dimension
if np.any(ws > shape):
raise ValueError('ws cannot be larger than a in any dimension. a.shape was %s and ws was %s' % (str(a.shape), str(ws))) # depends on [control=['if'], data=[]]
# how many slices will there be in each dimension?
newshape = norm_shape((shape - ws) // ss + 1)
# the shape of the strided array will be the number of slices in each dimension
# plus the shape of the window (tuple addition)
newshape += norm_shape(ws)
# the strides tuple will be the array's strides multiplied by step size, plus
# the array's strides (tuple addition)
newstrides = norm_shape(np.array(a.strides) * ss) + a.strides
strided = ast(a, shape=newshape, strides=newstrides)
if not flatten:
return strided # depends on [control=['if'], data=[]]
# Collapse strided so that it has one more dimension than the window. I.e.,
# the new array is a flat list of slices.
meat = len(ws) if ws.shape else 0
firstdim = (np.product(newshape[:-meat]),) if ws.shape else ()
dim = firstdim + newshape[-meat:]
# remove any dimensions with size 1
dim = [i for i in dim if i != 1]
return strided.reshape(dim) |
def run(self):
""" Run at parse time.
When the documents are initially being scanned, this method runs
and does two things: (a) creates an instance that is added to
the site's widgets, and (b) leaves behind a placeholder docutils
node that can later be processed after the docs are resolved.
The latter needs enough information to retrieve the former.
"""
rtype = self.name
resource_content = '\n'.join(self.content)
resource_class = ResourceDirective.get_resource_class(rtype)
this_resource = resource_class(self.docname, rtype, resource_content)
# Add this resource to the site
self.resources[this_resource.docname] = this_resource
# Don't need to return a resource "node", the document is the node
return [] | def function[run, parameter[self]]:
constant[ Run at parse time.
When the documents are initially being scanned, this method runs
and does two things: (a) creates an instance that is added to
the site's widgets, and (b) leaves behind a placeholder docutils
node that can later be processed after the docs are resolved.
The latter needs enough information to retrieve the former.
]
variable[rtype] assign[=] name[self].name
variable[resource_content] assign[=] call[constant[
].join, parameter[name[self].content]]
variable[resource_class] assign[=] call[name[ResourceDirective].get_resource_class, parameter[name[rtype]]]
variable[this_resource] assign[=] call[name[resource_class], parameter[name[self].docname, name[rtype], name[resource_content]]]
call[name[self].resources][name[this_resource].docname] assign[=] name[this_resource]
return[list[[]]] | keyword[def] identifier[run] ( identifier[self] ):
literal[string]
identifier[rtype] = identifier[self] . identifier[name]
identifier[resource_content] = literal[string] . identifier[join] ( identifier[self] . identifier[content] )
identifier[resource_class] = identifier[ResourceDirective] . identifier[get_resource_class] ( identifier[rtype] )
identifier[this_resource] = identifier[resource_class] ( identifier[self] . identifier[docname] , identifier[rtype] , identifier[resource_content] )
identifier[self] . identifier[resources] [ identifier[this_resource] . identifier[docname] ]= identifier[this_resource]
keyword[return] [] | def run(self):
""" Run at parse time.
When the documents are initially being scanned, this method runs
and does two things: (a) creates an instance that is added to
the site's widgets, and (b) leaves behind a placeholder docutils
node that can later be processed after the docs are resolved.
The latter needs enough information to retrieve the former.
"""
rtype = self.name
resource_content = '\n'.join(self.content)
resource_class = ResourceDirective.get_resource_class(rtype)
this_resource = resource_class(self.docname, rtype, resource_content)
# Add this resource to the site
self.resources[this_resource.docname] = this_resource
# Don't need to return a resource "node", the document is the node
return [] |
def textmetrics(self, txt, width=None, height=None, **kwargs):
'''Returns the width and height of a string of text as a tuple
(according to current font settings).
'''
# for now only returns width and height (as per Nodebox behaviour)
# but maybe we could use the other data from cairo
# we send doRender=False to prevent the actual rendering process, only the path generation is enabled
# not the most efficient way, but it generates accurate results
txt = self.Text(txt, 0, 0, width, height, enableRendering=False, **kwargs)
return txt.metrics | def function[textmetrics, parameter[self, txt, width, height]]:
constant[Returns the width and height of a string of text as a tuple
(according to current font settings).
]
variable[txt] assign[=] call[name[self].Text, parameter[name[txt], constant[0], constant[0], name[width], name[height]]]
return[name[txt].metrics] | keyword[def] identifier[textmetrics] ( identifier[self] , identifier[txt] , identifier[width] = keyword[None] , identifier[height] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[txt] = identifier[self] . identifier[Text] ( identifier[txt] , literal[int] , literal[int] , identifier[width] , identifier[height] , identifier[enableRendering] = keyword[False] ,** identifier[kwargs] )
keyword[return] identifier[txt] . identifier[metrics] | def textmetrics(self, txt, width=None, height=None, **kwargs):
"""Returns the width and height of a string of text as a tuple
(according to current font settings).
"""
# for now only returns width and height (as per Nodebox behaviour)
# but maybe we could use the other data from cairo
# we send doRender=False to prevent the actual rendering process, only the path generation is enabled
# not the most efficient way, but it generates accurate results
txt = self.Text(txt, 0, 0, width, height, enableRendering=False, **kwargs)
return txt.metrics |
def delete_speaker(self, speaker_uri):
"""Delete an speaker from a collection
:param speaker_uri: the URI that references the speaker
:type speaker_uri: String
:rtype: Boolean
:returns: True if the speaker was deleted
:raises: APIError if the request was not successful
"""
response = self.api_request(speaker_uri, method='DELETE')
return self.__check_success(response) | def function[delete_speaker, parameter[self, speaker_uri]]:
constant[Delete an speaker from a collection
:param speaker_uri: the URI that references the speaker
:type speaker_uri: String
:rtype: Boolean
:returns: True if the speaker was deleted
:raises: APIError if the request was not successful
]
variable[response] assign[=] call[name[self].api_request, parameter[name[speaker_uri]]]
return[call[name[self].__check_success, parameter[name[response]]]] | keyword[def] identifier[delete_speaker] ( identifier[self] , identifier[speaker_uri] ):
literal[string]
identifier[response] = identifier[self] . identifier[api_request] ( identifier[speaker_uri] , identifier[method] = literal[string] )
keyword[return] identifier[self] . identifier[__check_success] ( identifier[response] ) | def delete_speaker(self, speaker_uri):
"""Delete an speaker from a collection
:param speaker_uri: the URI that references the speaker
:type speaker_uri: String
:rtype: Boolean
:returns: True if the speaker was deleted
:raises: APIError if the request was not successful
"""
response = self.api_request(speaker_uri, method='DELETE')
return self.__check_success(response) |
def values_are_equivalent(self, val1, val2):
'''Check whether 2 values are equivalent (meaning they
are in the same bucket/range)
Returns:
true if the 2 values are equivalent
'''
return self.get_lowest_equivalent_value(val1) == self.get_lowest_equivalent_value(val2) | def function[values_are_equivalent, parameter[self, val1, val2]]:
constant[Check whether 2 values are equivalent (meaning they
are in the same bucket/range)
Returns:
true if the 2 values are equivalent
]
return[compare[call[name[self].get_lowest_equivalent_value, parameter[name[val1]]] equal[==] call[name[self].get_lowest_equivalent_value, parameter[name[val2]]]]] | keyword[def] identifier[values_are_equivalent] ( identifier[self] , identifier[val1] , identifier[val2] ):
literal[string]
keyword[return] identifier[self] . identifier[get_lowest_equivalent_value] ( identifier[val1] )== identifier[self] . identifier[get_lowest_equivalent_value] ( identifier[val2] ) | def values_are_equivalent(self, val1, val2):
"""Check whether 2 values are equivalent (meaning they
are in the same bucket/range)
Returns:
true if the 2 values are equivalent
"""
return self.get_lowest_equivalent_value(val1) == self.get_lowest_equivalent_value(val2) |
def _get_value(self, entity):
"""Internal helper to get the value for this Property from an entity.
For a repeated Property this initializes the value to an empty
list if it is not set.
"""
if entity._projection:
if self._name not in entity._projection:
raise UnprojectedPropertyError(
'Property %s is not in the projection' % (self._name,))
return self._get_user_value(entity) | def function[_get_value, parameter[self, entity]]:
constant[Internal helper to get the value for this Property from an entity.
For a repeated Property this initializes the value to an empty
list if it is not set.
]
if name[entity]._projection begin[:]
if compare[name[self]._name <ast.NotIn object at 0x7da2590d7190> name[entity]._projection] begin[:]
<ast.Raise object at 0x7da1b23472b0>
return[call[name[self]._get_user_value, parameter[name[entity]]]] | keyword[def] identifier[_get_value] ( identifier[self] , identifier[entity] ):
literal[string]
keyword[if] identifier[entity] . identifier[_projection] :
keyword[if] identifier[self] . identifier[_name] keyword[not] keyword[in] identifier[entity] . identifier[_projection] :
keyword[raise] identifier[UnprojectedPropertyError] (
literal[string] %( identifier[self] . identifier[_name] ,))
keyword[return] identifier[self] . identifier[_get_user_value] ( identifier[entity] ) | def _get_value(self, entity):
"""Internal helper to get the value for this Property from an entity.
For a repeated Property this initializes the value to an empty
list if it is not set.
"""
if entity._projection:
if self._name not in entity._projection:
raise UnprojectedPropertyError('Property %s is not in the projection' % (self._name,)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return self._get_user_value(entity) |
def dv(self, orb):
"""Computation of the velocity increment in the reference frame of the orbit
Args:
orb (Orbit):
Return:
numpy.array: Velocity increment, length 3
"""
orb = orb.copy(form="cartesian")
if self.frame == "QSW":
mat = to_qsw(orb).T
elif self.frame == "TNW":
mat = to_tnw(orb).T
else:
mat = np.identity(3)
# velocity increment in the same reference frame as the orbit
return mat @ self._dv | def function[dv, parameter[self, orb]]:
constant[Computation of the velocity increment in the reference frame of the orbit
Args:
orb (Orbit):
Return:
numpy.array: Velocity increment, length 3
]
variable[orb] assign[=] call[name[orb].copy, parameter[]]
if compare[name[self].frame equal[==] constant[QSW]] begin[:]
variable[mat] assign[=] call[name[to_qsw], parameter[name[orb]]].T
return[binary_operation[name[mat] <ast.MatMult object at 0x7da2590d6860> name[self]._dv]] | keyword[def] identifier[dv] ( identifier[self] , identifier[orb] ):
literal[string]
identifier[orb] = identifier[orb] . identifier[copy] ( identifier[form] = literal[string] )
keyword[if] identifier[self] . identifier[frame] == literal[string] :
identifier[mat] = identifier[to_qsw] ( identifier[orb] ). identifier[T]
keyword[elif] identifier[self] . identifier[frame] == literal[string] :
identifier[mat] = identifier[to_tnw] ( identifier[orb] ). identifier[T]
keyword[else] :
identifier[mat] = identifier[np] . identifier[identity] ( literal[int] )
keyword[return] identifier[mat] @ identifier[self] . identifier[_dv] | def dv(self, orb):
"""Computation of the velocity increment in the reference frame of the orbit
Args:
orb (Orbit):
Return:
numpy.array: Velocity increment, length 3
"""
orb = orb.copy(form='cartesian')
if self.frame == 'QSW':
mat = to_qsw(orb).T # depends on [control=['if'], data=[]]
elif self.frame == 'TNW':
mat = to_tnw(orb).T # depends on [control=['if'], data=[]]
else:
mat = np.identity(3)
# velocity increment in the same reference frame as the orbit
return mat @ self._dv |
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version) | def function[get_version, parameter[self]]:
constant[Get blink(1) firmware version
]
if compare[name[self].dev equal[==] constant[None]] begin[:]
return[constant[]]
variable[buf] assign[=] list[[<ast.Name object at 0x7da18f811690>, <ast.Call object at 0x7da18f813f40>, <ast.Constant object at 0x7da18f811210>, <ast.Constant object at 0x7da18f812020>, <ast.Constant object at 0x7da18f812710>, <ast.Constant object at 0x7da18f812590>, <ast.Constant object at 0x7da18f813e50>, <ast.Constant object at 0x7da18f812470>, <ast.Constant object at 0x7da18f813640>]]
call[name[self].write, parameter[name[buf]]]
call[name[time].sleep, parameter[constant[0.05]]]
variable[version_raw] assign[=] call[name[self].read, parameter[]]
variable[version] assign[=] binary_operation[binary_operation[binary_operation[call[name[version_raw]][constant[3]] - call[name[ord], parameter[constant[0]]]] * constant[100]] + binary_operation[call[name[version_raw]][constant[4]] - call[name[ord], parameter[constant[0]]]]]
return[call[name[str], parameter[name[version]]]] | keyword[def] identifier[get_version] ( identifier[self] ):
literal[string]
keyword[if] ( identifier[self] . identifier[dev] == keyword[None] ): keyword[return] literal[string]
identifier[buf] =[ identifier[REPORT_ID] , identifier[ord] ( literal[string] ), literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[self] . identifier[write] ( identifier[buf] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[version_raw] = identifier[self] . identifier[read] ()
identifier[version] =( identifier[version_raw] [ literal[int] ]- identifier[ord] ( literal[string] ))* literal[int] +( identifier[version_raw] [ literal[int] ]- identifier[ord] ( literal[string] ))
keyword[return] identifier[str] ( identifier[version] ) | def get_version(self):
"""Get blink(1) firmware version
"""
if self.dev == None:
return '' # depends on [control=['if'], data=[]]
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(0.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version) |
def manipulate_shown_data(f, input_axes="gca", output_axes=None, fxname=1, fyname=1, clear=1, pause=False, **kwargs):
"""
Loops over the visible data on the specified axes and modifies it based on
the function f(xdata, ydata), which must return new_xdata, new_ydata
input_axes which axes to pull the data from
output_axes which axes to dump the manipulated data (None for new figure)
fxname the name of the function on x
fyname the name of the function on y
1 means "use f.__name__"
0 or None means no change.
otherwise specify a string
**kwargs are sent to axes.plot
"""
# get the axes
if input_axes == "gca": a1 = _pylab.gca()
else: a1 = input_axes
# get the xlimits
xmin, xmax = a1.get_xlim()
# get the name to stick on the x and y labels
if fxname==1: fxname = f.__name__
if fyname==1: fyname = f.__name__
# get the output axes
if output_axes == None:
_pylab.figure(a1.figure.number+1)
a2 = _pylab.axes()
else:
a2 = output_axes
if clear: a2.clear()
# loop over the data
for line in a1.get_lines():
# if it's a line, do the manipulation
if isinstance(line, _mpl.lines.Line2D):
# get the data
x, y = line.get_data()
# trim the data according to the current zoom level
x, y = _fun.trim_data(xmin, xmax, x, y)
# do the manipulation
new_x, new_y = f(x,y)
# plot the new
_s.plot.xy.data(new_x, new_y, clear=0, label=line.get_label().replace("_", "-"), axes=a2, **kwargs)
# pause after each curve if we're supposed to
if pause:
_pylab.draw()
input("<enter> ")
# set the labels and title.
if fxname in [0,None]: a2.set_xlabel(a1.get_xlabel())
else: a2.set_xlabel(fxname+"("+a1.get_xlabel()+")")
if fyname in [0,None]: a2.set_ylabel(a1.get_ylabel())
else: a2.set_ylabel(fyname+"("+a1.get_ylabel()+")")
_pylab.draw() | def function[manipulate_shown_data, parameter[f, input_axes, output_axes, fxname, fyname, clear, pause]]:
constant[
Loops over the visible data on the specified axes and modifies it based on
the function f(xdata, ydata), which must return new_xdata, new_ydata
input_axes which axes to pull the data from
output_axes which axes to dump the manipulated data (None for new figure)
fxname the name of the function on x
fyname the name of the function on y
1 means "use f.__name__"
0 or None means no change.
otherwise specify a string
**kwargs are sent to axes.plot
]
if compare[name[input_axes] equal[==] constant[gca]] begin[:]
variable[a1] assign[=] call[name[_pylab].gca, parameter[]]
<ast.Tuple object at 0x7da18eb550f0> assign[=] call[name[a1].get_xlim, parameter[]]
if compare[name[fxname] equal[==] constant[1]] begin[:]
variable[fxname] assign[=] name[f].__name__
if compare[name[fyname] equal[==] constant[1]] begin[:]
variable[fyname] assign[=] name[f].__name__
if compare[name[output_axes] equal[==] constant[None]] begin[:]
call[name[_pylab].figure, parameter[binary_operation[name[a1].figure.number + constant[1]]]]
variable[a2] assign[=] call[name[_pylab].axes, parameter[]]
if name[clear] begin[:]
call[name[a2].clear, parameter[]]
for taget[name[line]] in starred[call[name[a1].get_lines, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[line], name[_mpl].lines.Line2D]] begin[:]
<ast.Tuple object at 0x7da18eb57520> assign[=] call[name[line].get_data, parameter[]]
<ast.Tuple object at 0x7da18eb56980> assign[=] call[name[_fun].trim_data, parameter[name[xmin], name[xmax], name[x], name[y]]]
<ast.Tuple object at 0x7da18eb57970> assign[=] call[name[f], parameter[name[x], name[y]]]
call[name[_s].plot.xy.data, parameter[name[new_x], name[new_y]]]
if name[pause] begin[:]
call[name[_pylab].draw, parameter[]]
call[name[input], parameter[constant[<enter> ]]]
if compare[name[fxname] in list[[<ast.Constant object at 0x7da18eb56e60>, <ast.Constant object at 0x7da18eb56f50>]]] begin[:]
call[name[a2].set_xlabel, parameter[call[name[a1].get_xlabel, parameter[]]]]
if compare[name[fyname] in list[[<ast.Constant object at 0x7da18eb55240>, <ast.Constant object at 0x7da18eb54670>]]] begin[:]
call[name[a2].set_ylabel, parameter[call[name[a1].get_ylabel, parameter[]]]]
call[name[_pylab].draw, parameter[]] | keyword[def] identifier[manipulate_shown_data] ( identifier[f] , identifier[input_axes] = literal[string] , identifier[output_axes] = keyword[None] , identifier[fxname] = literal[int] , identifier[fyname] = literal[int] , identifier[clear] = literal[int] , identifier[pause] = keyword[False] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[input_axes] == literal[string] : identifier[a1] = identifier[_pylab] . identifier[gca] ()
keyword[else] : identifier[a1] = identifier[input_axes]
identifier[xmin] , identifier[xmax] = identifier[a1] . identifier[get_xlim] ()
keyword[if] identifier[fxname] == literal[int] : identifier[fxname] = identifier[f] . identifier[__name__]
keyword[if] identifier[fyname] == literal[int] : identifier[fyname] = identifier[f] . identifier[__name__]
keyword[if] identifier[output_axes] == keyword[None] :
identifier[_pylab] . identifier[figure] ( identifier[a1] . identifier[figure] . identifier[number] + literal[int] )
identifier[a2] = identifier[_pylab] . identifier[axes] ()
keyword[else] :
identifier[a2] = identifier[output_axes]
keyword[if] identifier[clear] : identifier[a2] . identifier[clear] ()
keyword[for] identifier[line] keyword[in] identifier[a1] . identifier[get_lines] ():
keyword[if] identifier[isinstance] ( identifier[line] , identifier[_mpl] . identifier[lines] . identifier[Line2D] ):
identifier[x] , identifier[y] = identifier[line] . identifier[get_data] ()
identifier[x] , identifier[y] = identifier[_fun] . identifier[trim_data] ( identifier[xmin] , identifier[xmax] , identifier[x] , identifier[y] )
identifier[new_x] , identifier[new_y] = identifier[f] ( identifier[x] , identifier[y] )
identifier[_s] . identifier[plot] . identifier[xy] . identifier[data] ( identifier[new_x] , identifier[new_y] , identifier[clear] = literal[int] , identifier[label] = identifier[line] . identifier[get_label] (). identifier[replace] ( literal[string] , literal[string] ), identifier[axes] = identifier[a2] ,** identifier[kwargs] )
keyword[if] identifier[pause] :
identifier[_pylab] . identifier[draw] ()
identifier[input] ( literal[string] )
keyword[if] identifier[fxname] keyword[in] [ literal[int] , keyword[None] ]: identifier[a2] . identifier[set_xlabel] ( identifier[a1] . identifier[get_xlabel] ())
keyword[else] : identifier[a2] . identifier[set_xlabel] ( identifier[fxname] + literal[string] + identifier[a1] . identifier[get_xlabel] ()+ literal[string] )
keyword[if] identifier[fyname] keyword[in] [ literal[int] , keyword[None] ]: identifier[a2] . identifier[set_ylabel] ( identifier[a1] . identifier[get_ylabel] ())
keyword[else] : identifier[a2] . identifier[set_ylabel] ( identifier[fyname] + literal[string] + identifier[a1] . identifier[get_ylabel] ()+ literal[string] )
identifier[_pylab] . identifier[draw] () | def manipulate_shown_data(f, input_axes='gca', output_axes=None, fxname=1, fyname=1, clear=1, pause=False, **kwargs):
"""
Loops over the visible data on the specified axes and modifies it based on
the function f(xdata, ydata), which must return new_xdata, new_ydata
input_axes which axes to pull the data from
output_axes which axes to dump the manipulated data (None for new figure)
fxname the name of the function on x
fyname the name of the function on y
1 means "use f.__name__"
0 or None means no change.
otherwise specify a string
**kwargs are sent to axes.plot
"""
# get the axes
if input_axes == 'gca':
a1 = _pylab.gca() # depends on [control=['if'], data=[]]
else:
a1 = input_axes
# get the xlimits
(xmin, xmax) = a1.get_xlim()
# get the name to stick on the x and y labels
if fxname == 1:
fxname = f.__name__ # depends on [control=['if'], data=['fxname']]
if fyname == 1:
fyname = f.__name__ # depends on [control=['if'], data=['fyname']]
# get the output axes
if output_axes == None:
_pylab.figure(a1.figure.number + 1)
a2 = _pylab.axes() # depends on [control=['if'], data=[]]
else:
a2 = output_axes
if clear:
a2.clear() # depends on [control=['if'], data=[]]
# loop over the data
for line in a1.get_lines():
# if it's a line, do the manipulation
if isinstance(line, _mpl.lines.Line2D):
# get the data
(x, y) = line.get_data()
# trim the data according to the current zoom level
(x, y) = _fun.trim_data(xmin, xmax, x, y)
# do the manipulation
(new_x, new_y) = f(x, y)
# plot the new
_s.plot.xy.data(new_x, new_y, clear=0, label=line.get_label().replace('_', '-'), axes=a2, **kwargs)
# pause after each curve if we're supposed to
if pause:
_pylab.draw()
input('<enter> ') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
# set the labels and title.
if fxname in [0, None]:
a2.set_xlabel(a1.get_xlabel()) # depends on [control=['if'], data=[]]
else:
a2.set_xlabel(fxname + '(' + a1.get_xlabel() + ')')
if fyname in [0, None]:
a2.set_ylabel(a1.get_ylabel()) # depends on [control=['if'], data=[]]
else:
a2.set_ylabel(fyname + '(' + a1.get_ylabel() + ')')
_pylab.draw() |
def fromstring(cls, root_name, data):
"""Initialize S3Element from name and XML string data.
:param name: Name for XML data. Used in XML errors.
:param data: string data to be parsed.
:return: Returns an S3Element.
"""
try:
return cls(root_name, cElementTree.fromstring(data))
except _ETREE_EXCEPTIONS as error:
raise InvalidXMLError(
'"{}" XML is not parsable. Message: {}'.format(
root_name, error.message
)
) | def function[fromstring, parameter[cls, root_name, data]]:
constant[Initialize S3Element from name and XML string data.
:param name: Name for XML data. Used in XML errors.
:param data: string data to be parsed.
:return: Returns an S3Element.
]
<ast.Try object at 0x7da1b1d39750> | keyword[def] identifier[fromstring] ( identifier[cls] , identifier[root_name] , identifier[data] ):
literal[string]
keyword[try] :
keyword[return] identifier[cls] ( identifier[root_name] , identifier[cElementTree] . identifier[fromstring] ( identifier[data] ))
keyword[except] identifier[_ETREE_EXCEPTIONS] keyword[as] identifier[error] :
keyword[raise] identifier[InvalidXMLError] (
literal[string] . identifier[format] (
identifier[root_name] , identifier[error] . identifier[message]
)
) | def fromstring(cls, root_name, data):
"""Initialize S3Element from name and XML string data.
:param name: Name for XML data. Used in XML errors.
:param data: string data to be parsed.
:return: Returns an S3Element.
"""
try:
return cls(root_name, cElementTree.fromstring(data)) # depends on [control=['try'], data=[]]
except _ETREE_EXCEPTIONS as error:
raise InvalidXMLError('"{}" XML is not parsable. Message: {}'.format(root_name, error.message)) # depends on [control=['except'], data=['error']] |
def inverse_transform(self, X):
"""Scale back the data to the original representation
Parameters
----------
X : array-like
The data used to scale along the specified axis.
This implementation was copied and modified from Scikit-Learn.
See License information here:
https://github.com/scikit-learn/scikit-learn/blob/master/README.rst
"""
check_is_fitted(self, "center_", "scale_")
# if sparse.issparse(X):
# if self.with_scaling:
# inplace_column_scale(X, self.scale_)
# else:
if self.with_scaling:
X *= self.scale_
if self.with_centering:
X += self.center_
return X | def function[inverse_transform, parameter[self, X]]:
constant[Scale back the data to the original representation
Parameters
----------
X : array-like
The data used to scale along the specified axis.
This implementation was copied and modified from Scikit-Learn.
See License information here:
https://github.com/scikit-learn/scikit-learn/blob/master/README.rst
]
call[name[check_is_fitted], parameter[name[self], constant[center_], constant[scale_]]]
if name[self].with_scaling begin[:]
<ast.AugAssign object at 0x7da1b1917c70>
if name[self].with_centering begin[:]
<ast.AugAssign object at 0x7da1b1916560>
return[name[X]] | keyword[def] identifier[inverse_transform] ( identifier[self] , identifier[X] ):
literal[string]
identifier[check_is_fitted] ( identifier[self] , literal[string] , literal[string] )
keyword[if] identifier[self] . identifier[with_scaling] :
identifier[X] *= identifier[self] . identifier[scale_]
keyword[if] identifier[self] . identifier[with_centering] :
identifier[X] += identifier[self] . identifier[center_]
keyword[return] identifier[X] | def inverse_transform(self, X):
"""Scale back the data to the original representation
Parameters
----------
X : array-like
The data used to scale along the specified axis.
This implementation was copied and modified from Scikit-Learn.
See License information here:
https://github.com/scikit-learn/scikit-learn/blob/master/README.rst
"""
check_is_fitted(self, 'center_', 'scale_')
# if sparse.issparse(X):
# if self.with_scaling:
# inplace_column_scale(X, self.scale_)
# else:
if self.with_scaling:
X *= self.scale_ # depends on [control=['if'], data=[]]
if self.with_centering:
X += self.center_ # depends on [control=['if'], data=[]]
return X |
def group_by(self, fields, items=None):
""" Returns a dict of lists of items, grouped by the given fields.
``fields`` can be a string (one field) or an iterable of field names.
"""
result = defaultdict(list)
if items is None:
items = self.items()
try:
key = operator.attrgetter(fields + '')
except TypeError:
def key(obj, names=tuple(fields)):
'Helper to return group key tuple'
return tuple(getattr(obj, x) for x in names)
for item in items:
result[key(item)].append(item)
return result | def function[group_by, parameter[self, fields, items]]:
constant[ Returns a dict of lists of items, grouped by the given fields.
``fields`` can be a string (one field) or an iterable of field names.
]
variable[result] assign[=] call[name[defaultdict], parameter[name[list]]]
if compare[name[items] is constant[None]] begin[:]
variable[items] assign[=] call[name[self].items, parameter[]]
<ast.Try object at 0x7da2044c0100>
for taget[name[item]] in starred[name[items]] begin[:]
call[call[name[result]][call[name[key], parameter[name[item]]]].append, parameter[name[item]]]
return[name[result]] | keyword[def] identifier[group_by] ( identifier[self] , identifier[fields] , identifier[items] = keyword[None] ):
literal[string]
identifier[result] = identifier[defaultdict] ( identifier[list] )
keyword[if] identifier[items] keyword[is] keyword[None] :
identifier[items] = identifier[self] . identifier[items] ()
keyword[try] :
identifier[key] = identifier[operator] . identifier[attrgetter] ( identifier[fields] + literal[string] )
keyword[except] identifier[TypeError] :
keyword[def] identifier[key] ( identifier[obj] , identifier[names] = identifier[tuple] ( identifier[fields] )):
literal[string]
keyword[return] identifier[tuple] ( identifier[getattr] ( identifier[obj] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[names] )
keyword[for] identifier[item] keyword[in] identifier[items] :
identifier[result] [ identifier[key] ( identifier[item] )]. identifier[append] ( identifier[item] )
keyword[return] identifier[result] | def group_by(self, fields, items=None):
""" Returns a dict of lists of items, grouped by the given fields.
``fields`` can be a string (one field) or an iterable of field names.
"""
result = defaultdict(list)
if items is None:
items = self.items() # depends on [control=['if'], data=['items']]
try:
key = operator.attrgetter(fields + '') # depends on [control=['try'], data=[]]
except TypeError:
def key(obj, names=tuple(fields)):
"""Helper to return group key tuple"""
return tuple((getattr(obj, x) for x in names)) # depends on [control=['except'], data=[]]
for item in items:
result[key(item)].append(item) # depends on [control=['for'], data=['item']]
return result |
def generate_cot(context, parent_path=None):
"""Format and sign the cot body, and write to disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
parent_path (str, optional): The directory to write the chain of trust
artifacts to. If None, this is ``artifact_dir/public/``.
Defaults to None.
Returns:
str: the contents of the chain of trust artifact.
Raises:
ScriptWorkerException: on schema error.
"""
body = generate_cot_body(context)
schema = load_json_or_yaml(
context.config['cot_schema_path'], is_path=True,
exception=ScriptWorkerException,
message="Can't read schema file {}: %(exc)s".format(context.config['cot_schema_path'])
)
validate_json_schema(body, schema, name="chain of trust")
body = format_json(body)
parent_path = parent_path or os.path.join(context.config['artifact_dir'], 'public')
unsigned_path = os.path.join(parent_path, 'chain-of-trust.json')
write_to_file(unsigned_path, body)
if context.config['sign_chain_of_trust']:
ed25519_signature_path = '{}.sig'.format(unsigned_path)
ed25519_private_key = ed25519_private_key_from_file(context.config['ed25519_private_key_path'])
ed25519_signature = ed25519_private_key.sign(body.encode('utf-8'))
write_to_file(ed25519_signature_path, ed25519_signature, file_type='binary')
return body | def function[generate_cot, parameter[context, parent_path]]:
constant[Format and sign the cot body, and write to disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
parent_path (str, optional): The directory to write the chain of trust
artifacts to. If None, this is ``artifact_dir/public/``.
Defaults to None.
Returns:
str: the contents of the chain of trust artifact.
Raises:
ScriptWorkerException: on schema error.
]
variable[body] assign[=] call[name[generate_cot_body], parameter[name[context]]]
variable[schema] assign[=] call[name[load_json_or_yaml], parameter[call[name[context].config][constant[cot_schema_path]]]]
call[name[validate_json_schema], parameter[name[body], name[schema]]]
variable[body] assign[=] call[name[format_json], parameter[name[body]]]
variable[parent_path] assign[=] <ast.BoolOp object at 0x7da2045671f0>
variable[unsigned_path] assign[=] call[name[os].path.join, parameter[name[parent_path], constant[chain-of-trust.json]]]
call[name[write_to_file], parameter[name[unsigned_path], name[body]]]
if call[name[context].config][constant[sign_chain_of_trust]] begin[:]
variable[ed25519_signature_path] assign[=] call[constant[{}.sig].format, parameter[name[unsigned_path]]]
variable[ed25519_private_key] assign[=] call[name[ed25519_private_key_from_file], parameter[call[name[context].config][constant[ed25519_private_key_path]]]]
variable[ed25519_signature] assign[=] call[name[ed25519_private_key].sign, parameter[call[name[body].encode, parameter[constant[utf-8]]]]]
call[name[write_to_file], parameter[name[ed25519_signature_path], name[ed25519_signature]]]
return[name[body]] | keyword[def] identifier[generate_cot] ( identifier[context] , identifier[parent_path] = keyword[None] ):
literal[string]
identifier[body] = identifier[generate_cot_body] ( identifier[context] )
identifier[schema] = identifier[load_json_or_yaml] (
identifier[context] . identifier[config] [ literal[string] ], identifier[is_path] = keyword[True] ,
identifier[exception] = identifier[ScriptWorkerException] ,
identifier[message] = literal[string] . identifier[format] ( identifier[context] . identifier[config] [ literal[string] ])
)
identifier[validate_json_schema] ( identifier[body] , identifier[schema] , identifier[name] = literal[string] )
identifier[body] = identifier[format_json] ( identifier[body] )
identifier[parent_path] = identifier[parent_path] keyword[or] identifier[os] . identifier[path] . identifier[join] ( identifier[context] . identifier[config] [ literal[string] ], literal[string] )
identifier[unsigned_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[parent_path] , literal[string] )
identifier[write_to_file] ( identifier[unsigned_path] , identifier[body] )
keyword[if] identifier[context] . identifier[config] [ literal[string] ]:
identifier[ed25519_signature_path] = literal[string] . identifier[format] ( identifier[unsigned_path] )
identifier[ed25519_private_key] = identifier[ed25519_private_key_from_file] ( identifier[context] . identifier[config] [ literal[string] ])
identifier[ed25519_signature] = identifier[ed25519_private_key] . identifier[sign] ( identifier[body] . identifier[encode] ( literal[string] ))
identifier[write_to_file] ( identifier[ed25519_signature_path] , identifier[ed25519_signature] , identifier[file_type] = literal[string] )
keyword[return] identifier[body] | def generate_cot(context, parent_path=None):
"""Format and sign the cot body, and write to disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
parent_path (str, optional): The directory to write the chain of trust
artifacts to. If None, this is ``artifact_dir/public/``.
Defaults to None.
Returns:
str: the contents of the chain of trust artifact.
Raises:
ScriptWorkerException: on schema error.
"""
body = generate_cot_body(context)
schema = load_json_or_yaml(context.config['cot_schema_path'], is_path=True, exception=ScriptWorkerException, message="Can't read schema file {}: %(exc)s".format(context.config['cot_schema_path']))
validate_json_schema(body, schema, name='chain of trust')
body = format_json(body)
parent_path = parent_path or os.path.join(context.config['artifact_dir'], 'public')
unsigned_path = os.path.join(parent_path, 'chain-of-trust.json')
write_to_file(unsigned_path, body)
if context.config['sign_chain_of_trust']:
ed25519_signature_path = '{}.sig'.format(unsigned_path)
ed25519_private_key = ed25519_private_key_from_file(context.config['ed25519_private_key_path'])
ed25519_signature = ed25519_private_key.sign(body.encode('utf-8'))
write_to_file(ed25519_signature_path, ed25519_signature, file_type='binary') # depends on [control=['if'], data=[]]
return body |
def clean(self, *args, **kwargs):
"""
Potentially, these fields should validate against context-based
queries.
If a context variable has been transmitted to the field, it's being
used to 'reset' the queryset and make sure the chosen item fits to
the user context.
"""
self.transmit_agnocomplete_context()
return super(AgnocompleteMixin, self).clean(*args, **kwargs) | def function[clean, parameter[self]]:
constant[
Potentially, these fields should validate against context-based
queries.
If a context variable has been transmitted to the field, it's being
used to 'reset' the queryset and make sure the chosen item fits to
the user context.
]
call[name[self].transmit_agnocomplete_context, parameter[]]
return[call[call[name[super], parameter[name[AgnocompleteMixin], name[self]]].clean, parameter[<ast.Starred object at 0x7da18f58ceb0>]]] | keyword[def] identifier[clean] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[transmit_agnocomplete_context] ()
keyword[return] identifier[super] ( identifier[AgnocompleteMixin] , identifier[self] ). identifier[clean] (* identifier[args] ,** identifier[kwargs] ) | def clean(self, *args, **kwargs):
"""
Potentially, these fields should validate against context-based
queries.
If a context variable has been transmitted to the field, it's being
used to 'reset' the queryset and make sure the chosen item fits to
the user context.
"""
self.transmit_agnocomplete_context()
return super(AgnocompleteMixin, self).clean(*args, **kwargs) |
def fastq(args):
"""
%prog fastq fastqfile
Convert reads formatted as FASTQ file, and convert to CA frg file.
"""
from jcvi.formats.fastq import guessoffset
p = OptionParser(fastq.__doc__)
p.add_option("--outtie", dest="outtie", default=False, action="store_true",
help="Are these outie reads? [default: %default]")
p.set_phred()
p.set_size()
opts, args = p.parse_args(args)
if len(args) < 1:
sys.exit(p.print_help())
fastqfiles = [get_abs_path(x) for x in args]
size = opts.size
outtie = opts.outtie
if size > 1000 and (not outtie):
logging.debug("[warn] long insert size {0} but not outtie".format(size))
mated = (size != 0)
libname = op.basename(args[0]).split(".")[0]
libname = libname.replace("_1_sequence", "")
frgfile = libname + ".frg"
mean, sv = get_mean_sv(opts.size)
cmd = "fastqToCA"
cmd += " -libraryname {0} ".format(libname)
fastqs = " ".join("-reads {0}".format(x) for x in fastqfiles)
if mated:
assert len(args) in (1, 2), "you need one or two fastq files for mated library"
fastqs = "-mates {0}".format(",".join(fastqfiles))
cmd += "-insertsize {0} {1} ".format(mean, sv)
cmd += fastqs
offset = int(opts.phred) if opts.phred else guessoffset([fastqfiles[0]])
illumina = (offset == 64)
if illumina:
cmd += " -type illumina"
if outtie:
cmd += " -outtie"
sh(cmd, outfile=frgfile) | def function[fastq, parameter[args]]:
constant[
%prog fastq fastqfile
Convert reads formatted as FASTQ file, and convert to CA frg file.
]
from relative_module[jcvi.formats.fastq] import module[guessoffset]
variable[p] assign[=] call[name[OptionParser], parameter[name[fastq].__doc__]]
call[name[p].add_option, parameter[constant[--outtie]]]
call[name[p].set_phred, parameter[]]
call[name[p].set_size, parameter[]]
<ast.Tuple object at 0x7da18dc9ae30> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] less[<] constant[1]] begin[:]
call[name[sys].exit, parameter[call[name[p].print_help, parameter[]]]]
variable[fastqfiles] assign[=] <ast.ListComp object at 0x7da18dc98730>
variable[size] assign[=] name[opts].size
variable[outtie] assign[=] name[opts].outtie
if <ast.BoolOp object at 0x7da18dc9bb50> begin[:]
call[name[logging].debug, parameter[call[constant[[warn] long insert size {0} but not outtie].format, parameter[name[size]]]]]
variable[mated] assign[=] compare[name[size] not_equal[!=] constant[0]]
variable[libname] assign[=] call[call[call[name[op].basename, parameter[call[name[args]][constant[0]]]].split, parameter[constant[.]]]][constant[0]]
variable[libname] assign[=] call[name[libname].replace, parameter[constant[_1_sequence], constant[]]]
variable[frgfile] assign[=] binary_operation[name[libname] + constant[.frg]]
<ast.Tuple object at 0x7da18dc9a620> assign[=] call[name[get_mean_sv], parameter[name[opts].size]]
variable[cmd] assign[=] constant[fastqToCA]
<ast.AugAssign object at 0x7da18dc9a3b0>
variable[fastqs] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da18dc98b80>]]
if name[mated] begin[:]
assert[compare[call[name[len], parameter[name[args]]] in tuple[[<ast.Constant object at 0x7da18dc98460>, <ast.Constant object at 0x7da18dc9be50>]]]]
variable[fastqs] assign[=] call[constant[-mates {0}].format, parameter[call[constant[,].join, parameter[name[fastqfiles]]]]]
<ast.AugAssign object at 0x7da18dc9b6d0>
<ast.AugAssign object at 0x7da18dc9b760>
variable[offset] assign[=] <ast.IfExp object at 0x7da18dc99750>
variable[illumina] assign[=] compare[name[offset] equal[==] constant[64]]
if name[illumina] begin[:]
<ast.AugAssign object at 0x7da18dc9bc10>
if name[outtie] begin[:]
<ast.AugAssign object at 0x7da18dc986a0>
call[name[sh], parameter[name[cmd]]] | keyword[def] identifier[fastq] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[fastq] keyword[import] identifier[guessoffset]
identifier[p] = identifier[OptionParser] ( identifier[fastq] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[dest] = literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[set_phred] ()
identifier[p] . identifier[set_size] ()
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )< literal[int] :
identifier[sys] . identifier[exit] ( identifier[p] . identifier[print_help] ())
identifier[fastqfiles] =[ identifier[get_abs_path] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[args] ]
identifier[size] = identifier[opts] . identifier[size]
identifier[outtie] = identifier[opts] . identifier[outtie]
keyword[if] identifier[size] > literal[int] keyword[and] ( keyword[not] identifier[outtie] ):
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[size] ))
identifier[mated] =( identifier[size] != literal[int] )
identifier[libname] = identifier[op] . identifier[basename] ( identifier[args] [ literal[int] ]). identifier[split] ( literal[string] )[ literal[int] ]
identifier[libname] = identifier[libname] . identifier[replace] ( literal[string] , literal[string] )
identifier[frgfile] = identifier[libname] + literal[string]
identifier[mean] , identifier[sv] = identifier[get_mean_sv] ( identifier[opts] . identifier[size] )
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[libname] )
identifier[fastqs] = literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[fastqfiles] )
keyword[if] identifier[mated] :
keyword[assert] identifier[len] ( identifier[args] ) keyword[in] ( literal[int] , literal[int] ), literal[string]
identifier[fastqs] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[fastqfiles] ))
identifier[cmd] += literal[string] . identifier[format] ( identifier[mean] , identifier[sv] )
identifier[cmd] += identifier[fastqs]
identifier[offset] = identifier[int] ( identifier[opts] . identifier[phred] ) keyword[if] identifier[opts] . identifier[phred] keyword[else] identifier[guessoffset] ([ identifier[fastqfiles] [ literal[int] ]])
identifier[illumina] =( identifier[offset] == literal[int] )
keyword[if] identifier[illumina] :
identifier[cmd] += literal[string]
keyword[if] identifier[outtie] :
identifier[cmd] += literal[string]
identifier[sh] ( identifier[cmd] , identifier[outfile] = identifier[frgfile] ) | def fastq(args):
"""
%prog fastq fastqfile
Convert reads formatted as FASTQ file, and convert to CA frg file.
"""
from jcvi.formats.fastq import guessoffset
p = OptionParser(fastq.__doc__)
p.add_option('--outtie', dest='outtie', default=False, action='store_true', help='Are these outie reads? [default: %default]')
p.set_phred()
p.set_size()
(opts, args) = p.parse_args(args)
if len(args) < 1:
sys.exit(p.print_help()) # depends on [control=['if'], data=[]]
fastqfiles = [get_abs_path(x) for x in args]
size = opts.size
outtie = opts.outtie
if size > 1000 and (not outtie):
logging.debug('[warn] long insert size {0} but not outtie'.format(size)) # depends on [control=['if'], data=[]]
mated = size != 0
libname = op.basename(args[0]).split('.')[0]
libname = libname.replace('_1_sequence', '')
frgfile = libname + '.frg'
(mean, sv) = get_mean_sv(opts.size)
cmd = 'fastqToCA'
cmd += ' -libraryname {0} '.format(libname)
fastqs = ' '.join(('-reads {0}'.format(x) for x in fastqfiles))
if mated:
assert len(args) in (1, 2), 'you need one or two fastq files for mated library'
fastqs = '-mates {0}'.format(','.join(fastqfiles))
cmd += '-insertsize {0} {1} '.format(mean, sv) # depends on [control=['if'], data=[]]
cmd += fastqs
offset = int(opts.phred) if opts.phred else guessoffset([fastqfiles[0]])
illumina = offset == 64
if illumina:
cmd += ' -type illumina' # depends on [control=['if'], data=[]]
if outtie:
cmd += ' -outtie' # depends on [control=['if'], data=[]]
sh(cmd, outfile=frgfile) |
def _extract_params(request_dict, param_list, param_fallback=False):
''' Extract pddb parameters from request '''
if not param_list or not request_dict:
return dict()
query = dict()
for param in param_list:
# Retrieve all items in the form of {param: value} and
# convert {param__key: value} into {param: {key: value}}
for query_key, query_value in request_dict.items():
if param == query_key:
query[param] = query_value
else:
query_key_parts = query_key.split('__', 1)
if param == query_key_parts[0]:
query[param] = {query_key_parts[1]: query_value}
# Convert special string "__null__" into Python None
nullifier = lambda d: {k:(nullifier(v) if isinstance(v, dict) else # pylint: disable=used-before-assignment
(None if v == '__null__' else v)) for k, v in d.items()}
# When fallback is enabled and no parameter matched, assume query refers to first parameter
if param_fallback and all([param_key not in query.keys() for param_key in param_list]):
query = {param_list[0]: dict(request_dict)}
# Return a dictionary with only the requested parameters
return {k:v for k, v in nullifier(query).items() if k in param_list} | def function[_extract_params, parameter[request_dict, param_list, param_fallback]]:
constant[ Extract pddb parameters from request ]
if <ast.BoolOp object at 0x7da1b141d6f0> begin[:]
return[call[name[dict], parameter[]]]
variable[query] assign[=] call[name[dict], parameter[]]
for taget[name[param]] in starred[name[param_list]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b141c040>, <ast.Name object at 0x7da1b141d9f0>]]] in starred[call[name[request_dict].items, parameter[]]] begin[:]
if compare[name[param] equal[==] name[query_key]] begin[:]
call[name[query]][name[param]] assign[=] name[query_value]
variable[nullifier] assign[=] <ast.Lambda object at 0x7da1b141c190>
if <ast.BoolOp object at 0x7da1b141f940> begin[:]
variable[query] assign[=] dictionary[[<ast.Subscript object at 0x7da1b141de40>], [<ast.Call object at 0x7da1b141cca0>]]
return[<ast.DictComp object at 0x7da1b141e0e0>] | keyword[def] identifier[_extract_params] ( identifier[request_dict] , identifier[param_list] , identifier[param_fallback] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[param_list] keyword[or] keyword[not] identifier[request_dict] :
keyword[return] identifier[dict] ()
identifier[query] = identifier[dict] ()
keyword[for] identifier[param] keyword[in] identifier[param_list] :
keyword[for] identifier[query_key] , identifier[query_value] keyword[in] identifier[request_dict] . identifier[items] ():
keyword[if] identifier[param] == identifier[query_key] :
identifier[query] [ identifier[param] ]= identifier[query_value]
keyword[else] :
identifier[query_key_parts] = identifier[query_key] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[param] == identifier[query_key_parts] [ literal[int] ]:
identifier[query] [ identifier[param] ]={ identifier[query_key_parts] [ literal[int] ]: identifier[query_value] }
identifier[nullifier] = keyword[lambda] identifier[d] :{ identifier[k] :( identifier[nullifier] ( identifier[v] ) keyword[if] identifier[isinstance] ( identifier[v] , identifier[dict] ) keyword[else]
( keyword[None] keyword[if] identifier[v] == literal[string] keyword[else] identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] ()}
keyword[if] identifier[param_fallback] keyword[and] identifier[all] ([ identifier[param_key] keyword[not] keyword[in] identifier[query] . identifier[keys] () keyword[for] identifier[param_key] keyword[in] identifier[param_list] ]):
identifier[query] ={ identifier[param_list] [ literal[int] ]: identifier[dict] ( identifier[request_dict] )}
keyword[return] { identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[nullifier] ( identifier[query] ). identifier[items] () keyword[if] identifier[k] keyword[in] identifier[param_list] } | def _extract_params(request_dict, param_list, param_fallback=False):
""" Extract pddb parameters from request """
if not param_list or not request_dict:
return dict() # depends on [control=['if'], data=[]]
query = dict()
for param in param_list:
# Retrieve all items in the form of {param: value} and
# convert {param__key: value} into {param: {key: value}}
for (query_key, query_value) in request_dict.items():
if param == query_key:
query[param] = query_value # depends on [control=['if'], data=['param']]
else:
query_key_parts = query_key.split('__', 1)
if param == query_key_parts[0]:
query[param] = {query_key_parts[1]: query_value} # depends on [control=['if'], data=['param']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['param']]
# Convert special string "__null__" into Python None
# pylint: disable=used-before-assignment
nullifier = lambda d: {k: nullifier(v) if isinstance(v, dict) else None if v == '__null__' else v for (k, v) in d.items()}
# When fallback is enabled and no parameter matched, assume query refers to first parameter
if param_fallback and all([param_key not in query.keys() for param_key in param_list]):
query = {param_list[0]: dict(request_dict)} # depends on [control=['if'], data=[]]
# Return a dictionary with only the requested parameters
return {k: v for (k, v) in nullifier(query).items() if k in param_list} |
def find_mecab_dictionary(names):
"""
Find a MeCab dictionary with a given name. The dictionary has to be
installed separately -- see wordfreq's README for instructions.
"""
suggested_pkg = names[0]
paths = [
os.path.expanduser('~/.local/lib/mecab/dic'),
'/var/lib/mecab/dic',
'/var/local/lib/mecab/dic',
'/usr/lib/mecab/dic',
'/usr/local/lib/mecab/dic',
'/usr/lib/x86_64-linux-gnu/mecab/dic',
]
full_paths = [os.path.join(path, name) for path in paths for name in names]
checked_paths = [path for path in full_paths if len(path) <= MAX_PATH_LENGTH]
for path in checked_paths:
if os.path.exists(path):
return path
error_lines = [
"Couldn't find the MeCab dictionary named %r." % suggested_pkg,
"You should download or use your system's package manager to install",
"the %r package." % suggested_pkg,
"",
"We looked in the following locations:"
] + ["\t%s" % path for path in checked_paths]
skipped_paths = [path for path in full_paths if len(path) > MAX_PATH_LENGTH]
if skipped_paths:
error_lines += [
"We had to skip these paths that are too long for MeCab to find:",
] + ["\t%s" % path for path in skipped_paths]
raise OSError('\n'.join(error_lines)) | def function[find_mecab_dictionary, parameter[names]]:
constant[
Find a MeCab dictionary with a given name. The dictionary has to be
installed separately -- see wordfreq's README for instructions.
]
variable[suggested_pkg] assign[=] call[name[names]][constant[0]]
variable[paths] assign[=] list[[<ast.Call object at 0x7da2044c0220>, <ast.Constant object at 0x7da2044c3ca0>, <ast.Constant object at 0x7da2044c2200>, <ast.Constant object at 0x7da2044c2530>, <ast.Constant object at 0x7da2044c2bf0>, <ast.Constant object at 0x7da2044c04f0>]]
variable[full_paths] assign[=] <ast.ListComp object at 0x7da2044c1480>
variable[checked_paths] assign[=] <ast.ListComp object at 0x7da2044c09a0>
for taget[name[path]] in starred[name[checked_paths]] begin[:]
if call[name[os].path.exists, parameter[name[path]]] begin[:]
return[name[path]]
variable[error_lines] assign[=] binary_operation[list[[<ast.BinOp object at 0x7da2044c1660>, <ast.Constant object at 0x7da2044c3130>, <ast.BinOp object at 0x7da2044c2140>, <ast.Constant object at 0x7da18f720f40>, <ast.Constant object at 0x7da18f721270>]] + <ast.ListComp object at 0x7da18f721900>]
variable[skipped_paths] assign[=] <ast.ListComp object at 0x7da18f723610>
if name[skipped_paths] begin[:]
<ast.AugAssign object at 0x7da18f721390>
<ast.Raise object at 0x7da18f09dc60> | keyword[def] identifier[find_mecab_dictionary] ( identifier[names] ):
literal[string]
identifier[suggested_pkg] = identifier[names] [ literal[int] ]
identifier[paths] =[
identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] ),
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
identifier[full_paths] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[name] ) keyword[for] identifier[path] keyword[in] identifier[paths] keyword[for] identifier[name] keyword[in] identifier[names] ]
identifier[checked_paths] =[ identifier[path] keyword[for] identifier[path] keyword[in] identifier[full_paths] keyword[if] identifier[len] ( identifier[path] )<= identifier[MAX_PATH_LENGTH] ]
keyword[for] identifier[path] keyword[in] identifier[checked_paths] :
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ):
keyword[return] identifier[path]
identifier[error_lines] =[
literal[string] % identifier[suggested_pkg] ,
literal[string] ,
literal[string] % identifier[suggested_pkg] ,
literal[string] ,
literal[string]
]+[ literal[string] % identifier[path] keyword[for] identifier[path] keyword[in] identifier[checked_paths] ]
identifier[skipped_paths] =[ identifier[path] keyword[for] identifier[path] keyword[in] identifier[full_paths] keyword[if] identifier[len] ( identifier[path] )> identifier[MAX_PATH_LENGTH] ]
keyword[if] identifier[skipped_paths] :
identifier[error_lines] +=[
literal[string] ,
]+[ literal[string] % identifier[path] keyword[for] identifier[path] keyword[in] identifier[skipped_paths] ]
keyword[raise] identifier[OSError] ( literal[string] . identifier[join] ( identifier[error_lines] )) | def find_mecab_dictionary(names):
"""
Find a MeCab dictionary with a given name. The dictionary has to be
installed separately -- see wordfreq's README for instructions.
"""
suggested_pkg = names[0]
paths = [os.path.expanduser('~/.local/lib/mecab/dic'), '/var/lib/mecab/dic', '/var/local/lib/mecab/dic', '/usr/lib/mecab/dic', '/usr/local/lib/mecab/dic', '/usr/lib/x86_64-linux-gnu/mecab/dic']
full_paths = [os.path.join(path, name) for path in paths for name in names]
checked_paths = [path for path in full_paths if len(path) <= MAX_PATH_LENGTH]
for path in checked_paths:
if os.path.exists(path):
return path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']]
error_lines = ["Couldn't find the MeCab dictionary named %r." % suggested_pkg, "You should download or use your system's package manager to install", 'the %r package.' % suggested_pkg, '', 'We looked in the following locations:'] + ['\t%s' % path for path in checked_paths]
skipped_paths = [path for path in full_paths if len(path) > MAX_PATH_LENGTH]
if skipped_paths:
error_lines += ['We had to skip these paths that are too long for MeCab to find:'] + ['\t%s' % path for path in skipped_paths] # depends on [control=['if'], data=[]]
raise OSError('\n'.join(error_lines)) |
def tinygps_to_degdec(lat, lng):
"""Converts TinyGPS formats (Decimal Degrees to e-5) to Degrees Decimal."""
x = float(lat[:-5] + '.' + lat[-5:])
y = float(lng[:-5] + '.' + lng[-5:])
return x, y | def function[tinygps_to_degdec, parameter[lat, lng]]:
constant[Converts TinyGPS formats (Decimal Degrees to e-5) to Degrees Decimal.]
variable[x] assign[=] call[name[float], parameter[binary_operation[binary_operation[call[name[lat]][<ast.Slice object at 0x7da1b14e5f00>] + constant[.]] + call[name[lat]][<ast.Slice object at 0x7da1b14e60e0>]]]]
variable[y] assign[=] call[name[float], parameter[binary_operation[binary_operation[call[name[lng]][<ast.Slice object at 0x7da1b14e74f0>] + constant[.]] + call[name[lng]][<ast.Slice object at 0x7da1b14e6950>]]]]
return[tuple[[<ast.Name object at 0x7da1b14e5450>, <ast.Name object at 0x7da1b14e5780>]]] | keyword[def] identifier[tinygps_to_degdec] ( identifier[lat] , identifier[lng] ):
literal[string]
identifier[x] = identifier[float] ( identifier[lat] [:- literal[int] ]+ literal[string] + identifier[lat] [- literal[int] :])
identifier[y] = identifier[float] ( identifier[lng] [:- literal[int] ]+ literal[string] + identifier[lng] [- literal[int] :])
keyword[return] identifier[x] , identifier[y] | def tinygps_to_degdec(lat, lng):
"""Converts TinyGPS formats (Decimal Degrees to e-5) to Degrees Decimal."""
x = float(lat[:-5] + '.' + lat[-5:])
y = float(lng[:-5] + '.' + lng[-5:])
return (x, y) |
def tags_published():
"""
Return the published tags.
"""
from tagging.models import Tag
from zinnia.models.entry import Entry
tags_entry_published = Tag.objects.usage_for_queryset(
Entry.published.all())
# Need to do that until the issue #44 of django-tagging is fixed
return Tag.objects.filter(name__in=[t.name for t in tags_entry_published]) | def function[tags_published, parameter[]]:
constant[
Return the published tags.
]
from relative_module[tagging.models] import module[Tag]
from relative_module[zinnia.models.entry] import module[Entry]
variable[tags_entry_published] assign[=] call[name[Tag].objects.usage_for_queryset, parameter[call[name[Entry].published.all, parameter[]]]]
return[call[name[Tag].objects.filter, parameter[]]] | keyword[def] identifier[tags_published] ():
literal[string]
keyword[from] identifier[tagging] . identifier[models] keyword[import] identifier[Tag]
keyword[from] identifier[zinnia] . identifier[models] . identifier[entry] keyword[import] identifier[Entry]
identifier[tags_entry_published] = identifier[Tag] . identifier[objects] . identifier[usage_for_queryset] (
identifier[Entry] . identifier[published] . identifier[all] ())
keyword[return] identifier[Tag] . identifier[objects] . identifier[filter] ( identifier[name__in] =[ identifier[t] . identifier[name] keyword[for] identifier[t] keyword[in] identifier[tags_entry_published] ]) | def tags_published():
"""
Return the published tags.
"""
from tagging.models import Tag
from zinnia.models.entry import Entry
tags_entry_published = Tag.objects.usage_for_queryset(Entry.published.all())
# Need to do that until the issue #44 of django-tagging is fixed
return Tag.objects.filter(name__in=[t.name for t in tags_entry_published]) |
def db_snapshot_append(cls, cur, block_id, consensus_hash, ops_hash, timestamp):
"""
Append hash info for the last block processed, and the time at which it was done.
Meant to be executed as part of a transaction.
Return True on success
Raise an exception on invalid block number
Abort on db error
"""
query = 'INSERT INTO snapshots (block_id,consensus_hash,ops_hash,timestamp) VALUES (?,?,?,?);'
args = (block_id,consensus_hash,ops_hash,timestamp)
cls.db_query_execute(cur, query, args)
return True | def function[db_snapshot_append, parameter[cls, cur, block_id, consensus_hash, ops_hash, timestamp]]:
constant[
Append hash info for the last block processed, and the time at which it was done.
Meant to be executed as part of a transaction.
Return True on success
Raise an exception on invalid block number
Abort on db error
]
variable[query] assign[=] constant[INSERT INTO snapshots (block_id,consensus_hash,ops_hash,timestamp) VALUES (?,?,?,?);]
variable[args] assign[=] tuple[[<ast.Name object at 0x7da18ede72b0>, <ast.Name object at 0x7da18ede44c0>, <ast.Name object at 0x7da18ede6f20>, <ast.Name object at 0x7da18ede7970>]]
call[name[cls].db_query_execute, parameter[name[cur], name[query], name[args]]]
return[constant[True]] | keyword[def] identifier[db_snapshot_append] ( identifier[cls] , identifier[cur] , identifier[block_id] , identifier[consensus_hash] , identifier[ops_hash] , identifier[timestamp] ):
literal[string]
identifier[query] = literal[string]
identifier[args] =( identifier[block_id] , identifier[consensus_hash] , identifier[ops_hash] , identifier[timestamp] )
identifier[cls] . identifier[db_query_execute] ( identifier[cur] , identifier[query] , identifier[args] )
keyword[return] keyword[True] | def db_snapshot_append(cls, cur, block_id, consensus_hash, ops_hash, timestamp):
"""
Append hash info for the last block processed, and the time at which it was done.
Meant to be executed as part of a transaction.
Return True on success
Raise an exception on invalid block number
Abort on db error
"""
query = 'INSERT INTO snapshots (block_id,consensus_hash,ops_hash,timestamp) VALUES (?,?,?,?);'
args = (block_id, consensus_hash, ops_hash, timestamp)
cls.db_query_execute(cur, query, args)
return True |
def authenticate_identify(self, api_token, override=True):
"""Set credentials for Identify authentication.
Args:
api_token (str): Token issued to your Application through the Gem
Developer Console.
override (boolean): Replace existing Application credentials.
"""
if (self.context.has_auth_params('Gem-Identify') and not override):
raise OverrideError('Gem-Identify')
if (not api_token or
not self.context.authorize('Gem-Identify', api_token=api_token)):
raise AuthUsageError(self.context, 'Gem-Identify')
return True | def function[authenticate_identify, parameter[self, api_token, override]]:
constant[Set credentials for Identify authentication.
Args:
api_token (str): Token issued to your Application through the Gem
Developer Console.
override (boolean): Replace existing Application credentials.
]
if <ast.BoolOp object at 0x7da1b14d1ea0> begin[:]
<ast.Raise object at 0x7da1b14d3100>
if <ast.BoolOp object at 0x7da1b14d3070> begin[:]
<ast.Raise object at 0x7da1b14d0fa0>
return[constant[True]] | keyword[def] identifier[authenticate_identify] ( identifier[self] , identifier[api_token] , identifier[override] = keyword[True] ):
literal[string]
keyword[if] ( identifier[self] . identifier[context] . identifier[has_auth_params] ( literal[string] ) keyword[and] keyword[not] identifier[override] ):
keyword[raise] identifier[OverrideError] ( literal[string] )
keyword[if] ( keyword[not] identifier[api_token] keyword[or]
keyword[not] identifier[self] . identifier[context] . identifier[authorize] ( literal[string] , identifier[api_token] = identifier[api_token] )):
keyword[raise] identifier[AuthUsageError] ( identifier[self] . identifier[context] , literal[string] )
keyword[return] keyword[True] | def authenticate_identify(self, api_token, override=True):
"""Set credentials for Identify authentication.
Args:
api_token (str): Token issued to your Application through the Gem
Developer Console.
override (boolean): Replace existing Application credentials.
"""
if self.context.has_auth_params('Gem-Identify') and (not override):
raise OverrideError('Gem-Identify') # depends on [control=['if'], data=[]]
if not api_token or not self.context.authorize('Gem-Identify', api_token=api_token):
raise AuthUsageError(self.context, 'Gem-Identify') # depends on [control=['if'], data=[]]
return True |
def scenario_risk(riskinputs, riskmodel, param, monitor):
"""
Core function for a scenario computation.
:param riskinput:
a of :class:`openquake.risklib.riskinput.RiskInput` object
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:returns:
a dictionary {
'agg': array of shape (E, L, R, 2),
'avg': list of tuples (lt_idx, rlz_idx, asset_ordinal, statistics)
}
where E is the number of simulated events, L the number of loss types,
R the number of realizations and statistics is an array of shape
(n, R, 4), with n the number of assets in the current riskinput object
"""
E = param['E']
L = len(riskmodel.loss_types)
result = dict(agg=numpy.zeros((E, L), F32), avg=[],
all_losses=AccumDict(accum={}))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor, param['epspath']):
r = out.rlzi
weight = param['weights'][r]
slc = param['event_slice'](r)
for l, loss_type in enumerate(riskmodel.loss_types):
losses = out[loss_type]
if numpy.product(losses.shape) == 0: # happens for all NaNs
continue
stats = numpy.zeros(len(ri.assets), stat_dt) # mean, stddev
for a, asset in enumerate(ri.assets):
stats['mean'][a] = losses[a].mean()
stats['stddev'][a] = losses[a].std(ddof=1)
result['avg'].append((l, r, asset['ordinal'], stats[a]))
agglosses = losses.sum(axis=0) # shape num_gmfs
result['agg'][slc, l] += agglosses * weight
if param['asset_loss_table']:
aids = ri.assets['ordinal']
result['all_losses'][l, r] += AccumDict(zip(aids, losses))
return result | def function[scenario_risk, parameter[riskinputs, riskmodel, param, monitor]]:
constant[
Core function for a scenario computation.
:param riskinput:
a of :class:`openquake.risklib.riskinput.RiskInput` object
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:returns:
a dictionary {
'agg': array of shape (E, L, R, 2),
'avg': list of tuples (lt_idx, rlz_idx, asset_ordinal, statistics)
}
where E is the number of simulated events, L the number of loss types,
R the number of realizations and statistics is an array of shape
(n, R, 4), with n the number of assets in the current riskinput object
]
variable[E] assign[=] call[name[param]][constant[E]]
variable[L] assign[=] call[name[len], parameter[name[riskmodel].loss_types]]
variable[result] assign[=] call[name[dict], parameter[]]
for taget[name[ri]] in starred[name[riskinputs]] begin[:]
for taget[name[out]] in starred[call[name[riskmodel].gen_outputs, parameter[name[ri], name[monitor], call[name[param]][constant[epspath]]]]] begin[:]
variable[r] assign[=] name[out].rlzi
variable[weight] assign[=] call[call[name[param]][constant[weights]]][name[r]]
variable[slc] assign[=] call[call[name[param]][constant[event_slice]], parameter[name[r]]]
for taget[tuple[[<ast.Name object at 0x7da18f8132e0>, <ast.Name object at 0x7da18f813a00>]]] in starred[call[name[enumerate], parameter[name[riskmodel].loss_types]]] begin[:]
variable[losses] assign[=] call[name[out]][name[loss_type]]
if compare[call[name[numpy].product, parameter[name[losses].shape]] equal[==] constant[0]] begin[:]
continue
variable[stats] assign[=] call[name[numpy].zeros, parameter[call[name[len], parameter[name[ri].assets]], name[stat_dt]]]
for taget[tuple[[<ast.Name object at 0x7da18f8127d0>, <ast.Name object at 0x7da18f813730>]]] in starred[call[name[enumerate], parameter[name[ri].assets]]] begin[:]
call[call[name[stats]][constant[mean]]][name[a]] assign[=] call[call[name[losses]][name[a]].mean, parameter[]]
call[call[name[stats]][constant[stddev]]][name[a]] assign[=] call[call[name[losses]][name[a]].std, parameter[]]
call[call[name[result]][constant[avg]].append, parameter[tuple[[<ast.Name object at 0x7da207f9b0a0>, <ast.Name object at 0x7da207f98730>, <ast.Subscript object at 0x7da207f998a0>, <ast.Subscript object at 0x7da207f98e20>]]]]
variable[agglosses] assign[=] call[name[losses].sum, parameter[]]
<ast.AugAssign object at 0x7da207f99330>
if call[name[param]][constant[asset_loss_table]] begin[:]
variable[aids] assign[=] call[name[ri].assets][constant[ordinal]]
<ast.AugAssign object at 0x7da207f99480>
return[name[result]] | keyword[def] identifier[scenario_risk] ( identifier[riskinputs] , identifier[riskmodel] , identifier[param] , identifier[monitor] ):
literal[string]
identifier[E] = identifier[param] [ literal[string] ]
identifier[L] = identifier[len] ( identifier[riskmodel] . identifier[loss_types] )
identifier[result] = identifier[dict] ( identifier[agg] = identifier[numpy] . identifier[zeros] (( identifier[E] , identifier[L] ), identifier[F32] ), identifier[avg] =[],
identifier[all_losses] = identifier[AccumDict] ( identifier[accum] ={}))
keyword[for] identifier[ri] keyword[in] identifier[riskinputs] :
keyword[for] identifier[out] keyword[in] identifier[riskmodel] . identifier[gen_outputs] ( identifier[ri] , identifier[monitor] , identifier[param] [ literal[string] ]):
identifier[r] = identifier[out] . identifier[rlzi]
identifier[weight] = identifier[param] [ literal[string] ][ identifier[r] ]
identifier[slc] = identifier[param] [ literal[string] ]( identifier[r] )
keyword[for] identifier[l] , identifier[loss_type] keyword[in] identifier[enumerate] ( identifier[riskmodel] . identifier[loss_types] ):
identifier[losses] = identifier[out] [ identifier[loss_type] ]
keyword[if] identifier[numpy] . identifier[product] ( identifier[losses] . identifier[shape] )== literal[int] :
keyword[continue]
identifier[stats] = identifier[numpy] . identifier[zeros] ( identifier[len] ( identifier[ri] . identifier[assets] ), identifier[stat_dt] )
keyword[for] identifier[a] , identifier[asset] keyword[in] identifier[enumerate] ( identifier[ri] . identifier[assets] ):
identifier[stats] [ literal[string] ][ identifier[a] ]= identifier[losses] [ identifier[a] ]. identifier[mean] ()
identifier[stats] [ literal[string] ][ identifier[a] ]= identifier[losses] [ identifier[a] ]. identifier[std] ( identifier[ddof] = literal[int] )
identifier[result] [ literal[string] ]. identifier[append] (( identifier[l] , identifier[r] , identifier[asset] [ literal[string] ], identifier[stats] [ identifier[a] ]))
identifier[agglosses] = identifier[losses] . identifier[sum] ( identifier[axis] = literal[int] )
identifier[result] [ literal[string] ][ identifier[slc] , identifier[l] ]+= identifier[agglosses] * identifier[weight]
keyword[if] identifier[param] [ literal[string] ]:
identifier[aids] = identifier[ri] . identifier[assets] [ literal[string] ]
identifier[result] [ literal[string] ][ identifier[l] , identifier[r] ]+= identifier[AccumDict] ( identifier[zip] ( identifier[aids] , identifier[losses] ))
keyword[return] identifier[result] | def scenario_risk(riskinputs, riskmodel, param, monitor):
"""
Core function for a scenario computation.
:param riskinput:
a of :class:`openquake.risklib.riskinput.RiskInput` object
:param riskmodel:
a :class:`openquake.risklib.riskinput.CompositeRiskModel` instance
:param param:
dictionary of extra parameters
:param monitor:
:class:`openquake.baselib.performance.Monitor` instance
:returns:
a dictionary {
'agg': array of shape (E, L, R, 2),
'avg': list of tuples (lt_idx, rlz_idx, asset_ordinal, statistics)
}
where E is the number of simulated events, L the number of loss types,
R the number of realizations and statistics is an array of shape
(n, R, 4), with n the number of assets in the current riskinput object
"""
E = param['E']
L = len(riskmodel.loss_types)
result = dict(agg=numpy.zeros((E, L), F32), avg=[], all_losses=AccumDict(accum={}))
for ri in riskinputs:
for out in riskmodel.gen_outputs(ri, monitor, param['epspath']):
r = out.rlzi
weight = param['weights'][r]
slc = param['event_slice'](r)
for (l, loss_type) in enumerate(riskmodel.loss_types):
losses = out[loss_type]
if numpy.product(losses.shape) == 0: # happens for all NaNs
continue # depends on [control=['if'], data=[]]
stats = numpy.zeros(len(ri.assets), stat_dt) # mean, stddev
for (a, asset) in enumerate(ri.assets):
stats['mean'][a] = losses[a].mean()
stats['stddev'][a] = losses[a].std(ddof=1)
result['avg'].append((l, r, asset['ordinal'], stats[a])) # depends on [control=['for'], data=[]]
agglosses = losses.sum(axis=0) # shape num_gmfs
result['agg'][slc, l] += agglosses * weight
if param['asset_loss_table']:
aids = ri.assets['ordinal']
result['all_losses'][l, r] += AccumDict(zip(aids, losses)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['out']] # depends on [control=['for'], data=['ri']]
return result |
def env_valid(env):
"""
Given an env, determine if it's valid
Args:
env: the env to check
Returns:
True if the env is valid
Raises:
ValueError with message if the env is not valid
"""
if env not in EFConfig.ENV_LIST:
raise ValueError("unknown env: {}; env must be one of: ".format(env) + ", ".join(EFConfig.ENV_LIST))
return True | def function[env_valid, parameter[env]]:
constant[
Given an env, determine if it's valid
Args:
env: the env to check
Returns:
True if the env is valid
Raises:
ValueError with message if the env is not valid
]
if compare[name[env] <ast.NotIn object at 0x7da2590d7190> name[EFConfig].ENV_LIST] begin[:]
<ast.Raise object at 0x7da1b1b01990>
return[constant[True]] | keyword[def] identifier[env_valid] ( identifier[env] ):
literal[string]
keyword[if] identifier[env] keyword[not] keyword[in] identifier[EFConfig] . identifier[ENV_LIST] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[env] )+ literal[string] . identifier[join] ( identifier[EFConfig] . identifier[ENV_LIST] ))
keyword[return] keyword[True] | def env_valid(env):
"""
Given an env, determine if it's valid
Args:
env: the env to check
Returns:
True if the env is valid
Raises:
ValueError with message if the env is not valid
"""
if env not in EFConfig.ENV_LIST:
raise ValueError('unknown env: {}; env must be one of: '.format(env) + ', '.join(EFConfig.ENV_LIST)) # depends on [control=['if'], data=['env']]
return True |
def index(self, strictindex):
"""
Return a chunk in a sequence referenced by index.
"""
return self._select(self._pointer.index(self.ruamelindex(strictindex))) | def function[index, parameter[self, strictindex]]:
constant[
Return a chunk in a sequence referenced by index.
]
return[call[name[self]._select, parameter[call[name[self]._pointer.index, parameter[call[name[self].ruamelindex, parameter[name[strictindex]]]]]]]] | keyword[def] identifier[index] ( identifier[self] , identifier[strictindex] ):
literal[string]
keyword[return] identifier[self] . identifier[_select] ( identifier[self] . identifier[_pointer] . identifier[index] ( identifier[self] . identifier[ruamelindex] ( identifier[strictindex] ))) | def index(self, strictindex):
"""
Return a chunk in a sequence referenced by index.
"""
return self._select(self._pointer.index(self.ruamelindex(strictindex))) |
def parseVersionParts(text, seps=vseps):
'''
Extract a list of major/minor/version integer strings from a string.
Args:
text (str): String to parse
seps (tuple): A tuple or list of separators to use when parsing the version string.
Examples:
Parse a simple version string into a major and minor parts::
parts = parseVersionParts('1.2')
Parse a complex version string into a major and minor parts::
parts = parseVersionParts('wowsoft_1.2')
Parse a simple version string into a major, minor and patch parts. Parts after the "3." are dropped from the
results::
parts = parseVersionParts('1.2.3.4.5')
Notes:
This attempts to brute force out integers from the version string by stripping any leading ascii letters and
part separators, and then regexing out numeric parts optionally followed by part separators. It will stop at
the first mixed-character part encountered. For example, "1.2-3a" would only parse out the "1" and "2" from
the string.
Returns:
dict: Either a empty dictionary or dictionary containing up to three keys, 'major', 'minor' and 'patch'.
'''
# Join seps together
seps = ''.join(seps)
# Strip whitespace
text = text.strip()
# Strip off leading chars
text = text.lstrip(string.ascii_letters)
# Strip off any leading separator which may be present
text = text.lstrip(seps)
pattern = r'^(\d+)([{}]+|$)'.format(regex.escape(seps))
parts = []
ret = {}
off = 0
while True:
m = regex.search(pattern, text[off:])
if not m:
break
off += m.end()
p, s = m.groups()
parts.append(int(p))
if not parts:
return None
keys = ('major', 'minor', 'patch')
ret.update(zip(keys, parts))
return ret | def function[parseVersionParts, parameter[text, seps]]:
constant[
Extract a list of major/minor/version integer strings from a string.
Args:
text (str): String to parse
seps (tuple): A tuple or list of separators to use when parsing the version string.
Examples:
Parse a simple version string into a major and minor parts::
parts = parseVersionParts('1.2')
Parse a complex version string into a major and minor parts::
parts = parseVersionParts('wowsoft_1.2')
Parse a simple version string into a major, minor and patch parts. Parts after the "3." are dropped from the
results::
parts = parseVersionParts('1.2.3.4.5')
Notes:
This attempts to brute force out integers from the version string by stripping any leading ascii letters and
part separators, and then regexing out numeric parts optionally followed by part separators. It will stop at
the first mixed-character part encountered. For example, "1.2-3a" would only parse out the "1" and "2" from
the string.
Returns:
dict: Either a empty dictionary or dictionary containing up to three keys, 'major', 'minor' and 'patch'.
]
variable[seps] assign[=] call[constant[].join, parameter[name[seps]]]
variable[text] assign[=] call[name[text].strip, parameter[]]
variable[text] assign[=] call[name[text].lstrip, parameter[name[string].ascii_letters]]
variable[text] assign[=] call[name[text].lstrip, parameter[name[seps]]]
variable[pattern] assign[=] call[constant[^(\d+)([{}]+|$)].format, parameter[call[name[regex].escape, parameter[name[seps]]]]]
variable[parts] assign[=] list[[]]
variable[ret] assign[=] dictionary[[], []]
variable[off] assign[=] constant[0]
while constant[True] begin[:]
variable[m] assign[=] call[name[regex].search, parameter[name[pattern], call[name[text]][<ast.Slice object at 0x7da1b230b610>]]]
if <ast.UnaryOp object at 0x7da1b230ae60> begin[:]
break
<ast.AugAssign object at 0x7da1b2309c90>
<ast.Tuple object at 0x7da1b230aa10> assign[=] call[name[m].groups, parameter[]]
call[name[parts].append, parameter[call[name[int], parameter[name[p]]]]]
if <ast.UnaryOp object at 0x7da1b2309e70> begin[:]
return[constant[None]]
variable[keys] assign[=] tuple[[<ast.Constant object at 0x7da1b230a020>, <ast.Constant object at 0x7da1b230ba90>, <ast.Constant object at 0x7da1b230a0e0>]]
call[name[ret].update, parameter[call[name[zip], parameter[name[keys], name[parts]]]]]
return[name[ret]] | keyword[def] identifier[parseVersionParts] ( identifier[text] , identifier[seps] = identifier[vseps] ):
literal[string]
identifier[seps] = literal[string] . identifier[join] ( identifier[seps] )
identifier[text] = identifier[text] . identifier[strip] ()
identifier[text] = identifier[text] . identifier[lstrip] ( identifier[string] . identifier[ascii_letters] )
identifier[text] = identifier[text] . identifier[lstrip] ( identifier[seps] )
identifier[pattern] = literal[string] . identifier[format] ( identifier[regex] . identifier[escape] ( identifier[seps] ))
identifier[parts] =[]
identifier[ret] ={}
identifier[off] = literal[int]
keyword[while] keyword[True] :
identifier[m] = identifier[regex] . identifier[search] ( identifier[pattern] , identifier[text] [ identifier[off] :])
keyword[if] keyword[not] identifier[m] :
keyword[break]
identifier[off] += identifier[m] . identifier[end] ()
identifier[p] , identifier[s] = identifier[m] . identifier[groups] ()
identifier[parts] . identifier[append] ( identifier[int] ( identifier[p] ))
keyword[if] keyword[not] identifier[parts] :
keyword[return] keyword[None]
identifier[keys] =( literal[string] , literal[string] , literal[string] )
identifier[ret] . identifier[update] ( identifier[zip] ( identifier[keys] , identifier[parts] ))
keyword[return] identifier[ret] | def parseVersionParts(text, seps=vseps):
"""
Extract a list of major/minor/version integer strings from a string.
Args:
text (str): String to parse
seps (tuple): A tuple or list of separators to use when parsing the version string.
Examples:
Parse a simple version string into a major and minor parts::
parts = parseVersionParts('1.2')
Parse a complex version string into a major and minor parts::
parts = parseVersionParts('wowsoft_1.2')
Parse a simple version string into a major, minor and patch parts. Parts after the "3." are dropped from the
results::
parts = parseVersionParts('1.2.3.4.5')
Notes:
This attempts to brute force out integers from the version string by stripping any leading ascii letters and
part separators, and then regexing out numeric parts optionally followed by part separators. It will stop at
the first mixed-character part encountered. For example, "1.2-3a" would only parse out the "1" and "2" from
the string.
Returns:
dict: Either a empty dictionary or dictionary containing up to three keys, 'major', 'minor' and 'patch'.
"""
# Join seps together
seps = ''.join(seps)
# Strip whitespace
text = text.strip()
# Strip off leading chars
text = text.lstrip(string.ascii_letters)
# Strip off any leading separator which may be present
text = text.lstrip(seps)
pattern = '^(\\d+)([{}]+|$)'.format(regex.escape(seps))
parts = []
ret = {}
off = 0
while True:
m = regex.search(pattern, text[off:])
if not m:
break # depends on [control=['if'], data=[]]
off += m.end()
(p, s) = m.groups()
parts.append(int(p)) # depends on [control=['while'], data=[]]
if not parts:
return None # depends on [control=['if'], data=[]]
keys = ('major', 'minor', 'patch')
ret.update(zip(keys, parts))
return ret |
def repeat(self, repeats, *args, **kwargs):
"""
Repeat elements of an array.
See Also
--------
numpy.ndarray.repeat
"""
nv.validate_repeat(args, kwargs)
values = self._data.repeat(repeats)
return type(self)(values.view('i8'), dtype=self.dtype) | def function[repeat, parameter[self, repeats]]:
constant[
Repeat elements of an array.
See Also
--------
numpy.ndarray.repeat
]
call[name[nv].validate_repeat, parameter[name[args], name[kwargs]]]
variable[values] assign[=] call[name[self]._data.repeat, parameter[name[repeats]]]
return[call[call[name[type], parameter[name[self]]], parameter[call[name[values].view, parameter[constant[i8]]]]]] | keyword[def] identifier[repeat] ( identifier[self] , identifier[repeats] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[nv] . identifier[validate_repeat] ( identifier[args] , identifier[kwargs] )
identifier[values] = identifier[self] . identifier[_data] . identifier[repeat] ( identifier[repeats] )
keyword[return] identifier[type] ( identifier[self] )( identifier[values] . identifier[view] ( literal[string] ), identifier[dtype] = identifier[self] . identifier[dtype] ) | def repeat(self, repeats, *args, **kwargs):
"""
Repeat elements of an array.
See Also
--------
numpy.ndarray.repeat
"""
nv.validate_repeat(args, kwargs)
values = self._data.repeat(repeats)
return type(self)(values.view('i8'), dtype=self.dtype) |
def change_custom_contact_var(self, contact, varname, varvalue):
"""Change custom contact variable
Format of the line that triggers function call::
CHANGE_CUSTOM_CONTACT_VAR;<contact_name>;<varname>;<varvalue>
:param contact: contact to edit
:type contact: alignak.objects.contact.Contact
:param varname: variable name to change
:type varname: str
:param varvalue: variable new value
:type varvalue: str
:return: None
"""
if varname.upper() in contact.customs:
contact.modified_attributes |= DICT_MODATTR["MODATTR_CUSTOM_VARIABLE"].value
contact.customs[varname.upper()] = varvalue
self.send_an_element(contact.get_update_status_brok()) | def function[change_custom_contact_var, parameter[self, contact, varname, varvalue]]:
constant[Change custom contact variable
Format of the line that triggers function call::
CHANGE_CUSTOM_CONTACT_VAR;<contact_name>;<varname>;<varvalue>
:param contact: contact to edit
:type contact: alignak.objects.contact.Contact
:param varname: variable name to change
:type varname: str
:param varvalue: variable new value
:type varvalue: str
:return: None
]
if compare[call[name[varname].upper, parameter[]] in name[contact].customs] begin[:]
<ast.AugAssign object at 0x7da2045641c0>
call[name[contact].customs][call[name[varname].upper, parameter[]]] assign[=] name[varvalue]
call[name[self].send_an_element, parameter[call[name[contact].get_update_status_brok, parameter[]]]] | keyword[def] identifier[change_custom_contact_var] ( identifier[self] , identifier[contact] , identifier[varname] , identifier[varvalue] ):
literal[string]
keyword[if] identifier[varname] . identifier[upper] () keyword[in] identifier[contact] . identifier[customs] :
identifier[contact] . identifier[modified_attributes] |= identifier[DICT_MODATTR] [ literal[string] ]. identifier[value]
identifier[contact] . identifier[customs] [ identifier[varname] . identifier[upper] ()]= identifier[varvalue]
identifier[self] . identifier[send_an_element] ( identifier[contact] . identifier[get_update_status_brok] ()) | def change_custom_contact_var(self, contact, varname, varvalue):
"""Change custom contact variable
Format of the line that triggers function call::
CHANGE_CUSTOM_CONTACT_VAR;<contact_name>;<varname>;<varvalue>
:param contact: contact to edit
:type contact: alignak.objects.contact.Contact
:param varname: variable name to change
:type varname: str
:param varvalue: variable new value
:type varvalue: str
:return: None
"""
if varname.upper() in contact.customs:
contact.modified_attributes |= DICT_MODATTR['MODATTR_CUSTOM_VARIABLE'].value
contact.customs[varname.upper()] = varvalue
self.send_an_element(contact.get_update_status_brok()) # depends on [control=['if'], data=[]] |
def formatMessage(self, record: logging.LogRecord) -> str:
"""Convert the already filled log record to a string."""
level_color = "0"
text_color = "0"
fmt = ""
if record.levelno <= logging.DEBUG:
fmt = "\033[0;37m" + logging.BASIC_FORMAT + "\033[0m"
elif record.levelno <= logging.INFO:
level_color = "1;36"
lmsg = record.message.lower()
if self.GREEN_RE.search(lmsg):
text_color = "1;32"
elif record.levelno <= logging.WARNING:
level_color = "1;33"
elif record.levelno <= logging.CRITICAL:
level_color = "1;31"
if not fmt:
fmt = "\033[" + level_color + \
"m%(levelname)s\033[0m:%(rthread)s:%(name)s:\033[" + text_color + \
"m%(message)s\033[0m"
fmt = _fest + fmt
record.rthread = reduce_thread_id(record.thread)
return fmt % record.__dict__ | def function[formatMessage, parameter[self, record]]:
constant[Convert the already filled log record to a string.]
variable[level_color] assign[=] constant[0]
variable[text_color] assign[=] constant[0]
variable[fmt] assign[=] constant[]
if compare[name[record].levelno less_or_equal[<=] name[logging].DEBUG] begin[:]
variable[fmt] assign[=] binary_operation[binary_operation[constant[[0;37m] + name[logging].BASIC_FORMAT] + constant[[0m]]
if <ast.UnaryOp object at 0x7da1b0c20490> begin[:]
variable[fmt] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[[] + name[level_color]] + constant[m%(levelname)s[0m:%(rthread)s:%(name)s:[]] + name[text_color]] + constant[m%(message)s[0m]]
variable[fmt] assign[=] binary_operation[name[_fest] + name[fmt]]
name[record].rthread assign[=] call[name[reduce_thread_id], parameter[name[record].thread]]
return[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> name[record].__dict__]] | keyword[def] identifier[formatMessage] ( identifier[self] , identifier[record] : identifier[logging] . identifier[LogRecord] )-> identifier[str] :
literal[string]
identifier[level_color] = literal[string]
identifier[text_color] = literal[string]
identifier[fmt] = literal[string]
keyword[if] identifier[record] . identifier[levelno] <= identifier[logging] . identifier[DEBUG] :
identifier[fmt] = literal[string] + identifier[logging] . identifier[BASIC_FORMAT] + literal[string]
keyword[elif] identifier[record] . identifier[levelno] <= identifier[logging] . identifier[INFO] :
identifier[level_color] = literal[string]
identifier[lmsg] = identifier[record] . identifier[message] . identifier[lower] ()
keyword[if] identifier[self] . identifier[GREEN_RE] . identifier[search] ( identifier[lmsg] ):
identifier[text_color] = literal[string]
keyword[elif] identifier[record] . identifier[levelno] <= identifier[logging] . identifier[WARNING] :
identifier[level_color] = literal[string]
keyword[elif] identifier[record] . identifier[levelno] <= identifier[logging] . identifier[CRITICAL] :
identifier[level_color] = literal[string]
keyword[if] keyword[not] identifier[fmt] :
identifier[fmt] = literal[string] + identifier[level_color] + literal[string] + identifier[text_color] + literal[string]
identifier[fmt] = identifier[_fest] + identifier[fmt]
identifier[record] . identifier[rthread] = identifier[reduce_thread_id] ( identifier[record] . identifier[thread] )
keyword[return] identifier[fmt] % identifier[record] . identifier[__dict__] | def formatMessage(self, record: logging.LogRecord) -> str:
"""Convert the already filled log record to a string."""
level_color = '0'
text_color = '0'
fmt = ''
if record.levelno <= logging.DEBUG:
fmt = '\x1b[0;37m' + logging.BASIC_FORMAT + '\x1b[0m' # depends on [control=['if'], data=[]]
elif record.levelno <= logging.INFO:
level_color = '1;36'
lmsg = record.message.lower()
if self.GREEN_RE.search(lmsg):
text_color = '1;32' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif record.levelno <= logging.WARNING:
level_color = '1;33' # depends on [control=['if'], data=[]]
elif record.levelno <= logging.CRITICAL:
level_color = '1;31' # depends on [control=['if'], data=[]]
if not fmt:
fmt = '\x1b[' + level_color + 'm%(levelname)s\x1b[0m:%(rthread)s:%(name)s:\x1b[' + text_color + 'm%(message)s\x1b[0m' # depends on [control=['if'], data=[]]
fmt = _fest + fmt
record.rthread = reduce_thread_id(record.thread)
return fmt % record.__dict__ |
def set_args(args):
"""Set computed command arguments.
Parameters
----------
args : `argparse.Namespace`
Command arguments.
base : `iterable` of `type`
Generator mixins.
Raises
------
ValueError
If output file is stdout and progress bars are enabled.
"""
try:
if args.output is sys.stdout and args.progress:
raise ValueError('args.output is stdout and args.progress')
except AttributeError:
pass
try:
fname = '.' + args.type
except AttributeError:
try:
fname = args.state
except AttributeError:
try:
fname = args.output
except AttributeError:
fname = '.json'
if fname is None or fname.endswith('.json') or fname.endswith('.json.bz2'):
args.type = JSON
else:
args.type = SQLITE
settings = {}
try:
if args.settings is not None:
settings = json.load(args.settings)
args.settings.close()
except AttributeError:
pass
args.settings = settings | def function[set_args, parameter[args]]:
constant[Set computed command arguments.
Parameters
----------
args : `argparse.Namespace`
Command arguments.
base : `iterable` of `type`
Generator mixins.
Raises
------
ValueError
If output file is stdout and progress bars are enabled.
]
<ast.Try object at 0x7da20cabc400>
<ast.Try object at 0x7da20cabd7e0>
if <ast.BoolOp object at 0x7da20cabf820> begin[:]
name[args].type assign[=] name[JSON]
variable[settings] assign[=] dictionary[[], []]
<ast.Try object at 0x7da18bcc9270>
name[args].settings assign[=] name[settings] | keyword[def] identifier[set_args] ( identifier[args] ):
literal[string]
keyword[try] :
keyword[if] identifier[args] . identifier[output] keyword[is] identifier[sys] . identifier[stdout] keyword[and] identifier[args] . identifier[progress] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[try] :
identifier[fname] = literal[string] + identifier[args] . identifier[type]
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[fname] = identifier[args] . identifier[state]
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[fname] = identifier[args] . identifier[output]
keyword[except] identifier[AttributeError] :
identifier[fname] = literal[string]
keyword[if] identifier[fname] keyword[is] keyword[None] keyword[or] identifier[fname] . identifier[endswith] ( literal[string] ) keyword[or] identifier[fname] . identifier[endswith] ( literal[string] ):
identifier[args] . identifier[type] = identifier[JSON]
keyword[else] :
identifier[args] . identifier[type] = identifier[SQLITE]
identifier[settings] ={}
keyword[try] :
keyword[if] identifier[args] . identifier[settings] keyword[is] keyword[not] keyword[None] :
identifier[settings] = identifier[json] . identifier[load] ( identifier[args] . identifier[settings] )
identifier[args] . identifier[settings] . identifier[close] ()
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[args] . identifier[settings] = identifier[settings] | def set_args(args):
"""Set computed command arguments.
Parameters
----------
args : `argparse.Namespace`
Command arguments.
base : `iterable` of `type`
Generator mixins.
Raises
------
ValueError
If output file is stdout and progress bars are enabled.
"""
try:
if args.output is sys.stdout and args.progress:
raise ValueError('args.output is stdout and args.progress') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
try:
fname = '.' + args.type # depends on [control=['try'], data=[]]
except AttributeError:
try:
fname = args.state # depends on [control=['try'], data=[]]
except AttributeError:
try:
fname = args.output # depends on [control=['try'], data=[]]
except AttributeError:
fname = '.json' # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
if fname is None or fname.endswith('.json') or fname.endswith('.json.bz2'):
args.type = JSON # depends on [control=['if'], data=[]]
else:
args.type = SQLITE
settings = {}
try:
if args.settings is not None:
settings = json.load(args.settings)
args.settings.close() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
args.settings = settings |
def _remove_prioritization(in_file, data, out_dir=None):
"""Remove tumor-only prioritization and return non-filtered calls.
"""
out_file = "%s-germline.vcf" % utils.splitext_plus(in_file)[0]
if out_dir:
out_file = os.path.join(out_dir, os.path.basename(out_file))
if not utils.file_uptodate(out_file, in_file) and not utils.file_uptodate(out_file + ".gz", in_file):
with file_transaction(data, out_file) as tx_out_file:
reader = cyvcf2.VCF(str(in_file))
reader.add_filter_to_header({'ID': 'Somatic', 'Description': 'Variant called as Somatic'})
# with open(tx_out_file, "w") as out_handle:
# out_handle.write(reader.raw_header)
with contextlib.closing(cyvcf2.Writer(tx_out_file, reader)) as writer:
for rec in reader:
rec = _update_prioritization_filters(rec)
# out_handle.write(str(rec))
writer.write_record(rec)
return out_file | def function[_remove_prioritization, parameter[in_file, data, out_dir]]:
constant[Remove tumor-only prioritization and return non-filtered calls.
]
variable[out_file] assign[=] binary_operation[constant[%s-germline.vcf] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[name[in_file]]]][constant[0]]]
if name[out_dir] begin[:]
variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], call[name[os].path.basename, parameter[name[out_file]]]]]
if <ast.BoolOp object at 0x7da1b18d1420> begin[:]
with call[name[file_transaction], parameter[name[data], name[out_file]]] begin[:]
variable[reader] assign[=] call[name[cyvcf2].VCF, parameter[call[name[str], parameter[name[in_file]]]]]
call[name[reader].add_filter_to_header, parameter[dictionary[[<ast.Constant object at 0x7da1b1713400>, <ast.Constant object at 0x7da1b17104c0>], [<ast.Constant object at 0x7da1b17136d0>, <ast.Constant object at 0x7da1b1710e50>]]]]
with call[name[contextlib].closing, parameter[call[name[cyvcf2].Writer, parameter[name[tx_out_file], name[reader]]]]] begin[:]
for taget[name[rec]] in starred[name[reader]] begin[:]
variable[rec] assign[=] call[name[_update_prioritization_filters], parameter[name[rec]]]
call[name[writer].write_record, parameter[name[rec]]]
return[name[out_file]] | keyword[def] identifier[_remove_prioritization] ( identifier[in_file] , identifier[data] , identifier[out_dir] = keyword[None] ):
literal[string]
identifier[out_file] = literal[string] % identifier[utils] . identifier[splitext_plus] ( identifier[in_file] )[ literal[int] ]
keyword[if] identifier[out_dir] :
identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[os] . identifier[path] . identifier[basename] ( identifier[out_file] ))
keyword[if] keyword[not] identifier[utils] . identifier[file_uptodate] ( identifier[out_file] , identifier[in_file] ) keyword[and] keyword[not] identifier[utils] . identifier[file_uptodate] ( identifier[out_file] + literal[string] , identifier[in_file] ):
keyword[with] identifier[file_transaction] ( identifier[data] , identifier[out_file] ) keyword[as] identifier[tx_out_file] :
identifier[reader] = identifier[cyvcf2] . identifier[VCF] ( identifier[str] ( identifier[in_file] ))
identifier[reader] . identifier[add_filter_to_header] ({ literal[string] : literal[string] , literal[string] : literal[string] })
keyword[with] identifier[contextlib] . identifier[closing] ( identifier[cyvcf2] . identifier[Writer] ( identifier[tx_out_file] , identifier[reader] )) keyword[as] identifier[writer] :
keyword[for] identifier[rec] keyword[in] identifier[reader] :
identifier[rec] = identifier[_update_prioritization_filters] ( identifier[rec] )
identifier[writer] . identifier[write_record] ( identifier[rec] )
keyword[return] identifier[out_file] | def _remove_prioritization(in_file, data, out_dir=None):
"""Remove tumor-only prioritization and return non-filtered calls.
"""
out_file = '%s-germline.vcf' % utils.splitext_plus(in_file)[0]
if out_dir:
out_file = os.path.join(out_dir, os.path.basename(out_file)) # depends on [control=['if'], data=[]]
if not utils.file_uptodate(out_file, in_file) and (not utils.file_uptodate(out_file + '.gz', in_file)):
with file_transaction(data, out_file) as tx_out_file:
reader = cyvcf2.VCF(str(in_file))
reader.add_filter_to_header({'ID': 'Somatic', 'Description': 'Variant called as Somatic'})
# with open(tx_out_file, "w") as out_handle:
# out_handle.write(reader.raw_header)
with contextlib.closing(cyvcf2.Writer(tx_out_file, reader)) as writer:
for rec in reader:
rec = _update_prioritization_filters(rec)
# out_handle.write(str(rec))
writer.write_record(rec) # depends on [control=['for'], data=['rec']] # depends on [control=['with'], data=['writer']] # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]]
return out_file |
def _get_consecutive_and_overlapping_fronts(onset_fronts, offset_fronts, onset_front_id, offset_front_id):
"""
Gets an onset_front and an offset_front such that they both occupy at least some of the same
frequency channels, then returns the portion of each that overlaps with the other.
"""
# Get the onset front of interest
onset_front = _get_front_idxs_from_id(onset_fronts, onset_front_id)
# Get the offset front of interest
offset_front = _get_front_idxs_from_id(offset_fronts, offset_front_id)
# Keep trying consecutive portions of this onset front until we find a consecutive portion
# that overlaps with part of the offset front
consecutive_portions_of_onset_front = [c for c in _get_consecutive_portions_of_front(onset_front)]
for consecutive_portion_of_onset_front in consecutive_portions_of_onset_front:
# Only get the segment of this front that overlaps in frequencies with the onset front of interest
onset_front_frequency_indexes = [f for f, _ in consecutive_portion_of_onset_front]
overlapping_offset_front = [(f, s) for f, s in offset_front if f in onset_front_frequency_indexes]
# Only get as much of this overlapping portion as is actually consecutive
for consecutive_portion_of_offset_front in _get_consecutive_portions_of_front(overlapping_offset_front):
if consecutive_portion_of_offset_front:
# Just return the first one we get - if we get any it means we found a portion of overlap
return consecutive_portion_of_onset_front, consecutive_portion_of_offset_front
return [], [] | def function[_get_consecutive_and_overlapping_fronts, parameter[onset_fronts, offset_fronts, onset_front_id, offset_front_id]]:
constant[
Gets an onset_front and an offset_front such that they both occupy at least some of the same
frequency channels, then returns the portion of each that overlaps with the other.
]
variable[onset_front] assign[=] call[name[_get_front_idxs_from_id], parameter[name[onset_fronts], name[onset_front_id]]]
variable[offset_front] assign[=] call[name[_get_front_idxs_from_id], parameter[name[offset_fronts], name[offset_front_id]]]
variable[consecutive_portions_of_onset_front] assign[=] <ast.ListComp object at 0x7da1b03952d0>
for taget[name[consecutive_portion_of_onset_front]] in starred[name[consecutive_portions_of_onset_front]] begin[:]
variable[onset_front_frequency_indexes] assign[=] <ast.ListComp object at 0x7da1b03941f0>
variable[overlapping_offset_front] assign[=] <ast.ListComp object at 0x7da1b0395f30>
for taget[name[consecutive_portion_of_offset_front]] in starred[call[name[_get_consecutive_portions_of_front], parameter[name[overlapping_offset_front]]]] begin[:]
if name[consecutive_portion_of_offset_front] begin[:]
return[tuple[[<ast.Name object at 0x7da1b0394070>, <ast.Name object at 0x7da1b0395db0>]]]
return[tuple[[<ast.List object at 0x7da1b0394910>, <ast.List object at 0x7da1b0394dc0>]]] | keyword[def] identifier[_get_consecutive_and_overlapping_fronts] ( identifier[onset_fronts] , identifier[offset_fronts] , identifier[onset_front_id] , identifier[offset_front_id] ):
literal[string]
identifier[onset_front] = identifier[_get_front_idxs_from_id] ( identifier[onset_fronts] , identifier[onset_front_id] )
identifier[offset_front] = identifier[_get_front_idxs_from_id] ( identifier[offset_fronts] , identifier[offset_front_id] )
identifier[consecutive_portions_of_onset_front] =[ identifier[c] keyword[for] identifier[c] keyword[in] identifier[_get_consecutive_portions_of_front] ( identifier[onset_front] )]
keyword[for] identifier[consecutive_portion_of_onset_front] keyword[in] identifier[consecutive_portions_of_onset_front] :
identifier[onset_front_frequency_indexes] =[ identifier[f] keyword[for] identifier[f] , identifier[_] keyword[in] identifier[consecutive_portion_of_onset_front] ]
identifier[overlapping_offset_front] =[( identifier[f] , identifier[s] ) keyword[for] identifier[f] , identifier[s] keyword[in] identifier[offset_front] keyword[if] identifier[f] keyword[in] identifier[onset_front_frequency_indexes] ]
keyword[for] identifier[consecutive_portion_of_offset_front] keyword[in] identifier[_get_consecutive_portions_of_front] ( identifier[overlapping_offset_front] ):
keyword[if] identifier[consecutive_portion_of_offset_front] :
keyword[return] identifier[consecutive_portion_of_onset_front] , identifier[consecutive_portion_of_offset_front]
keyword[return] [],[] | def _get_consecutive_and_overlapping_fronts(onset_fronts, offset_fronts, onset_front_id, offset_front_id):
"""
Gets an onset_front and an offset_front such that they both occupy at least some of the same
frequency channels, then returns the portion of each that overlaps with the other.
"""
# Get the onset front of interest
onset_front = _get_front_idxs_from_id(onset_fronts, onset_front_id)
# Get the offset front of interest
offset_front = _get_front_idxs_from_id(offset_fronts, offset_front_id)
# Keep trying consecutive portions of this onset front until we find a consecutive portion
# that overlaps with part of the offset front
consecutive_portions_of_onset_front = [c for c in _get_consecutive_portions_of_front(onset_front)]
for consecutive_portion_of_onset_front in consecutive_portions_of_onset_front:
# Only get the segment of this front that overlaps in frequencies with the onset front of interest
onset_front_frequency_indexes = [f for (f, _) in consecutive_portion_of_onset_front]
overlapping_offset_front = [(f, s) for (f, s) in offset_front if f in onset_front_frequency_indexes]
# Only get as much of this overlapping portion as is actually consecutive
for consecutive_portion_of_offset_front in _get_consecutive_portions_of_front(overlapping_offset_front):
if consecutive_portion_of_offset_front:
# Just return the first one we get - if we get any it means we found a portion of overlap
return (consecutive_portion_of_onset_front, consecutive_portion_of_offset_front) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['consecutive_portion_of_offset_front']] # depends on [control=['for'], data=['consecutive_portion_of_onset_front']]
return ([], []) |
def joinpath(cls, first, *others):
"""
Join first to zero or more :class:`Path` components,
adding a separator character (:samp:`{first}.module.sep`)
if needed. Returns a new instance of
:samp:`{first}._next_class`.
.. seealso:: :func:`os.path.join`
"""
if not isinstance(first, cls):
first = cls(first)
return first._next_class(first.module.join(first, *others)) | def function[joinpath, parameter[cls, first]]:
constant[
Join first to zero or more :class:`Path` components,
adding a separator character (:samp:`{first}.module.sep`)
if needed. Returns a new instance of
:samp:`{first}._next_class`.
.. seealso:: :func:`os.path.join`
]
if <ast.UnaryOp object at 0x7da18f09c9a0> begin[:]
variable[first] assign[=] call[name[cls], parameter[name[first]]]
return[call[name[first]._next_class, parameter[call[name[first].module.join, parameter[name[first], <ast.Starred object at 0x7da1b08e4340>]]]]] | keyword[def] identifier[joinpath] ( identifier[cls] , identifier[first] ,* identifier[others] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[first] , identifier[cls] ):
identifier[first] = identifier[cls] ( identifier[first] )
keyword[return] identifier[first] . identifier[_next_class] ( identifier[first] . identifier[module] . identifier[join] ( identifier[first] ,* identifier[others] )) | def joinpath(cls, first, *others):
"""
Join first to zero or more :class:`Path` components,
adding a separator character (:samp:`{first}.module.sep`)
if needed. Returns a new instance of
:samp:`{first}._next_class`.
.. seealso:: :func:`os.path.join`
"""
if not isinstance(first, cls):
first = cls(first) # depends on [control=['if'], data=[]]
return first._next_class(first.module.join(first, *others)) |
def remove(self, filename):
"""
Remove file from device
"""
output = self.shell('rm', filename)
# any output means rm failed.
return False if output else True | def function[remove, parameter[self, filename]]:
constant[
Remove file from device
]
variable[output] assign[=] call[name[self].shell, parameter[constant[rm], name[filename]]]
return[<ast.IfExp object at 0x7da2045645b0>] | keyword[def] identifier[remove] ( identifier[self] , identifier[filename] ):
literal[string]
identifier[output] = identifier[self] . identifier[shell] ( literal[string] , identifier[filename] )
keyword[return] keyword[False] keyword[if] identifier[output] keyword[else] keyword[True] | def remove(self, filename):
"""
Remove file from device
"""
output = self.shell('rm', filename)
# any output means rm failed.
return False if output else True |
def drop(self, columns):
"""Drop 1 or more columns. Any column which does not exist in the DataFrame is skipped, i.e. not removed,
without raising an exception.
Unlike Pandas' drop, this is currently restricted to dropping columns.
Parameters
----------
columns : str or list of str
Column name or list of column names to drop.
Returns
-------
DataFrame
A new DataFrame without these columns.
"""
if isinstance(columns, str):
new_data = OrderedDict()
if columns not in self._gather_column_names():
raise KeyError('Key {} not found'.format(columns))
for column_name in self:
if column_name != columns:
new_data[column_name] = self._data[column_name]
return DataFrame(new_data, self.index)
elif isinstance(columns, list):
check_inner_types(columns, str)
df = self
for column in columns:
df = df.drop(column)
return df
else:
raise TypeError('Expected columns as a str or a list of str') | def function[drop, parameter[self, columns]]:
constant[Drop 1 or more columns. Any column which does not exist in the DataFrame is skipped, i.e. not removed,
without raising an exception.
Unlike Pandas' drop, this is currently restricted to dropping columns.
Parameters
----------
columns : str or list of str
Column name or list of column names to drop.
Returns
-------
DataFrame
A new DataFrame without these columns.
]
if call[name[isinstance], parameter[name[columns], name[str]]] begin[:]
variable[new_data] assign[=] call[name[OrderedDict], parameter[]]
if compare[name[columns] <ast.NotIn object at 0x7da2590d7190> call[name[self]._gather_column_names, parameter[]]] begin[:]
<ast.Raise object at 0x7da207f00880>
for taget[name[column_name]] in starred[name[self]] begin[:]
if compare[name[column_name] not_equal[!=] name[columns]] begin[:]
call[name[new_data]][name[column_name]] assign[=] call[name[self]._data][name[column_name]]
return[call[name[DataFrame], parameter[name[new_data], name[self].index]]] | keyword[def] identifier[drop] ( identifier[self] , identifier[columns] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[columns] , identifier[str] ):
identifier[new_data] = identifier[OrderedDict] ()
keyword[if] identifier[columns] keyword[not] keyword[in] identifier[self] . identifier[_gather_column_names] ():
keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[columns] ))
keyword[for] identifier[column_name] keyword[in] identifier[self] :
keyword[if] identifier[column_name] != identifier[columns] :
identifier[new_data] [ identifier[column_name] ]= identifier[self] . identifier[_data] [ identifier[column_name] ]
keyword[return] identifier[DataFrame] ( identifier[new_data] , identifier[self] . identifier[index] )
keyword[elif] identifier[isinstance] ( identifier[columns] , identifier[list] ):
identifier[check_inner_types] ( identifier[columns] , identifier[str] )
identifier[df] = identifier[self]
keyword[for] identifier[column] keyword[in] identifier[columns] :
identifier[df] = identifier[df] . identifier[drop] ( identifier[column] )
keyword[return] identifier[df]
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def drop(self, columns):
"""Drop 1 or more columns. Any column which does not exist in the DataFrame is skipped, i.e. not removed,
without raising an exception.
Unlike Pandas' drop, this is currently restricted to dropping columns.
Parameters
----------
columns : str or list of str
Column name or list of column names to drop.
Returns
-------
DataFrame
A new DataFrame without these columns.
"""
if isinstance(columns, str):
new_data = OrderedDict()
if columns not in self._gather_column_names():
raise KeyError('Key {} not found'.format(columns)) # depends on [control=['if'], data=['columns']]
for column_name in self:
if column_name != columns:
new_data[column_name] = self._data[column_name] # depends on [control=['if'], data=['column_name']] # depends on [control=['for'], data=['column_name']]
return DataFrame(new_data, self.index) # depends on [control=['if'], data=[]]
elif isinstance(columns, list):
check_inner_types(columns, str)
df = self
for column in columns:
df = df.drop(column) # depends on [control=['for'], data=['column']]
return df # depends on [control=['if'], data=[]]
else:
raise TypeError('Expected columns as a str or a list of str') |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TriggerContext for this TriggerInstance
:rtype: twilio.rest.api.v2010.account.usage.trigger.TriggerContext
"""
if self._context is None:
self._context = TriggerContext(
self._version,
account_sid=self._solution['account_sid'],
sid=self._solution['sid'],
)
return self._context | def function[_proxy, parameter[self]]:
constant[
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TriggerContext for this TriggerInstance
:rtype: twilio.rest.api.v2010.account.usage.trigger.TriggerContext
]
if compare[name[self]._context is constant[None]] begin[:]
name[self]._context assign[=] call[name[TriggerContext], parameter[name[self]._version]]
return[name[self]._context] | keyword[def] identifier[_proxy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] :
identifier[self] . identifier[_context] = identifier[TriggerContext] (
identifier[self] . identifier[_version] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_context] | def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: TriggerContext for this TriggerInstance
:rtype: twilio.rest.api.v2010.account.usage.trigger.TriggerContext
"""
if self._context is None:
self._context = TriggerContext(self._version, account_sid=self._solution['account_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._context |
def import_obj(file_name, **kwargs):
""" Reads .obj files and generates faces.
Keyword Arguments:
* ``callback``: reference to the function that processes the faces for customized output
The structure of the callback function is shown below:
.. code-block:: python
def my_callback_function(face_list):
# "face_list" will be a list of elements.Face class instances
# The function should return a list
return list()
:param file_name: file name
:type file_name: str
:return: output of the callback function (default is a list of faces)
:rtype: list
"""
def default_callback(face_list):
return face_list
# Keyword arguments
callback_func = kwargs.get('callback', default_callback)
# Read and process the input file
content = exch.read_file(file_name)
content_arr = content.split("\n")
# Initialize variables
on_face = False
vertices = []
triangles = []
faces = []
# Index values
vert_idx = 1
tri_idx = 1
face_idx = 1
# Loop through the data
for carr in content_arr:
carr = carr.strip()
data = carr.split(" ")
data = [d.strip() for d in data]
if data[0] == "v":
if on_face:
on_face = not on_face
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
face_idx += 1
vertices[:] = []
triangles[:] = []
vert_idx = 1
tri_idx = 1
vertex = elements.Vertex(*data[1:], id=vert_idx)
vertices.append(vertex)
vert_idx += 1
if data[0] == "f":
on_face = True
triangle = elements.Triangle(*[vertices[int(fidx) - 1] for fidx in data[1:]], id=tri_idx)
triangles.append(triangle)
tri_idx += 1
# Process he final face
if triangles:
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
# Return the output of the callback function
return callback_func(faces) | def function[import_obj, parameter[file_name]]:
constant[ Reads .obj files and generates faces.
Keyword Arguments:
* ``callback``: reference to the function that processes the faces for customized output
The structure of the callback function is shown below:
.. code-block:: python
def my_callback_function(face_list):
# "face_list" will be a list of elements.Face class instances
# The function should return a list
return list()
:param file_name: file name
:type file_name: str
:return: output of the callback function (default is a list of faces)
:rtype: list
]
def function[default_callback, parameter[face_list]]:
return[name[face_list]]
variable[callback_func] assign[=] call[name[kwargs].get, parameter[constant[callback], name[default_callback]]]
variable[content] assign[=] call[name[exch].read_file, parameter[name[file_name]]]
variable[content_arr] assign[=] call[name[content].split, parameter[constant[
]]]
variable[on_face] assign[=] constant[False]
variable[vertices] assign[=] list[[]]
variable[triangles] assign[=] list[[]]
variable[faces] assign[=] list[[]]
variable[vert_idx] assign[=] constant[1]
variable[tri_idx] assign[=] constant[1]
variable[face_idx] assign[=] constant[1]
for taget[name[carr]] in starred[name[content_arr]] begin[:]
variable[carr] assign[=] call[name[carr].strip, parameter[]]
variable[data] assign[=] call[name[carr].split, parameter[constant[ ]]]
variable[data] assign[=] <ast.ListComp object at 0x7da1b16a5c30>
if compare[call[name[data]][constant[0]] equal[==] constant[v]] begin[:]
if name[on_face] begin[:]
variable[on_face] assign[=] <ast.UnaryOp object at 0x7da1b1646bf0>
variable[face] assign[=] call[name[elements].Face, parameter[<ast.Starred object at 0x7da1b16462c0>]]
call[name[faces].append, parameter[name[face]]]
<ast.AugAssign object at 0x7da1b1646650>
call[name[vertices]][<ast.Slice object at 0x7da1b1647f10>] assign[=] list[[]]
call[name[triangles]][<ast.Slice object at 0x7da1b1645570>] assign[=] list[[]]
variable[vert_idx] assign[=] constant[1]
variable[tri_idx] assign[=] constant[1]
variable[vertex] assign[=] call[name[elements].Vertex, parameter[<ast.Starred object at 0x7da1b16a7d60>]]
call[name[vertices].append, parameter[name[vertex]]]
<ast.AugAssign object at 0x7da1b16a5c60>
if compare[call[name[data]][constant[0]] equal[==] constant[f]] begin[:]
variable[on_face] assign[=] constant[True]
variable[triangle] assign[=] call[name[elements].Triangle, parameter[<ast.Starred object at 0x7da1b16a7c40>]]
call[name[triangles].append, parameter[name[triangle]]]
<ast.AugAssign object at 0x7da1b16a7160>
if name[triangles] begin[:]
variable[face] assign[=] call[name[elements].Face, parameter[<ast.Starred object at 0x7da1b16a6ce0>]]
call[name[faces].append, parameter[name[face]]]
return[call[name[callback_func], parameter[name[faces]]]] | keyword[def] identifier[import_obj] ( identifier[file_name] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[default_callback] ( identifier[face_list] ):
keyword[return] identifier[face_list]
identifier[callback_func] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[default_callback] )
identifier[content] = identifier[exch] . identifier[read_file] ( identifier[file_name] )
identifier[content_arr] = identifier[content] . identifier[split] ( literal[string] )
identifier[on_face] = keyword[False]
identifier[vertices] =[]
identifier[triangles] =[]
identifier[faces] =[]
identifier[vert_idx] = literal[int]
identifier[tri_idx] = literal[int]
identifier[face_idx] = literal[int]
keyword[for] identifier[carr] keyword[in] identifier[content_arr] :
identifier[carr] = identifier[carr] . identifier[strip] ()
identifier[data] = identifier[carr] . identifier[split] ( literal[string] )
identifier[data] =[ identifier[d] . identifier[strip] () keyword[for] identifier[d] keyword[in] identifier[data] ]
keyword[if] identifier[data] [ literal[int] ]== literal[string] :
keyword[if] identifier[on_face] :
identifier[on_face] = keyword[not] identifier[on_face]
identifier[face] = identifier[elements] . identifier[Face] (* identifier[triangles] , identifier[id] = identifier[face_idx] )
identifier[faces] . identifier[append] ( identifier[face] )
identifier[face_idx] += literal[int]
identifier[vertices] [:]=[]
identifier[triangles] [:]=[]
identifier[vert_idx] = literal[int]
identifier[tri_idx] = literal[int]
identifier[vertex] = identifier[elements] . identifier[Vertex] (* identifier[data] [ literal[int] :], identifier[id] = identifier[vert_idx] )
identifier[vertices] . identifier[append] ( identifier[vertex] )
identifier[vert_idx] += literal[int]
keyword[if] identifier[data] [ literal[int] ]== literal[string] :
identifier[on_face] = keyword[True]
identifier[triangle] = identifier[elements] . identifier[Triangle] (*[ identifier[vertices] [ identifier[int] ( identifier[fidx] )- literal[int] ] keyword[for] identifier[fidx] keyword[in] identifier[data] [ literal[int] :]], identifier[id] = identifier[tri_idx] )
identifier[triangles] . identifier[append] ( identifier[triangle] )
identifier[tri_idx] += literal[int]
keyword[if] identifier[triangles] :
identifier[face] = identifier[elements] . identifier[Face] (* identifier[triangles] , identifier[id] = identifier[face_idx] )
identifier[faces] . identifier[append] ( identifier[face] )
keyword[return] identifier[callback_func] ( identifier[faces] ) | def import_obj(file_name, **kwargs):
""" Reads .obj files and generates faces.
Keyword Arguments:
* ``callback``: reference to the function that processes the faces for customized output
The structure of the callback function is shown below:
.. code-block:: python
def my_callback_function(face_list):
# "face_list" will be a list of elements.Face class instances
# The function should return a list
return list()
:param file_name: file name
:type file_name: str
:return: output of the callback function (default is a list of faces)
:rtype: list
"""
def default_callback(face_list):
return face_list
# Keyword arguments
callback_func = kwargs.get('callback', default_callback)
# Read and process the input file
content = exch.read_file(file_name)
content_arr = content.split('\n')
# Initialize variables
on_face = False
vertices = []
triangles = []
faces = []
# Index values
vert_idx = 1
tri_idx = 1
face_idx = 1
# Loop through the data
for carr in content_arr:
carr = carr.strip()
data = carr.split(' ')
data = [d.strip() for d in data]
if data[0] == 'v':
if on_face:
on_face = not on_face
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
face_idx += 1
vertices[:] = []
triangles[:] = []
vert_idx = 1
tri_idx = 1 # depends on [control=['if'], data=[]]
vertex = elements.Vertex(*data[1:], id=vert_idx)
vertices.append(vertex)
vert_idx += 1 # depends on [control=['if'], data=[]]
if data[0] == 'f':
on_face = True
triangle = elements.Triangle(*[vertices[int(fidx) - 1] for fidx in data[1:]], id=tri_idx)
triangles.append(triangle)
tri_idx += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['carr']]
# Process he final face
if triangles:
face = elements.Face(*triangles, id=face_idx)
faces.append(face) # depends on [control=['if'], data=[]]
# Return the output of the callback function
return callback_func(faces) |
def bin_priority(op,left,right):
"I don't know how to handle order of operations in the LR grammar, so here it is"
# note: recursion limits protect this from infinite looping. I'm serious. (i.e. it will crash rather than hanging)
if isinstance(left,BinX) and left.op < op: return bin_priority(left.op,left.left,bin_priority(op,left.right,right))
elif isinstance(left,UnX) and left.op < op: return un_priority(left.op,BinX(op,left.val,right)) # note: obviously, no need to do this when right is a UnX
elif isinstance(right,BinX) and right.op < op: return bin_priority(right.op,bin_priority(op,left,right.left),right.right)
else: return BinX(op,left,right) | def function[bin_priority, parameter[op, left, right]]:
constant[I don't know how to handle order of operations in the LR grammar, so here it is]
if <ast.BoolOp object at 0x7da1b14b31f0> begin[:]
return[call[name[bin_priority], parameter[name[left].op, name[left].left, call[name[bin_priority], parameter[name[op], name[left].right, name[right]]]]]] | keyword[def] identifier[bin_priority] ( identifier[op] , identifier[left] , identifier[right] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[left] , identifier[BinX] ) keyword[and] identifier[left] . identifier[op] < identifier[op] : keyword[return] identifier[bin_priority] ( identifier[left] . identifier[op] , identifier[left] . identifier[left] , identifier[bin_priority] ( identifier[op] , identifier[left] . identifier[right] , identifier[right] ))
keyword[elif] identifier[isinstance] ( identifier[left] , identifier[UnX] ) keyword[and] identifier[left] . identifier[op] < identifier[op] : keyword[return] identifier[un_priority] ( identifier[left] . identifier[op] , identifier[BinX] ( identifier[op] , identifier[left] . identifier[val] , identifier[right] ))
keyword[elif] identifier[isinstance] ( identifier[right] , identifier[BinX] ) keyword[and] identifier[right] . identifier[op] < identifier[op] : keyword[return] identifier[bin_priority] ( identifier[right] . identifier[op] , identifier[bin_priority] ( identifier[op] , identifier[left] , identifier[right] . identifier[left] ), identifier[right] . identifier[right] )
keyword[else] : keyword[return] identifier[BinX] ( identifier[op] , identifier[left] , identifier[right] ) | def bin_priority(op, left, right):
"""I don't know how to handle order of operations in the LR grammar, so here it is"""
# note: recursion limits protect this from infinite looping. I'm serious. (i.e. it will crash rather than hanging)
if isinstance(left, BinX) and left.op < op:
return bin_priority(left.op, left.left, bin_priority(op, left.right, right)) # depends on [control=['if'], data=[]]
elif isinstance(left, UnX) and left.op < op:
return un_priority(left.op, BinX(op, left.val, right)) # note: obviously, no need to do this when right is a UnX # depends on [control=['if'], data=[]]
elif isinstance(right, BinX) and right.op < op:
return bin_priority(right.op, bin_priority(op, left, right.left), right.right) # depends on [control=['if'], data=[]]
else:
return BinX(op, left, right) |
def action(cls, view):
"""Register admin view action."""
name = "%s:%s" % (cls.name, view.__name__)
path = "%s/%s" % (cls.url, view.__name__)
cls.actions.append((view.__doc__, path))
return cls.register(path, name=name)(view) | def function[action, parameter[cls, view]]:
constant[Register admin view action.]
variable[name] assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e954400>, <ast.Attribute object at 0x7da20e957940>]]]
variable[path] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e955a80>, <ast.Attribute object at 0x7da20e9552a0>]]]
call[name[cls].actions.append, parameter[tuple[[<ast.Attribute object at 0x7da20e956920>, <ast.Name object at 0x7da20e956ec0>]]]]
return[call[call[name[cls].register, parameter[name[path]]], parameter[name[view]]]] | keyword[def] identifier[action] ( identifier[cls] , identifier[view] ):
literal[string]
identifier[name] = literal[string] %( identifier[cls] . identifier[name] , identifier[view] . identifier[__name__] )
identifier[path] = literal[string] %( identifier[cls] . identifier[url] , identifier[view] . identifier[__name__] )
identifier[cls] . identifier[actions] . identifier[append] (( identifier[view] . identifier[__doc__] , identifier[path] ))
keyword[return] identifier[cls] . identifier[register] ( identifier[path] , identifier[name] = identifier[name] )( identifier[view] ) | def action(cls, view):
"""Register admin view action."""
name = '%s:%s' % (cls.name, view.__name__)
path = '%s/%s' % (cls.url, view.__name__)
cls.actions.append((view.__doc__, path))
return cls.register(path, name=name)(view) |
def set_backlight(self, backlight):
"""Enable or disable the backlight. If PWM is not enabled (default), a
non-zero backlight value will turn on the backlight and a zero value will
turn it off. If PWM is enabled, backlight can be any value from 0.0 to
1.0, with 1.0 being full intensity backlight.
"""
if self._backlight is not None:
if self._pwm_enabled:
self._pwm.set_duty_cycle(self._backlight, self._pwm_duty_cycle(backlight))
else:
self._gpio.output(self._backlight, self._blpol if backlight else not self._blpol) | def function[set_backlight, parameter[self, backlight]]:
constant[Enable or disable the backlight. If PWM is not enabled (default), a
non-zero backlight value will turn on the backlight and a zero value will
turn it off. If PWM is enabled, backlight can be any value from 0.0 to
1.0, with 1.0 being full intensity backlight.
]
if compare[name[self]._backlight is_not constant[None]] begin[:]
if name[self]._pwm_enabled begin[:]
call[name[self]._pwm.set_duty_cycle, parameter[name[self]._backlight, call[name[self]._pwm_duty_cycle, parameter[name[backlight]]]]] | keyword[def] identifier[set_backlight] ( identifier[self] , identifier[backlight] ):
literal[string]
keyword[if] identifier[self] . identifier[_backlight] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[self] . identifier[_pwm_enabled] :
identifier[self] . identifier[_pwm] . identifier[set_duty_cycle] ( identifier[self] . identifier[_backlight] , identifier[self] . identifier[_pwm_duty_cycle] ( identifier[backlight] ))
keyword[else] :
identifier[self] . identifier[_gpio] . identifier[output] ( identifier[self] . identifier[_backlight] , identifier[self] . identifier[_blpol] keyword[if] identifier[backlight] keyword[else] keyword[not] identifier[self] . identifier[_blpol] ) | def set_backlight(self, backlight):
"""Enable or disable the backlight. If PWM is not enabled (default), a
non-zero backlight value will turn on the backlight and a zero value will
turn it off. If PWM is enabled, backlight can be any value from 0.0 to
1.0, with 1.0 being full intensity backlight.
"""
if self._backlight is not None:
if self._pwm_enabled:
self._pwm.set_duty_cycle(self._backlight, self._pwm_duty_cycle(backlight)) # depends on [control=['if'], data=[]]
else:
self._gpio.output(self._backlight, self._blpol if backlight else not self._blpol) # depends on [control=['if'], data=[]] |
def build_request(self):
"""Build a prepared request object."""
clientheaders = {}
if (self.parent_url and
self.parent_url.lower().startswith(HTTP_SCHEMAS)):
clientheaders["Referer"] = self.parent_url
kwargs = dict(
method='GET',
url=self.url,
headers=clientheaders,
)
if self.auth:
kwargs['auth'] = self.auth
log.debug(LOG_CHECK, "Prepare request with %s", kwargs)
request = requests.Request(**kwargs)
return self.session.prepare_request(request) | def function[build_request, parameter[self]]:
constant[Build a prepared request object.]
variable[clientheaders] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da18f723be0> begin[:]
call[name[clientheaders]][constant[Referer]] assign[=] name[self].parent_url
variable[kwargs] assign[=] call[name[dict], parameter[]]
if name[self].auth begin[:]
call[name[kwargs]][constant[auth]] assign[=] name[self].auth
call[name[log].debug, parameter[name[LOG_CHECK], constant[Prepare request with %s], name[kwargs]]]
variable[request] assign[=] call[name[requests].Request, parameter[]]
return[call[name[self].session.prepare_request, parameter[name[request]]]] | keyword[def] identifier[build_request] ( identifier[self] ):
literal[string]
identifier[clientheaders] ={}
keyword[if] ( identifier[self] . identifier[parent_url] keyword[and]
identifier[self] . identifier[parent_url] . identifier[lower] (). identifier[startswith] ( identifier[HTTP_SCHEMAS] )):
identifier[clientheaders] [ literal[string] ]= identifier[self] . identifier[parent_url]
identifier[kwargs] = identifier[dict] (
identifier[method] = literal[string] ,
identifier[url] = identifier[self] . identifier[url] ,
identifier[headers] = identifier[clientheaders] ,
)
keyword[if] identifier[self] . identifier[auth] :
identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[auth]
identifier[log] . identifier[debug] ( identifier[LOG_CHECK] , literal[string] , identifier[kwargs] )
identifier[request] = identifier[requests] . identifier[Request] (** identifier[kwargs] )
keyword[return] identifier[self] . identifier[session] . identifier[prepare_request] ( identifier[request] ) | def build_request(self):
"""Build a prepared request object."""
clientheaders = {}
if self.parent_url and self.parent_url.lower().startswith(HTTP_SCHEMAS):
clientheaders['Referer'] = self.parent_url # depends on [control=['if'], data=[]]
kwargs = dict(method='GET', url=self.url, headers=clientheaders)
if self.auth:
kwargs['auth'] = self.auth # depends on [control=['if'], data=[]]
log.debug(LOG_CHECK, 'Prepare request with %s', kwargs)
request = requests.Request(**kwargs)
return self.session.prepare_request(request) |
def from_json(cls, data, json_schema_class=None):
""" This class overwrites the from_json method, thus making sure that if `from_json` is called from this class instance, it will provide its JSON schema as a default one"""
schema = cls.json_schema if json_schema_class is None else json_schema_class()
return super(InfinityVertex, cls).from_json(data=data, json_schema_class=schema.__class__) | def function[from_json, parameter[cls, data, json_schema_class]]:
constant[ This class overwrites the from_json method, thus making sure that if `from_json` is called from this class instance, it will provide its JSON schema as a default one]
variable[schema] assign[=] <ast.IfExp object at 0x7da1b0b72680>
return[call[call[name[super], parameter[name[InfinityVertex], name[cls]]].from_json, parameter[]]] | keyword[def] identifier[from_json] ( identifier[cls] , identifier[data] , identifier[json_schema_class] = keyword[None] ):
literal[string]
identifier[schema] = identifier[cls] . identifier[json_schema] keyword[if] identifier[json_schema_class] keyword[is] keyword[None] keyword[else] identifier[json_schema_class] ()
keyword[return] identifier[super] ( identifier[InfinityVertex] , identifier[cls] ). identifier[from_json] ( identifier[data] = identifier[data] , identifier[json_schema_class] = identifier[schema] . identifier[__class__] ) | def from_json(cls, data, json_schema_class=None):
""" This class overwrites the from_json method, thus making sure that if `from_json` is called from this class instance, it will provide its JSON schema as a default one"""
schema = cls.json_schema if json_schema_class is None else json_schema_class()
return super(InfinityVertex, cls).from_json(data=data, json_schema_class=schema.__class__) |
def swipe_left(self, width: int = 1080, length: int = 1920) -> None:
'''Swipe left.'''
self.swipe(0.8*width, 0.5*length, 0.2*width, 0.5*length) | def function[swipe_left, parameter[self, width, length]]:
constant[Swipe left.]
call[name[self].swipe, parameter[binary_operation[constant[0.8] * name[width]], binary_operation[constant[0.5] * name[length]], binary_operation[constant[0.2] * name[width]], binary_operation[constant[0.5] * name[length]]]] | keyword[def] identifier[swipe_left] ( identifier[self] , identifier[width] : identifier[int] = literal[int] , identifier[length] : identifier[int] = literal[int] )-> keyword[None] :
literal[string]
identifier[self] . identifier[swipe] ( literal[int] * identifier[width] , literal[int] * identifier[length] , literal[int] * identifier[width] , literal[int] * identifier[length] ) | def swipe_left(self, width: int=1080, length: int=1920) -> None:
"""Swipe left."""
self.swipe(0.8 * width, 0.5 * length, 0.2 * width, 0.5 * length) |
def PlotTransit(compact = False, ldplot = True, plottitle = "",
xlim = None, binned = True, **kwargs):
'''
Plots a light curve described by `kwargs`
:param bool compact: Display the compact version of the plot? Default `False`
:param bool ldplot: Displat the limb darkening inset? Default `True`
:param str plottitle: The title of the plot. Default `""`
:param float xlim: The half-width of the orbit plot in stellar radii. Default is to \
auto adjust this
:param bool binned: Bin the light curve model to the exposure time? Default `True`
:param kwargs: Any keyword arguments to be passed to :py:func:`pysyzygy.transit.Transit`
:returns fig: The :py:mod:`matplotlib` figure object
'''
# Plotting
fig = pl.figure(figsize = (12,8))
fig.subplots_adjust(hspace=0.3)
ax1, ax2 = pl.subplot(211), pl.subplot(212)
if not compact:
fig.subplots_adjust(right = 0.7)
t0 = kwargs.pop('t0', 0.)
trn = Transit(**kwargs)
try:
trn.Compute()
notransit = False
except Exception as e:
if str(e) == "Object does not transit the star.":
notransit = True
else: raise Exception(e)
time = trn.arrays.time + t0
if not notransit:
if binned:
trn.Bin()
flux = trn.arrays.bflx
else:
flux = trn.arrays.flux
time = np.concatenate(([-1.e5], time, [1.e5])) # Add baseline on each side
flux = np.concatenate(([1.], flux, [1.]))
ax1.plot(time, flux, '-', color='DarkBlue')
rng = np.max(flux) - np.min(flux)
if rng > 0:
ax1.set_ylim(np.min(flux) - 0.1*rng, np.max(flux) + 0.1*rng)
left = np.argmax(flux < (1. - 1.e-8))
right = np.argmax(flux[left:] > (1. - 1.e-8)) + left
rng = time[right] - time[left]
ax1.set_xlim(time[left] - rng, time[right] + rng)
ax1.set_xlabel('Time (Days)', fontweight='bold')
ax1.set_ylabel('Normalized Flux', fontweight='bold')
# Adjust these for full-orbit plotting
maxpts = kwargs.get('maxpts', 10000); kwargs.update({'maxpts': maxpts})
per = kwargs.get('per', 10.); kwargs.update({'per': per})
kwargs.update({'fullorbit': True})
kwargs.update({'exppts': 30})
kwargs.update({'exptime': 50 * per / maxpts})
trn = Transit(**kwargs)
try:
trn.Compute()
except Exception as e:
if str(e) == "Object does not transit the star.":
pass
else: raise Exception(e)
# Sky-projected motion
x = trn.arrays.x
y = trn.arrays.y
z = trn.arrays.z
inc = (np.arccos(trn.transit.bcirc/trn.transit.aRs)*180./np.pi) # Orbital inclination
# Mask the star
for j in range(len(x)):
if (x[j]**2 + y[j]**2) < 1. and (z[j] > 0):
x[j] = np.nan
y[j] = np.nan
# The star
r = np.linspace(0,1,100)
Ir = I(r,trn.limbdark)/I(0,trn.limbdark)
for ri,Iri in zip(r[::-1],Ir[::-1]):
star = pl.Circle((0, 0), ri, color=str(0.95*Iri), alpha=1.)
ax2.add_artist(star)
# Inset: Limb darkening
if ldplot:
if compact:
inset1 = pl.axes([0.145, 0.32, .09, .1])
else:
inset1 = fig.add_axes([0.725,0.3,0.2,0.15])
inset1.plot(r,Ir,'k-')
pl.setp(inset1, xlim=(-0.1,1.1), ylim=(-0.1,1.1), xticks=[0,1], yticks=[0,1])
for tick in inset1.xaxis.get_major_ticks() + inset1.yaxis.get_major_ticks():
tick.label.set_fontsize(8)
inset1.set_ylabel(r'I/I$_0$', fontsize=8, labelpad=-8)
inset1.set_xlabel(r'r/R$_\star$', fontsize=8, labelpad=-8)
inset1.set_title('Limb Darkening', fontweight='bold', fontsize=9)
# Inset: Top view of orbit
if compact:
inset2 = pl.axes([0.135, 0.115, .1, .1])
else:
inset2 = fig.add_axes([0.725,0.1,0.2,0.15])
pl.setp(inset2, xticks=[], yticks=[])
trn.transit.bcirc = trn.transit.aRs # This ensures we are face-on
try:
trn.Compute()
except Exception as e:
if str(e) == "Object does not transit the star.":
pass
else: raise Exception(e)
xp = trn.arrays.x
yp = trn.arrays.y
inset2.plot(xp, yp, '-', color='DarkBlue', alpha=0.5)
# Draw some invisible dots at the corners to set the window size
xmin, xmax, ymin, ymax = np.nanmin(xp), np.nanmax(xp), np.nanmin(yp), np.nanmax(yp)
xrng = xmax - xmin
yrng = ymax - ymin
xmin -= 0.1*xrng; xmax += 0.1*xrng;
ymin -= 0.1*yrng; ymax += 0.1*yrng;
inset2.scatter([xmin,xmin,xmax,xmax], [ymin,ymax,ymin,ymax], alpha = 0.)
# Plot the star
for ri,Iri in zip(r[::-10],Ir[::-10]):
star = pl.Circle((0, 0), ri, color=str(0.95*Iri), alpha=1.)
inset2.add_artist(star)
# Plot the planet
ycenter = yp[np.where(np.abs(xp) == np.nanmin(np.abs(xp)))][0]
while ycenter > 0:
xp[np.where(np.abs(xp) == np.nanmin(np.abs(xp)))] = np.nan
ycenter = yp[np.where(np.abs(xp) == np.nanmin(np.abs(xp)))][0]
planet = pl.Circle((0, ycenter), trn.transit.RpRs, color='DarkBlue', alpha=1.)
inset2.add_artist(planet)
inset2.set_title('Top View', fontweight='bold', fontsize=9)
inset2.set_aspect('equal','datalim')
# The orbit itself
with np.errstate(invalid='ignore'):
ax2.plot(x, y, '-', color='DarkBlue', lw = 1. if per < 30. else
max(1. - (per - 30.) / 100., 0.3) )
# The planet
with np.errstate(invalid = 'ignore'):
ycenter = y[np.where(np.abs(x) == np.nanmin(np.abs(x)))][0]
while ycenter > 0:
x[np.where(np.abs(x) == np.nanmin(np.abs(x)))] = np.nan
ycenter = y[np.where(np.abs(x) == np.nanmin(np.abs(x)))][0]
planet = pl.Circle((0, ycenter), trn.transit.RpRs, color='DarkBlue', alpha=1.)
ax2.add_artist(planet)
# Force aspect
if xlim is None:
xlim = 1.1 * max(np.nanmax(x), np.nanmax(-x))
ax2.set_ylim(-xlim/3.2,xlim/3.2)
ax2.set_xlim(-xlim,xlim)
ax2.set_xlabel(r'X (R$_\star$)', fontweight='bold')
ax2.set_ylabel(r'Y (R$_\star$)', fontweight='bold')
ax1.set_title(plottitle, fontsize=12)
if not compact:
rect = 0.725,0.55,0.2,0.35
ax3 = fig.add_axes(rect)
ax3.xaxis.set_visible(False)
ax3.yaxis.set_visible(False)
# Table of parameters
ltable = [ r'$P:$',
r'$e:$',
r'$i:$',
r'$\omega:$',
r'$\rho_\star:$',
r'$M_p:$',
r'$R_p:$',
r'$q_1:$',
r'$q_2:$']
rtable = [ r'$%.4f\ \mathrm{days}$' % trn.transit.per,
r'$%.5f$' % trn.transit.ecc,
r'$%.4f^\circ$' % inc,
r'$%.3f^\circ$' % (trn.transit.w*180./np.pi),
r'$%.5f\ \mathrm{g/cm^3}$' % trn.transit.rhos,
r'$%.5f\ M_\star$' % trn.transit.MpMs,
r'$%.5f\ R_\star$' % trn.transit.RpRs,
r'$%.5f$' % trn.limbdark.q1,
r'$%.5f$' % trn.limbdark.q2]
yt = 0.875
for l,r in zip(ltable, rtable):
ax3.annotate(l, xy=(0.25, yt), xycoords="axes fraction", ha='right', fontsize=16)
ax3.annotate(r, xy=(0.35, yt), xycoords="axes fraction", fontsize=16)
yt -= 0.1
return fig | def function[PlotTransit, parameter[compact, ldplot, plottitle, xlim, binned]]:
constant[
Plots a light curve described by `kwargs`
:param bool compact: Display the compact version of the plot? Default `False`
:param bool ldplot: Displat the limb darkening inset? Default `True`
:param str plottitle: The title of the plot. Default `""`
:param float xlim: The half-width of the orbit plot in stellar radii. Default is to auto adjust this
:param bool binned: Bin the light curve model to the exposure time? Default `True`
:param kwargs: Any keyword arguments to be passed to :py:func:`pysyzygy.transit.Transit`
:returns fig: The :py:mod:`matplotlib` figure object
]
variable[fig] assign[=] call[name[pl].figure, parameter[]]
call[name[fig].subplots_adjust, parameter[]]
<ast.Tuple object at 0x7da1b0145390> assign[=] tuple[[<ast.Call object at 0x7da1b01452d0>, <ast.Call object at 0x7da1b0145210>]]
if <ast.UnaryOp object at 0x7da1b0145120> begin[:]
call[name[fig].subplots_adjust, parameter[]]
variable[t0] assign[=] call[name[kwargs].pop, parameter[constant[t0], constant[0.0]]]
variable[trn] assign[=] call[name[Transit], parameter[]]
<ast.Try object at 0x7da1b0144d00>
variable[time] assign[=] binary_operation[name[trn].arrays.time + name[t0]]
if <ast.UnaryOp object at 0x7da1b0144700> begin[:]
if name[binned] begin[:]
call[name[trn].Bin, parameter[]]
variable[flux] assign[=] name[trn].arrays.bflx
variable[time] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.List object at 0x7da1b0144220>, <ast.Name object at 0x7da1b0144190>, <ast.List object at 0x7da1b0144160>]]]]
variable[flux] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.List object at 0x7da1b011bfa0>, <ast.Name object at 0x7da1b011bf40>, <ast.List object at 0x7da1b011bf10>]]]]
call[name[ax1].plot, parameter[name[time], name[flux], constant[-]]]
variable[rng] assign[=] binary_operation[call[name[np].max, parameter[name[flux]]] - call[name[np].min, parameter[name[flux]]]]
if compare[name[rng] greater[>] constant[0]] begin[:]
call[name[ax1].set_ylim, parameter[binary_operation[call[name[np].min, parameter[name[flux]]] - binary_operation[constant[0.1] * name[rng]]], binary_operation[call[name[np].max, parameter[name[flux]]] + binary_operation[constant[0.1] * name[rng]]]]]
variable[left] assign[=] call[name[np].argmax, parameter[compare[name[flux] less[<] binary_operation[constant[1.0] - constant[1e-08]]]]]
variable[right] assign[=] binary_operation[call[name[np].argmax, parameter[compare[call[name[flux]][<ast.Slice object at 0x7da1b011b250>] greater[>] binary_operation[constant[1.0] - constant[1e-08]]]]] + name[left]]
variable[rng] assign[=] binary_operation[call[name[time]][name[right]] - call[name[time]][name[left]]]
call[name[ax1].set_xlim, parameter[binary_operation[call[name[time]][name[left]] - name[rng]], binary_operation[call[name[time]][name[right]] + name[rng]]]]
call[name[ax1].set_xlabel, parameter[constant[Time (Days)]]]
call[name[ax1].set_ylabel, parameter[constant[Normalized Flux]]]
variable[maxpts] assign[=] call[name[kwargs].get, parameter[constant[maxpts], constant[10000]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b011a770>], [<ast.Name object at 0x7da1b011a740>]]]]
variable[per] assign[=] call[name[kwargs].get, parameter[constant[per], constant[10.0]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b011a4a0>], [<ast.Name object at 0x7da1b011a470>]]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b011a320>], [<ast.Constant object at 0x7da1b011a2f0>]]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b011a1a0>], [<ast.Constant object at 0x7da1b011a170>]]]]
call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b011a020>], [<ast.BinOp object at 0x7da1b0119ff0>]]]]
variable[trn] assign[=] call[name[Transit], parameter[]]
<ast.Try object at 0x7da1b0119db0>
variable[x] assign[=] name[trn].arrays.x
variable[y] assign[=] name[trn].arrays.y
variable[z] assign[=] name[trn].arrays.z
variable[inc] assign[=] binary_operation[binary_operation[call[name[np].arccos, parameter[binary_operation[name[trn].transit.bcirc / name[trn].transit.aRs]]] * constant[180.0]] / name[np].pi]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[x]]]]]] begin[:]
if <ast.BoolOp object at 0x7da1b01ab3d0> begin[:]
call[name[x]][name[j]] assign[=] name[np].nan
call[name[y]][name[j]] assign[=] name[np].nan
variable[r] assign[=] call[name[np].linspace, parameter[constant[0], constant[1], constant[100]]]
variable[Ir] assign[=] binary_operation[call[name[I], parameter[name[r], name[trn].limbdark]] / call[name[I], parameter[constant[0], name[trn].limbdark]]]
for taget[tuple[[<ast.Name object at 0x7da1b01aa9b0>, <ast.Name object at 0x7da1b01aa980>]]] in starred[call[name[zip], parameter[call[name[r]][<ast.Slice object at 0x7da1b01aa890>], call[name[Ir]][<ast.Slice object at 0x7da1b01aa7a0>]]]] begin[:]
variable[star] assign[=] call[name[pl].Circle, parameter[tuple[[<ast.Constant object at 0x7da1b01aa5f0>, <ast.Constant object at 0x7da1b01aa5c0>]], name[ri]]]
call[name[ax2].add_artist, parameter[name[star]]]
if name[ldplot] begin[:]
if name[compact] begin[:]
variable[inset1] assign[=] call[name[pl].axes, parameter[list[[<ast.Constant object at 0x7da1b01aa0e0>, <ast.Constant object at 0x7da1b01aa0b0>, <ast.Constant object at 0x7da1b01aa080>, <ast.Constant object at 0x7da1b01aa050>]]]]
call[name[inset1].plot, parameter[name[r], name[Ir], constant[k-]]]
call[name[pl].setp, parameter[name[inset1]]]
for taget[name[tick]] in starred[binary_operation[call[name[inset1].xaxis.get_major_ticks, parameter[]] + call[name[inset1].yaxis.get_major_ticks, parameter[]]]] begin[:]
call[name[tick].label.set_fontsize, parameter[constant[8]]]
call[name[inset1].set_ylabel, parameter[constant[I/I$_0$]]]
call[name[inset1].set_xlabel, parameter[constant[r/R$_\star$]]]
call[name[inset1].set_title, parameter[constant[Limb Darkening]]]
if name[compact] begin[:]
variable[inset2] assign[=] call[name[pl].axes, parameter[list[[<ast.Constant object at 0x7da1b01a8d60>, <ast.Constant object at 0x7da1b01a8d30>, <ast.Constant object at 0x7da1b01a8d00>, <ast.Constant object at 0x7da1b01a8cd0>]]]]
call[name[pl].setp, parameter[name[inset2]]]
name[trn].transit.bcirc assign[=] name[trn].transit.aRs
<ast.Try object at 0x7da1b01a8790>
variable[xp] assign[=] name[trn].arrays.x
variable[yp] assign[=] name[trn].arrays.y
call[name[inset2].plot, parameter[name[xp], name[yp], constant[-]]]
<ast.Tuple object at 0x7da1b01d6470> assign[=] tuple[[<ast.Call object at 0x7da1b01d5e70>, <ast.Call object at 0x7da1b01d6020>, <ast.Call object at 0x7da1b01d4040>, <ast.Call object at 0x7da1b01d4160>]]
variable[xrng] assign[=] binary_operation[name[xmax] - name[xmin]]
variable[yrng] assign[=] binary_operation[name[ymax] - name[ymin]]
<ast.AugAssign object at 0x7da1b01d5f00>
<ast.AugAssign object at 0x7da1b01d4c10>
<ast.AugAssign object at 0x7da1b01d5db0>
<ast.AugAssign object at 0x7da1b01d4c40>
call[name[inset2].scatter, parameter[list[[<ast.Name object at 0x7da1b01d4220>, <ast.Name object at 0x7da1b01d5c60>, <ast.Name object at 0x7da1b01d4250>, <ast.Name object at 0x7da1b01d5c30>]], list[[<ast.Name object at 0x7da1b01d5c00>, <ast.Name object at 0x7da1b01d5ba0>, <ast.Name object at 0x7da1b01d5fc0>, <ast.Name object at 0x7da1b01d5f90>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b01d4a60>, <ast.Name object at 0x7da1b01d4ca0>]]] in starred[call[name[zip], parameter[call[name[r]][<ast.Slice object at 0x7da1b01d4cd0>], call[name[Ir]][<ast.Slice object at 0x7da1b01d5b70>]]]] begin[:]
variable[star] assign[=] call[name[pl].Circle, parameter[tuple[[<ast.Constant object at 0x7da1b01d6830>, <ast.Constant object at 0x7da1b01d6860>]], name[ri]]]
call[name[inset2].add_artist, parameter[name[star]]]
variable[ycenter] assign[=] call[call[name[yp]][call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[xp]]] equal[==] call[name[np].nanmin, parameter[call[name[np].abs, parameter[name[xp]]]]]]]]]][constant[0]]
while compare[name[ycenter] greater[>] constant[0]] begin[:]
call[name[xp]][call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[xp]]] equal[==] call[name[np].nanmin, parameter[call[name[np].abs, parameter[name[xp]]]]]]]]] assign[=] name[np].nan
variable[ycenter] assign[=] call[call[name[yp]][call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[xp]]] equal[==] call[name[np].nanmin, parameter[call[name[np].abs, parameter[name[xp]]]]]]]]]][constant[0]]
variable[planet] assign[=] call[name[pl].Circle, parameter[tuple[[<ast.Constant object at 0x7da1b03b8190>, <ast.Name object at 0x7da1b03b8130>]], name[trn].transit.RpRs]]
call[name[inset2].add_artist, parameter[name[planet]]]
call[name[inset2].set_title, parameter[constant[Top View]]]
call[name[inset2].set_aspect, parameter[constant[equal], constant[datalim]]]
with call[name[np].errstate, parameter[]] begin[:]
call[name[ax2].plot, parameter[name[x], name[y], constant[-]]]
with call[name[np].errstate, parameter[]] begin[:]
variable[ycenter] assign[=] call[call[name[y]][call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[x]]] equal[==] call[name[np].nanmin, parameter[call[name[np].abs, parameter[name[x]]]]]]]]]][constant[0]]
while compare[name[ycenter] greater[>] constant[0]] begin[:]
call[name[x]][call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[x]]] equal[==] call[name[np].nanmin, parameter[call[name[np].abs, parameter[name[x]]]]]]]]] assign[=] name[np].nan
variable[ycenter] assign[=] call[call[name[y]][call[name[np].where, parameter[compare[call[name[np].abs, parameter[name[x]]] equal[==] call[name[np].nanmin, parameter[call[name[np].abs, parameter[name[x]]]]]]]]]][constant[0]]
variable[planet] assign[=] call[name[pl].Circle, parameter[tuple[[<ast.Constant object at 0x7da1b0290100>, <ast.Name object at 0x7da1b0293e80>]], name[trn].transit.RpRs]]
call[name[ax2].add_artist, parameter[name[planet]]]
if compare[name[xlim] is constant[None]] begin[:]
variable[xlim] assign[=] binary_operation[constant[1.1] * call[name[max], parameter[call[name[np].nanmax, parameter[name[x]]], call[name[np].nanmax, parameter[<ast.UnaryOp object at 0x7da1b0292050>]]]]]
call[name[ax2].set_ylim, parameter[binary_operation[<ast.UnaryOp object at 0x7da1b0291060> / constant[3.2]], binary_operation[name[xlim] / constant[3.2]]]]
call[name[ax2].set_xlim, parameter[<ast.UnaryOp object at 0x7da1b0292a40>, name[xlim]]]
call[name[ax2].set_xlabel, parameter[constant[X (R$_\star$)]]]
call[name[ax2].set_ylabel, parameter[constant[Y (R$_\star$)]]]
call[name[ax1].set_title, parameter[name[plottitle]]]
if <ast.UnaryOp object at 0x7da1b0290f10> begin[:]
variable[rect] assign[=] tuple[[<ast.Constant object at 0x7da1b01d4670>, <ast.Constant object at 0x7da1b01d46a0>, <ast.Constant object at 0x7da1b01d77c0>, <ast.Constant object at 0x7da1b01d6ec0>]]
variable[ax3] assign[=] call[name[fig].add_axes, parameter[name[rect]]]
call[name[ax3].xaxis.set_visible, parameter[constant[False]]]
call[name[ax3].yaxis.set_visible, parameter[constant[False]]]
variable[ltable] assign[=] list[[<ast.Constant object at 0x7da1b01d7d00>, <ast.Constant object at 0x7da1b01d7eb0>, <ast.Constant object at 0x7da1b01d7df0>, <ast.Constant object at 0x7da1b01d4eb0>, <ast.Constant object at 0x7da1b01d5e40>, <ast.Constant object at 0x7da1b01d75b0>, <ast.Constant object at 0x7da1b01d6dd0>, <ast.Constant object at 0x7da1b01d7280>, <ast.Constant object at 0x7da1b01d4340>]]
variable[rtable] assign[=] list[[<ast.BinOp object at 0x7da1b01d56c0>, <ast.BinOp object at 0x7da1b01d5090>, <ast.BinOp object at 0x7da1b01d78e0>, <ast.BinOp object at 0x7da1b03713f0>, <ast.BinOp object at 0x7da1b0372b30>, <ast.BinOp object at 0x7da1b0373c40>, <ast.BinOp object at 0x7da1b03729b0>, <ast.BinOp object at 0x7da1b0370be0>, <ast.BinOp object at 0x7da1b0371d80>]]
variable[yt] assign[=] constant[0.875]
for taget[tuple[[<ast.Name object at 0x7da1b0373d60>, <ast.Name object at 0x7da1b0373ca0>]]] in starred[call[name[zip], parameter[name[ltable], name[rtable]]]] begin[:]
call[name[ax3].annotate, parameter[name[l]]]
call[name[ax3].annotate, parameter[name[r]]]
<ast.AugAssign object at 0x7da1b0241a20>
return[name[fig]] | keyword[def] identifier[PlotTransit] ( identifier[compact] = keyword[False] , identifier[ldplot] = keyword[True] , identifier[plottitle] = literal[string] ,
identifier[xlim] = keyword[None] , identifier[binned] = keyword[True] ,** identifier[kwargs] ):
literal[string]
identifier[fig] = identifier[pl] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ))
identifier[fig] . identifier[subplots_adjust] ( identifier[hspace] = literal[int] )
identifier[ax1] , identifier[ax2] = identifier[pl] . identifier[subplot] ( literal[int] ), identifier[pl] . identifier[subplot] ( literal[int] )
keyword[if] keyword[not] identifier[compact] :
identifier[fig] . identifier[subplots_adjust] ( identifier[right] = literal[int] )
identifier[t0] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] )
identifier[trn] = identifier[Transit] (** identifier[kwargs] )
keyword[try] :
identifier[trn] . identifier[Compute] ()
identifier[notransit] = keyword[False]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[str] ( identifier[e] )== literal[string] :
identifier[notransit] = keyword[True]
keyword[else] : keyword[raise] identifier[Exception] ( identifier[e] )
identifier[time] = identifier[trn] . identifier[arrays] . identifier[time] + identifier[t0]
keyword[if] keyword[not] identifier[notransit] :
keyword[if] identifier[binned] :
identifier[trn] . identifier[Bin] ()
identifier[flux] = identifier[trn] . identifier[arrays] . identifier[bflx]
keyword[else] :
identifier[flux] = identifier[trn] . identifier[arrays] . identifier[flux]
identifier[time] = identifier[np] . identifier[concatenate] (([- literal[int] ], identifier[time] ,[ literal[int] ]))
identifier[flux] = identifier[np] . identifier[concatenate] (([ literal[int] ], identifier[flux] ,[ literal[int] ]))
identifier[ax1] . identifier[plot] ( identifier[time] , identifier[flux] , literal[string] , identifier[color] = literal[string] )
identifier[rng] = identifier[np] . identifier[max] ( identifier[flux] )- identifier[np] . identifier[min] ( identifier[flux] )
keyword[if] identifier[rng] > literal[int] :
identifier[ax1] . identifier[set_ylim] ( identifier[np] . identifier[min] ( identifier[flux] )- literal[int] * identifier[rng] , identifier[np] . identifier[max] ( identifier[flux] )+ literal[int] * identifier[rng] )
identifier[left] = identifier[np] . identifier[argmax] ( identifier[flux] <( literal[int] - literal[int] ))
identifier[right] = identifier[np] . identifier[argmax] ( identifier[flux] [ identifier[left] :]>( literal[int] - literal[int] ))+ identifier[left]
identifier[rng] = identifier[time] [ identifier[right] ]- identifier[time] [ identifier[left] ]
identifier[ax1] . identifier[set_xlim] ( identifier[time] [ identifier[left] ]- identifier[rng] , identifier[time] [ identifier[right] ]+ identifier[rng] )
identifier[ax1] . identifier[set_xlabel] ( literal[string] , identifier[fontweight] = literal[string] )
identifier[ax1] . identifier[set_ylabel] ( literal[string] , identifier[fontweight] = literal[string] )
identifier[maxpts] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ); identifier[kwargs] . identifier[update] ({ literal[string] : identifier[maxpts] })
identifier[per] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ); identifier[kwargs] . identifier[update] ({ literal[string] : identifier[per] })
identifier[kwargs] . identifier[update] ({ literal[string] : keyword[True] })
identifier[kwargs] . identifier[update] ({ literal[string] : literal[int] })
identifier[kwargs] . identifier[update] ({ literal[string] : literal[int] * identifier[per] / identifier[maxpts] })
identifier[trn] = identifier[Transit] (** identifier[kwargs] )
keyword[try] :
identifier[trn] . identifier[Compute] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[str] ( identifier[e] )== literal[string] :
keyword[pass]
keyword[else] : keyword[raise] identifier[Exception] ( identifier[e] )
identifier[x] = identifier[trn] . identifier[arrays] . identifier[x]
identifier[y] = identifier[trn] . identifier[arrays] . identifier[y]
identifier[z] = identifier[trn] . identifier[arrays] . identifier[z]
identifier[inc] =( identifier[np] . identifier[arccos] ( identifier[trn] . identifier[transit] . identifier[bcirc] / identifier[trn] . identifier[transit] . identifier[aRs] )* literal[int] / identifier[np] . identifier[pi] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[x] )):
keyword[if] ( identifier[x] [ identifier[j] ]** literal[int] + identifier[y] [ identifier[j] ]** literal[int] )< literal[int] keyword[and] ( identifier[z] [ identifier[j] ]> literal[int] ):
identifier[x] [ identifier[j] ]= identifier[np] . identifier[nan]
identifier[y] [ identifier[j] ]= identifier[np] . identifier[nan]
identifier[r] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , literal[int] )
identifier[Ir] = identifier[I] ( identifier[r] , identifier[trn] . identifier[limbdark] )/ identifier[I] ( literal[int] , identifier[trn] . identifier[limbdark] )
keyword[for] identifier[ri] , identifier[Iri] keyword[in] identifier[zip] ( identifier[r] [::- literal[int] ], identifier[Ir] [::- literal[int] ]):
identifier[star] = identifier[pl] . identifier[Circle] (( literal[int] , literal[int] ), identifier[ri] , identifier[color] = identifier[str] ( literal[int] * identifier[Iri] ), identifier[alpha] = literal[int] )
identifier[ax2] . identifier[add_artist] ( identifier[star] )
keyword[if] identifier[ldplot] :
keyword[if] identifier[compact] :
identifier[inset1] = identifier[pl] . identifier[axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
keyword[else] :
identifier[inset1] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[inset1] . identifier[plot] ( identifier[r] , identifier[Ir] , literal[string] )
identifier[pl] . identifier[setp] ( identifier[inset1] , identifier[xlim] =(- literal[int] , literal[int] ), identifier[ylim] =(- literal[int] , literal[int] ), identifier[xticks] =[ literal[int] , literal[int] ], identifier[yticks] =[ literal[int] , literal[int] ])
keyword[for] identifier[tick] keyword[in] identifier[inset1] . identifier[xaxis] . identifier[get_major_ticks] ()+ identifier[inset1] . identifier[yaxis] . identifier[get_major_ticks] ():
identifier[tick] . identifier[label] . identifier[set_fontsize] ( literal[int] )
identifier[inset1] . identifier[set_ylabel] ( literal[string] , identifier[fontsize] = literal[int] , identifier[labelpad] =- literal[int] )
identifier[inset1] . identifier[set_xlabel] ( literal[string] , identifier[fontsize] = literal[int] , identifier[labelpad] =- literal[int] )
identifier[inset1] . identifier[set_title] ( literal[string] , identifier[fontweight] = literal[string] , identifier[fontsize] = literal[int] )
keyword[if] identifier[compact] :
identifier[inset2] = identifier[pl] . identifier[axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
keyword[else] :
identifier[inset2] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[pl] . identifier[setp] ( identifier[inset2] , identifier[xticks] =[], identifier[yticks] =[])
identifier[trn] . identifier[transit] . identifier[bcirc] = identifier[trn] . identifier[transit] . identifier[aRs]
keyword[try] :
identifier[trn] . identifier[Compute] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[str] ( identifier[e] )== literal[string] :
keyword[pass]
keyword[else] : keyword[raise] identifier[Exception] ( identifier[e] )
identifier[xp] = identifier[trn] . identifier[arrays] . identifier[x]
identifier[yp] = identifier[trn] . identifier[arrays] . identifier[y]
identifier[inset2] . identifier[plot] ( identifier[xp] , identifier[yp] , literal[string] , identifier[color] = literal[string] , identifier[alpha] = literal[int] )
identifier[xmin] , identifier[xmax] , identifier[ymin] , identifier[ymax] = identifier[np] . identifier[nanmin] ( identifier[xp] ), identifier[np] . identifier[nanmax] ( identifier[xp] ), identifier[np] . identifier[nanmin] ( identifier[yp] ), identifier[np] . identifier[nanmax] ( identifier[yp] )
identifier[xrng] = identifier[xmax] - identifier[xmin]
identifier[yrng] = identifier[ymax] - identifier[ymin]
identifier[xmin] -= literal[int] * identifier[xrng] ; identifier[xmax] += literal[int] * identifier[xrng] ;
identifier[ymin] -= literal[int] * identifier[yrng] ; identifier[ymax] += literal[int] * identifier[yrng] ;
identifier[inset2] . identifier[scatter] ([ identifier[xmin] , identifier[xmin] , identifier[xmax] , identifier[xmax] ],[ identifier[ymin] , identifier[ymax] , identifier[ymin] , identifier[ymax] ], identifier[alpha] = literal[int] )
keyword[for] identifier[ri] , identifier[Iri] keyword[in] identifier[zip] ( identifier[r] [::- literal[int] ], identifier[Ir] [::- literal[int] ]):
identifier[star] = identifier[pl] . identifier[Circle] (( literal[int] , literal[int] ), identifier[ri] , identifier[color] = identifier[str] ( literal[int] * identifier[Iri] ), identifier[alpha] = literal[int] )
identifier[inset2] . identifier[add_artist] ( identifier[star] )
identifier[ycenter] = identifier[yp] [ identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[xp] )== identifier[np] . identifier[nanmin] ( identifier[np] . identifier[abs] ( identifier[xp] )))][ literal[int] ]
keyword[while] identifier[ycenter] > literal[int] :
identifier[xp] [ identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[xp] )== identifier[np] . identifier[nanmin] ( identifier[np] . identifier[abs] ( identifier[xp] )))]= identifier[np] . identifier[nan]
identifier[ycenter] = identifier[yp] [ identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[xp] )== identifier[np] . identifier[nanmin] ( identifier[np] . identifier[abs] ( identifier[xp] )))][ literal[int] ]
identifier[planet] = identifier[pl] . identifier[Circle] (( literal[int] , identifier[ycenter] ), identifier[trn] . identifier[transit] . identifier[RpRs] , identifier[color] = literal[string] , identifier[alpha] = literal[int] )
identifier[inset2] . identifier[add_artist] ( identifier[planet] )
identifier[inset2] . identifier[set_title] ( literal[string] , identifier[fontweight] = literal[string] , identifier[fontsize] = literal[int] )
identifier[inset2] . identifier[set_aspect] ( literal[string] , literal[string] )
keyword[with] identifier[np] . identifier[errstate] ( identifier[invalid] = literal[string] ):
identifier[ax2] . identifier[plot] ( identifier[x] , identifier[y] , literal[string] , identifier[color] = literal[string] , identifier[lw] = literal[int] keyword[if] identifier[per] < literal[int] keyword[else]
identifier[max] ( literal[int] -( identifier[per] - literal[int] )/ literal[int] , literal[int] ))
keyword[with] identifier[np] . identifier[errstate] ( identifier[invalid] = literal[string] ):
identifier[ycenter] = identifier[y] [ identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[x] )== identifier[np] . identifier[nanmin] ( identifier[np] . identifier[abs] ( identifier[x] )))][ literal[int] ]
keyword[while] identifier[ycenter] > literal[int] :
identifier[x] [ identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[x] )== identifier[np] . identifier[nanmin] ( identifier[np] . identifier[abs] ( identifier[x] )))]= identifier[np] . identifier[nan]
identifier[ycenter] = identifier[y] [ identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[x] )== identifier[np] . identifier[nanmin] ( identifier[np] . identifier[abs] ( identifier[x] )))][ literal[int] ]
identifier[planet] = identifier[pl] . identifier[Circle] (( literal[int] , identifier[ycenter] ), identifier[trn] . identifier[transit] . identifier[RpRs] , identifier[color] = literal[string] , identifier[alpha] = literal[int] )
identifier[ax2] . identifier[add_artist] ( identifier[planet] )
keyword[if] identifier[xlim] keyword[is] keyword[None] :
identifier[xlim] = literal[int] * identifier[max] ( identifier[np] . identifier[nanmax] ( identifier[x] ), identifier[np] . identifier[nanmax] (- identifier[x] ))
identifier[ax2] . identifier[set_ylim] (- identifier[xlim] / literal[int] , identifier[xlim] / literal[int] )
identifier[ax2] . identifier[set_xlim] (- identifier[xlim] , identifier[xlim] )
identifier[ax2] . identifier[set_xlabel] ( literal[string] , identifier[fontweight] = literal[string] )
identifier[ax2] . identifier[set_ylabel] ( literal[string] , identifier[fontweight] = literal[string] )
identifier[ax1] . identifier[set_title] ( identifier[plottitle] , identifier[fontsize] = literal[int] )
keyword[if] keyword[not] identifier[compact] :
identifier[rect] = literal[int] , literal[int] , literal[int] , literal[int]
identifier[ax3] = identifier[fig] . identifier[add_axes] ( identifier[rect] )
identifier[ax3] . identifier[xaxis] . identifier[set_visible] ( keyword[False] )
identifier[ax3] . identifier[yaxis] . identifier[set_visible] ( keyword[False] )
identifier[ltable] =[ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
identifier[rtable] =[ literal[string] % identifier[trn] . identifier[transit] . identifier[per] ,
literal[string] % identifier[trn] . identifier[transit] . identifier[ecc] ,
literal[string] % identifier[inc] ,
literal[string] %( identifier[trn] . identifier[transit] . identifier[w] * literal[int] / identifier[np] . identifier[pi] ),
literal[string] % identifier[trn] . identifier[transit] . identifier[rhos] ,
literal[string] % identifier[trn] . identifier[transit] . identifier[MpMs] ,
literal[string] % identifier[trn] . identifier[transit] . identifier[RpRs] ,
literal[string] % identifier[trn] . identifier[limbdark] . identifier[q1] ,
literal[string] % identifier[trn] . identifier[limbdark] . identifier[q2] ]
identifier[yt] = literal[int]
keyword[for] identifier[l] , identifier[r] keyword[in] identifier[zip] ( identifier[ltable] , identifier[rtable] ):
identifier[ax3] . identifier[annotate] ( identifier[l] , identifier[xy] =( literal[int] , identifier[yt] ), identifier[xycoords] = literal[string] , identifier[ha] = literal[string] , identifier[fontsize] = literal[int] )
identifier[ax3] . identifier[annotate] ( identifier[r] , identifier[xy] =( literal[int] , identifier[yt] ), identifier[xycoords] = literal[string] , identifier[fontsize] = literal[int] )
identifier[yt] -= literal[int]
keyword[return] identifier[fig] | def PlotTransit(compact=False, ldplot=True, plottitle='', xlim=None, binned=True, **kwargs):
"""
Plots a light curve described by `kwargs`
:param bool compact: Display the compact version of the plot? Default `False`
:param bool ldplot: Displat the limb darkening inset? Default `True`
:param str plottitle: The title of the plot. Default `""`
:param float xlim: The half-width of the orbit plot in stellar radii. Default is to auto adjust this
:param bool binned: Bin the light curve model to the exposure time? Default `True`
:param kwargs: Any keyword arguments to be passed to :py:func:`pysyzygy.transit.Transit`
:returns fig: The :py:mod:`matplotlib` figure object
"""
# Plotting
fig = pl.figure(figsize=(12, 8))
fig.subplots_adjust(hspace=0.3)
(ax1, ax2) = (pl.subplot(211), pl.subplot(212))
if not compact:
fig.subplots_adjust(right=0.7) # depends on [control=['if'], data=[]]
t0 = kwargs.pop('t0', 0.0)
trn = Transit(**kwargs)
try:
trn.Compute()
notransit = False # depends on [control=['try'], data=[]]
except Exception as e:
if str(e) == 'Object does not transit the star.':
notransit = True # depends on [control=['if'], data=[]]
else:
raise Exception(e) # depends on [control=['except'], data=['e']]
time = trn.arrays.time + t0
if not notransit:
if binned:
trn.Bin()
flux = trn.arrays.bflx # depends on [control=['if'], data=[]]
else:
flux = trn.arrays.flux
time = np.concatenate(([-100000.0], time, [100000.0])) # Add baseline on each side
flux = np.concatenate(([1.0], flux, [1.0]))
ax1.plot(time, flux, '-', color='DarkBlue')
rng = np.max(flux) - np.min(flux)
if rng > 0:
ax1.set_ylim(np.min(flux) - 0.1 * rng, np.max(flux) + 0.1 * rng)
left = np.argmax(flux < 1.0 - 1e-08)
right = np.argmax(flux[left:] > 1.0 - 1e-08) + left
rng = time[right] - time[left]
ax1.set_xlim(time[left] - rng, time[right] + rng) # depends on [control=['if'], data=['rng']] # depends on [control=['if'], data=[]]
ax1.set_xlabel('Time (Days)', fontweight='bold')
ax1.set_ylabel('Normalized Flux', fontweight='bold')
# Adjust these for full-orbit plotting
maxpts = kwargs.get('maxpts', 10000)
kwargs.update({'maxpts': maxpts})
per = kwargs.get('per', 10.0)
kwargs.update({'per': per})
kwargs.update({'fullorbit': True})
kwargs.update({'exppts': 30})
kwargs.update({'exptime': 50 * per / maxpts})
trn = Transit(**kwargs)
try:
trn.Compute() # depends on [control=['try'], data=[]]
except Exception as e:
if str(e) == 'Object does not transit the star.':
pass # depends on [control=['if'], data=[]]
else:
raise Exception(e) # depends on [control=['except'], data=['e']]
# Sky-projected motion
x = trn.arrays.x
y = trn.arrays.y
z = trn.arrays.z
inc = np.arccos(trn.transit.bcirc / trn.transit.aRs) * 180.0 / np.pi # Orbital inclination
# Mask the star
for j in range(len(x)):
if x[j] ** 2 + y[j] ** 2 < 1.0 and z[j] > 0:
x[j] = np.nan
y[j] = np.nan # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']]
# The star
r = np.linspace(0, 1, 100)
Ir = I(r, trn.limbdark) / I(0, trn.limbdark)
for (ri, Iri) in zip(r[::-1], Ir[::-1]):
star = pl.Circle((0, 0), ri, color=str(0.95 * Iri), alpha=1.0)
ax2.add_artist(star) # depends on [control=['for'], data=[]]
# Inset: Limb darkening
if ldplot:
if compact:
inset1 = pl.axes([0.145, 0.32, 0.09, 0.1]) # depends on [control=['if'], data=[]]
else:
inset1 = fig.add_axes([0.725, 0.3, 0.2, 0.15])
inset1.plot(r, Ir, 'k-')
pl.setp(inset1, xlim=(-0.1, 1.1), ylim=(-0.1, 1.1), xticks=[0, 1], yticks=[0, 1])
for tick in inset1.xaxis.get_major_ticks() + inset1.yaxis.get_major_ticks():
tick.label.set_fontsize(8) # depends on [control=['for'], data=['tick']]
inset1.set_ylabel('I/I$_0$', fontsize=8, labelpad=-8)
inset1.set_xlabel('r/R$_\\star$', fontsize=8, labelpad=-8)
inset1.set_title('Limb Darkening', fontweight='bold', fontsize=9) # depends on [control=['if'], data=[]]
# Inset: Top view of orbit
if compact:
inset2 = pl.axes([0.135, 0.115, 0.1, 0.1]) # depends on [control=['if'], data=[]]
else:
inset2 = fig.add_axes([0.725, 0.1, 0.2, 0.15])
pl.setp(inset2, xticks=[], yticks=[])
trn.transit.bcirc = trn.transit.aRs # This ensures we are face-on
try:
trn.Compute() # depends on [control=['try'], data=[]]
except Exception as e:
if str(e) == 'Object does not transit the star.':
pass # depends on [control=['if'], data=[]]
else:
raise Exception(e) # depends on [control=['except'], data=['e']]
xp = trn.arrays.x
yp = trn.arrays.y
inset2.plot(xp, yp, '-', color='DarkBlue', alpha=0.5)
# Draw some invisible dots at the corners to set the window size
(xmin, xmax, ymin, ymax) = (np.nanmin(xp), np.nanmax(xp), np.nanmin(yp), np.nanmax(yp))
xrng = xmax - xmin
yrng = ymax - ymin
xmin -= 0.1 * xrng
xmax += 0.1 * xrng
ymin -= 0.1 * yrng
ymax += 0.1 * yrng
inset2.scatter([xmin, xmin, xmax, xmax], [ymin, ymax, ymin, ymax], alpha=0.0)
# Plot the star
for (ri, Iri) in zip(r[::-10], Ir[::-10]):
star = pl.Circle((0, 0), ri, color=str(0.95 * Iri), alpha=1.0)
inset2.add_artist(star) # depends on [control=['for'], data=[]]
# Plot the planet
ycenter = yp[np.where(np.abs(xp) == np.nanmin(np.abs(xp)))][0]
while ycenter > 0:
xp[np.where(np.abs(xp) == np.nanmin(np.abs(xp)))] = np.nan
ycenter = yp[np.where(np.abs(xp) == np.nanmin(np.abs(xp)))][0] # depends on [control=['while'], data=['ycenter']]
planet = pl.Circle((0, ycenter), trn.transit.RpRs, color='DarkBlue', alpha=1.0)
inset2.add_artist(planet)
inset2.set_title('Top View', fontweight='bold', fontsize=9)
inset2.set_aspect('equal', 'datalim')
# The orbit itself
with np.errstate(invalid='ignore'):
ax2.plot(x, y, '-', color='DarkBlue', lw=1.0 if per < 30.0 else max(1.0 - (per - 30.0) / 100.0, 0.3)) # depends on [control=['with'], data=[]]
# The planet
with np.errstate(invalid='ignore'):
ycenter = y[np.where(np.abs(x) == np.nanmin(np.abs(x)))][0] # depends on [control=['with'], data=[]]
while ycenter > 0:
x[np.where(np.abs(x) == np.nanmin(np.abs(x)))] = np.nan
ycenter = y[np.where(np.abs(x) == np.nanmin(np.abs(x)))][0] # depends on [control=['while'], data=['ycenter']]
planet = pl.Circle((0, ycenter), trn.transit.RpRs, color='DarkBlue', alpha=1.0)
ax2.add_artist(planet)
# Force aspect
if xlim is None:
xlim = 1.1 * max(np.nanmax(x), np.nanmax(-x)) # depends on [control=['if'], data=['xlim']]
ax2.set_ylim(-xlim / 3.2, xlim / 3.2)
ax2.set_xlim(-xlim, xlim)
ax2.set_xlabel('X (R$_\\star$)', fontweight='bold')
ax2.set_ylabel('Y (R$_\\star$)', fontweight='bold')
ax1.set_title(plottitle, fontsize=12)
if not compact:
rect = (0.725, 0.55, 0.2, 0.35)
ax3 = fig.add_axes(rect)
ax3.xaxis.set_visible(False)
ax3.yaxis.set_visible(False)
# Table of parameters
ltable = ['$P:$', '$e:$', '$i:$', '$\\omega:$', '$\\rho_\\star:$', '$M_p:$', '$R_p:$', '$q_1:$', '$q_2:$']
rtable = ['$%.4f\\ \\mathrm{days}$' % trn.transit.per, '$%.5f$' % trn.transit.ecc, '$%.4f^\\circ$' % inc, '$%.3f^\\circ$' % (trn.transit.w * 180.0 / np.pi), '$%.5f\\ \\mathrm{g/cm^3}$' % trn.transit.rhos, '$%.5f\\ M_\\star$' % trn.transit.MpMs, '$%.5f\\ R_\\star$' % trn.transit.RpRs, '$%.5f$' % trn.limbdark.q1, '$%.5f$' % trn.limbdark.q2]
yt = 0.875
for (l, r) in zip(ltable, rtable):
ax3.annotate(l, xy=(0.25, yt), xycoords='axes fraction', ha='right', fontsize=16)
ax3.annotate(r, xy=(0.35, yt), xycoords='axes fraction', fontsize=16)
yt -= 0.1 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return fig |
def post(self, url, body=None, **kwargs):
"""
Send a POST request.
:param str url: Sub URL for the request. You MUST not specify neither base url nor api version prefix.
:param dict body: (optional) Dictionary of body attributes that will be wrapped with envelope and json encoded.
:param dict **kwargs: (optional) Other parameters which are directly passed to :func:`requests.request`.
:return: Tuple of three elements: (http status code, headers, response - either parsed json or plain text)
:rtype: tuple
"""
return self.request('post', url, body=body, **kwargs) | def function[post, parameter[self, url, body]]:
constant[
Send a POST request.
:param str url: Sub URL for the request. You MUST not specify neither base url nor api version prefix.
:param dict body: (optional) Dictionary of body attributes that will be wrapped with envelope and json encoded.
:param dict **kwargs: (optional) Other parameters which are directly passed to :func:`requests.request`.
:return: Tuple of three elements: (http status code, headers, response - either parsed json or plain text)
:rtype: tuple
]
return[call[name[self].request, parameter[constant[post], name[url]]]] | keyword[def] identifier[post] ( identifier[self] , identifier[url] , identifier[body] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[url] , identifier[body] = identifier[body] ,** identifier[kwargs] ) | def post(self, url, body=None, **kwargs):
"""
Send a POST request.
:param str url: Sub URL for the request. You MUST not specify neither base url nor api version prefix.
:param dict body: (optional) Dictionary of body attributes that will be wrapped with envelope and json encoded.
:param dict **kwargs: (optional) Other parameters which are directly passed to :func:`requests.request`.
:return: Tuple of three elements: (http status code, headers, response - either parsed json or plain text)
:rtype: tuple
"""
return self.request('post', url, body=body, **kwargs) |
def invert_shift(chars):
"""
>>> invert_shift("a")
'A'
>>> invert_shift("A")
'a'
>>> invert_shift("123 foo 456 BAR #!")
'123 FOO 456 bar #!'
"""
result = ""
for char in chars:
if char in string.ascii_lowercase:
# log.critical("auto shift lowercase char %s to UPPERCASE", repr(char))
char = char.upper()
elif char in string.ascii_uppercase:
# log.critical("auto shift UPPERCASE char %s to lowercase", repr(char))
char = char.lower()
result += char
return result | def function[invert_shift, parameter[chars]]:
constant[
>>> invert_shift("a")
'A'
>>> invert_shift("A")
'a'
>>> invert_shift("123 foo 456 BAR #!")
'123 FOO 456 bar #!'
]
variable[result] assign[=] constant[]
for taget[name[char]] in starred[name[chars]] begin[:]
if compare[name[char] in name[string].ascii_lowercase] begin[:]
variable[char] assign[=] call[name[char].upper, parameter[]]
<ast.AugAssign object at 0x7da1b142a440>
return[name[result]] | keyword[def] identifier[invert_shift] ( identifier[chars] ):
literal[string]
identifier[result] = literal[string]
keyword[for] identifier[char] keyword[in] identifier[chars] :
keyword[if] identifier[char] keyword[in] identifier[string] . identifier[ascii_lowercase] :
identifier[char] = identifier[char] . identifier[upper] ()
keyword[elif] identifier[char] keyword[in] identifier[string] . identifier[ascii_uppercase] :
identifier[char] = identifier[char] . identifier[lower] ()
identifier[result] += identifier[char]
keyword[return] identifier[result] | def invert_shift(chars):
"""
>>> invert_shift("a")
'A'
>>> invert_shift("A")
'a'
>>> invert_shift("123 foo 456 BAR #!")
'123 FOO 456 bar #!'
"""
result = ''
for char in chars:
if char in string.ascii_lowercase:
# log.critical("auto shift lowercase char %s to UPPERCASE", repr(char))
char = char.upper() # depends on [control=['if'], data=['char']]
elif char in string.ascii_uppercase:
# log.critical("auto shift UPPERCASE char %s to lowercase", repr(char))
char = char.lower() # depends on [control=['if'], data=['char']]
result += char # depends on [control=['for'], data=['char']]
return result |
def from_api_repr(cls, api_repr):
"""Return a :class:`TimePartitioning` object deserialized from a dict.
This method creates a new ``TimePartitioning`` instance that points to
the ``api_repr`` parameter as its internal properties dict. This means
that when a ``TimePartitioning`` instance is stored as a property of
another object, any changes made at the higher level will also appear
here::
>>> time_partitioning = TimePartitioning()
>>> table.time_partitioning = time_partitioning
>>> table.time_partitioning.field = 'timecolumn'
>>> time_partitioning.field
'timecolumn'
Args:
api_repr (Mapping[str, str]):
The serialized representation of the TimePartitioning, such as
what is output by :meth:`to_api_repr`.
Returns:
google.cloud.bigquery.table.TimePartitioning:
The ``TimePartitioning`` object.
"""
instance = cls(api_repr["type"])
instance._properties = api_repr
return instance | def function[from_api_repr, parameter[cls, api_repr]]:
constant[Return a :class:`TimePartitioning` object deserialized from a dict.
This method creates a new ``TimePartitioning`` instance that points to
the ``api_repr`` parameter as its internal properties dict. This means
that when a ``TimePartitioning`` instance is stored as a property of
another object, any changes made at the higher level will also appear
here::
>>> time_partitioning = TimePartitioning()
>>> table.time_partitioning = time_partitioning
>>> table.time_partitioning.field = 'timecolumn'
>>> time_partitioning.field
'timecolumn'
Args:
api_repr (Mapping[str, str]):
The serialized representation of the TimePartitioning, such as
what is output by :meth:`to_api_repr`.
Returns:
google.cloud.bigquery.table.TimePartitioning:
The ``TimePartitioning`` object.
]
variable[instance] assign[=] call[name[cls], parameter[call[name[api_repr]][constant[type]]]]
name[instance]._properties assign[=] name[api_repr]
return[name[instance]] | keyword[def] identifier[from_api_repr] ( identifier[cls] , identifier[api_repr] ):
literal[string]
identifier[instance] = identifier[cls] ( identifier[api_repr] [ literal[string] ])
identifier[instance] . identifier[_properties] = identifier[api_repr]
keyword[return] identifier[instance] | def from_api_repr(cls, api_repr):
"""Return a :class:`TimePartitioning` object deserialized from a dict.
This method creates a new ``TimePartitioning`` instance that points to
the ``api_repr`` parameter as its internal properties dict. This means
that when a ``TimePartitioning`` instance is stored as a property of
another object, any changes made at the higher level will also appear
here::
>>> time_partitioning = TimePartitioning()
>>> table.time_partitioning = time_partitioning
>>> table.time_partitioning.field = 'timecolumn'
>>> time_partitioning.field
'timecolumn'
Args:
api_repr (Mapping[str, str]):
The serialized representation of the TimePartitioning, such as
what is output by :meth:`to_api_repr`.
Returns:
google.cloud.bigquery.table.TimePartitioning:
The ``TimePartitioning`` object.
"""
instance = cls(api_repr['type'])
instance._properties = api_repr
return instance |
def guess_filename(filename):
"""Guess filename"""
if osp.isfile(filename):
return filename
if not filename.endswith('.py'):
filename += '.py'
for path in [getcwd_or_home()] + sys.path:
fname = osp.join(path, filename)
if osp.isfile(fname):
return fname
elif osp.isfile(fname+'.py'):
return fname+'.py'
elif osp.isfile(fname+'.pyw'):
return fname+'.pyw'
return filename | def function[guess_filename, parameter[filename]]:
constant[Guess filename]
if call[name[osp].isfile, parameter[name[filename]]] begin[:]
return[name[filename]]
if <ast.UnaryOp object at 0x7da18dc06c20> begin[:]
<ast.AugAssign object at 0x7da18dc05900>
for taget[name[path]] in starred[binary_operation[list[[<ast.Call object at 0x7da18dc062f0>]] + name[sys].path]] begin[:]
variable[fname] assign[=] call[name[osp].join, parameter[name[path], name[filename]]]
if call[name[osp].isfile, parameter[name[fname]]] begin[:]
return[name[fname]]
return[name[filename]] | keyword[def] identifier[guess_filename] ( identifier[filename] ):
literal[string]
keyword[if] identifier[osp] . identifier[isfile] ( identifier[filename] ):
keyword[return] identifier[filename]
keyword[if] keyword[not] identifier[filename] . identifier[endswith] ( literal[string] ):
identifier[filename] += literal[string]
keyword[for] identifier[path] keyword[in] [ identifier[getcwd_or_home] ()]+ identifier[sys] . identifier[path] :
identifier[fname] = identifier[osp] . identifier[join] ( identifier[path] , identifier[filename] )
keyword[if] identifier[osp] . identifier[isfile] ( identifier[fname] ):
keyword[return] identifier[fname]
keyword[elif] identifier[osp] . identifier[isfile] ( identifier[fname] + literal[string] ):
keyword[return] identifier[fname] + literal[string]
keyword[elif] identifier[osp] . identifier[isfile] ( identifier[fname] + literal[string] ):
keyword[return] identifier[fname] + literal[string]
keyword[return] identifier[filename] | def guess_filename(filename):
"""Guess filename"""
if osp.isfile(filename):
return filename # depends on [control=['if'], data=[]]
if not filename.endswith('.py'):
filename += '.py' # depends on [control=['if'], data=[]]
for path in [getcwd_or_home()] + sys.path:
fname = osp.join(path, filename)
if osp.isfile(fname):
return fname # depends on [control=['if'], data=[]]
elif osp.isfile(fname + '.py'):
return fname + '.py' # depends on [control=['if'], data=[]]
elif osp.isfile(fname + '.pyw'):
return fname + '.pyw' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']]
return filename |
def hash(self):
"""Hash value based on file name and content"""
if self._hash is None:
tohash = [self.path.name]
# Hash a maximum of ~1MB of the hdf5 file
tohash.append(hashfile(self.path, blocksize=65536, count=20))
self._hash = hashobj(tohash)
return self._hash | def function[hash, parameter[self]]:
constant[Hash value based on file name and content]
if compare[name[self]._hash is constant[None]] begin[:]
variable[tohash] assign[=] list[[<ast.Attribute object at 0x7da1b198f1c0>]]
call[name[tohash].append, parameter[call[name[hashfile], parameter[name[self].path]]]]
name[self]._hash assign[=] call[name[hashobj], parameter[name[tohash]]]
return[name[self]._hash] | keyword[def] identifier[hash] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_hash] keyword[is] keyword[None] :
identifier[tohash] =[ identifier[self] . identifier[path] . identifier[name] ]
identifier[tohash] . identifier[append] ( identifier[hashfile] ( identifier[self] . identifier[path] , identifier[blocksize] = literal[int] , identifier[count] = literal[int] ))
identifier[self] . identifier[_hash] = identifier[hashobj] ( identifier[tohash] )
keyword[return] identifier[self] . identifier[_hash] | def hash(self):
"""Hash value based on file name and content"""
if self._hash is None:
tohash = [self.path.name]
# Hash a maximum of ~1MB of the hdf5 file
tohash.append(hashfile(self.path, blocksize=65536, count=20))
self._hash = hashobj(tohash) # depends on [control=['if'], data=[]]
return self._hash |
def score_(self):
"""
The concordance score (also known as the c-index) of the fit. The c-index is a generalization of the ROC AUC
to survival data, including censorships.
For this purpose, the ``score_`` is a measure of the predictive accuracy of the fitted model
onto the training dataset.
"""
# pylint: disable=access-member-before-definition
if hasattr(self, "_predicted_median"):
self._concordance_score_ = concordance_index(self.durations, self._predicted_median, self.event_observed)
del self._predicted_median
return self._concordance_score_
return self._concordance_score_ | def function[score_, parameter[self]]:
constant[
The concordance score (also known as the c-index) of the fit. The c-index is a generalization of the ROC AUC
to survival data, including censorships.
For this purpose, the ``score_`` is a measure of the predictive accuracy of the fitted model
onto the training dataset.
]
if call[name[hasattr], parameter[name[self], constant[_predicted_median]]] begin[:]
name[self]._concordance_score_ assign[=] call[name[concordance_index], parameter[name[self].durations, name[self]._predicted_median, name[self].event_observed]]
<ast.Delete object at 0x7da18f00d0f0>
return[name[self]._concordance_score_]
return[name[self]._concordance_score_] | keyword[def] identifier[score_] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_concordance_score_] = identifier[concordance_index] ( identifier[self] . identifier[durations] , identifier[self] . identifier[_predicted_median] , identifier[self] . identifier[event_observed] )
keyword[del] identifier[self] . identifier[_predicted_median]
keyword[return] identifier[self] . identifier[_concordance_score_]
keyword[return] identifier[self] . identifier[_concordance_score_] | def score_(self):
"""
The concordance score (also known as the c-index) of the fit. The c-index is a generalization of the ROC AUC
to survival data, including censorships.
For this purpose, the ``score_`` is a measure of the predictive accuracy of the fitted model
onto the training dataset.
"""
# pylint: disable=access-member-before-definition
if hasattr(self, '_predicted_median'):
self._concordance_score_ = concordance_index(self.durations, self._predicted_median, self.event_observed)
del self._predicted_median
return self._concordance_score_ # depends on [control=['if'], data=[]]
return self._concordance_score_ |
def _fonts2pys(self):
"""Writes fonts to pys file"""
# Get mapping from fonts to fontfiles
system_fonts = font_manager.findSystemFonts()
font_name2font_file = {}
for sys_font in system_fonts:
font_name = font_manager.FontProperties(fname=sys_font).get_name()
if font_name in self.fonts_used:
font_name2font_file[font_name] = sys_font
# Only include fonts that have been used in the attributes
for font_name in font_name2font_file:
# Serialize font
with open(font_name2font_file[font_name]) as fontfile:
font_data = fontfile.read()
ascii_font_data = base64.b64encode(font_data)
# Store font in pys file
font_line_list = [font_name, ascii_font_data]
self.pys_file.write(u"\t".join(font_line_list) + u"\n") | def function[_fonts2pys, parameter[self]]:
constant[Writes fonts to pys file]
variable[system_fonts] assign[=] call[name[font_manager].findSystemFonts, parameter[]]
variable[font_name2font_file] assign[=] dictionary[[], []]
for taget[name[sys_font]] in starred[name[system_fonts]] begin[:]
variable[font_name] assign[=] call[call[name[font_manager].FontProperties, parameter[]].get_name, parameter[]]
if compare[name[font_name] in name[self].fonts_used] begin[:]
call[name[font_name2font_file]][name[font_name]] assign[=] name[sys_font]
for taget[name[font_name]] in starred[name[font_name2font_file]] begin[:]
with call[name[open], parameter[call[name[font_name2font_file]][name[font_name]]]] begin[:]
variable[font_data] assign[=] call[name[fontfile].read, parameter[]]
variable[ascii_font_data] assign[=] call[name[base64].b64encode, parameter[name[font_data]]]
variable[font_line_list] assign[=] list[[<ast.Name object at 0x7da1b15189a0>, <ast.Name object at 0x7da1b16d02e0>]]
call[name[self].pys_file.write, parameter[binary_operation[call[constant[ ].join, parameter[name[font_line_list]]] + constant[
]]]] | keyword[def] identifier[_fonts2pys] ( identifier[self] ):
literal[string]
identifier[system_fonts] = identifier[font_manager] . identifier[findSystemFonts] ()
identifier[font_name2font_file] ={}
keyword[for] identifier[sys_font] keyword[in] identifier[system_fonts] :
identifier[font_name] = identifier[font_manager] . identifier[FontProperties] ( identifier[fname] = identifier[sys_font] ). identifier[get_name] ()
keyword[if] identifier[font_name] keyword[in] identifier[self] . identifier[fonts_used] :
identifier[font_name2font_file] [ identifier[font_name] ]= identifier[sys_font]
keyword[for] identifier[font_name] keyword[in] identifier[font_name2font_file] :
keyword[with] identifier[open] ( identifier[font_name2font_file] [ identifier[font_name] ]) keyword[as] identifier[fontfile] :
identifier[font_data] = identifier[fontfile] . identifier[read] ()
identifier[ascii_font_data] = identifier[base64] . identifier[b64encode] ( identifier[font_data] )
identifier[font_line_list] =[ identifier[font_name] , identifier[ascii_font_data] ]
identifier[self] . identifier[pys_file] . identifier[write] ( literal[string] . identifier[join] ( identifier[font_line_list] )+ literal[string] ) | def _fonts2pys(self):
"""Writes fonts to pys file"""
# Get mapping from fonts to fontfiles
system_fonts = font_manager.findSystemFonts()
font_name2font_file = {}
for sys_font in system_fonts:
font_name = font_manager.FontProperties(fname=sys_font).get_name()
if font_name in self.fonts_used:
font_name2font_file[font_name] = sys_font # depends on [control=['if'], data=['font_name']] # depends on [control=['for'], data=['sys_font']]
# Only include fonts that have been used in the attributes
for font_name in font_name2font_file:
# Serialize font
with open(font_name2font_file[font_name]) as fontfile:
font_data = fontfile.read()
ascii_font_data = base64.b64encode(font_data) # depends on [control=['with'], data=['fontfile']]
# Store font in pys file
font_line_list = [font_name, ascii_font_data]
self.pys_file.write(u'\t'.join(font_line_list) + u'\n') # depends on [control=['for'], data=['font_name']] |
def get_accounts(self, owner_id=None, member_id=None, properties=None):
"""GetAccounts.
Get a list of accounts for a specific owner or a specific member.
:param str owner_id: ID for the owner of the accounts.
:param str member_id: ID for a member of the accounts.
:param str properties:
:rtype: [Account]
"""
query_parameters = {}
if owner_id is not None:
query_parameters['ownerId'] = self._serialize.query('owner_id', owner_id, 'str')
if member_id is not None:
query_parameters['memberId'] = self._serialize.query('member_id', member_id, 'str')
if properties is not None:
query_parameters['properties'] = self._serialize.query('properties', properties, 'str')
response = self._send(http_method='GET',
location_id='229a6a53-b428-4ffb-a835-e8f36b5b4b1e',
version='5.0',
query_parameters=query_parameters)
return self._deserialize('[Account]', self._unwrap_collection(response)) | def function[get_accounts, parameter[self, owner_id, member_id, properties]]:
constant[GetAccounts.
Get a list of accounts for a specific owner or a specific member.
:param str owner_id: ID for the owner of the accounts.
:param str member_id: ID for a member of the accounts.
:param str properties:
:rtype: [Account]
]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[owner_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[ownerId]] assign[=] call[name[self]._serialize.query, parameter[constant[owner_id], name[owner_id], constant[str]]]
if compare[name[member_id] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[memberId]] assign[=] call[name[self]._serialize.query, parameter[constant[member_id], name[member_id], constant[str]]]
if compare[name[properties] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[properties]] assign[=] call[name[self]._serialize.query, parameter[constant[properties], name[properties], constant[str]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[[Account]], call[name[self]._unwrap_collection, parameter[name[response]]]]]] | keyword[def] identifier[get_accounts] ( identifier[self] , identifier[owner_id] = keyword[None] , identifier[member_id] = keyword[None] , identifier[properties] = keyword[None] ):
literal[string]
identifier[query_parameters] ={}
keyword[if] identifier[owner_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[owner_id] , literal[string] )
keyword[if] identifier[member_id] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[member_id] , literal[string] )
keyword[if] identifier[properties] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[properties] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] )) | def get_accounts(self, owner_id=None, member_id=None, properties=None):
"""GetAccounts.
Get a list of accounts for a specific owner or a specific member.
:param str owner_id: ID for the owner of the accounts.
:param str member_id: ID for a member of the accounts.
:param str properties:
:rtype: [Account]
"""
query_parameters = {}
if owner_id is not None:
query_parameters['ownerId'] = self._serialize.query('owner_id', owner_id, 'str') # depends on [control=['if'], data=['owner_id']]
if member_id is not None:
query_parameters['memberId'] = self._serialize.query('member_id', member_id, 'str') # depends on [control=['if'], data=['member_id']]
if properties is not None:
query_parameters['properties'] = self._serialize.query('properties', properties, 'str') # depends on [control=['if'], data=['properties']]
response = self._send(http_method='GET', location_id='229a6a53-b428-4ffb-a835-e8f36b5b4b1e', version='5.0', query_parameters=query_parameters)
return self._deserialize('[Account]', self._unwrap_collection(response)) |
def mark_locations(h,section,locs,markspec='or',**kwargs):
"""
Marks one or more locations on along a section. Could be used to
mark the location of a recording or electrical stimulation.
Args:
h = hocObject to interface with neuron
section = reference to section
locs = float between 0 and 1, or array of floats
optional arguments specify details of marker
Returns:
line = reference to plotted markers
"""
# get list of cartesian coordinates specifying section path
xyz = get_section_path(h,section)
(r,theta,phi) = sequential_spherical(xyz)
rcum = np.append(0,np.cumsum(r))
# convert locs into lengths from the beginning of the path
if type(locs) is float or type(locs) is np.float64:
locs = np.array([locs])
if type(locs) is list:
locs = np.array(locs)
lengths = locs*rcum[-1]
# find cartesian coordinates for markers
xyz_marks = []
for targ_length in lengths:
xyz_marks.append(find_coord(targ_length,xyz,rcum,theta,phi))
xyz_marks = np.array(xyz_marks)
# plot markers
line, = plt.plot(xyz_marks[:,0], xyz_marks[:,1], \
xyz_marks[:,2], markspec, **kwargs)
return line | def function[mark_locations, parameter[h, section, locs, markspec]]:
constant[
Marks one or more locations on along a section. Could be used to
mark the location of a recording or electrical stimulation.
Args:
h = hocObject to interface with neuron
section = reference to section
locs = float between 0 and 1, or array of floats
optional arguments specify details of marker
Returns:
line = reference to plotted markers
]
variable[xyz] assign[=] call[name[get_section_path], parameter[name[h], name[section]]]
<ast.Tuple object at 0x7da18f00c2b0> assign[=] call[name[sequential_spherical], parameter[name[xyz]]]
variable[rcum] assign[=] call[name[np].append, parameter[constant[0], call[name[np].cumsum, parameter[name[r]]]]]
if <ast.BoolOp object at 0x7da18f00d870> begin[:]
variable[locs] assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da18f00efe0>]]]]
if compare[call[name[type], parameter[name[locs]]] is name[list]] begin[:]
variable[locs] assign[=] call[name[np].array, parameter[name[locs]]]
variable[lengths] assign[=] binary_operation[name[locs] * call[name[rcum]][<ast.UnaryOp object at 0x7da18f00d390>]]
variable[xyz_marks] assign[=] list[[]]
for taget[name[targ_length]] in starred[name[lengths]] begin[:]
call[name[xyz_marks].append, parameter[call[name[find_coord], parameter[name[targ_length], name[xyz], name[rcum], name[theta], name[phi]]]]]
variable[xyz_marks] assign[=] call[name[np].array, parameter[name[xyz_marks]]]
<ast.Tuple object at 0x7da1b1111ff0> assign[=] call[name[plt].plot, parameter[call[name[xyz_marks]][tuple[[<ast.Slice object at 0x7da20c7946a0>, <ast.Constant object at 0x7da20c796530>]]], call[name[xyz_marks]][tuple[[<ast.Slice object at 0x7da1b23443a0>, <ast.Constant object at 0x7da1b2345e10>]]], call[name[xyz_marks]][tuple[[<ast.Slice object at 0x7da1b23446a0>, <ast.Constant object at 0x7da1b2345570>]]], name[markspec]]]
return[name[line]] | keyword[def] identifier[mark_locations] ( identifier[h] , identifier[section] , identifier[locs] , identifier[markspec] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[xyz] = identifier[get_section_path] ( identifier[h] , identifier[section] )
( identifier[r] , identifier[theta] , identifier[phi] )= identifier[sequential_spherical] ( identifier[xyz] )
identifier[rcum] = identifier[np] . identifier[append] ( literal[int] , identifier[np] . identifier[cumsum] ( identifier[r] ))
keyword[if] identifier[type] ( identifier[locs] ) keyword[is] identifier[float] keyword[or] identifier[type] ( identifier[locs] ) keyword[is] identifier[np] . identifier[float64] :
identifier[locs] = identifier[np] . identifier[array] ([ identifier[locs] ])
keyword[if] identifier[type] ( identifier[locs] ) keyword[is] identifier[list] :
identifier[locs] = identifier[np] . identifier[array] ( identifier[locs] )
identifier[lengths] = identifier[locs] * identifier[rcum] [- literal[int] ]
identifier[xyz_marks] =[]
keyword[for] identifier[targ_length] keyword[in] identifier[lengths] :
identifier[xyz_marks] . identifier[append] ( identifier[find_coord] ( identifier[targ_length] , identifier[xyz] , identifier[rcum] , identifier[theta] , identifier[phi] ))
identifier[xyz_marks] = identifier[np] . identifier[array] ( identifier[xyz_marks] )
identifier[line] ,= identifier[plt] . identifier[plot] ( identifier[xyz_marks] [:, literal[int] ], identifier[xyz_marks] [:, literal[int] ], identifier[xyz_marks] [:, literal[int] ], identifier[markspec] ,** identifier[kwargs] )
keyword[return] identifier[line] | def mark_locations(h, section, locs, markspec='or', **kwargs):
"""
Marks one or more locations on along a section. Could be used to
mark the location of a recording or electrical stimulation.
Args:
h = hocObject to interface with neuron
section = reference to section
locs = float between 0 and 1, or array of floats
optional arguments specify details of marker
Returns:
line = reference to plotted markers
"""
# get list of cartesian coordinates specifying section path
xyz = get_section_path(h, section)
(r, theta, phi) = sequential_spherical(xyz)
rcum = np.append(0, np.cumsum(r))
# convert locs into lengths from the beginning of the path
if type(locs) is float or type(locs) is np.float64:
locs = np.array([locs]) # depends on [control=['if'], data=[]]
if type(locs) is list:
locs = np.array(locs) # depends on [control=['if'], data=[]]
lengths = locs * rcum[-1]
# find cartesian coordinates for markers
xyz_marks = []
for targ_length in lengths:
xyz_marks.append(find_coord(targ_length, xyz, rcum, theta, phi)) # depends on [control=['for'], data=['targ_length']]
xyz_marks = np.array(xyz_marks)
# plot markers
(line,) = plt.plot(xyz_marks[:, 0], xyz_marks[:, 1], xyz_marks[:, 2], markspec, **kwargs)
return line |
def _config_parser(cls, repo, parent_commit, read_only):
""":return: Config Parser constrained to our submodule in read or write mode
:raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
at the given parent commit. Otherwise the exception would be delayed until the first
access of the config parser"""
parent_matches_head = True
if parent_commit is not None:
try:
parent_matches_head = repo.head.commit == parent_commit
except ValueError:
# We are most likely in an empty repository, so the HEAD doesn't point to a valid ref
pass
# end handle parent_commit
if not repo.bare and parent_matches_head:
fp_module = osp.join(repo.working_tree_dir, cls.k_modules_file)
else:
assert parent_commit is not None, "need valid parent_commit in bare repositories"
try:
fp_module = cls._sio_modules(parent_commit)
except KeyError:
raise IOError("Could not find %s file in the tree of parent commit %s" %
(cls.k_modules_file, parent_commit))
# END handle exceptions
# END handle non-bare working tree
if not read_only and (repo.bare or not parent_matches_head):
raise ValueError("Cannot write blobs of 'historical' submodule configurations")
# END handle writes of historical submodules
return SubmoduleConfigParser(fp_module, read_only=read_only) | def function[_config_parser, parameter[cls, repo, parent_commit, read_only]]:
constant[:return: Config Parser constrained to our submodule in read or write mode
:raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
at the given parent commit. Otherwise the exception would be delayed until the first
access of the config parser]
variable[parent_matches_head] assign[=] constant[True]
if compare[name[parent_commit] is_not constant[None]] begin[:]
<ast.Try object at 0x7da18c4cfa00>
if <ast.BoolOp object at 0x7da18c4cd300> begin[:]
variable[fp_module] assign[=] call[name[osp].join, parameter[name[repo].working_tree_dir, name[cls].k_modules_file]]
if <ast.BoolOp object at 0x7da18bccbca0> begin[:]
<ast.Raise object at 0x7da18bccadd0>
return[call[name[SubmoduleConfigParser], parameter[name[fp_module]]]] | keyword[def] identifier[_config_parser] ( identifier[cls] , identifier[repo] , identifier[parent_commit] , identifier[read_only] ):
literal[string]
identifier[parent_matches_head] = keyword[True]
keyword[if] identifier[parent_commit] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[parent_matches_head] = identifier[repo] . identifier[head] . identifier[commit] == identifier[parent_commit]
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[if] keyword[not] identifier[repo] . identifier[bare] keyword[and] identifier[parent_matches_head] :
identifier[fp_module] = identifier[osp] . identifier[join] ( identifier[repo] . identifier[working_tree_dir] , identifier[cls] . identifier[k_modules_file] )
keyword[else] :
keyword[assert] identifier[parent_commit] keyword[is] keyword[not] keyword[None] , literal[string]
keyword[try] :
identifier[fp_module] = identifier[cls] . identifier[_sio_modules] ( identifier[parent_commit] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[IOError] ( literal[string] %
( identifier[cls] . identifier[k_modules_file] , identifier[parent_commit] ))
keyword[if] keyword[not] identifier[read_only] keyword[and] ( identifier[repo] . identifier[bare] keyword[or] keyword[not] identifier[parent_matches_head] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] identifier[SubmoduleConfigParser] ( identifier[fp_module] , identifier[read_only] = identifier[read_only] ) | def _config_parser(cls, repo, parent_commit, read_only):
""":return: Config Parser constrained to our submodule in read or write mode
:raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
at the given parent commit. Otherwise the exception would be delayed until the first
access of the config parser"""
parent_matches_head = True
if parent_commit is not None:
try:
parent_matches_head = repo.head.commit == parent_commit # depends on [control=['try'], data=[]]
except ValueError:
# We are most likely in an empty repository, so the HEAD doesn't point to a valid ref
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['parent_commit']]
# end handle parent_commit
if not repo.bare and parent_matches_head:
fp_module = osp.join(repo.working_tree_dir, cls.k_modules_file) # depends on [control=['if'], data=[]]
else:
assert parent_commit is not None, 'need valid parent_commit in bare repositories'
try:
fp_module = cls._sio_modules(parent_commit) # depends on [control=['try'], data=[]]
except KeyError:
raise IOError('Could not find %s file in the tree of parent commit %s' % (cls.k_modules_file, parent_commit)) # depends on [control=['except'], data=[]]
# END handle exceptions
# END handle non-bare working tree
if not read_only and (repo.bare or not parent_matches_head):
raise ValueError("Cannot write blobs of 'historical' submodule configurations") # depends on [control=['if'], data=[]]
# END handle writes of historical submodules
return SubmoduleConfigParser(fp_module, read_only=read_only) |
def __liftover_coordinates_size_match(self, intersecting_region):
"""
Lift a region that overlaps the genomic occurrence of the retrotransposon
to consensus sequence coordinates using just the coordinates (not the full)
alignment, when they have the same length. This is an internal helper
method. The above length constraint must be true, otherwise an assertion
is failed.
:param intersect_region: a region that intersects this occurrence.
:return: GenomicInterval representing the region after lifting to consensus
:note: no checks are made for whether the interval really intersects!!
"""
# should always pass, but check anyway...
consensus_match_length = self.consensus_end - self.consensus_start
assert(consensus_match_length - len(self) == 0)
if self.consensus_match_strand is '+':
s = max(intersecting_region.start - self.start, 0) +\
self.consensus_start
e = min(max(intersecting_region.end - self.start, 0) +
self.consensus_start, self.consensus_len)
g = GenomicInterval(self.repeat_name(), s, e, intersecting_region.name,
intersecting_region.score, self.strand)
return g
elif self.consensus_match_strand is '-':
e = (self.consensus_end -
max(intersecting_region.start - self.start, 0))
s = (self.consensus_end -
min(max(intersecting_region.end - self.start, 0), len(self)))
g = GenomicInterval(self.repeat_name(), s, e, intersecting_region.name,
intersecting_region.score, self.strand)
return g
else:
raise RetrotransposonError("couldn't determine strand of " +
"retrotransposon occurrance " + str(self)) | def function[__liftover_coordinates_size_match, parameter[self, intersecting_region]]:
constant[
Lift a region that overlaps the genomic occurrence of the retrotransposon
to consensus sequence coordinates using just the coordinates (not the full)
alignment, when they have the same length. This is an internal helper
method. The above length constraint must be true, otherwise an assertion
is failed.
:param intersect_region: a region that intersects this occurrence.
:return: GenomicInterval representing the region after lifting to consensus
:note: no checks are made for whether the interval really intersects!!
]
variable[consensus_match_length] assign[=] binary_operation[name[self].consensus_end - name[self].consensus_start]
assert[compare[binary_operation[name[consensus_match_length] - call[name[len], parameter[name[self]]]] equal[==] constant[0]]]
if compare[name[self].consensus_match_strand is constant[+]] begin[:]
variable[s] assign[=] binary_operation[call[name[max], parameter[binary_operation[name[intersecting_region].start - name[self].start], constant[0]]] + name[self].consensus_start]
variable[e] assign[=] call[name[min], parameter[binary_operation[call[name[max], parameter[binary_operation[name[intersecting_region].end - name[self].start], constant[0]]] + name[self].consensus_start], name[self].consensus_len]]
variable[g] assign[=] call[name[GenomicInterval], parameter[call[name[self].repeat_name, parameter[]], name[s], name[e], name[intersecting_region].name, name[intersecting_region].score, name[self].strand]]
return[name[g]] | keyword[def] identifier[__liftover_coordinates_size_match] ( identifier[self] , identifier[intersecting_region] ):
literal[string]
identifier[consensus_match_length] = identifier[self] . identifier[consensus_end] - identifier[self] . identifier[consensus_start]
keyword[assert] ( identifier[consensus_match_length] - identifier[len] ( identifier[self] )== literal[int] )
keyword[if] identifier[self] . identifier[consensus_match_strand] keyword[is] literal[string] :
identifier[s] = identifier[max] ( identifier[intersecting_region] . identifier[start] - identifier[self] . identifier[start] , literal[int] )+ identifier[self] . identifier[consensus_start]
identifier[e] = identifier[min] ( identifier[max] ( identifier[intersecting_region] . identifier[end] - identifier[self] . identifier[start] , literal[int] )+
identifier[self] . identifier[consensus_start] , identifier[self] . identifier[consensus_len] )
identifier[g] = identifier[GenomicInterval] ( identifier[self] . identifier[repeat_name] (), identifier[s] , identifier[e] , identifier[intersecting_region] . identifier[name] ,
identifier[intersecting_region] . identifier[score] , identifier[self] . identifier[strand] )
keyword[return] identifier[g]
keyword[elif] identifier[self] . identifier[consensus_match_strand] keyword[is] literal[string] :
identifier[e] =( identifier[self] . identifier[consensus_end] -
identifier[max] ( identifier[intersecting_region] . identifier[start] - identifier[self] . identifier[start] , literal[int] ))
identifier[s] =( identifier[self] . identifier[consensus_end] -
identifier[min] ( identifier[max] ( identifier[intersecting_region] . identifier[end] - identifier[self] . identifier[start] , literal[int] ), identifier[len] ( identifier[self] )))
identifier[g] = identifier[GenomicInterval] ( identifier[self] . identifier[repeat_name] (), identifier[s] , identifier[e] , identifier[intersecting_region] . identifier[name] ,
identifier[intersecting_region] . identifier[score] , identifier[self] . identifier[strand] )
keyword[return] identifier[g]
keyword[else] :
keyword[raise] identifier[RetrotransposonError] ( literal[string] +
literal[string] + identifier[str] ( identifier[self] )) | def __liftover_coordinates_size_match(self, intersecting_region):
"""
Lift a region that overlaps the genomic occurrence of the retrotransposon
to consensus sequence coordinates using just the coordinates (not the full)
alignment, when they have the same length. This is an internal helper
method. The above length constraint must be true, otherwise an assertion
is failed.
:param intersect_region: a region that intersects this occurrence.
:return: GenomicInterval representing the region after lifting to consensus
:note: no checks are made for whether the interval really intersects!!
"""
# should always pass, but check anyway...
consensus_match_length = self.consensus_end - self.consensus_start
assert consensus_match_length - len(self) == 0
if self.consensus_match_strand is '+':
s = max(intersecting_region.start - self.start, 0) + self.consensus_start
e = min(max(intersecting_region.end - self.start, 0) + self.consensus_start, self.consensus_len)
g = GenomicInterval(self.repeat_name(), s, e, intersecting_region.name, intersecting_region.score, self.strand)
return g # depends on [control=['if'], data=[]]
elif self.consensus_match_strand is '-':
e = self.consensus_end - max(intersecting_region.start - self.start, 0)
s = self.consensus_end - min(max(intersecting_region.end - self.start, 0), len(self))
g = GenomicInterval(self.repeat_name(), s, e, intersecting_region.name, intersecting_region.score, self.strand)
return g # depends on [control=['if'], data=[]]
else:
raise RetrotransposonError("couldn't determine strand of " + 'retrotransposon occurrance ' + str(self)) |
def html_path(builder, pagename=None):
"""Calculate the relative path to the Slides for pagename."""
return builder.get_relative_uri(
pagename or builder.current_docname,
os.path.join(
builder.app.config.slide_html_relative_path,
pagename or builder.current_docname,
)) | def function[html_path, parameter[builder, pagename]]:
constant[Calculate the relative path to the Slides for pagename.]
return[call[name[builder].get_relative_uri, parameter[<ast.BoolOp object at 0x7da1b0338280>, call[name[os].path.join, parameter[name[builder].app.config.slide_html_relative_path, <ast.BoolOp object at 0x7da1b033a860>]]]]] | keyword[def] identifier[html_path] ( identifier[builder] , identifier[pagename] = keyword[None] ):
literal[string]
keyword[return] identifier[builder] . identifier[get_relative_uri] (
identifier[pagename] keyword[or] identifier[builder] . identifier[current_docname] ,
identifier[os] . identifier[path] . identifier[join] (
identifier[builder] . identifier[app] . identifier[config] . identifier[slide_html_relative_path] ,
identifier[pagename] keyword[or] identifier[builder] . identifier[current_docname] ,
)) | def html_path(builder, pagename=None):
"""Calculate the relative path to the Slides for pagename."""
return builder.get_relative_uri(pagename or builder.current_docname, os.path.join(builder.app.config.slide_html_relative_path, pagename or builder.current_docname)) |
def is_link_inline(cls, tag, attribute):
'''Return whether the link is likely to be inline object.'''
if tag in cls.TAG_ATTRIBUTES \
and attribute in cls.TAG_ATTRIBUTES[tag]:
attr_flags = cls.TAG_ATTRIBUTES[tag][attribute]
return attr_flags & cls.ATTR_INLINE
return attribute != 'href' | def function[is_link_inline, parameter[cls, tag, attribute]]:
constant[Return whether the link is likely to be inline object.]
if <ast.BoolOp object at 0x7da2054a56c0> begin[:]
variable[attr_flags] assign[=] call[call[name[cls].TAG_ATTRIBUTES][name[tag]]][name[attribute]]
return[binary_operation[name[attr_flags] <ast.BitAnd object at 0x7da2590d6b60> name[cls].ATTR_INLINE]]
return[compare[name[attribute] not_equal[!=] constant[href]]] | keyword[def] identifier[is_link_inline] ( identifier[cls] , identifier[tag] , identifier[attribute] ):
literal[string]
keyword[if] identifier[tag] keyword[in] identifier[cls] . identifier[TAG_ATTRIBUTES] keyword[and] identifier[attribute] keyword[in] identifier[cls] . identifier[TAG_ATTRIBUTES] [ identifier[tag] ]:
identifier[attr_flags] = identifier[cls] . identifier[TAG_ATTRIBUTES] [ identifier[tag] ][ identifier[attribute] ]
keyword[return] identifier[attr_flags] & identifier[cls] . identifier[ATTR_INLINE]
keyword[return] identifier[attribute] != literal[string] | def is_link_inline(cls, tag, attribute):
"""Return whether the link is likely to be inline object."""
if tag in cls.TAG_ATTRIBUTES and attribute in cls.TAG_ATTRIBUTES[tag]:
attr_flags = cls.TAG_ATTRIBUTES[tag][attribute]
return attr_flags & cls.ATTR_INLINE # depends on [control=['if'], data=[]]
return attribute != 'href' |
def _initEphemerals(self):
"""
Initialize attributes that are not saved with the checkpoint.
"""
self._firstComputeCall = True
self._accuracy = None
self._protoScores = None
self._categoryDistances = None
self._knn = knn_classifier.KNNClassifier(**self.knnParams)
for x in ('_partitions', '_useAuxiliary', '_doSphering',
'_scanInfo', '_protoScores'):
if not hasattr(self, x):
setattr(self, x, None) | def function[_initEphemerals, parameter[self]]:
constant[
Initialize attributes that are not saved with the checkpoint.
]
name[self]._firstComputeCall assign[=] constant[True]
name[self]._accuracy assign[=] constant[None]
name[self]._protoScores assign[=] constant[None]
name[self]._categoryDistances assign[=] constant[None]
name[self]._knn assign[=] call[name[knn_classifier].KNNClassifier, parameter[]]
for taget[name[x]] in starred[tuple[[<ast.Constant object at 0x7da18f723370>, <ast.Constant object at 0x7da18f722e00>, <ast.Constant object at 0x7da18f722b00>, <ast.Constant object at 0x7da18f7209a0>, <ast.Constant object at 0x7da18f7230d0>]]] begin[:]
if <ast.UnaryOp object at 0x7da18f721300> begin[:]
call[name[setattr], parameter[name[self], name[x], constant[None]]] | keyword[def] identifier[_initEphemerals] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_firstComputeCall] = keyword[True]
identifier[self] . identifier[_accuracy] = keyword[None]
identifier[self] . identifier[_protoScores] = keyword[None]
identifier[self] . identifier[_categoryDistances] = keyword[None]
identifier[self] . identifier[_knn] = identifier[knn_classifier] . identifier[KNNClassifier] (** identifier[self] . identifier[knnParams] )
keyword[for] identifier[x] keyword[in] ( literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ):
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[x] ):
identifier[setattr] ( identifier[self] , identifier[x] , keyword[None] ) | def _initEphemerals(self):
"""
Initialize attributes that are not saved with the checkpoint.
"""
self._firstComputeCall = True
self._accuracy = None
self._protoScores = None
self._categoryDistances = None
self._knn = knn_classifier.KNNClassifier(**self.knnParams)
for x in ('_partitions', '_useAuxiliary', '_doSphering', '_scanInfo', '_protoScores'):
if not hasattr(self, x):
setattr(self, x, None) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] |
def feed_backend_arthur(backend_name, backend_params):
""" Feed Ocean with backend data collected from arthur redis queue"""
# Always get pending items from arthur for all data sources
feed_arthur()
logger.debug("Items available for %s", arthur_items.keys())
# Get only the items for the backend
if not get_connector_from_name(backend_name):
raise RuntimeError("Unknown backend %s" % backend_name)
connector = get_connector_from_name(backend_name)
klass = connector[3] # BackendCmd for the connector
backend_cmd = init_backend(klass(*backend_params))
tag = backend_cmd.backend.tag
logger.debug("Getting items for %s.", tag)
if tag in arthur_items:
logger.debug("Found items for %s.", tag)
for item in arthur_items[tag]:
yield item | def function[feed_backend_arthur, parameter[backend_name, backend_params]]:
constant[ Feed Ocean with backend data collected from arthur redis queue]
call[name[feed_arthur], parameter[]]
call[name[logger].debug, parameter[constant[Items available for %s], call[name[arthur_items].keys, parameter[]]]]
if <ast.UnaryOp object at 0x7da18bcca050> begin[:]
<ast.Raise object at 0x7da18bcc93f0>
variable[connector] assign[=] call[name[get_connector_from_name], parameter[name[backend_name]]]
variable[klass] assign[=] call[name[connector]][constant[3]]
variable[backend_cmd] assign[=] call[name[init_backend], parameter[call[name[klass], parameter[<ast.Starred object at 0x7da18bcca800>]]]]
variable[tag] assign[=] name[backend_cmd].backend.tag
call[name[logger].debug, parameter[constant[Getting items for %s.], name[tag]]]
if compare[name[tag] in name[arthur_items]] begin[:]
call[name[logger].debug, parameter[constant[Found items for %s.], name[tag]]]
for taget[name[item]] in starred[call[name[arthur_items]][name[tag]]] begin[:]
<ast.Yield object at 0x7da18bcc9930> | keyword[def] identifier[feed_backend_arthur] ( identifier[backend_name] , identifier[backend_params] ):
literal[string]
identifier[feed_arthur] ()
identifier[logger] . identifier[debug] ( literal[string] , identifier[arthur_items] . identifier[keys] ())
keyword[if] keyword[not] identifier[get_connector_from_name] ( identifier[backend_name] ):
keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[backend_name] )
identifier[connector] = identifier[get_connector_from_name] ( identifier[backend_name] )
identifier[klass] = identifier[connector] [ literal[int] ]
identifier[backend_cmd] = identifier[init_backend] ( identifier[klass] (* identifier[backend_params] ))
identifier[tag] = identifier[backend_cmd] . identifier[backend] . identifier[tag]
identifier[logger] . identifier[debug] ( literal[string] , identifier[tag] )
keyword[if] identifier[tag] keyword[in] identifier[arthur_items] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[tag] )
keyword[for] identifier[item] keyword[in] identifier[arthur_items] [ identifier[tag] ]:
keyword[yield] identifier[item] | def feed_backend_arthur(backend_name, backend_params):
""" Feed Ocean with backend data collected from arthur redis queue"""
# Always get pending items from arthur for all data sources
feed_arthur()
logger.debug('Items available for %s', arthur_items.keys())
# Get only the items for the backend
if not get_connector_from_name(backend_name):
raise RuntimeError('Unknown backend %s' % backend_name) # depends on [control=['if'], data=[]]
connector = get_connector_from_name(backend_name)
klass = connector[3] # BackendCmd for the connector
backend_cmd = init_backend(klass(*backend_params))
tag = backend_cmd.backend.tag
logger.debug('Getting items for %s.', tag)
if tag in arthur_items:
logger.debug('Found items for %s.', tag)
for item in arthur_items[tag]:
yield item # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['tag', 'arthur_items']] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.