code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _code_support(self, language, caller):
"""Helper callback."""
code = caller()
# remove the leading whitespace so the whole block can be indented more easily with flow of page
lines = code.splitlines()
first_nonempty_line_index = 0
while not lines[first_nonempty_line_index]:
first_nonempty_line_index += 1
len_to_trim = len(lines[first_nonempty_line_index]) - len(
lines[first_nonempty_line_index].lstrip()
)
lines = [x[len_to_trim:] for x in lines]
code = "\n".join(lines)
if language:
lexer = get_lexer_by_name(language, stripall=True)
else:
lexer = guess_lexer(code)
highlighted = highlight(code, lexer, HtmlFormatter())
return highlighted | def function[_code_support, parameter[self, language, caller]]:
constant[Helper callback.]
variable[code] assign[=] call[name[caller], parameter[]]
variable[lines] assign[=] call[name[code].splitlines, parameter[]]
variable[first_nonempty_line_index] assign[=] constant[0]
while <ast.UnaryOp object at 0x7da2044c1150> begin[:]
<ast.AugAssign object at 0x7da2044c0b50>
variable[len_to_trim] assign[=] binary_operation[call[name[len], parameter[call[name[lines]][name[first_nonempty_line_index]]]] - call[name[len], parameter[call[call[name[lines]][name[first_nonempty_line_index]].lstrip, parameter[]]]]]
variable[lines] assign[=] <ast.ListComp object at 0x7da20c6ab640>
variable[code] assign[=] call[constant[
].join, parameter[name[lines]]]
if name[language] begin[:]
variable[lexer] assign[=] call[name[get_lexer_by_name], parameter[name[language]]]
variable[highlighted] assign[=] call[name[highlight], parameter[name[code], name[lexer], call[name[HtmlFormatter], parameter[]]]]
return[name[highlighted]] | keyword[def] identifier[_code_support] ( identifier[self] , identifier[language] , identifier[caller] ):
literal[string]
identifier[code] = identifier[caller] ()
identifier[lines] = identifier[code] . identifier[splitlines] ()
identifier[first_nonempty_line_index] = literal[int]
keyword[while] keyword[not] identifier[lines] [ identifier[first_nonempty_line_index] ]:
identifier[first_nonempty_line_index] += literal[int]
identifier[len_to_trim] = identifier[len] ( identifier[lines] [ identifier[first_nonempty_line_index] ])- identifier[len] (
identifier[lines] [ identifier[first_nonempty_line_index] ]. identifier[lstrip] ()
)
identifier[lines] =[ identifier[x] [ identifier[len_to_trim] :] keyword[for] identifier[x] keyword[in] identifier[lines] ]
identifier[code] = literal[string] . identifier[join] ( identifier[lines] )
keyword[if] identifier[language] :
identifier[lexer] = identifier[get_lexer_by_name] ( identifier[language] , identifier[stripall] = keyword[True] )
keyword[else] :
identifier[lexer] = identifier[guess_lexer] ( identifier[code] )
identifier[highlighted] = identifier[highlight] ( identifier[code] , identifier[lexer] , identifier[HtmlFormatter] ())
keyword[return] identifier[highlighted] | def _code_support(self, language, caller):
"""Helper callback."""
code = caller()
# remove the leading whitespace so the whole block can be indented more easily with flow of page
lines = code.splitlines()
first_nonempty_line_index = 0
while not lines[first_nonempty_line_index]:
first_nonempty_line_index += 1 # depends on [control=['while'], data=[]]
len_to_trim = len(lines[first_nonempty_line_index]) - len(lines[first_nonempty_line_index].lstrip())
lines = [x[len_to_trim:] for x in lines]
code = '\n'.join(lines)
if language:
lexer = get_lexer_by_name(language, stripall=True) # depends on [control=['if'], data=[]]
else:
lexer = guess_lexer(code)
highlighted = highlight(code, lexer, HtmlFormatter())
return highlighted |
def random_2in4sat(num_variables, num_clauses, vartype=dimod.BINARY, satisfiable=True):
"""Random two-in-four (2-in-4) constraint satisfaction problem.
Args:
num_variables (integer): Number of variables (at least four).
num_clauses (integer): Number of constraints that together constitute the
constraint satisfaction problem.
vartype (Vartype, optional, default='BINARY'): Variable type. Accepted
input values:
* Vartype.SPIN, 'SPIN', {-1, 1}
* Vartype.BINARY, 'BINARY', {0, 1}
satisfiable (bool, optional, default=True): True if the CSP can be satisfied.
Returns:
CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when its variables
are assigned values that satisfy a two-in-four satisfiability problem.
Examples:
This example creates a CSP with 6 variables and two random constraints and checks
whether a particular assignment of variables satisifies it.
>>> import dwavebinarycsp
>>> import dwavebinarycsp.factories as sat
>>> csp = sat.random_2in4sat(6, 2)
>>> csp.constraints # doctest: +SKIP
[Constraint.from_configurations(frozenset({(1, 0, 1, 0), (1, 0, 0, 1), (1, 1, 1, 1), (0, 1, 1, 0), (0, 0, 0, 0),
(0, 1, 0, 1)}), (2, 4, 0, 1), Vartype.BINARY, name='2-in-4'),
Constraint.from_configurations(frozenset({(1, 0, 1, 1), (1, 1, 0, 1), (1, 1, 1, 0), (0, 0, 0, 1),
(0, 1, 0, 0), (0, 0, 1, 0)}), (1, 2, 4, 5), Vartype.BINARY, name='2-in-4')]
>>> csp.check({0: 1, 1: 0, 2: 1, 3: 1, 4: 0, 5: 0}) # doctest: +SKIP
True
"""
if num_variables < 4:
raise ValueError("a 2in4 problem needs at least 4 variables")
if num_clauses > 16 * _nchoosek(num_variables, 4): # 16 different negation patterns
raise ValueError("too many clauses")
# also checks the vartype argument
csp = ConstraintSatisfactionProblem(vartype)
variables = list(range(num_variables))
constraints = set()
if satisfiable:
values = tuple(vartype.value)
planted_solution = {v: choice(values) for v in variables}
configurations = [(0, 0, 1, 1), (0, 1, 0, 1), (1, 0, 0, 1),
(0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 0)]
while len(constraints) < num_clauses:
# sort the variables because constraints are hashed on configurations/variables
# because 2-in-4 sat is symmetric, we would not get a hash conflict for different
# variable orders
constraint_variables = sorted(sample(variables, 4))
# pick (uniformly) a configuration and determine which variables we need to negate to
# match the chosen configuration
config = choice(configurations)
pos = tuple(v for idx, v in enumerate(constraint_variables) if config[idx] == (planted_solution[v] > 0))
neg = tuple(v for idx, v in enumerate(constraint_variables) if config[idx] != (planted_solution[v] > 0))
const = sat2in4(pos=pos, neg=neg, vartype=vartype)
assert const.check(planted_solution)
constraints.add(const)
else:
while len(constraints) < num_clauses:
# sort the variables because constraints are hashed on configurations/variables
# because 2-in-4 sat is symmetric, we would not get a hash conflict for different
# variable orders
constraint_variables = sorted(sample(variables, 4))
# randomly determine negations
pos = tuple(v for v in constraint_variables if random() > .5)
neg = tuple(v for v in constraint_variables if v not in pos)
const = sat2in4(pos=pos, neg=neg, vartype=vartype)
constraints.add(const)
for const in constraints:
csp.add_constraint(const)
# in case any variables didn't make it in
for v in variables:
csp.add_variable(v)
return csp | def function[random_2in4sat, parameter[num_variables, num_clauses, vartype, satisfiable]]:
constant[Random two-in-four (2-in-4) constraint satisfaction problem.
Args:
num_variables (integer): Number of variables (at least four).
num_clauses (integer): Number of constraints that together constitute the
constraint satisfaction problem.
vartype (Vartype, optional, default='BINARY'): Variable type. Accepted
input values:
* Vartype.SPIN, 'SPIN', {-1, 1}
* Vartype.BINARY, 'BINARY', {0, 1}
satisfiable (bool, optional, default=True): True if the CSP can be satisfied.
Returns:
CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when its variables
are assigned values that satisfy a two-in-four satisfiability problem.
Examples:
This example creates a CSP with 6 variables and two random constraints and checks
whether a particular assignment of variables satisifies it.
>>> import dwavebinarycsp
>>> import dwavebinarycsp.factories as sat
>>> csp = sat.random_2in4sat(6, 2)
>>> csp.constraints # doctest: +SKIP
[Constraint.from_configurations(frozenset({(1, 0, 1, 0), (1, 0, 0, 1), (1, 1, 1, 1), (0, 1, 1, 0), (0, 0, 0, 0),
(0, 1, 0, 1)}), (2, 4, 0, 1), Vartype.BINARY, name='2-in-4'),
Constraint.from_configurations(frozenset({(1, 0, 1, 1), (1, 1, 0, 1), (1, 1, 1, 0), (0, 0, 0, 1),
(0, 1, 0, 0), (0, 0, 1, 0)}), (1, 2, 4, 5), Vartype.BINARY, name='2-in-4')]
>>> csp.check({0: 1, 1: 0, 2: 1, 3: 1, 4: 0, 5: 0}) # doctest: +SKIP
True
]
if compare[name[num_variables] less[<] constant[4]] begin[:]
<ast.Raise object at 0x7da1b00cfca0>
if compare[name[num_clauses] greater[>] binary_operation[constant[16] * call[name[_nchoosek], parameter[name[num_variables], constant[4]]]]] begin[:]
<ast.Raise object at 0x7da1b00cfa30>
variable[csp] assign[=] call[name[ConstraintSatisfactionProblem], parameter[name[vartype]]]
variable[variables] assign[=] call[name[list], parameter[call[name[range], parameter[name[num_variables]]]]]
variable[constraints] assign[=] call[name[set], parameter[]]
if name[satisfiable] begin[:]
variable[values] assign[=] call[name[tuple], parameter[name[vartype].value]]
variable[planted_solution] assign[=] <ast.DictComp object at 0x7da1b00cf490>
variable[configurations] assign[=] list[[<ast.Tuple object at 0x7da1b00cf220>, <ast.Tuple object at 0x7da1b00cf130>, <ast.Tuple object at 0x7da1b00cf040>, <ast.Tuple object at 0x7da1b00cef50>, <ast.Tuple object at 0x7da1b00cee60>, <ast.Tuple object at 0x7da1b00ced70>]]
while compare[call[name[len], parameter[name[constraints]]] less[<] name[num_clauses]] begin[:]
variable[constraint_variables] assign[=] call[name[sorted], parameter[call[name[sample], parameter[name[variables], constant[4]]]]]
variable[config] assign[=] call[name[choice], parameter[name[configurations]]]
variable[pos] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b00ce290>]]
variable[neg] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b00cde40>]]
variable[const] assign[=] call[name[sat2in4], parameter[]]
assert[call[name[const].check, parameter[name[planted_solution]]]]
call[name[constraints].add, parameter[name[const]]]
for taget[name[const]] in starred[name[constraints]] begin[:]
call[name[csp].add_constraint, parameter[name[const]]]
for taget[name[v]] in starred[name[variables]] begin[:]
call[name[csp].add_variable, parameter[name[v]]]
return[name[csp]] | keyword[def] identifier[random_2in4sat] ( identifier[num_variables] , identifier[num_clauses] , identifier[vartype] = identifier[dimod] . identifier[BINARY] , identifier[satisfiable] = keyword[True] ):
literal[string]
keyword[if] identifier[num_variables] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[num_clauses] > literal[int] * identifier[_nchoosek] ( identifier[num_variables] , literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[csp] = identifier[ConstraintSatisfactionProblem] ( identifier[vartype] )
identifier[variables] = identifier[list] ( identifier[range] ( identifier[num_variables] ))
identifier[constraints] = identifier[set] ()
keyword[if] identifier[satisfiable] :
identifier[values] = identifier[tuple] ( identifier[vartype] . identifier[value] )
identifier[planted_solution] ={ identifier[v] : identifier[choice] ( identifier[values] ) keyword[for] identifier[v] keyword[in] identifier[variables] }
identifier[configurations] =[( literal[int] , literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] , literal[int] ),
( literal[int] , literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] , literal[int] )]
keyword[while] identifier[len] ( identifier[constraints] )< identifier[num_clauses] :
identifier[constraint_variables] = identifier[sorted] ( identifier[sample] ( identifier[variables] , literal[int] ))
identifier[config] = identifier[choice] ( identifier[configurations] )
identifier[pos] = identifier[tuple] ( identifier[v] keyword[for] identifier[idx] , identifier[v] keyword[in] identifier[enumerate] ( identifier[constraint_variables] ) keyword[if] identifier[config] [ identifier[idx] ]==( identifier[planted_solution] [ identifier[v] ]> literal[int] ))
identifier[neg] = identifier[tuple] ( identifier[v] keyword[for] identifier[idx] , identifier[v] keyword[in] identifier[enumerate] ( identifier[constraint_variables] ) keyword[if] identifier[config] [ identifier[idx] ]!=( identifier[planted_solution] [ identifier[v] ]> literal[int] ))
identifier[const] = identifier[sat2in4] ( identifier[pos] = identifier[pos] , identifier[neg] = identifier[neg] , identifier[vartype] = identifier[vartype] )
keyword[assert] identifier[const] . identifier[check] ( identifier[planted_solution] )
identifier[constraints] . identifier[add] ( identifier[const] )
keyword[else] :
keyword[while] identifier[len] ( identifier[constraints] )< identifier[num_clauses] :
identifier[constraint_variables] = identifier[sorted] ( identifier[sample] ( identifier[variables] , literal[int] ))
identifier[pos] = identifier[tuple] ( identifier[v] keyword[for] identifier[v] keyword[in] identifier[constraint_variables] keyword[if] identifier[random] ()> literal[int] )
identifier[neg] = identifier[tuple] ( identifier[v] keyword[for] identifier[v] keyword[in] identifier[constraint_variables] keyword[if] identifier[v] keyword[not] keyword[in] identifier[pos] )
identifier[const] = identifier[sat2in4] ( identifier[pos] = identifier[pos] , identifier[neg] = identifier[neg] , identifier[vartype] = identifier[vartype] )
identifier[constraints] . identifier[add] ( identifier[const] )
keyword[for] identifier[const] keyword[in] identifier[constraints] :
identifier[csp] . identifier[add_constraint] ( identifier[const] )
keyword[for] identifier[v] keyword[in] identifier[variables] :
identifier[csp] . identifier[add_variable] ( identifier[v] )
keyword[return] identifier[csp] | def random_2in4sat(num_variables, num_clauses, vartype=dimod.BINARY, satisfiable=True):
"""Random two-in-four (2-in-4) constraint satisfaction problem.
Args:
num_variables (integer): Number of variables (at least four).
num_clauses (integer): Number of constraints that together constitute the
constraint satisfaction problem.
vartype (Vartype, optional, default='BINARY'): Variable type. Accepted
input values:
* Vartype.SPIN, 'SPIN', {-1, 1}
* Vartype.BINARY, 'BINARY', {0, 1}
satisfiable (bool, optional, default=True): True if the CSP can be satisfied.
Returns:
CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when its variables
are assigned values that satisfy a two-in-four satisfiability problem.
Examples:
This example creates a CSP with 6 variables and two random constraints and checks
whether a particular assignment of variables satisifies it.
>>> import dwavebinarycsp
>>> import dwavebinarycsp.factories as sat
>>> csp = sat.random_2in4sat(6, 2)
>>> csp.constraints # doctest: +SKIP
[Constraint.from_configurations(frozenset({(1, 0, 1, 0), (1, 0, 0, 1), (1, 1, 1, 1), (0, 1, 1, 0), (0, 0, 0, 0),
(0, 1, 0, 1)}), (2, 4, 0, 1), Vartype.BINARY, name='2-in-4'),
Constraint.from_configurations(frozenset({(1, 0, 1, 1), (1, 1, 0, 1), (1, 1, 1, 0), (0, 0, 0, 1),
(0, 1, 0, 0), (0, 0, 1, 0)}), (1, 2, 4, 5), Vartype.BINARY, name='2-in-4')]
>>> csp.check({0: 1, 1: 0, 2: 1, 3: 1, 4: 0, 5: 0}) # doctest: +SKIP
True
"""
if num_variables < 4:
raise ValueError('a 2in4 problem needs at least 4 variables') # depends on [control=['if'], data=[]]
if num_clauses > 16 * _nchoosek(num_variables, 4): # 16 different negation patterns
raise ValueError('too many clauses') # depends on [control=['if'], data=[]]
# also checks the vartype argument
csp = ConstraintSatisfactionProblem(vartype)
variables = list(range(num_variables))
constraints = set()
if satisfiable:
values = tuple(vartype.value)
planted_solution = {v: choice(values) for v in variables}
configurations = [(0, 0, 1, 1), (0, 1, 0, 1), (1, 0, 0, 1), (0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 0)]
while len(constraints) < num_clauses:
# sort the variables because constraints are hashed on configurations/variables
# because 2-in-4 sat is symmetric, we would not get a hash conflict for different
# variable orders
constraint_variables = sorted(sample(variables, 4))
# pick (uniformly) a configuration and determine which variables we need to negate to
# match the chosen configuration
config = choice(configurations)
pos = tuple((v for (idx, v) in enumerate(constraint_variables) if config[idx] == (planted_solution[v] > 0)))
neg = tuple((v for (idx, v) in enumerate(constraint_variables) if config[idx] != (planted_solution[v] > 0)))
const = sat2in4(pos=pos, neg=neg, vartype=vartype)
assert const.check(planted_solution)
constraints.add(const) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else:
while len(constraints) < num_clauses:
# sort the variables because constraints are hashed on configurations/variables
# because 2-in-4 sat is symmetric, we would not get a hash conflict for different
# variable orders
constraint_variables = sorted(sample(variables, 4))
# randomly determine negations
pos = tuple((v for v in constraint_variables if random() > 0.5))
neg = tuple((v for v in constraint_variables if v not in pos))
const = sat2in4(pos=pos, neg=neg, vartype=vartype)
constraints.add(const) # depends on [control=['while'], data=[]]
for const in constraints:
csp.add_constraint(const) # depends on [control=['for'], data=['const']]
# in case any variables didn't make it in
for v in variables:
csp.add_variable(v) # depends on [control=['for'], data=['v']]
return csp |
def calculate_leapdays(init_date, final_date):
"""Currently unsupported, it only works for differences in years."""
leap_days = (final_date.year - 1) // 4 - (init_date.year - 1) // 4
leap_days -= (final_date.year - 1) // 100 - (init_date.year - 1) // 100
leap_days += (final_date.year - 1) // 400 - (init_date.year - 1) // 400
# TODO: Internal date correction (e.g. init_date is 1-March or later)
return datetime.timedelta(days=leap_days) | def function[calculate_leapdays, parameter[init_date, final_date]]:
constant[Currently unsupported, it only works for differences in years.]
variable[leap_days] assign[=] binary_operation[binary_operation[binary_operation[name[final_date].year - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]] - binary_operation[binary_operation[name[init_date].year - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[4]]]
<ast.AugAssign object at 0x7da1b0335360>
<ast.AugAssign object at 0x7da1b03357b0>
return[call[name[datetime].timedelta, parameter[]]] | keyword[def] identifier[calculate_leapdays] ( identifier[init_date] , identifier[final_date] ):
literal[string]
identifier[leap_days] =( identifier[final_date] . identifier[year] - literal[int] )// literal[int] -( identifier[init_date] . identifier[year] - literal[int] )// literal[int]
identifier[leap_days] -=( identifier[final_date] . identifier[year] - literal[int] )// literal[int] -( identifier[init_date] . identifier[year] - literal[int] )// literal[int]
identifier[leap_days] +=( identifier[final_date] . identifier[year] - literal[int] )// literal[int] -( identifier[init_date] . identifier[year] - literal[int] )// literal[int]
keyword[return] identifier[datetime] . identifier[timedelta] ( identifier[days] = identifier[leap_days] ) | def calculate_leapdays(init_date, final_date):
"""Currently unsupported, it only works for differences in years."""
leap_days = (final_date.year - 1) // 4 - (init_date.year - 1) // 4
leap_days -= (final_date.year - 1) // 100 - (init_date.year - 1) // 100
leap_days += (final_date.year - 1) // 400 - (init_date.year - 1) // 400
# TODO: Internal date correction (e.g. init_date is 1-March or later)
return datetime.timedelta(days=leap_days) |
def set_emergency_params(
self, workers_step=None, idle_cycles_max=None, queue_size=None, queue_nonzero_delay=None):
"""Sets busyness algorithm emergency workers related params.
Emergency workers could be spawned depending upon uWSGI backlog state.
.. note:: These options are Linux only.
:param int workers_step: Number of emergency workers to spawn. Default: 1.
:param int idle_cycles_max: Idle cycles to reach before stopping an emergency worker. Default: 3.
:param int queue_size: Listen queue (backlog) max size to spawn an emergency worker. Default: 33.
:param int queue_nonzero_delay: If the request listen queue is > 0 for more than given amount of seconds
new emergency workers will be spawned. Default: 60.
"""
self._set('cheaper-busyness-backlog-step', workers_step)
self._set('cheaper-busyness-backlog-multiplier', idle_cycles_max)
self._set('cheaper-busyness-backlog-alert', queue_size)
self._set('cheaper-busyness-backlog-nonzero', queue_nonzero_delay)
return self | def function[set_emergency_params, parameter[self, workers_step, idle_cycles_max, queue_size, queue_nonzero_delay]]:
constant[Sets busyness algorithm emergency workers related params.
Emergency workers could be spawned depending upon uWSGI backlog state.
.. note:: These options are Linux only.
:param int workers_step: Number of emergency workers to spawn. Default: 1.
:param int idle_cycles_max: Idle cycles to reach before stopping an emergency worker. Default: 3.
:param int queue_size: Listen queue (backlog) max size to spawn an emergency worker. Default: 33.
:param int queue_nonzero_delay: If the request listen queue is > 0 for more than given amount of seconds
new emergency workers will be spawned. Default: 60.
]
call[name[self]._set, parameter[constant[cheaper-busyness-backlog-step], name[workers_step]]]
call[name[self]._set, parameter[constant[cheaper-busyness-backlog-multiplier], name[idle_cycles_max]]]
call[name[self]._set, parameter[constant[cheaper-busyness-backlog-alert], name[queue_size]]]
call[name[self]._set, parameter[constant[cheaper-busyness-backlog-nonzero], name[queue_nonzero_delay]]]
return[name[self]] | keyword[def] identifier[set_emergency_params] (
identifier[self] , identifier[workers_step] = keyword[None] , identifier[idle_cycles_max] = keyword[None] , identifier[queue_size] = keyword[None] , identifier[queue_nonzero_delay] = keyword[None] ):
literal[string]
identifier[self] . identifier[_set] ( literal[string] , identifier[workers_step] )
identifier[self] . identifier[_set] ( literal[string] , identifier[idle_cycles_max] )
identifier[self] . identifier[_set] ( literal[string] , identifier[queue_size] )
identifier[self] . identifier[_set] ( literal[string] , identifier[queue_nonzero_delay] )
keyword[return] identifier[self] | def set_emergency_params(self, workers_step=None, idle_cycles_max=None, queue_size=None, queue_nonzero_delay=None):
"""Sets busyness algorithm emergency workers related params.
Emergency workers could be spawned depending upon uWSGI backlog state.
.. note:: These options are Linux only.
:param int workers_step: Number of emergency workers to spawn. Default: 1.
:param int idle_cycles_max: Idle cycles to reach before stopping an emergency worker. Default: 3.
:param int queue_size: Listen queue (backlog) max size to spawn an emergency worker. Default: 33.
:param int queue_nonzero_delay: If the request listen queue is > 0 for more than given amount of seconds
new emergency workers will be spawned. Default: 60.
"""
self._set('cheaper-busyness-backlog-step', workers_step)
self._set('cheaper-busyness-backlog-multiplier', idle_cycles_max)
self._set('cheaper-busyness-backlog-alert', queue_size)
self._set('cheaper-busyness-backlog-nonzero', queue_nonzero_delay)
return self |
def enable_gui(gui=None, app=None):
"""Switch amongst GUI input hooks by name.
This is just a utility wrapper around the methods of the InputHookManager
object.
Parameters
----------
gui : optional, string or None
If None (or 'none'), clears input hook, otherwise it must be one
of the recognized GUI names (see ``GUI_*`` constants in module).
app : optional, existing application object.
For toolkits that have the concept of a global app, you can supply an
existing one. If not given, the toolkit will be probed for one, and if
none is found, a new one will be created. Note that GTK does not have
this concept, and passing an app if ``gui=="GTK"`` will raise an error.
Returns
-------
The output of the underlying gui switch routine, typically the actual
PyOS_InputHook wrapper object or the GUI toolkit app created, if there was
one.
"""
if get_return_control_callback() is None:
raise ValueError("A return_control_callback must be supplied as a reference before a gui can be enabled")
guis = {GUI_NONE: clear_inputhook,
GUI_OSX: enable_mac,
GUI_TK: enable_tk,
GUI_GTK: enable_gtk,
GUI_WX: enable_wx,
GUI_QT: enable_qt,
GUI_QT4: enable_qt4,
GUI_QT5: enable_qt5,
GUI_GLUT: enable_glut,
GUI_PYGLET: enable_pyglet,
GUI_GTK3: enable_gtk3,
}
try:
gui_hook = guis[gui]
except KeyError:
if gui is None or gui == '':
gui_hook = clear_inputhook
else:
e = "Invalid GUI request %r, valid ones are:%s" % (gui, guis.keys())
raise ValueError(e)
return gui_hook(app) | def function[enable_gui, parameter[gui, app]]:
constant[Switch amongst GUI input hooks by name.
This is just a utility wrapper around the methods of the InputHookManager
object.
Parameters
----------
gui : optional, string or None
If None (or 'none'), clears input hook, otherwise it must be one
of the recognized GUI names (see ``GUI_*`` constants in module).
app : optional, existing application object.
For toolkits that have the concept of a global app, you can supply an
existing one. If not given, the toolkit will be probed for one, and if
none is found, a new one will be created. Note that GTK does not have
this concept, and passing an app if ``gui=="GTK"`` will raise an error.
Returns
-------
The output of the underlying gui switch routine, typically the actual
PyOS_InputHook wrapper object or the GUI toolkit app created, if there was
one.
]
if compare[call[name[get_return_control_callback], parameter[]] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c6c47f0>
variable[guis] assign[=] dictionary[[<ast.Name object at 0x7da20c6c4df0>, <ast.Name object at 0x7da20c6c5d80>, <ast.Name object at 0x7da20c6c7220>, <ast.Name object at 0x7da20c6c47c0>, <ast.Name object at 0x7da20c6c46a0>, <ast.Name object at 0x7da20c6c5f30>, <ast.Name object at 0x7da20c6c5f90>, <ast.Name object at 0x7da20c6c49d0>, <ast.Name object at 0x7da20c6c5690>, <ast.Name object at 0x7da20c6c6470>, <ast.Name object at 0x7da20c6c6dd0>], [<ast.Name object at 0x7da20c6c4d60>, <ast.Name object at 0x7da20c6c7e20>, <ast.Name object at 0x7da20c6c6260>, <ast.Name object at 0x7da20c6c50c0>, <ast.Name object at 0x7da20c6c5570>, <ast.Name object at 0x7da20c6c4070>, <ast.Name object at 0x7da20c6c7430>, <ast.Name object at 0x7da20c6c7fa0>, <ast.Name object at 0x7da20c6c7010>, <ast.Name object at 0x7da20c6c5c00>, <ast.Name object at 0x7da20c6c51e0>]]
<ast.Try object at 0x7da20c6c40a0>
return[call[name[gui_hook], parameter[name[app]]]] | keyword[def] identifier[enable_gui] ( identifier[gui] = keyword[None] , identifier[app] = keyword[None] ):
literal[string]
keyword[if] identifier[get_return_control_callback] () keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[guis] ={ identifier[GUI_NONE] : identifier[clear_inputhook] ,
identifier[GUI_OSX] : identifier[enable_mac] ,
identifier[GUI_TK] : identifier[enable_tk] ,
identifier[GUI_GTK] : identifier[enable_gtk] ,
identifier[GUI_WX] : identifier[enable_wx] ,
identifier[GUI_QT] : identifier[enable_qt] ,
identifier[GUI_QT4] : identifier[enable_qt4] ,
identifier[GUI_QT5] : identifier[enable_qt5] ,
identifier[GUI_GLUT] : identifier[enable_glut] ,
identifier[GUI_PYGLET] : identifier[enable_pyglet] ,
identifier[GUI_GTK3] : identifier[enable_gtk3] ,
}
keyword[try] :
identifier[gui_hook] = identifier[guis] [ identifier[gui] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[gui] keyword[is] keyword[None] keyword[or] identifier[gui] == literal[string] :
identifier[gui_hook] = identifier[clear_inputhook]
keyword[else] :
identifier[e] = literal[string] %( identifier[gui] , identifier[guis] . identifier[keys] ())
keyword[raise] identifier[ValueError] ( identifier[e] )
keyword[return] identifier[gui_hook] ( identifier[app] ) | def enable_gui(gui=None, app=None):
"""Switch amongst GUI input hooks by name.
This is just a utility wrapper around the methods of the InputHookManager
object.
Parameters
----------
gui : optional, string or None
If None (or 'none'), clears input hook, otherwise it must be one
of the recognized GUI names (see ``GUI_*`` constants in module).
app : optional, existing application object.
For toolkits that have the concept of a global app, you can supply an
existing one. If not given, the toolkit will be probed for one, and if
none is found, a new one will be created. Note that GTK does not have
this concept, and passing an app if ``gui=="GTK"`` will raise an error.
Returns
-------
The output of the underlying gui switch routine, typically the actual
PyOS_InputHook wrapper object or the GUI toolkit app created, if there was
one.
"""
if get_return_control_callback() is None:
raise ValueError('A return_control_callback must be supplied as a reference before a gui can be enabled') # depends on [control=['if'], data=[]]
guis = {GUI_NONE: clear_inputhook, GUI_OSX: enable_mac, GUI_TK: enable_tk, GUI_GTK: enable_gtk, GUI_WX: enable_wx, GUI_QT: enable_qt, GUI_QT4: enable_qt4, GUI_QT5: enable_qt5, GUI_GLUT: enable_glut, GUI_PYGLET: enable_pyglet, GUI_GTK3: enable_gtk3}
try:
gui_hook = guis[gui] # depends on [control=['try'], data=[]]
except KeyError:
if gui is None or gui == '':
gui_hook = clear_inputhook # depends on [control=['if'], data=[]]
else:
e = 'Invalid GUI request %r, valid ones are:%s' % (gui, guis.keys())
raise ValueError(e) # depends on [control=['except'], data=[]]
return gui_hook(app) |
def p_postfix_expr(self, p):
"""postfix_expr : left_hand_side_expr
| left_hand_side_expr PLUSPLUS
| left_hand_side_expr MINUSMINUS
"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = ast.UnaryOp(op=p[2], value=p[1], postfix=True) | def function[p_postfix_expr, parameter[self, p]]:
constant[postfix_expr : left_hand_side_expr
| left_hand_side_expr PLUSPLUS
| left_hand_side_expr MINUSMINUS
]
if compare[call[name[len], parameter[name[p]]] equal[==] constant[2]] begin[:]
call[name[p]][constant[0]] assign[=] call[name[p]][constant[1]] | keyword[def] identifier[p_postfix_expr] ( identifier[self] , identifier[p] ):
literal[string]
keyword[if] identifier[len] ( identifier[p] )== literal[int] :
identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ]
keyword[else] :
identifier[p] [ literal[int] ]= identifier[ast] . identifier[UnaryOp] ( identifier[op] = identifier[p] [ literal[int] ], identifier[value] = identifier[p] [ literal[int] ], identifier[postfix] = keyword[True] ) | def p_postfix_expr(self, p):
"""postfix_expr : left_hand_side_expr
| left_hand_side_expr PLUSPLUS
| left_hand_side_expr MINUSMINUS
"""
if len(p) == 2:
p[0] = p[1] # depends on [control=['if'], data=[]]
else:
p[0] = ast.UnaryOp(op=p[2], value=p[1], postfix=True) |
def storage_uri_for_key(key):
"""Returns a StorageUri for the given key.
:type key: :class:`boto.s3.key.Key` or subclass
:param key: URI naming bucket + optional object.
"""
if not isinstance(key, boto.s3.key.Key):
raise InvalidUriError('Requested key (%s) is not a subclass of '
'boto.s3.key.Key' % str(type(key)))
prov_name = key.bucket.connection.provider.get_provider_name()
uri_str = '%s://%s/%s' % (prov_name, key.bucket.name, key.name)
return storage_uri(uri_str) | def function[storage_uri_for_key, parameter[key]]:
constant[Returns a StorageUri for the given key.
:type key: :class:`boto.s3.key.Key` or subclass
:param key: URI naming bucket + optional object.
]
if <ast.UnaryOp object at 0x7da1b2651fc0> begin[:]
<ast.Raise object at 0x7da1b26530d0>
variable[prov_name] assign[=] call[name[key].bucket.connection.provider.get_provider_name, parameter[]]
variable[uri_str] assign[=] binary_operation[constant[%s://%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b26529b0>, <ast.Attribute object at 0x7da1b2651a20>, <ast.Attribute object at 0x7da1b2652320>]]]
return[call[name[storage_uri], parameter[name[uri_str]]]] | keyword[def] identifier[storage_uri_for_key] ( identifier[key] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[boto] . identifier[s3] . identifier[key] . identifier[Key] ):
keyword[raise] identifier[InvalidUriError] ( literal[string]
literal[string] % identifier[str] ( identifier[type] ( identifier[key] )))
identifier[prov_name] = identifier[key] . identifier[bucket] . identifier[connection] . identifier[provider] . identifier[get_provider_name] ()
identifier[uri_str] = literal[string] %( identifier[prov_name] , identifier[key] . identifier[bucket] . identifier[name] , identifier[key] . identifier[name] )
keyword[return] identifier[storage_uri] ( identifier[uri_str] ) | def storage_uri_for_key(key):
"""Returns a StorageUri for the given key.
:type key: :class:`boto.s3.key.Key` or subclass
:param key: URI naming bucket + optional object.
"""
if not isinstance(key, boto.s3.key.Key):
raise InvalidUriError('Requested key (%s) is not a subclass of boto.s3.key.Key' % str(type(key))) # depends on [control=['if'], data=[]]
prov_name = key.bucket.connection.provider.get_provider_name()
uri_str = '%s://%s/%s' % (prov_name, key.bucket.name, key.name)
return storage_uri(uri_str) |
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
r"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a DSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. DSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes priority.
``parse_float``, if specified, will be called with the string
of every DSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for DSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every DSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for DSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid DSON numbers
are encountered.
To use a custom ``DSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``DSONDecoder`` is used.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = DSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s) | def function[loads, parameter[s, encoding, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook]]:
constant[Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a DSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. DSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes priority.
``parse_float``, if specified, will be called with the string
of every DSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for DSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every DSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for DSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid DSON numbers
are encountered.
To use a custom ``DSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``DSONDecoder`` is used.
]
if <ast.BoolOp object at 0x7da18f7207f0> begin[:]
return[call[name[_default_decoder].decode, parameter[name[s]]]]
if compare[name[cls] is constant[None]] begin[:]
variable[cls] assign[=] name[DSONDecoder]
if compare[name[object_hook] is_not constant[None]] begin[:]
call[name[kw]][constant[object_hook]] assign[=] name[object_hook]
if compare[name[object_pairs_hook] is_not constant[None]] begin[:]
call[name[kw]][constant[object_pairs_hook]] assign[=] name[object_pairs_hook]
if compare[name[parse_float] is_not constant[None]] begin[:]
call[name[kw]][constant[parse_float]] assign[=] name[parse_float]
if compare[name[parse_int] is_not constant[None]] begin[:]
call[name[kw]][constant[parse_int]] assign[=] name[parse_int]
if compare[name[parse_constant] is_not constant[None]] begin[:]
call[name[kw]][constant[parse_constant]] assign[=] name[parse_constant]
return[call[call[name[cls], parameter[]].decode, parameter[name[s]]]] | keyword[def] identifier[loads] ( identifier[s] , identifier[encoding] = keyword[None] , identifier[cls] = keyword[None] , identifier[object_hook] = keyword[None] , identifier[parse_float] = keyword[None] ,
identifier[parse_int] = keyword[None] , identifier[parse_constant] = keyword[None] , identifier[object_pairs_hook] = keyword[None] ,** identifier[kw] ):
literal[string]
keyword[if] ( identifier[cls] keyword[is] keyword[None] keyword[and] identifier[encoding] keyword[is] keyword[None] keyword[and] identifier[object_hook] keyword[is] keyword[None] keyword[and]
identifier[parse_int] keyword[is] keyword[None] keyword[and] identifier[parse_float] keyword[is] keyword[None] keyword[and]
identifier[parse_constant] keyword[is] keyword[None] keyword[and] identifier[object_pairs_hook] keyword[is] keyword[None] keyword[and] keyword[not] identifier[kw] ):
keyword[return] identifier[_default_decoder] . identifier[decode] ( identifier[s] )
keyword[if] identifier[cls] keyword[is] keyword[None] :
identifier[cls] = identifier[DSONDecoder]
keyword[if] identifier[object_hook] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[object_hook]
keyword[if] identifier[object_pairs_hook] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[object_pairs_hook]
keyword[if] identifier[parse_float] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[parse_float]
keyword[if] identifier[parse_int] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[parse_int]
keyword[if] identifier[parse_constant] keyword[is] keyword[not] keyword[None] :
identifier[kw] [ literal[string] ]= identifier[parse_constant]
keyword[return] identifier[cls] ( identifier[encoding] = identifier[encoding] ,** identifier[kw] ). identifier[decode] ( identifier[s] ) | def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a DSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. DSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes priority.
``parse_float``, if specified, will be called with the string
of every DSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for DSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every DSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for DSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid DSON numbers
are encountered.
To use a custom ``DSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``DSONDecoder`` is used.
"""
if cls is None and encoding is None and (object_hook is None) and (parse_int is None) and (parse_float is None) and (parse_constant is None) and (object_pairs_hook is None) and (not kw):
return _default_decoder.decode(s) # depends on [control=['if'], data=[]]
if cls is None:
cls = DSONDecoder # depends on [control=['if'], data=['cls']]
if object_hook is not None:
kw['object_hook'] = object_hook # depends on [control=['if'], data=['object_hook']]
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook # depends on [control=['if'], data=['object_pairs_hook']]
if parse_float is not None:
kw['parse_float'] = parse_float # depends on [control=['if'], data=['parse_float']]
if parse_int is not None:
kw['parse_int'] = parse_int # depends on [control=['if'], data=['parse_int']]
if parse_constant is not None:
kw['parse_constant'] = parse_constant # depends on [control=['if'], data=['parse_constant']]
return cls(encoding=encoding, **kw).decode(s) |
def _process_module(self, name, contents, parent, match, filepath=None):
"""Processes a regex match for a module to create a CodeElement."""
#First, get hold of the name and contents of the module so that we can process the other
#parts of the module.
modifiers = []
#We need to check for the private keyword before any type or contains declarations
if self.RE_PRIV.search(contents):
modifiers.append("private")
#The only other modifier for modules ought to be implicit none
if re.search("implicit\s+none", contents):
modifiers.append("implicit none")
#Next, parse out the dependencies of the module on other modules
dependencies = self._parse_use(contents)
publics, pubstart = self._process_publics(match.string)
#We can now create the CodeElement
result = Module(name, modifiers, dependencies, publics, contents, parent)
if filepath is not None:
result.filepath = filepath.lower()
result.start = match.start()
result.end = match.end()
result.refstring = match.string
result.set_public_start(pubstart)
if self.RE_PRECOMP.search(contents):
result.precompile = True
self.xparser.parse(result)
self.tparser.parse(result)
#It is possible for the module to have members, parse those
self._parse_members(contents, result)
self.iparser.parse(result)
#Now we can update the docstrings for the types. They rely on data
#extracted during parse_members() which is why they have to run
#separately over here.
for t in result.types:
self.tparser.update_docs(result.types[t], result)
return result | def function[_process_module, parameter[self, name, contents, parent, match, filepath]]:
constant[Processes a regex match for a module to create a CodeElement.]
variable[modifiers] assign[=] list[[]]
if call[name[self].RE_PRIV.search, parameter[name[contents]]] begin[:]
call[name[modifiers].append, parameter[constant[private]]]
if call[name[re].search, parameter[constant[implicit\s+none], name[contents]]] begin[:]
call[name[modifiers].append, parameter[constant[implicit none]]]
variable[dependencies] assign[=] call[name[self]._parse_use, parameter[name[contents]]]
<ast.Tuple object at 0x7da20e954b80> assign[=] call[name[self]._process_publics, parameter[name[match].string]]
variable[result] assign[=] call[name[Module], parameter[name[name], name[modifiers], name[dependencies], name[publics], name[contents], name[parent]]]
if compare[name[filepath] is_not constant[None]] begin[:]
name[result].filepath assign[=] call[name[filepath].lower, parameter[]]
name[result].start assign[=] call[name[match].start, parameter[]]
name[result].end assign[=] call[name[match].end, parameter[]]
name[result].refstring assign[=] name[match].string
call[name[result].set_public_start, parameter[name[pubstart]]]
if call[name[self].RE_PRECOMP.search, parameter[name[contents]]] begin[:]
name[result].precompile assign[=] constant[True]
call[name[self].xparser.parse, parameter[name[result]]]
call[name[self].tparser.parse, parameter[name[result]]]
call[name[self]._parse_members, parameter[name[contents], name[result]]]
call[name[self].iparser.parse, parameter[name[result]]]
for taget[name[t]] in starred[name[result].types] begin[:]
call[name[self].tparser.update_docs, parameter[call[name[result].types][name[t]], name[result]]]
return[name[result]] | keyword[def] identifier[_process_module] ( identifier[self] , identifier[name] , identifier[contents] , identifier[parent] , identifier[match] , identifier[filepath] = keyword[None] ):
literal[string]
identifier[modifiers] =[]
keyword[if] identifier[self] . identifier[RE_PRIV] . identifier[search] ( identifier[contents] ):
identifier[modifiers] . identifier[append] ( literal[string] )
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[contents] ):
identifier[modifiers] . identifier[append] ( literal[string] )
identifier[dependencies] = identifier[self] . identifier[_parse_use] ( identifier[contents] )
identifier[publics] , identifier[pubstart] = identifier[self] . identifier[_process_publics] ( identifier[match] . identifier[string] )
identifier[result] = identifier[Module] ( identifier[name] , identifier[modifiers] , identifier[dependencies] , identifier[publics] , identifier[contents] , identifier[parent] )
keyword[if] identifier[filepath] keyword[is] keyword[not] keyword[None] :
identifier[result] . identifier[filepath] = identifier[filepath] . identifier[lower] ()
identifier[result] . identifier[start] = identifier[match] . identifier[start] ()
identifier[result] . identifier[end] = identifier[match] . identifier[end] ()
identifier[result] . identifier[refstring] = identifier[match] . identifier[string]
identifier[result] . identifier[set_public_start] ( identifier[pubstart] )
keyword[if] identifier[self] . identifier[RE_PRECOMP] . identifier[search] ( identifier[contents] ):
identifier[result] . identifier[precompile] = keyword[True]
identifier[self] . identifier[xparser] . identifier[parse] ( identifier[result] )
identifier[self] . identifier[tparser] . identifier[parse] ( identifier[result] )
identifier[self] . identifier[_parse_members] ( identifier[contents] , identifier[result] )
identifier[self] . identifier[iparser] . identifier[parse] ( identifier[result] )
keyword[for] identifier[t] keyword[in] identifier[result] . identifier[types] :
identifier[self] . identifier[tparser] . identifier[update_docs] ( identifier[result] . identifier[types] [ identifier[t] ], identifier[result] )
keyword[return] identifier[result] | def _process_module(self, name, contents, parent, match, filepath=None):
"""Processes a regex match for a module to create a CodeElement."""
#First, get hold of the name and contents of the module so that we can process the other
#parts of the module.
modifiers = []
#We need to check for the private keyword before any type or contains declarations
if self.RE_PRIV.search(contents):
modifiers.append('private') # depends on [control=['if'], data=[]]
#The only other modifier for modules ought to be implicit none
if re.search('implicit\\s+none', contents):
modifiers.append('implicit none') # depends on [control=['if'], data=[]]
#Next, parse out the dependencies of the module on other modules
dependencies = self._parse_use(contents)
(publics, pubstart) = self._process_publics(match.string)
#We can now create the CodeElement
result = Module(name, modifiers, dependencies, publics, contents, parent)
if filepath is not None:
result.filepath = filepath.lower() # depends on [control=['if'], data=['filepath']]
result.start = match.start()
result.end = match.end()
result.refstring = match.string
result.set_public_start(pubstart)
if self.RE_PRECOMP.search(contents):
result.precompile = True # depends on [control=['if'], data=[]]
self.xparser.parse(result)
self.tparser.parse(result)
#It is possible for the module to have members, parse those
self._parse_members(contents, result)
self.iparser.parse(result)
#Now we can update the docstrings for the types. They rely on data
#extracted during parse_members() which is why they have to run
#separately over here.
for t in result.types:
self.tparser.update_docs(result.types[t], result) # depends on [control=['for'], data=['t']]
return result |
def start_discovery(add_callback=None, remove_callback=None):
"""
Start discovering chromecasts on the network.
This method will start discovering chromecasts on a separate thread. When
a chromecast is discovered, the callback will be called with the
discovered chromecast's zeroconf name. This is the dictionary key to find
the chromecast metadata in listener.services.
This method returns the CastListener object and the zeroconf ServiceBrowser
object. The CastListener object will contain information for the discovered
chromecasts. To stop discovery, call the stop_discovery method with the
ServiceBrowser object.
"""
listener = CastListener(add_callback, remove_callback)
service_browser = False
try:
service_browser = zeroconf.ServiceBrowser(zeroconf.Zeroconf(),
"_googlecast._tcp.local.",
listener)
except (zeroconf.BadTypeInNameException,
NotImplementedError,
OSError,
socket.error,
zeroconf.NonUniqueNameException):
pass
return listener, service_browser | def function[start_discovery, parameter[add_callback, remove_callback]]:
constant[
Start discovering chromecasts on the network.
This method will start discovering chromecasts on a separate thread. When
a chromecast is discovered, the callback will be called with the
discovered chromecast's zeroconf name. This is the dictionary key to find
the chromecast metadata in listener.services.
This method returns the CastListener object and the zeroconf ServiceBrowser
object. The CastListener object will contain information for the discovered
chromecasts. To stop discovery, call the stop_discovery method with the
ServiceBrowser object.
]
variable[listener] assign[=] call[name[CastListener], parameter[name[add_callback], name[remove_callback]]]
variable[service_browser] assign[=] constant[False]
<ast.Try object at 0x7da20c7c9150>
return[tuple[[<ast.Name object at 0x7da20c7cbac0>, <ast.Name object at 0x7da20c7cb220>]]] | keyword[def] identifier[start_discovery] ( identifier[add_callback] = keyword[None] , identifier[remove_callback] = keyword[None] ):
literal[string]
identifier[listener] = identifier[CastListener] ( identifier[add_callback] , identifier[remove_callback] )
identifier[service_browser] = keyword[False]
keyword[try] :
identifier[service_browser] = identifier[zeroconf] . identifier[ServiceBrowser] ( identifier[zeroconf] . identifier[Zeroconf] (),
literal[string] ,
identifier[listener] )
keyword[except] ( identifier[zeroconf] . identifier[BadTypeInNameException] ,
identifier[NotImplementedError] ,
identifier[OSError] ,
identifier[socket] . identifier[error] ,
identifier[zeroconf] . identifier[NonUniqueNameException] ):
keyword[pass]
keyword[return] identifier[listener] , identifier[service_browser] | def start_discovery(add_callback=None, remove_callback=None):
"""
Start discovering chromecasts on the network.
This method will start discovering chromecasts on a separate thread. When
a chromecast is discovered, the callback will be called with the
discovered chromecast's zeroconf name. This is the dictionary key to find
the chromecast metadata in listener.services.
This method returns the CastListener object and the zeroconf ServiceBrowser
object. The CastListener object will contain information for the discovered
chromecasts. To stop discovery, call the stop_discovery method with the
ServiceBrowser object.
"""
listener = CastListener(add_callback, remove_callback)
service_browser = False
try:
service_browser = zeroconf.ServiceBrowser(zeroconf.Zeroconf(), '_googlecast._tcp.local.', listener) # depends on [control=['try'], data=[]]
except (zeroconf.BadTypeInNameException, NotImplementedError, OSError, socket.error, zeroconf.NonUniqueNameException):
pass # depends on [control=['except'], data=[]]
return (listener, service_browser) |
def get(self, pk):
"""
Returns the object for the key
Override it for efficiency.
"""
for item in self.store.get(self.query_class):
# coverts pk value to correct type
pk = item.properties[item.pk].col_type(pk)
if getattr(item, item.pk) == pk:
return item | def function[get, parameter[self, pk]]:
constant[
Returns the object for the key
Override it for efficiency.
]
for taget[name[item]] in starred[call[name[self].store.get, parameter[name[self].query_class]]] begin[:]
variable[pk] assign[=] call[call[name[item].properties][name[item].pk].col_type, parameter[name[pk]]]
if compare[call[name[getattr], parameter[name[item], name[item].pk]] equal[==] name[pk]] begin[:]
return[name[item]] | keyword[def] identifier[get] ( identifier[self] , identifier[pk] ):
literal[string]
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[store] . identifier[get] ( identifier[self] . identifier[query_class] ):
identifier[pk] = identifier[item] . identifier[properties] [ identifier[item] . identifier[pk] ]. identifier[col_type] ( identifier[pk] )
keyword[if] identifier[getattr] ( identifier[item] , identifier[item] . identifier[pk] )== identifier[pk] :
keyword[return] identifier[item] | def get(self, pk):
"""
Returns the object for the key
Override it for efficiency.
"""
for item in self.store.get(self.query_class):
# coverts pk value to correct type
pk = item.properties[item.pk].col_type(pk)
if getattr(item, item.pk) == pk:
return item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] |
def get_suffix_tree(self, suffix_tree_id):
"""Returns a suffix tree entity, equipped with node and edge repos it (at least at the moment) needs.
"""
suffix_tree = self.suffix_tree_repo[suffix_tree_id]
assert isinstance(suffix_tree, GeneralizedSuffixTree)
suffix_tree._node_repo = self.node_repo
suffix_tree._node_child_collection_repo = self.node_child_collection_repo
suffix_tree._edge_repo = self.edge_repo
suffix_tree._stringid_collection_repo = self.stringid_collection_repo
return suffix_tree | def function[get_suffix_tree, parameter[self, suffix_tree_id]]:
constant[Returns a suffix tree entity, equipped with node and edge repos it (at least at the moment) needs.
]
variable[suffix_tree] assign[=] call[name[self].suffix_tree_repo][name[suffix_tree_id]]
assert[call[name[isinstance], parameter[name[suffix_tree], name[GeneralizedSuffixTree]]]]
name[suffix_tree]._node_repo assign[=] name[self].node_repo
name[suffix_tree]._node_child_collection_repo assign[=] name[self].node_child_collection_repo
name[suffix_tree]._edge_repo assign[=] name[self].edge_repo
name[suffix_tree]._stringid_collection_repo assign[=] name[self].stringid_collection_repo
return[name[suffix_tree]] | keyword[def] identifier[get_suffix_tree] ( identifier[self] , identifier[suffix_tree_id] ):
literal[string]
identifier[suffix_tree] = identifier[self] . identifier[suffix_tree_repo] [ identifier[suffix_tree_id] ]
keyword[assert] identifier[isinstance] ( identifier[suffix_tree] , identifier[GeneralizedSuffixTree] )
identifier[suffix_tree] . identifier[_node_repo] = identifier[self] . identifier[node_repo]
identifier[suffix_tree] . identifier[_node_child_collection_repo] = identifier[self] . identifier[node_child_collection_repo]
identifier[suffix_tree] . identifier[_edge_repo] = identifier[self] . identifier[edge_repo]
identifier[suffix_tree] . identifier[_stringid_collection_repo] = identifier[self] . identifier[stringid_collection_repo]
keyword[return] identifier[suffix_tree] | def get_suffix_tree(self, suffix_tree_id):
"""Returns a suffix tree entity, equipped with node and edge repos it (at least at the moment) needs.
"""
suffix_tree = self.suffix_tree_repo[suffix_tree_id]
assert isinstance(suffix_tree, GeneralizedSuffixTree)
suffix_tree._node_repo = self.node_repo
suffix_tree._node_child_collection_repo = self.node_child_collection_repo
suffix_tree._edge_repo = self.edge_repo
suffix_tree._stringid_collection_repo = self.stringid_collection_repo
return suffix_tree |
def get(self, key, default=None):
"""
Get an element of the collection.
:param key: The index of the element
:type key: mixed
:param default: The default value to return
:type default: mixed
:rtype: mixed
"""
try:
return self.items[key]
except IndexError:
return value(default) | def function[get, parameter[self, key, default]]:
constant[
Get an element of the collection.
:param key: The index of the element
:type key: mixed
:param default: The default value to return
:type default: mixed
:rtype: mixed
]
<ast.Try object at 0x7da1b055ecb0> | keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[default] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[items] [ identifier[key] ]
keyword[except] identifier[IndexError] :
keyword[return] identifier[value] ( identifier[default] ) | def get(self, key, default=None):
"""
Get an element of the collection.
:param key: The index of the element
:type key: mixed
:param default: The default value to return
:type default: mixed
:rtype: mixed
"""
try:
return self.items[key] # depends on [control=['try'], data=[]]
except IndexError:
return value(default) # depends on [control=['except'], data=[]] |
def match(mode_lst: list, obj: 'object that has __destruct__ method'):
"""
>>> from Redy.ADT.Core import match, data, P
>>> from Redy.ADT.traits import ConsInd, Discrete
>>> @data
>>> class List(ConsInd, Discrete):
>>> # ConsInd(index following constructing)
>>> # |-> Ind;
>>> # Discrete
>>> # |-> Im(Immutable), Eq
>>> Nil : ...
>>> Cons: lambda head, tail: ...
>>> lst = List.Cons(2, List.Cons(1, List.Nil))
>>> mode_lst = P[List.Cons, P, P[List.Cons, 1]]
>>> if match(mode_lst, lst):
>>> assert mode_lst == [List.Cons, 2, [List.Cons, 1]]
"""
# noinspection PyUnresolvedReferences
try:
# noinspection PyUnresolvedReferences
structure = obj.__destruct__()
except AttributeError:
return False
n = len(mode_lst)
if n > len(structure):
return False
for i in range(n):
mode = mode_lst[i]
# noinspection PyUnresolvedReferences
elem = obj[i]
if isinstance(mode, PatternList):
if not match(mode, elem):
return False
elif mode is P:
# noinspection PyUnresolvedReferences
mode_lst[i] = elem
elif mode is any:
pass
elif mode != elem:
return False
return True | def function[match, parameter[mode_lst, obj]]:
constant[
>>> from Redy.ADT.Core import match, data, P
>>> from Redy.ADT.traits import ConsInd, Discrete
>>> @data
>>> class List(ConsInd, Discrete):
>>> # ConsInd(index following constructing)
>>> # |-> Ind;
>>> # Discrete
>>> # |-> Im(Immutable), Eq
>>> Nil : ...
>>> Cons: lambda head, tail: ...
>>> lst = List.Cons(2, List.Cons(1, List.Nil))
>>> mode_lst = P[List.Cons, P, P[List.Cons, 1]]
>>> if match(mode_lst, lst):
>>> assert mode_lst == [List.Cons, 2, [List.Cons, 1]]
]
<ast.Try object at 0x7da18eb55060>
variable[n] assign[=] call[name[len], parameter[name[mode_lst]]]
if compare[name[n] greater[>] call[name[len], parameter[name[structure]]]] begin[:]
return[constant[False]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
variable[mode] assign[=] call[name[mode_lst]][name[i]]
variable[elem] assign[=] call[name[obj]][name[i]]
if call[name[isinstance], parameter[name[mode], name[PatternList]]] begin[:]
if <ast.UnaryOp object at 0x7da18eb55030> begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[match] ( identifier[mode_lst] : identifier[list] , identifier[obj] : literal[string] ):
literal[string]
keyword[try] :
identifier[structure] = identifier[obj] . identifier[__destruct__] ()
keyword[except] identifier[AttributeError] :
keyword[return] keyword[False]
identifier[n] = identifier[len] ( identifier[mode_lst] )
keyword[if] identifier[n] > identifier[len] ( identifier[structure] ):
keyword[return] keyword[False]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[mode] = identifier[mode_lst] [ identifier[i] ]
identifier[elem] = identifier[obj] [ identifier[i] ]
keyword[if] identifier[isinstance] ( identifier[mode] , identifier[PatternList] ):
keyword[if] keyword[not] identifier[match] ( identifier[mode] , identifier[elem] ):
keyword[return] keyword[False]
keyword[elif] identifier[mode] keyword[is] identifier[P] :
identifier[mode_lst] [ identifier[i] ]= identifier[elem]
keyword[elif] identifier[mode] keyword[is] identifier[any] :
keyword[pass]
keyword[elif] identifier[mode] != identifier[elem] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def match(mode_lst: list, obj: 'object that has __destruct__ method'):
"""
>>> from Redy.ADT.Core import match, data, P
>>> from Redy.ADT.traits import ConsInd, Discrete
>>> @data
>>> class List(ConsInd, Discrete):
>>> # ConsInd(index following constructing)
>>> # |-> Ind;
>>> # Discrete
>>> # |-> Im(Immutable), Eq
>>> Nil : ...
>>> Cons: lambda head, tail: ...
>>> lst = List.Cons(2, List.Cons(1, List.Nil))
>>> mode_lst = P[List.Cons, P, P[List.Cons, 1]]
>>> if match(mode_lst, lst):
>>> assert mode_lst == [List.Cons, 2, [List.Cons, 1]]
"""
# noinspection PyUnresolvedReferences
try:
# noinspection PyUnresolvedReferences
structure = obj.__destruct__() # depends on [control=['try'], data=[]]
except AttributeError:
return False # depends on [control=['except'], data=[]]
n = len(mode_lst)
if n > len(structure):
return False # depends on [control=['if'], data=[]]
for i in range(n):
mode = mode_lst[i]
# noinspection PyUnresolvedReferences
elem = obj[i]
if isinstance(mode, PatternList):
if not match(mode, elem):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif mode is P:
# noinspection PyUnresolvedReferences
mode_lst[i] = elem # depends on [control=['if'], data=[]]
elif mode is any:
pass # depends on [control=['if'], data=[]]
elif mode != elem:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
return True |
def register(self, perm_func=None, model=None, allow_staff=None, allow_superuser=None,
allow_anonymous=None, unauthenticated_handler=None, request_types=None, name=None,
replace=False, _return_entry=False):
"""Register permission function & return the original function.
This is typically used as a decorator::
permissions = PermissionsRegistry()
@permissions.register
def can_do_something(user):
...
For internal use only: you can pass ``_return_entry=True`` to
have the registry :class:`.Entry` returned instead of
``perm_func``.
"""
allow_staff = _default(allow_staff, self._allow_staff)
allow_superuser = _default(allow_superuser, self._allow_superuser)
allow_anonymous = _default(allow_anonymous, self._allow_anonymous)
unauthenticated_handler = _default(unauthenticated_handler, self._unauthenticated_handler)
request_types = _default(request_types, self._request_types)
if perm_func is None:
return (
lambda perm_func_:
self.register(
perm_func_, model, allow_staff, allow_superuser, allow_anonymous,
unauthenticated_handler, request_types, name, replace, _return_entry)
)
name = _default(name, perm_func.__name__)
if name == 'register':
raise PermissionsError('register cannot be used as a permission name')
elif name in self._registry and not replace:
raise DuplicatePermissionError(name)
view_decorator = self._make_view_decorator(
name, perm_func, model, allow_staff, allow_superuser, allow_anonymous,
unauthenticated_handler, request_types)
entry = Entry(
name, perm_func, view_decorator, model, allow_staff, allow_superuser, allow_anonymous,
unauthenticated_handler, request_types, set())
self._registry[name] = entry
@wraps(perm_func)
def wrapped_func(user, instance=NO_VALUE):
if user is None:
return False
if not allow_anonymous and user.is_anonymous():
return False
test = lambda: perm_func(user) if instance is NO_VALUE else perm_func(user, instance)
return (
allow_staff and user.is_staff or
allow_superuser and user.is_superuser or
test()
)
register.filter(name, wrapped_func)
log.debug('Registered permission: {0}'.format(name))
return entry if _return_entry else wrapped_func | def function[register, parameter[self, perm_func, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types, name, replace, _return_entry]]:
constant[Register permission function & return the original function.
This is typically used as a decorator::
permissions = PermissionsRegistry()
@permissions.register
def can_do_something(user):
...
For internal use only: you can pass ``_return_entry=True`` to
have the registry :class:`.Entry` returned instead of
``perm_func``.
]
variable[allow_staff] assign[=] call[name[_default], parameter[name[allow_staff], name[self]._allow_staff]]
variable[allow_superuser] assign[=] call[name[_default], parameter[name[allow_superuser], name[self]._allow_superuser]]
variable[allow_anonymous] assign[=] call[name[_default], parameter[name[allow_anonymous], name[self]._allow_anonymous]]
variable[unauthenticated_handler] assign[=] call[name[_default], parameter[name[unauthenticated_handler], name[self]._unauthenticated_handler]]
variable[request_types] assign[=] call[name[_default], parameter[name[request_types], name[self]._request_types]]
if compare[name[perm_func] is constant[None]] begin[:]
return[<ast.Lambda object at 0x7da18f00cbb0>]
variable[name] assign[=] call[name[_default], parameter[name[name], name[perm_func].__name__]]
if compare[name[name] equal[==] constant[register]] begin[:]
<ast.Raise object at 0x7da18f00fb50>
variable[view_decorator] assign[=] call[name[self]._make_view_decorator, parameter[name[name], name[perm_func], name[model], name[allow_staff], name[allow_superuser], name[allow_anonymous], name[unauthenticated_handler], name[request_types]]]
variable[entry] assign[=] call[name[Entry], parameter[name[name], name[perm_func], name[view_decorator], name[model], name[allow_staff], name[allow_superuser], name[allow_anonymous], name[unauthenticated_handler], name[request_types], call[name[set], parameter[]]]]
call[name[self]._registry][name[name]] assign[=] name[entry]
def function[wrapped_func, parameter[user, instance]]:
if compare[name[user] is constant[None]] begin[:]
return[constant[False]]
if <ast.BoolOp object at 0x7da1b16a82b0> begin[:]
return[constant[False]]
variable[test] assign[=] <ast.Lambda object at 0x7da1b16abdf0>
return[<ast.BoolOp object at 0x7da1b16aba90>]
call[name[register].filter, parameter[name[name], name[wrapped_func]]]
call[name[log].debug, parameter[call[constant[Registered permission: {0}].format, parameter[name[name]]]]]
return[<ast.IfExp object at 0x7da1b16a8b50>] | keyword[def] identifier[register] ( identifier[self] , identifier[perm_func] = keyword[None] , identifier[model] = keyword[None] , identifier[allow_staff] = keyword[None] , identifier[allow_superuser] = keyword[None] ,
identifier[allow_anonymous] = keyword[None] , identifier[unauthenticated_handler] = keyword[None] , identifier[request_types] = keyword[None] , identifier[name] = keyword[None] ,
identifier[replace] = keyword[False] , identifier[_return_entry] = keyword[False] ):
literal[string]
identifier[allow_staff] = identifier[_default] ( identifier[allow_staff] , identifier[self] . identifier[_allow_staff] )
identifier[allow_superuser] = identifier[_default] ( identifier[allow_superuser] , identifier[self] . identifier[_allow_superuser] )
identifier[allow_anonymous] = identifier[_default] ( identifier[allow_anonymous] , identifier[self] . identifier[_allow_anonymous] )
identifier[unauthenticated_handler] = identifier[_default] ( identifier[unauthenticated_handler] , identifier[self] . identifier[_unauthenticated_handler] )
identifier[request_types] = identifier[_default] ( identifier[request_types] , identifier[self] . identifier[_request_types] )
keyword[if] identifier[perm_func] keyword[is] keyword[None] :
keyword[return] (
keyword[lambda] identifier[perm_func_] :
identifier[self] . identifier[register] (
identifier[perm_func_] , identifier[model] , identifier[allow_staff] , identifier[allow_superuser] , identifier[allow_anonymous] ,
identifier[unauthenticated_handler] , identifier[request_types] , identifier[name] , identifier[replace] , identifier[_return_entry] )
)
identifier[name] = identifier[_default] ( identifier[name] , identifier[perm_func] . identifier[__name__] )
keyword[if] identifier[name] == literal[string] :
keyword[raise] identifier[PermissionsError] ( literal[string] )
keyword[elif] identifier[name] keyword[in] identifier[self] . identifier[_registry] keyword[and] keyword[not] identifier[replace] :
keyword[raise] identifier[DuplicatePermissionError] ( identifier[name] )
identifier[view_decorator] = identifier[self] . identifier[_make_view_decorator] (
identifier[name] , identifier[perm_func] , identifier[model] , identifier[allow_staff] , identifier[allow_superuser] , identifier[allow_anonymous] ,
identifier[unauthenticated_handler] , identifier[request_types] )
identifier[entry] = identifier[Entry] (
identifier[name] , identifier[perm_func] , identifier[view_decorator] , identifier[model] , identifier[allow_staff] , identifier[allow_superuser] , identifier[allow_anonymous] ,
identifier[unauthenticated_handler] , identifier[request_types] , identifier[set] ())
identifier[self] . identifier[_registry] [ identifier[name] ]= identifier[entry]
@ identifier[wraps] ( identifier[perm_func] )
keyword[def] identifier[wrapped_func] ( identifier[user] , identifier[instance] = identifier[NO_VALUE] ):
keyword[if] identifier[user] keyword[is] keyword[None] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[allow_anonymous] keyword[and] identifier[user] . identifier[is_anonymous] ():
keyword[return] keyword[False]
identifier[test] = keyword[lambda] : identifier[perm_func] ( identifier[user] ) keyword[if] identifier[instance] keyword[is] identifier[NO_VALUE] keyword[else] identifier[perm_func] ( identifier[user] , identifier[instance] )
keyword[return] (
identifier[allow_staff] keyword[and] identifier[user] . identifier[is_staff] keyword[or]
identifier[allow_superuser] keyword[and] identifier[user] . identifier[is_superuser] keyword[or]
identifier[test] ()
)
identifier[register] . identifier[filter] ( identifier[name] , identifier[wrapped_func] )
identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[return] identifier[entry] keyword[if] identifier[_return_entry] keyword[else] identifier[wrapped_func] | def register(self, perm_func=None, model=None, allow_staff=None, allow_superuser=None, allow_anonymous=None, unauthenticated_handler=None, request_types=None, name=None, replace=False, _return_entry=False):
"""Register permission function & return the original function.
This is typically used as a decorator::
permissions = PermissionsRegistry()
@permissions.register
def can_do_something(user):
...
For internal use only: you can pass ``_return_entry=True`` to
have the registry :class:`.Entry` returned instead of
``perm_func``.
"""
allow_staff = _default(allow_staff, self._allow_staff)
allow_superuser = _default(allow_superuser, self._allow_superuser)
allow_anonymous = _default(allow_anonymous, self._allow_anonymous)
unauthenticated_handler = _default(unauthenticated_handler, self._unauthenticated_handler)
request_types = _default(request_types, self._request_types)
if perm_func is None:
return lambda perm_func_: self.register(perm_func_, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types, name, replace, _return_entry) # depends on [control=['if'], data=[]]
name = _default(name, perm_func.__name__)
if name == 'register':
raise PermissionsError('register cannot be used as a permission name') # depends on [control=['if'], data=[]]
elif name in self._registry and (not replace):
raise DuplicatePermissionError(name) # depends on [control=['if'], data=[]]
view_decorator = self._make_view_decorator(name, perm_func, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types)
entry = Entry(name, perm_func, view_decorator, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types, set())
self._registry[name] = entry
@wraps(perm_func)
def wrapped_func(user, instance=NO_VALUE):
if user is None:
return False # depends on [control=['if'], data=[]]
if not allow_anonymous and user.is_anonymous():
return False # depends on [control=['if'], data=[]]
test = lambda : perm_func(user) if instance is NO_VALUE else perm_func(user, instance)
return allow_staff and user.is_staff or (allow_superuser and user.is_superuser) or test()
register.filter(name, wrapped_func)
log.debug('Registered permission: {0}'.format(name))
return entry if _return_entry else wrapped_func |
def verify_precompiled_checksums(self, precompiled_path: Path) -> None:
""" Compare source code checksums with those from a precompiled file. """
# We get the precompiled file data
contracts_precompiled = ContractManager(precompiled_path)
# Silence mypy
assert self.contracts_checksums is not None
# Compare each contract source code checksum with the one from the precompiled file
for contract, checksum in self.contracts_checksums.items():
try:
# Silence mypy
assert contracts_precompiled.contracts_checksums is not None
precompiled_checksum = contracts_precompiled.contracts_checksums[contract]
except KeyError:
raise ContractSourceManagerVerificationError(
f'No checksum for {contract}',
)
if precompiled_checksum != checksum:
raise ContractSourceManagerVerificationError(
f'checksum of {contract} does not match {precompiled_checksum} != {checksum}',
)
# Compare the overall source code checksum with the one from the precompiled file
if self.overall_checksum != contracts_precompiled.overall_checksum:
raise ContractSourceManagerVerificationError(
f'overall checksum does not match '
f'{self.overall_checksum} != {contracts_precompiled.overall_checksum}',
) | def function[verify_precompiled_checksums, parameter[self, precompiled_path]]:
constant[ Compare source code checksums with those from a precompiled file. ]
variable[contracts_precompiled] assign[=] call[name[ContractManager], parameter[name[precompiled_path]]]
assert[compare[name[self].contracts_checksums is_not constant[None]]]
for taget[tuple[[<ast.Name object at 0x7da1b088d900>, <ast.Name object at 0x7da1b088d720>]]] in starred[call[name[self].contracts_checksums.items, parameter[]]] begin[:]
<ast.Try object at 0x7da1b088f760>
if compare[name[precompiled_checksum] not_equal[!=] name[checksum]] begin[:]
<ast.Raise object at 0x7da1b26ae110>
if compare[name[self].overall_checksum not_equal[!=] name[contracts_precompiled].overall_checksum] begin[:]
<ast.Raise object at 0x7da20e9b1150> | keyword[def] identifier[verify_precompiled_checksums] ( identifier[self] , identifier[precompiled_path] : identifier[Path] )-> keyword[None] :
literal[string]
identifier[contracts_precompiled] = identifier[ContractManager] ( identifier[precompiled_path] )
keyword[assert] identifier[self] . identifier[contracts_checksums] keyword[is] keyword[not] keyword[None]
keyword[for] identifier[contract] , identifier[checksum] keyword[in] identifier[self] . identifier[contracts_checksums] . identifier[items] ():
keyword[try] :
keyword[assert] identifier[contracts_precompiled] . identifier[contracts_checksums] keyword[is] keyword[not] keyword[None]
identifier[precompiled_checksum] = identifier[contracts_precompiled] . identifier[contracts_checksums] [ identifier[contract] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ContractSourceManagerVerificationError] (
literal[string] ,
)
keyword[if] identifier[precompiled_checksum] != identifier[checksum] :
keyword[raise] identifier[ContractSourceManagerVerificationError] (
literal[string] ,
)
keyword[if] identifier[self] . identifier[overall_checksum] != identifier[contracts_precompiled] . identifier[overall_checksum] :
keyword[raise] identifier[ContractSourceManagerVerificationError] (
literal[string]
literal[string] ,
) | def verify_precompiled_checksums(self, precompiled_path: Path) -> None:
""" Compare source code checksums with those from a precompiled file. """
# We get the precompiled file data
contracts_precompiled = ContractManager(precompiled_path)
# Silence mypy
assert self.contracts_checksums is not None
# Compare each contract source code checksum with the one from the precompiled file
for (contract, checksum) in self.contracts_checksums.items():
try:
# Silence mypy
assert contracts_precompiled.contracts_checksums is not None
precompiled_checksum = contracts_precompiled.contracts_checksums[contract] # depends on [control=['try'], data=[]]
except KeyError:
raise ContractSourceManagerVerificationError(f'No checksum for {contract}') # depends on [control=['except'], data=[]]
if precompiled_checksum != checksum:
raise ContractSourceManagerVerificationError(f'checksum of {contract} does not match {precompiled_checksum} != {checksum}') # depends on [control=['if'], data=['precompiled_checksum', 'checksum']] # depends on [control=['for'], data=[]]
# Compare the overall source code checksum with the one from the precompiled file
if self.overall_checksum != contracts_precompiled.overall_checksum:
raise ContractSourceManagerVerificationError(f'overall checksum does not match {self.overall_checksum} != {contracts_precompiled.overall_checksum}') # depends on [control=['if'], data=[]] |
def __PrintAdditionalImports(self, imports):
"""Print additional imports needed for protorpc."""
google_imports = [x for x in imports if 'google' in x]
other_imports = [x for x in imports if 'google' not in x]
if other_imports:
for import_ in sorted(other_imports):
self.__printer(import_)
self.__printer()
# Note: If we ever were going to add imports from this package, we'd
# need to sort those out and put them at the end.
if google_imports:
for import_ in sorted(google_imports):
self.__printer(import_)
self.__printer() | def function[__PrintAdditionalImports, parameter[self, imports]]:
constant[Print additional imports needed for protorpc.]
variable[google_imports] assign[=] <ast.ListComp object at 0x7da1b0718b20>
variable[other_imports] assign[=] <ast.ListComp object at 0x7da1b0719e10>
if name[other_imports] begin[:]
for taget[name[import_]] in starred[call[name[sorted], parameter[name[other_imports]]]] begin[:]
call[name[self].__printer, parameter[name[import_]]]
call[name[self].__printer, parameter[]]
if name[google_imports] begin[:]
for taget[name[import_]] in starred[call[name[sorted], parameter[name[google_imports]]]] begin[:]
call[name[self].__printer, parameter[name[import_]]]
call[name[self].__printer, parameter[]] | keyword[def] identifier[__PrintAdditionalImports] ( identifier[self] , identifier[imports] ):
literal[string]
identifier[google_imports] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[imports] keyword[if] literal[string] keyword[in] identifier[x] ]
identifier[other_imports] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[imports] keyword[if] literal[string] keyword[not] keyword[in] identifier[x] ]
keyword[if] identifier[other_imports] :
keyword[for] identifier[import_] keyword[in] identifier[sorted] ( identifier[other_imports] ):
identifier[self] . identifier[__printer] ( identifier[import_] )
identifier[self] . identifier[__printer] ()
keyword[if] identifier[google_imports] :
keyword[for] identifier[import_] keyword[in] identifier[sorted] ( identifier[google_imports] ):
identifier[self] . identifier[__printer] ( identifier[import_] )
identifier[self] . identifier[__printer] () | def __PrintAdditionalImports(self, imports):
"""Print additional imports needed for protorpc."""
google_imports = [x for x in imports if 'google' in x]
other_imports = [x for x in imports if 'google' not in x]
if other_imports:
for import_ in sorted(other_imports):
self.__printer(import_) # depends on [control=['for'], data=['import_']]
self.__printer() # depends on [control=['if'], data=[]]
# Note: If we ever were going to add imports from this package, we'd
# need to sort those out and put them at the end.
if google_imports:
for import_ in sorted(google_imports):
self.__printer(import_) # depends on [control=['for'], data=['import_']]
self.__printer() # depends on [control=['if'], data=[]] |
def signed_number(number, precision=2):
"""
Return the given number as a string with a sign in front of it, ie. `+` if the number is positive, `-` otherwise.
"""
prefix = '' if number <= 0 else '+'
number_str = '{}{:.{precision}f}'.format(prefix, number, precision=precision)
return number_str | def function[signed_number, parameter[number, precision]]:
constant[
Return the given number as a string with a sign in front of it, ie. `+` if the number is positive, `-` otherwise.
]
variable[prefix] assign[=] <ast.IfExp object at 0x7da207f99ff0>
variable[number_str] assign[=] call[constant[{}{:.{precision}f}].format, parameter[name[prefix], name[number]]]
return[name[number_str]] | keyword[def] identifier[signed_number] ( identifier[number] , identifier[precision] = literal[int] ):
literal[string]
identifier[prefix] = literal[string] keyword[if] identifier[number] <= literal[int] keyword[else] literal[string]
identifier[number_str] = literal[string] . identifier[format] ( identifier[prefix] , identifier[number] , identifier[precision] = identifier[precision] )
keyword[return] identifier[number_str] | def signed_number(number, precision=2):
"""
Return the given number as a string with a sign in front of it, ie. `+` if the number is positive, `-` otherwise.
"""
prefix = '' if number <= 0 else '+'
number_str = '{}{:.{precision}f}'.format(prefix, number, precision=precision)
return number_str |
def get_asset_details(self, **params):
"""Fetch details on assets.
https://github.com/binance-exchange/binance-official-api-docs/blob/master/wapi-api.md#asset-detail-user_data
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
.. code-block:: python
{
"success": true,
"assetDetail": {
"CTR": {
"minWithdrawAmount": "70.00000000", //min withdraw amount
"depositStatus": false,//deposit status
"withdrawFee": 35, // withdraw fee
"withdrawStatus": true, //withdraw status
"depositTip": "Delisted, Deposit Suspended" //reason
},
"SKY": {
"minWithdrawAmount": "0.02000000",
"depositStatus": true,
"withdrawFee": 0.01,
"withdrawStatus": true
}
}
}
:raises: BinanceWithdrawException
"""
res = self._request_withdraw_api('get', 'assetDetail.html', True, data=params)
if not res['success']:
raise BinanceWithdrawException(res['msg'])
return res | def function[get_asset_details, parameter[self]]:
constant[Fetch details on assets.
https://github.com/binance-exchange/binance-official-api-docs/blob/master/wapi-api.md#asset-detail-user_data
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
.. code-block:: python
{
"success": true,
"assetDetail": {
"CTR": {
"minWithdrawAmount": "70.00000000", //min withdraw amount
"depositStatus": false,//deposit status
"withdrawFee": 35, // withdraw fee
"withdrawStatus": true, //withdraw status
"depositTip": "Delisted, Deposit Suspended" //reason
},
"SKY": {
"minWithdrawAmount": "0.02000000",
"depositStatus": true,
"withdrawFee": 0.01,
"withdrawStatus": true
}
}
}
:raises: BinanceWithdrawException
]
variable[res] assign[=] call[name[self]._request_withdraw_api, parameter[constant[get], constant[assetDetail.html], constant[True]]]
if <ast.UnaryOp object at 0x7da18c4ceb90> begin[:]
<ast.Raise object at 0x7da18c4cde70>
return[name[res]] | keyword[def] identifier[get_asset_details] ( identifier[self] ,** identifier[params] ):
literal[string]
identifier[res] = identifier[self] . identifier[_request_withdraw_api] ( literal[string] , literal[string] , keyword[True] , identifier[data] = identifier[params] )
keyword[if] keyword[not] identifier[res] [ literal[string] ]:
keyword[raise] identifier[BinanceWithdrawException] ( identifier[res] [ literal[string] ])
keyword[return] identifier[res] | def get_asset_details(self, **params):
"""Fetch details on assets.
https://github.com/binance-exchange/binance-official-api-docs/blob/master/wapi-api.md#asset-detail-user_data
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
.. code-block:: python
{
"success": true,
"assetDetail": {
"CTR": {
"minWithdrawAmount": "70.00000000", //min withdraw amount
"depositStatus": false,//deposit status
"withdrawFee": 35, // withdraw fee
"withdrawStatus": true, //withdraw status
"depositTip": "Delisted, Deposit Suspended" //reason
},
"SKY": {
"minWithdrawAmount": "0.02000000",
"depositStatus": true,
"withdrawFee": 0.01,
"withdrawStatus": true
}
}
}
:raises: BinanceWithdrawException
"""
res = self._request_withdraw_api('get', 'assetDetail.html', True, data=params)
if not res['success']:
raise BinanceWithdrawException(res['msg']) # depends on [control=['if'], data=[]]
return res |
def get_random_filename(content_type=None):
"""Get a pseudo-random, Planet-looking filename.
>>> from planet.api import utils
>>> print(utils.get_random_filename()) #doctest:+SKIP
planet-61FPnh7K
>>> print(utils.get_random_filename('image/tiff')) #doctest:+SKIP
planet-V8ELYxy5.tif
>>>
:returns: a filename (i.e. ``basename``)
:rtype: str
"""
extension = mimetypes.guess_extension(content_type or '') or ''
characters = string.ascii_letters + '0123456789'
letters = ''.join(random.sample(characters, 8))
name = 'planet-{}{}'.format(letters, extension)
return name | def function[get_random_filename, parameter[content_type]]:
constant[Get a pseudo-random, Planet-looking filename.
>>> from planet.api import utils
>>> print(utils.get_random_filename()) #doctest:+SKIP
planet-61FPnh7K
>>> print(utils.get_random_filename('image/tiff')) #doctest:+SKIP
planet-V8ELYxy5.tif
>>>
:returns: a filename (i.e. ``basename``)
:rtype: str
]
variable[extension] assign[=] <ast.BoolOp object at 0x7da18eb54520>
variable[characters] assign[=] binary_operation[name[string].ascii_letters + constant[0123456789]]
variable[letters] assign[=] call[constant[].join, parameter[call[name[random].sample, parameter[name[characters], constant[8]]]]]
variable[name] assign[=] call[constant[planet-{}{}].format, parameter[name[letters], name[extension]]]
return[name[name]] | keyword[def] identifier[get_random_filename] ( identifier[content_type] = keyword[None] ):
literal[string]
identifier[extension] = identifier[mimetypes] . identifier[guess_extension] ( identifier[content_type] keyword[or] literal[string] ) keyword[or] literal[string]
identifier[characters] = identifier[string] . identifier[ascii_letters] + literal[string]
identifier[letters] = literal[string] . identifier[join] ( identifier[random] . identifier[sample] ( identifier[characters] , literal[int] ))
identifier[name] = literal[string] . identifier[format] ( identifier[letters] , identifier[extension] )
keyword[return] identifier[name] | def get_random_filename(content_type=None):
"""Get a pseudo-random, Planet-looking filename.
>>> from planet.api import utils
>>> print(utils.get_random_filename()) #doctest:+SKIP
planet-61FPnh7K
>>> print(utils.get_random_filename('image/tiff')) #doctest:+SKIP
planet-V8ELYxy5.tif
>>>
:returns: a filename (i.e. ``basename``)
:rtype: str
"""
extension = mimetypes.guess_extension(content_type or '') or ''
characters = string.ascii_letters + '0123456789'
letters = ''.join(random.sample(characters, 8))
name = 'planet-{}{}'.format(letters, extension)
return name |
def authentication_url(self):
"""Redirect your users to here to authenticate them."""
params = {
'client_id': self.client_id,
'response_type': self.type,
'redirect_uri': self.callback_url
}
return AUTHENTICATION_URL + "?" + urlencode(params) | def function[authentication_url, parameter[self]]:
constant[Redirect your users to here to authenticate them.]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c4ca0>, <ast.Constant object at 0x7da20c6c5cc0>, <ast.Constant object at 0x7da20c6c70d0>], [<ast.Attribute object at 0x7da1b07fe6e0>, <ast.Attribute object at 0x7da1b07ffee0>, <ast.Attribute object at 0x7da1b07ffcd0>]]
return[binary_operation[binary_operation[name[AUTHENTICATION_URL] + constant[?]] + call[name[urlencode], parameter[name[params]]]]] | keyword[def] identifier[authentication_url] ( identifier[self] ):
literal[string]
identifier[params] ={
literal[string] : identifier[self] . identifier[client_id] ,
literal[string] : identifier[self] . identifier[type] ,
literal[string] : identifier[self] . identifier[callback_url]
}
keyword[return] identifier[AUTHENTICATION_URL] + literal[string] + identifier[urlencode] ( identifier[params] ) | def authentication_url(self):
"""Redirect your users to here to authenticate them."""
params = {'client_id': self.client_id, 'response_type': self.type, 'redirect_uri': self.callback_url}
return AUTHENTICATION_URL + '?' + urlencode(params) |
def run_supernova(ctx, executable, debug, quiet, environment, command, conf,
echo, dashboard):
"""
You can use supernova with many OpenStack clients and avoid the pain of
managing multiple sets of environment variables. Getting started is easy
and there's some documentation that can help:
http://supernova.readthedocs.org/
The first step is to get your environment variables packed into a
configuration file, usually in ~/.supernova. The docs (linked above) have
some good examples that you can fill in via copy/paste.
Once you have a configuration ready to go, replace 'prod' below with one
of your configured environments and try some of these commands:
supernova prod list (Lists instances via novaclient)
supernova prod image-list (Lists images via novaclient)
supernova prod boot ... (Boots an instance via novaclient)
Have questions, bugs, or comments? Head on over to Github and open an
issue or submit a pull request!
https://github.com/major/supernova
"""
# Retrieve our credentials from the configuration file
try:
nova_creds = config.run_config(config_file_override=conf)
except Exception as e:
msg = ("\n There's an error in your configuration file:\n\n"
" {0}\n").format(e)
click.echo(msg)
ctx.exit(1)
# Warn the user if there are potentially conflicting environment variables
# already set in the user's environment.
utils.check_environment_presets()
# Is our environment argument a single environment or a supernova group?
if utils.is_valid_group(environment, nova_creds):
envs = utils.get_envs_in_group(environment, nova_creds)
elif ',' in environment:
envs = []
for env in environment.split(','):
if utils.is_valid_group(env, nova_creds):
envs.extend(utils.get_envs_in_group(env, nova_creds))
else:
envs.append(env)
elif environment.startswith('/') and environment.endswith('/'):
envs = [nova_env for nova_env in nova_creds.keys()
if re.search(environment[1:-1], nova_env)]
else:
envs = [environment]
# These are arguments for supernova and not the executable that supernova
# will eventually call.
supernova_args = {
'debug': debug,
'executable': executable,
'quiet': quiet,
'echo': echo,
'dashboard': dashboard,
}
# If the user specified a single environment, we need to verify that the
# environment actually exists in their configuration file.
if len(envs) == 1 and not utils.is_valid_environment(envs[0], nova_creds):
msg = ("\nCouldn't find an environment called '{0}' in your "
"configuration file.\nTry supernova --list to see all "
"configured environments.\n".format(envs[0]))
click.echo(msg)
ctx.exit(1)
if supernova_args['echo']:
if len(envs) > 1:
msg = ("\nCan't echo a group of environments.\nSpecify a single "
"environment when using --echo.")
click.echo(msg)
ctx.exit(1)
env = credentials.prep_shell_environment(envs[0], nova_creds)
for k in env:
click.echo('{0}={1}'.format(k, env[k]))
ctx.exit(0)
if supernova_args['dashboard']:
if len(envs) > 1:
msg = ("\nCan't open dashboard for a group of environments.\n"
"Specify a single environment when using --dashboard.")
click.echo(msg)
ctx.exit(1)
url = nova_creds[envs[0]].get('SUPERNOVA_DASHBOARD_URL')
if url is None:
msg = ("\nNo SUPERNOVA_DASHBOARD_URL specified "
"for environment: %s" % envs[0])
click.echo(msg)
ctx.exit(1)
webbrowser.open(url)
ctx.exit(0)
if len(command) == 0:
msg = ("\nMissing arguments to pass to executable Run supernova "
"--help for examples.\n".format(envs[0]))
click.echo(msg)
ctx.exit(1)
nova_args = list(command)
# Loop through the single environment (if the user specified one) or all
# of the environments in a supernova group (if the user specified a group).
for env in envs:
supernova_args['nova_env'] = env
returncode = supernova.run_command(nova_creds, nova_args,
supernova_args)
# NOTE(major): The return code here is the one that comes back from the
# OS_EXECUTABLE that supernova runs (by default, 'nova'). When using
# supernova groups, the return code is the one returned by the executable
# for the last environment in the group.
#
# It's not ideal, but it's all I can think of for now. ;)
sys.exit(returncode) | def function[run_supernova, parameter[ctx, executable, debug, quiet, environment, command, conf, echo, dashboard]]:
constant[
You can use supernova with many OpenStack clients and avoid the pain of
managing multiple sets of environment variables. Getting started is easy
and there's some documentation that can help:
http://supernova.readthedocs.org/
The first step is to get your environment variables packed into a
configuration file, usually in ~/.supernova. The docs (linked above) have
some good examples that you can fill in via copy/paste.
Once you have a configuration ready to go, replace 'prod' below with one
of your configured environments and try some of these commands:
supernova prod list (Lists instances via novaclient)
supernova prod image-list (Lists images via novaclient)
supernova prod boot ... (Boots an instance via novaclient)
Have questions, bugs, or comments? Head on over to Github and open an
issue or submit a pull request!
https://github.com/major/supernova
]
<ast.Try object at 0x7da1b283add0>
call[name[utils].check_environment_presets, parameter[]]
if call[name[utils].is_valid_group, parameter[name[environment], name[nova_creds]]] begin[:]
variable[envs] assign[=] call[name[utils].get_envs_in_group, parameter[name[environment], name[nova_creds]]]
variable[supernova_args] assign[=] dictionary[[<ast.Constant object at 0x7da1b2838c40>, <ast.Constant object at 0x7da1b283a2c0>, <ast.Constant object at 0x7da1b283a170>, <ast.Constant object at 0x7da1b2839150>, <ast.Constant object at 0x7da1b283a140>], [<ast.Name object at 0x7da1b2839390>, <ast.Name object at 0x7da1b283a2f0>, <ast.Name object at 0x7da1b283a410>, <ast.Name object at 0x7da1b28399c0>, <ast.Name object at 0x7da1b283a380>]]
if <ast.BoolOp object at 0x7da1b2839480> begin[:]
variable[msg] assign[=] call[constant[
Couldn't find an environment called '{0}' in your configuration file.
Try supernova --list to see all configured environments.
].format, parameter[call[name[envs]][constant[0]]]]
call[name[click].echo, parameter[name[msg]]]
call[name[ctx].exit, parameter[constant[1]]]
if call[name[supernova_args]][constant[echo]] begin[:]
if compare[call[name[len], parameter[name[envs]]] greater[>] constant[1]] begin[:]
variable[msg] assign[=] constant[
Can't echo a group of environments.
Specify a single environment when using --echo.]
call[name[click].echo, parameter[name[msg]]]
call[name[ctx].exit, parameter[constant[1]]]
variable[env] assign[=] call[name[credentials].prep_shell_environment, parameter[call[name[envs]][constant[0]], name[nova_creds]]]
for taget[name[k]] in starred[name[env]] begin[:]
call[name[click].echo, parameter[call[constant[{0}={1}].format, parameter[name[k], call[name[env]][name[k]]]]]]
call[name[ctx].exit, parameter[constant[0]]]
if call[name[supernova_args]][constant[dashboard]] begin[:]
if compare[call[name[len], parameter[name[envs]]] greater[>] constant[1]] begin[:]
variable[msg] assign[=] constant[
Can't open dashboard for a group of environments.
Specify a single environment when using --dashboard.]
call[name[click].echo, parameter[name[msg]]]
call[name[ctx].exit, parameter[constant[1]]]
variable[url] assign[=] call[call[name[nova_creds]][call[name[envs]][constant[0]]].get, parameter[constant[SUPERNOVA_DASHBOARD_URL]]]
if compare[name[url] is constant[None]] begin[:]
variable[msg] assign[=] binary_operation[constant[
No SUPERNOVA_DASHBOARD_URL specified for environment: %s] <ast.Mod object at 0x7da2590d6920> call[name[envs]][constant[0]]]
call[name[click].echo, parameter[name[msg]]]
call[name[ctx].exit, parameter[constant[1]]]
call[name[webbrowser].open, parameter[name[url]]]
call[name[ctx].exit, parameter[constant[0]]]
if compare[call[name[len], parameter[name[command]]] equal[==] constant[0]] begin[:]
variable[msg] assign[=] call[constant[
Missing arguments to pass to executable Run supernova --help for examples.
].format, parameter[call[name[envs]][constant[0]]]]
call[name[click].echo, parameter[name[msg]]]
call[name[ctx].exit, parameter[constant[1]]]
variable[nova_args] assign[=] call[name[list], parameter[name[command]]]
for taget[name[env]] in starred[name[envs]] begin[:]
call[name[supernova_args]][constant[nova_env]] assign[=] name[env]
variable[returncode] assign[=] call[name[supernova].run_command, parameter[name[nova_creds], name[nova_args], name[supernova_args]]]
call[name[sys].exit, parameter[name[returncode]]] | keyword[def] identifier[run_supernova] ( identifier[ctx] , identifier[executable] , identifier[debug] , identifier[quiet] , identifier[environment] , identifier[command] , identifier[conf] ,
identifier[echo] , identifier[dashboard] ):
literal[string]
keyword[try] :
identifier[nova_creds] = identifier[config] . identifier[run_config] ( identifier[config_file_override] = identifier[conf] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[msg] =( literal[string]
literal[string] ). identifier[format] ( identifier[e] )
identifier[click] . identifier[echo] ( identifier[msg] )
identifier[ctx] . identifier[exit] ( literal[int] )
identifier[utils] . identifier[check_environment_presets] ()
keyword[if] identifier[utils] . identifier[is_valid_group] ( identifier[environment] , identifier[nova_creds] ):
identifier[envs] = identifier[utils] . identifier[get_envs_in_group] ( identifier[environment] , identifier[nova_creds] )
keyword[elif] literal[string] keyword[in] identifier[environment] :
identifier[envs] =[]
keyword[for] identifier[env] keyword[in] identifier[environment] . identifier[split] ( literal[string] ):
keyword[if] identifier[utils] . identifier[is_valid_group] ( identifier[env] , identifier[nova_creds] ):
identifier[envs] . identifier[extend] ( identifier[utils] . identifier[get_envs_in_group] ( identifier[env] , identifier[nova_creds] ))
keyword[else] :
identifier[envs] . identifier[append] ( identifier[env] )
keyword[elif] identifier[environment] . identifier[startswith] ( literal[string] ) keyword[and] identifier[environment] . identifier[endswith] ( literal[string] ):
identifier[envs] =[ identifier[nova_env] keyword[for] identifier[nova_env] keyword[in] identifier[nova_creds] . identifier[keys] ()
keyword[if] identifier[re] . identifier[search] ( identifier[environment] [ literal[int] :- literal[int] ], identifier[nova_env] )]
keyword[else] :
identifier[envs] =[ identifier[environment] ]
identifier[supernova_args] ={
literal[string] : identifier[debug] ,
literal[string] : identifier[executable] ,
literal[string] : identifier[quiet] ,
literal[string] : identifier[echo] ,
literal[string] : identifier[dashboard] ,
}
keyword[if] identifier[len] ( identifier[envs] )== literal[int] keyword[and] keyword[not] identifier[utils] . identifier[is_valid_environment] ( identifier[envs] [ literal[int] ], identifier[nova_creds] ):
identifier[msg] =( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[envs] [ literal[int] ]))
identifier[click] . identifier[echo] ( identifier[msg] )
identifier[ctx] . identifier[exit] ( literal[int] )
keyword[if] identifier[supernova_args] [ literal[string] ]:
keyword[if] identifier[len] ( identifier[envs] )> literal[int] :
identifier[msg] =( literal[string]
literal[string] )
identifier[click] . identifier[echo] ( identifier[msg] )
identifier[ctx] . identifier[exit] ( literal[int] )
identifier[env] = identifier[credentials] . identifier[prep_shell_environment] ( identifier[envs] [ literal[int] ], identifier[nova_creds] )
keyword[for] identifier[k] keyword[in] identifier[env] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[k] , identifier[env] [ identifier[k] ]))
identifier[ctx] . identifier[exit] ( literal[int] )
keyword[if] identifier[supernova_args] [ literal[string] ]:
keyword[if] identifier[len] ( identifier[envs] )> literal[int] :
identifier[msg] =( literal[string]
literal[string] )
identifier[click] . identifier[echo] ( identifier[msg] )
identifier[ctx] . identifier[exit] ( literal[int] )
identifier[url] = identifier[nova_creds] [ identifier[envs] [ literal[int] ]]. identifier[get] ( literal[string] )
keyword[if] identifier[url] keyword[is] keyword[None] :
identifier[msg] =( literal[string]
literal[string] % identifier[envs] [ literal[int] ])
identifier[click] . identifier[echo] ( identifier[msg] )
identifier[ctx] . identifier[exit] ( literal[int] )
identifier[webbrowser] . identifier[open] ( identifier[url] )
identifier[ctx] . identifier[exit] ( literal[int] )
keyword[if] identifier[len] ( identifier[command] )== literal[int] :
identifier[msg] =( literal[string]
literal[string] . identifier[format] ( identifier[envs] [ literal[int] ]))
identifier[click] . identifier[echo] ( identifier[msg] )
identifier[ctx] . identifier[exit] ( literal[int] )
identifier[nova_args] = identifier[list] ( identifier[command] )
keyword[for] identifier[env] keyword[in] identifier[envs] :
identifier[supernova_args] [ literal[string] ]= identifier[env]
identifier[returncode] = identifier[supernova] . identifier[run_command] ( identifier[nova_creds] , identifier[nova_args] ,
identifier[supernova_args] )
identifier[sys] . identifier[exit] ( identifier[returncode] ) | def run_supernova(ctx, executable, debug, quiet, environment, command, conf, echo, dashboard):
"""
You can use supernova with many OpenStack clients and avoid the pain of
managing multiple sets of environment variables. Getting started is easy
and there's some documentation that can help:
http://supernova.readthedocs.org/
The first step is to get your environment variables packed into a
configuration file, usually in ~/.supernova. The docs (linked above) have
some good examples that you can fill in via copy/paste.
Once you have a configuration ready to go, replace 'prod' below with one
of your configured environments and try some of these commands:
supernova prod list (Lists instances via novaclient)
supernova prod image-list (Lists images via novaclient)
supernova prod boot ... (Boots an instance via novaclient)
Have questions, bugs, or comments? Head on over to Github and open an
issue or submit a pull request!
https://github.com/major/supernova
"""
# Retrieve our credentials from the configuration file
try:
nova_creds = config.run_config(config_file_override=conf) # depends on [control=['try'], data=[]]
except Exception as e:
msg = "\n There's an error in your configuration file:\n\n {0}\n".format(e)
click.echo(msg)
ctx.exit(1) # depends on [control=['except'], data=['e']]
# Warn the user if there are potentially conflicting environment variables
# already set in the user's environment.
utils.check_environment_presets()
# Is our environment argument a single environment or a supernova group?
if utils.is_valid_group(environment, nova_creds):
envs = utils.get_envs_in_group(environment, nova_creds) # depends on [control=['if'], data=[]]
elif ',' in environment:
envs = []
for env in environment.split(','):
if utils.is_valid_group(env, nova_creds):
envs.extend(utils.get_envs_in_group(env, nova_creds)) # depends on [control=['if'], data=[]]
else:
envs.append(env) # depends on [control=['for'], data=['env']] # depends on [control=['if'], data=['environment']]
elif environment.startswith('/') and environment.endswith('/'):
envs = [nova_env for nova_env in nova_creds.keys() if re.search(environment[1:-1], nova_env)] # depends on [control=['if'], data=[]]
else:
envs = [environment]
# These are arguments for supernova and not the executable that supernova
# will eventually call.
supernova_args = {'debug': debug, 'executable': executable, 'quiet': quiet, 'echo': echo, 'dashboard': dashboard}
# If the user specified a single environment, we need to verify that the
# environment actually exists in their configuration file.
if len(envs) == 1 and (not utils.is_valid_environment(envs[0], nova_creds)):
msg = "\nCouldn't find an environment called '{0}' in your configuration file.\nTry supernova --list to see all configured environments.\n".format(envs[0])
click.echo(msg)
ctx.exit(1) # depends on [control=['if'], data=[]]
if supernova_args['echo']:
if len(envs) > 1:
msg = "\nCan't echo a group of environments.\nSpecify a single environment when using --echo."
click.echo(msg)
ctx.exit(1) # depends on [control=['if'], data=[]]
env = credentials.prep_shell_environment(envs[0], nova_creds)
for k in env:
click.echo('{0}={1}'.format(k, env[k])) # depends on [control=['for'], data=['k']]
ctx.exit(0) # depends on [control=['if'], data=[]]
if supernova_args['dashboard']:
if len(envs) > 1:
msg = "\nCan't open dashboard for a group of environments.\nSpecify a single environment when using --dashboard."
click.echo(msg)
ctx.exit(1) # depends on [control=['if'], data=[]]
url = nova_creds[envs[0]].get('SUPERNOVA_DASHBOARD_URL')
if url is None:
msg = '\nNo SUPERNOVA_DASHBOARD_URL specified for environment: %s' % envs[0]
click.echo(msg)
ctx.exit(1) # depends on [control=['if'], data=[]]
webbrowser.open(url)
ctx.exit(0) # depends on [control=['if'], data=[]]
if len(command) == 0:
msg = '\nMissing arguments to pass to executable Run supernova --help for examples.\n'.format(envs[0])
click.echo(msg)
ctx.exit(1) # depends on [control=['if'], data=[]]
nova_args = list(command)
# Loop through the single environment (if the user specified one) or all
# of the environments in a supernova group (if the user specified a group).
for env in envs:
supernova_args['nova_env'] = env
returncode = supernova.run_command(nova_creds, nova_args, supernova_args) # depends on [control=['for'], data=['env']]
# NOTE(major): The return code here is the one that comes back from the
# OS_EXECUTABLE that supernova runs (by default, 'nova'). When using
# supernova groups, the return code is the one returned by the executable
# for the last environment in the group.
#
# It's not ideal, but it's all I can think of for now. ;)
sys.exit(returncode) |
def do_version():
"""Return version details of the running server api"""
v = ApiPool.ping.model.Version(
name=ApiPool().current_server_name,
version=ApiPool().current_server_api.get_version(),
container=get_container_version(),
)
log.info("/version: " + pprint.pformat(v))
return v | def function[do_version, parameter[]]:
constant[Return version details of the running server api]
variable[v] assign[=] call[name[ApiPool].ping.model.Version, parameter[]]
call[name[log].info, parameter[binary_operation[constant[/version: ] + call[name[pprint].pformat, parameter[name[v]]]]]]
return[name[v]] | keyword[def] identifier[do_version] ():
literal[string]
identifier[v] = identifier[ApiPool] . identifier[ping] . identifier[model] . identifier[Version] (
identifier[name] = identifier[ApiPool] (). identifier[current_server_name] ,
identifier[version] = identifier[ApiPool] (). identifier[current_server_api] . identifier[get_version] (),
identifier[container] = identifier[get_container_version] (),
)
identifier[log] . identifier[info] ( literal[string] + identifier[pprint] . identifier[pformat] ( identifier[v] ))
keyword[return] identifier[v] | def do_version():
"""Return version details of the running server api"""
v = ApiPool.ping.model.Version(name=ApiPool().current_server_name, version=ApiPool().current_server_api.get_version(), container=get_container_version())
log.info('/version: ' + pprint.pformat(v))
return v |
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for key, value in self.items():
if key == 'oauth_signature':
continue
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if isinstance(value, STRING_TYPES):
items.append((to_utf8_if_string(key), to_utf8(value)))
else:
try:
value = list(value)
except TypeError as e:
assert 'is not iterable' in str(e)
items.append((to_utf8_if_string(key), to_utf8_if_string(value)))
else:
items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)
# Include any query string parameters from the provided URL
query = urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
url_items = [(to_utf8(k), to_utf8_optional_iterator(v)) for k, v in url_items if k != 'oauth_signature' ]
items.extend(url_items)
items.sort()
encoded_str = urlencode(items, True)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~') | def function[get_normalized_parameters, parameter[self]]:
constant[Return a string that contains the parameters that must be signed.]
variable[items] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1ddddb0>, <ast.Name object at 0x7da1b1ddfdc0>]]] in starred[call[name[self].items, parameter[]]] begin[:]
if compare[name[key] equal[==] constant[oauth_signature]] begin[:]
continue
if call[name[isinstance], parameter[name[value], name[STRING_TYPES]]] begin[:]
call[name[items].append, parameter[tuple[[<ast.Call object at 0x7da1b1ddc2e0>, <ast.Call object at 0x7da1b1dde8f0>]]]]
variable[query] assign[=] call[call[name[urlparse], parameter[name[self].url]]][constant[4]]
variable[url_items] assign[=] call[call[name[self]._split_url_string, parameter[name[query]]].items, parameter[]]
variable[url_items] assign[=] <ast.ListComp object at 0x7da1b1ddea40>
call[name[items].extend, parameter[name[url_items]]]
call[name[items].sort, parameter[]]
variable[encoded_str] assign[=] call[name[urlencode], parameter[name[items], constant[True]]]
return[call[call[name[encoded_str].replace, parameter[constant[+], constant[%20]]].replace, parameter[constant[%7E], constant[~]]]] | keyword[def] identifier[get_normalized_parameters] ( identifier[self] ):
literal[string]
identifier[items] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[items] ():
keyword[if] identifier[key] == literal[string] :
keyword[continue]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[STRING_TYPES] ):
identifier[items] . identifier[append] (( identifier[to_utf8_if_string] ( identifier[key] ), identifier[to_utf8] ( identifier[value] )))
keyword[else] :
keyword[try] :
identifier[value] = identifier[list] ( identifier[value] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
keyword[assert] literal[string] keyword[in] identifier[str] ( identifier[e] )
identifier[items] . identifier[append] (( identifier[to_utf8_if_string] ( identifier[key] ), identifier[to_utf8_if_string] ( identifier[value] )))
keyword[else] :
identifier[items] . identifier[extend] (( identifier[to_utf8_if_string] ( identifier[key] ), identifier[to_utf8_if_string] ( identifier[item] )) keyword[for] identifier[item] keyword[in] identifier[value] )
identifier[query] = identifier[urlparse] ( identifier[self] . identifier[url] )[ literal[int] ]
identifier[url_items] = identifier[self] . identifier[_split_url_string] ( identifier[query] ). identifier[items] ()
identifier[url_items] =[( identifier[to_utf8] ( identifier[k] ), identifier[to_utf8_optional_iterator] ( identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[url_items] keyword[if] identifier[k] != literal[string] ]
identifier[items] . identifier[extend] ( identifier[url_items] )
identifier[items] . identifier[sort] ()
identifier[encoded_str] = identifier[urlencode] ( identifier[items] , keyword[True] )
keyword[return] identifier[encoded_str] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) | def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for (key, value) in self.items():
if key == 'oauth_signature':
continue # depends on [control=['if'], data=[]]
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if isinstance(value, STRING_TYPES):
items.append((to_utf8_if_string(key), to_utf8(value))) # depends on [control=['if'], data=[]]
else:
try:
value = list(value) # depends on [control=['try'], data=[]]
except TypeError as e:
assert 'is not iterable' in str(e)
items.append((to_utf8_if_string(key), to_utf8_if_string(value))) # depends on [control=['except'], data=['e']]
else:
items.extend(((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)) # depends on [control=['for'], data=[]]
# Include any query string parameters from the provided URL
query = urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
url_items = [(to_utf8(k), to_utf8_optional_iterator(v)) for (k, v) in url_items if k != 'oauth_signature']
items.extend(url_items)
items.sort()
encoded_str = urlencode(items, True)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~') |
def distribute_payoff(self, match_set):
"""Distribute the payoff received in response to the selected
action of the given match set among the rules in the action set
which deserve credit for recommending the action. The match_set
argument is the MatchSet instance which suggested the selected
action and earned the payoff.
Usage:
match_set = model.match(situation)
match_set.select_action()
match_set.payoff = reward
model.algorithm.distribute_payoff(match_set)
Arguments:
match_set: A MatchSet instance for which the accumulated payoff
needs to be distributed among its classifier rules.
Return: None
"""
assert isinstance(match_set, MatchSet)
assert match_set.algorithm is self
assert match_set.selected_action is not None
payoff = float(match_set.payoff)
action_set = match_set[match_set.selected_action]
action_set_size = sum(rule.numerosity for rule in action_set)
# Update the average reward, error, and action set size of each
# rule participating in the action set.
for rule in action_set:
rule.experience += 1
update_rate = max(self.learning_rate, 1 / rule.experience)
rule.average_reward += (
(payoff - rule.average_reward) *
update_rate
)
rule.error += (
(abs(payoff - rule.average_reward) - rule.error) *
update_rate
)
rule.action_set_size += (
(action_set_size - rule.action_set_size) *
update_rate
)
# Update the fitness of the rules.
self._update_fitness(action_set)
# If the parameters so indicate, perform action set subsumption.
if self.do_action_set_subsumption:
self._action_set_subsumption(action_set) | def function[distribute_payoff, parameter[self, match_set]]:
constant[Distribute the payoff received in response to the selected
action of the given match set among the rules in the action set
which deserve credit for recommending the action. The match_set
argument is the MatchSet instance which suggested the selected
action and earned the payoff.
Usage:
match_set = model.match(situation)
match_set.select_action()
match_set.payoff = reward
model.algorithm.distribute_payoff(match_set)
Arguments:
match_set: A MatchSet instance for which the accumulated payoff
needs to be distributed among its classifier rules.
Return: None
]
assert[call[name[isinstance], parameter[name[match_set], name[MatchSet]]]]
assert[compare[name[match_set].algorithm is name[self]]]
assert[compare[name[match_set].selected_action is_not constant[None]]]
variable[payoff] assign[=] call[name[float], parameter[name[match_set].payoff]]
variable[action_set] assign[=] call[name[match_set]][name[match_set].selected_action]
variable[action_set_size] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b0fd7520>]]
for taget[name[rule]] in starred[name[action_set]] begin[:]
<ast.AugAssign object at 0x7da1b0fd7e80>
variable[update_rate] assign[=] call[name[max], parameter[name[self].learning_rate, binary_operation[constant[1] / name[rule].experience]]]
<ast.AugAssign object at 0x7da1b0fd6e00>
<ast.AugAssign object at 0x7da1b0fd6770>
<ast.AugAssign object at 0x7da1b0fd6d10>
call[name[self]._update_fitness, parameter[name[action_set]]]
if name[self].do_action_set_subsumption begin[:]
call[name[self]._action_set_subsumption, parameter[name[action_set]]] | keyword[def] identifier[distribute_payoff] ( identifier[self] , identifier[match_set] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[match_set] , identifier[MatchSet] )
keyword[assert] identifier[match_set] . identifier[algorithm] keyword[is] identifier[self]
keyword[assert] identifier[match_set] . identifier[selected_action] keyword[is] keyword[not] keyword[None]
identifier[payoff] = identifier[float] ( identifier[match_set] . identifier[payoff] )
identifier[action_set] = identifier[match_set] [ identifier[match_set] . identifier[selected_action] ]
identifier[action_set_size] = identifier[sum] ( identifier[rule] . identifier[numerosity] keyword[for] identifier[rule] keyword[in] identifier[action_set] )
keyword[for] identifier[rule] keyword[in] identifier[action_set] :
identifier[rule] . identifier[experience] += literal[int]
identifier[update_rate] = identifier[max] ( identifier[self] . identifier[learning_rate] , literal[int] / identifier[rule] . identifier[experience] )
identifier[rule] . identifier[average_reward] +=(
( identifier[payoff] - identifier[rule] . identifier[average_reward] )*
identifier[update_rate]
)
identifier[rule] . identifier[error] +=(
( identifier[abs] ( identifier[payoff] - identifier[rule] . identifier[average_reward] )- identifier[rule] . identifier[error] )*
identifier[update_rate]
)
identifier[rule] . identifier[action_set_size] +=(
( identifier[action_set_size] - identifier[rule] . identifier[action_set_size] )*
identifier[update_rate]
)
identifier[self] . identifier[_update_fitness] ( identifier[action_set] )
keyword[if] identifier[self] . identifier[do_action_set_subsumption] :
identifier[self] . identifier[_action_set_subsumption] ( identifier[action_set] ) | def distribute_payoff(self, match_set):
"""Distribute the payoff received in response to the selected
action of the given match set among the rules in the action set
which deserve credit for recommending the action. The match_set
argument is the MatchSet instance which suggested the selected
action and earned the payoff.
Usage:
match_set = model.match(situation)
match_set.select_action()
match_set.payoff = reward
model.algorithm.distribute_payoff(match_set)
Arguments:
match_set: A MatchSet instance for which the accumulated payoff
needs to be distributed among its classifier rules.
Return: None
"""
assert isinstance(match_set, MatchSet)
assert match_set.algorithm is self
assert match_set.selected_action is not None
payoff = float(match_set.payoff)
action_set = match_set[match_set.selected_action]
action_set_size = sum((rule.numerosity for rule in action_set))
# Update the average reward, error, and action set size of each
# rule participating in the action set.
for rule in action_set:
rule.experience += 1
update_rate = max(self.learning_rate, 1 / rule.experience)
rule.average_reward += (payoff - rule.average_reward) * update_rate
rule.error += (abs(payoff - rule.average_reward) - rule.error) * update_rate
rule.action_set_size += (action_set_size - rule.action_set_size) * update_rate # depends on [control=['for'], data=['rule']]
# Update the fitness of the rules.
self._update_fitness(action_set)
# If the parameters so indicate, perform action set subsumption.
if self.do_action_set_subsumption:
self._action_set_subsumption(action_set) # depends on [control=['if'], data=[]] |
def find_repositories_by_walking_and_following_symlinks(path):
"""Walk a tree and return a sequence of (directory, dotdir) pairs."""
repos = []
# This is for detecting symlink loops and escaping them. This is similar to
# http://stackoverflow.com/questions/36977259/avoiding-infinite-recursion-with-os-walk/36977656#36977656
def inode(path):
stats = os.stat(path)
return stats.st_dev, stats.st_ino
seen_inodes = {inode(path)}
for dirpath, dirnames, filenames in os.walk(path, followlinks=True):
inodes = [inode(os.path.join(dirpath, p)) for p in dirnames]
dirnames[:] = [p for p, i in zip(dirnames, inodes)
if not i in seen_inodes]
seen_inodes.update(inodes)
for dotdir in set(dirnames) & DOTDIRS:
repos.append((dirpath, dotdir))
return repos | def function[find_repositories_by_walking_and_following_symlinks, parameter[path]]:
constant[Walk a tree and return a sequence of (directory, dotdir) pairs.]
variable[repos] assign[=] list[[]]
def function[inode, parameter[path]]:
variable[stats] assign[=] call[name[os].stat, parameter[name[path]]]
return[tuple[[<ast.Attribute object at 0x7da1b26a05b0>, <ast.Attribute object at 0x7da1b26a1900>]]]
variable[seen_inodes] assign[=] <ast.Set object at 0x7da1b26a1930>
for taget[tuple[[<ast.Name object at 0x7da1b26a08b0>, <ast.Name object at 0x7da1b26a19f0>, <ast.Name object at 0x7da1b26a3190>]]] in starred[call[name[os].walk, parameter[name[path]]]] begin[:]
variable[inodes] assign[=] <ast.ListComp object at 0x7da1b26a0c70>
call[name[dirnames]][<ast.Slice object at 0x7da1b26a2440>] assign[=] <ast.ListComp object at 0x7da1b26a3250>
call[name[seen_inodes].update, parameter[name[inodes]]]
for taget[name[dotdir]] in starred[binary_operation[call[name[set], parameter[name[dirnames]]] <ast.BitAnd object at 0x7da2590d6b60> name[DOTDIRS]]] begin[:]
call[name[repos].append, parameter[tuple[[<ast.Name object at 0x7da1b26a3d30>, <ast.Name object at 0x7da1b26a3f70>]]]]
return[name[repos]] | keyword[def] identifier[find_repositories_by_walking_and_following_symlinks] ( identifier[path] ):
literal[string]
identifier[repos] =[]
keyword[def] identifier[inode] ( identifier[path] ):
identifier[stats] = identifier[os] . identifier[stat] ( identifier[path] )
keyword[return] identifier[stats] . identifier[st_dev] , identifier[stats] . identifier[st_ino]
identifier[seen_inodes] ={ identifier[inode] ( identifier[path] )}
keyword[for] identifier[dirpath] , identifier[dirnames] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( identifier[path] , identifier[followlinks] = keyword[True] ):
identifier[inodes] =[ identifier[inode] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dirpath] , identifier[p] )) keyword[for] identifier[p] keyword[in] identifier[dirnames] ]
identifier[dirnames] [:]=[ identifier[p] keyword[for] identifier[p] , identifier[i] keyword[in] identifier[zip] ( identifier[dirnames] , identifier[inodes] )
keyword[if] keyword[not] identifier[i] keyword[in] identifier[seen_inodes] ]
identifier[seen_inodes] . identifier[update] ( identifier[inodes] )
keyword[for] identifier[dotdir] keyword[in] identifier[set] ( identifier[dirnames] )& identifier[DOTDIRS] :
identifier[repos] . identifier[append] (( identifier[dirpath] , identifier[dotdir] ))
keyword[return] identifier[repos] | def find_repositories_by_walking_and_following_symlinks(path):
"""Walk a tree and return a sequence of (directory, dotdir) pairs."""
repos = []
# This is for detecting symlink loops and escaping them. This is similar to
# http://stackoverflow.com/questions/36977259/avoiding-infinite-recursion-with-os-walk/36977656#36977656
def inode(path):
stats = os.stat(path)
return (stats.st_dev, stats.st_ino)
seen_inodes = {inode(path)}
for (dirpath, dirnames, filenames) in os.walk(path, followlinks=True):
inodes = [inode(os.path.join(dirpath, p)) for p in dirnames]
dirnames[:] = [p for (p, i) in zip(dirnames, inodes) if not i in seen_inodes]
seen_inodes.update(inodes)
for dotdir in set(dirnames) & DOTDIRS:
repos.append((dirpath, dotdir)) # depends on [control=['for'], data=['dotdir']] # depends on [control=['for'], data=[]]
return repos |
def save(host=None, port=None, db=None, password=None):
'''
Synchronously save the dataset to disk
CLI Example:
.. code-block:: bash
salt '*' redis.save
'''
server = _connect(host, port, db, password)
return server.save() | def function[save, parameter[host, port, db, password]]:
constant[
Synchronously save the dataset to disk
CLI Example:
.. code-block:: bash
salt '*' redis.save
]
variable[server] assign[=] call[name[_connect], parameter[name[host], name[port], name[db], name[password]]]
return[call[name[server].save, parameter[]]] | keyword[def] identifier[save] ( identifier[host] = keyword[None] , identifier[port] = keyword[None] , identifier[db] = keyword[None] , identifier[password] = keyword[None] ):
literal[string]
identifier[server] = identifier[_connect] ( identifier[host] , identifier[port] , identifier[db] , identifier[password] )
keyword[return] identifier[server] . identifier[save] () | def save(host=None, port=None, db=None, password=None):
"""
Synchronously save the dataset to disk
CLI Example:
.. code-block:: bash
salt '*' redis.save
"""
server = _connect(host, port, db, password)
return server.save() |
def get_action_environment(op, name):
"""Return the environment for the operation."""
action = _get_action_by_name(op, name)
if action:
return action.get('environment') | def function[get_action_environment, parameter[op, name]]:
constant[Return the environment for the operation.]
variable[action] assign[=] call[name[_get_action_by_name], parameter[name[op], name[name]]]
if name[action] begin[:]
return[call[name[action].get, parameter[constant[environment]]]] | keyword[def] identifier[get_action_environment] ( identifier[op] , identifier[name] ):
literal[string]
identifier[action] = identifier[_get_action_by_name] ( identifier[op] , identifier[name] )
keyword[if] identifier[action] :
keyword[return] identifier[action] . identifier[get] ( literal[string] ) | def get_action_environment(op, name):
"""Return the environment for the operation."""
action = _get_action_by_name(op, name)
if action:
return action.get('environment') # depends on [control=['if'], data=[]] |
def read_md5(self, hex=False):
""" Calculate the md5 hash for this file.
hex - Return the digest as hex string.
This reads through the entire file.
"""
f = self.open('rb')
try:
m = hashlib.md5()
while True:
d = f.read(8192)
if not d:
break
m.update(d)
finally:
f.close()
if hex:
return m.hexdigest()
else:
return m.digest() | def function[read_md5, parameter[self, hex]]:
constant[ Calculate the md5 hash for this file.
hex - Return the digest as hex string.
This reads through the entire file.
]
variable[f] assign[=] call[name[self].open, parameter[constant[rb]]]
<ast.Try object at 0x7da1b247f7f0>
if name[hex] begin[:]
return[call[name[m].hexdigest, parameter[]]] | keyword[def] identifier[read_md5] ( identifier[self] , identifier[hex] = keyword[False] ):
literal[string]
identifier[f] = identifier[self] . identifier[open] ( literal[string] )
keyword[try] :
identifier[m] = identifier[hashlib] . identifier[md5] ()
keyword[while] keyword[True] :
identifier[d] = identifier[f] . identifier[read] ( literal[int] )
keyword[if] keyword[not] identifier[d] :
keyword[break]
identifier[m] . identifier[update] ( identifier[d] )
keyword[finally] :
identifier[f] . identifier[close] ()
keyword[if] identifier[hex] :
keyword[return] identifier[m] . identifier[hexdigest] ()
keyword[else] :
keyword[return] identifier[m] . identifier[digest] () | def read_md5(self, hex=False):
""" Calculate the md5 hash for this file.
hex - Return the digest as hex string.
This reads through the entire file.
"""
f = self.open('rb')
try:
m = hashlib.md5()
while True:
d = f.read(8192)
if not d:
break # depends on [control=['if'], data=[]]
m.update(d) # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
finally:
f.close()
if hex:
return m.hexdigest() # depends on [control=['if'], data=[]]
else:
return m.digest() |
def get_stamp(ra, decl,
survey='DSS2 Red',
scaling='Linear',
sizepix=300,
forcefetch=False,
cachedir='~/.astrobase/stamp-cache',
timeout=10.0,
retry_failed=True,
verbose=True,
jitter=5.0):
'''This gets a FITS cutout from the NASA GSFC SkyView service.
This downloads stamps in FITS format from the NASA SkyView service:
https://skyview.gsfc.nasa.gov/current/cgi/query.pl
Parameters
----------
ra,decl : float
These are decimal equatorial coordinates for the cutout center.
survey : str
The survey name to get the stamp from. This is one of the
values in the 'SkyView Surveys' option boxes on the SkyView
webpage. Currently, we've only tested using 'DSS2 Red' as the value for
this kwarg, but the other ones should work in principle.
scaling : str
This is the pixel value scaling function to use.
sizepix : int
The width and height of the cutout are specified by this value.
forcefetch : bool
If True, will disregard any existing cached copies of the stamp already
downloaded corresponding to the requested center coordinates and
redownload the FITS from the SkyView service.
cachedir : str
This is the path to the astrobase cache directory. All downloaded FITS
stamps are stored here as .fits.gz files so we can immediately respond
with the cached copy when a request is made for a coordinate center
that's already been downloaded.
timeout : float
Sets the timeout in seconds to wait for a response from the NASA SkyView
service.
retry_failed : bool
If the initial request to SkyView fails, and this is True, will retry
until it succeeds.
verbose : bool
If True, indicates progress.
jitter : float
This is used to control the scale of the random wait in seconds before
starting the query. Useful in parallelized situations.
Returns
-------
dict
A dict of the following form is returned::
{
'params':{input ra, decl and kwargs used},
'provenance':'cached' or 'new download',
'fitsfile':FITS file to which the cutout was saved on disk
}
'''
# parse the given params into the correct format for the form
formposition = ['%.4f, %.4f' % (ra, decl)]
formscaling = [scaling]
formparams = copy.deepcopy(SKYVIEW_PARAMS)
formparams['Position'] = formposition
formparams['survey'][0] = survey
formparams['scaling'] = formscaling
formparams['pixels'] = ['%s' % sizepix]
# see if the cachedir exists
if '~' in cachedir:
cachedir = os.path.expanduser(cachedir)
if not os.path.exists(cachedir):
os.makedirs(cachedir)
# figure out if we can get this image from the cache
cachekey = '%s-%s-%s-%s' % (formposition[0], survey, scaling, sizepix)
cachekey = hashlib.sha256(cachekey.encode()).hexdigest()
cachefname = os.path.join(cachedir, '%s.fits.gz' % cachekey)
provenance = 'cache'
# this is to handle older cached stamps that didn't include the sizepix
# parameter
if sizepix == 300:
oldcachekey = '%s-%s-%s' % (formposition[0], survey, scaling)
oldcachekey = hashlib.sha256(oldcachekey.encode()).hexdigest()
oldcachefname = os.path.join(cachedir, '%s.fits.gz' % oldcachekey)
if os.path.exists(oldcachefname):
cachefname = oldcachefname
# if this exists in the cache and we're not refetching, get the frame
if forcefetch or (not os.path.exists(cachefname)):
provenance = 'new download'
time.sleep(random.randint(1,jitter))
# fire the request
try:
if verbose:
LOGINFO('submitting stamp request for %s, %s, %s, %s' % (
formposition[0],
survey,
scaling,
sizepix)
)
req = requests.get(SKYVIEW_URL, params=formparams, timeout=timeout)
req.raise_for_status()
# get the text of the response, this includes the locations of the
# generated FITS on the server
resp = req.text
# find the URLS of the FITS
fitsurls = FITS_REGEX.findall(resp)
# download the URLs
if fitsurls:
for fitsurl in fitsurls:
fullfitsurl = urljoin(FITS_BASEURL, fitsurl)
if verbose:
LOGINFO('getting %s' % fullfitsurl)
fitsreq = requests.get(fullfitsurl, timeout=timeout)
with gzip.open(cachefname,'wb') as outfd:
outfd.write(fitsreq.content)
else:
LOGERROR('no FITS URLs found in query results for %s' %
formposition)
return None
except requests.exceptions.HTTPError as e:
LOGEXCEPTION('SkyView stamp request for '
'coordinates %s failed' % repr(formposition))
return None
except requests.exceptions.Timeout as e:
LOGERROR('SkyView stamp request for '
'coordinates %s did not complete within %s seconds' %
(repr(formposition), timeout))
return None
except Exception as e:
LOGEXCEPTION('SkyView stamp request for '
'coordinates %s failed' % repr(formposition))
return None
#
# DONE WITH FETCHING STUFF
#
# make sure the returned file is OK
try:
stampfits = pyfits.open(cachefname)
stampfits.close()
retdict = {
'params':{'ra':ra,
'decl':decl,
'survey':survey,
'scaling':scaling,
'sizepix':sizepix},
'provenance':provenance,
'fitsfile':cachefname
}
return retdict
except Exception as e:
LOGERROR('could not open cached FITS from Skyview download: %r' %
{'ra':ra,
'decl':decl,
'survey':survey,
'scaling': scaling,
'sizepix': sizepix})
if retry_failed:
return get_stamp(ra, decl,
survey=survey,
scaling=scaling,
sizepix=sizepix,
forcefetch=True,
cachedir=cachedir,
timeout=timeout,
verbose=verbose)
else:
return None | def function[get_stamp, parameter[ra, decl, survey, scaling, sizepix, forcefetch, cachedir, timeout, retry_failed, verbose, jitter]]:
constant[This gets a FITS cutout from the NASA GSFC SkyView service.
This downloads stamps in FITS format from the NASA SkyView service:
https://skyview.gsfc.nasa.gov/current/cgi/query.pl
Parameters
----------
ra,decl : float
These are decimal equatorial coordinates for the cutout center.
survey : str
The survey name to get the stamp from. This is one of the
values in the 'SkyView Surveys' option boxes on the SkyView
webpage. Currently, we've only tested using 'DSS2 Red' as the value for
this kwarg, but the other ones should work in principle.
scaling : str
This is the pixel value scaling function to use.
sizepix : int
The width and height of the cutout are specified by this value.
forcefetch : bool
If True, will disregard any existing cached copies of the stamp already
downloaded corresponding to the requested center coordinates and
redownload the FITS from the SkyView service.
cachedir : str
This is the path to the astrobase cache directory. All downloaded FITS
stamps are stored here as .fits.gz files so we can immediately respond
with the cached copy when a request is made for a coordinate center
that's already been downloaded.
timeout : float
Sets the timeout in seconds to wait for a response from the NASA SkyView
service.
retry_failed : bool
If the initial request to SkyView fails, and this is True, will retry
until it succeeds.
verbose : bool
If True, indicates progress.
jitter : float
This is used to control the scale of the random wait in seconds before
starting the query. Useful in parallelized situations.
Returns
-------
dict
A dict of the following form is returned::
{
'params':{input ra, decl and kwargs used},
'provenance':'cached' or 'new download',
'fitsfile':FITS file to which the cutout was saved on disk
}
]
variable[formposition] assign[=] list[[<ast.BinOp object at 0x7da1b00fd810>]]
variable[formscaling] assign[=] list[[<ast.Name object at 0x7da1b00fdcc0>]]
variable[formparams] assign[=] call[name[copy].deepcopy, parameter[name[SKYVIEW_PARAMS]]]
call[name[formparams]][constant[Position]] assign[=] name[formposition]
call[call[name[formparams]][constant[survey]]][constant[0]] assign[=] name[survey]
call[name[formparams]][constant[scaling]] assign[=] name[formscaling]
call[name[formparams]][constant[pixels]] assign[=] list[[<ast.BinOp object at 0x7da1b00fca60>]]
if compare[constant[~] in name[cachedir]] begin[:]
variable[cachedir] assign[=] call[name[os].path.expanduser, parameter[name[cachedir]]]
if <ast.UnaryOp object at 0x7da1b00fc520> begin[:]
call[name[os].makedirs, parameter[name[cachedir]]]
variable[cachekey] assign[=] binary_operation[constant[%s-%s-%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b00fc850>, <ast.Name object at 0x7da1b00fcc10>, <ast.Name object at 0x7da1b00fce80>, <ast.Name object at 0x7da1b00fce50>]]]
variable[cachekey] assign[=] call[call[name[hashlib].sha256, parameter[call[name[cachekey].encode, parameter[]]]].hexdigest, parameter[]]
variable[cachefname] assign[=] call[name[os].path.join, parameter[name[cachedir], binary_operation[constant[%s.fits.gz] <ast.Mod object at 0x7da2590d6920> name[cachekey]]]]
variable[provenance] assign[=] constant[cache]
if compare[name[sizepix] equal[==] constant[300]] begin[:]
variable[oldcachekey] assign[=] binary_operation[constant[%s-%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b00fd330>, <ast.Name object at 0x7da1b00fd2a0>, <ast.Name object at 0x7da1b00fd210>]]]
variable[oldcachekey] assign[=] call[call[name[hashlib].sha256, parameter[call[name[oldcachekey].encode, parameter[]]]].hexdigest, parameter[]]
variable[oldcachefname] assign[=] call[name[os].path.join, parameter[name[cachedir], binary_operation[constant[%s.fits.gz] <ast.Mod object at 0x7da2590d6920> name[oldcachekey]]]]
if call[name[os].path.exists, parameter[name[oldcachefname]]] begin[:]
variable[cachefname] assign[=] name[oldcachefname]
if <ast.BoolOp object at 0x7da1b00fef80> begin[:]
variable[provenance] assign[=] constant[new download]
call[name[time].sleep, parameter[call[name[random].randint, parameter[constant[1], name[jitter]]]]]
<ast.Try object at 0x7da1b00e7580>
<ast.Try object at 0x7da1b00e6410> | keyword[def] identifier[get_stamp] ( identifier[ra] , identifier[decl] ,
identifier[survey] = literal[string] ,
identifier[scaling] = literal[string] ,
identifier[sizepix] = literal[int] ,
identifier[forcefetch] = keyword[False] ,
identifier[cachedir] = literal[string] ,
identifier[timeout] = literal[int] ,
identifier[retry_failed] = keyword[True] ,
identifier[verbose] = keyword[True] ,
identifier[jitter] = literal[int] ):
literal[string]
identifier[formposition] =[ literal[string] %( identifier[ra] , identifier[decl] )]
identifier[formscaling] =[ identifier[scaling] ]
identifier[formparams] = identifier[copy] . identifier[deepcopy] ( identifier[SKYVIEW_PARAMS] )
identifier[formparams] [ literal[string] ]= identifier[formposition]
identifier[formparams] [ literal[string] ][ literal[int] ]= identifier[survey]
identifier[formparams] [ literal[string] ]= identifier[formscaling]
identifier[formparams] [ literal[string] ]=[ literal[string] % identifier[sizepix] ]
keyword[if] literal[string] keyword[in] identifier[cachedir] :
identifier[cachedir] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[cachedir] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[cachedir] ):
identifier[os] . identifier[makedirs] ( identifier[cachedir] )
identifier[cachekey] = literal[string] %( identifier[formposition] [ literal[int] ], identifier[survey] , identifier[scaling] , identifier[sizepix] )
identifier[cachekey] = identifier[hashlib] . identifier[sha256] ( identifier[cachekey] . identifier[encode] ()). identifier[hexdigest] ()
identifier[cachefname] = identifier[os] . identifier[path] . identifier[join] ( identifier[cachedir] , literal[string] % identifier[cachekey] )
identifier[provenance] = literal[string]
keyword[if] identifier[sizepix] == literal[int] :
identifier[oldcachekey] = literal[string] %( identifier[formposition] [ literal[int] ], identifier[survey] , identifier[scaling] )
identifier[oldcachekey] = identifier[hashlib] . identifier[sha256] ( identifier[oldcachekey] . identifier[encode] ()). identifier[hexdigest] ()
identifier[oldcachefname] = identifier[os] . identifier[path] . identifier[join] ( identifier[cachedir] , literal[string] % identifier[oldcachekey] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[oldcachefname] ):
identifier[cachefname] = identifier[oldcachefname]
keyword[if] identifier[forcefetch] keyword[or] ( keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[cachefname] )):
identifier[provenance] = literal[string]
identifier[time] . identifier[sleep] ( identifier[random] . identifier[randint] ( literal[int] , identifier[jitter] ))
keyword[try] :
keyword[if] identifier[verbose] :
identifier[LOGINFO] ( literal[string] %(
identifier[formposition] [ literal[int] ],
identifier[survey] ,
identifier[scaling] ,
identifier[sizepix] )
)
identifier[req] = identifier[requests] . identifier[get] ( identifier[SKYVIEW_URL] , identifier[params] = identifier[formparams] , identifier[timeout] = identifier[timeout] )
identifier[req] . identifier[raise_for_status] ()
identifier[resp] = identifier[req] . identifier[text]
identifier[fitsurls] = identifier[FITS_REGEX] . identifier[findall] ( identifier[resp] )
keyword[if] identifier[fitsurls] :
keyword[for] identifier[fitsurl] keyword[in] identifier[fitsurls] :
identifier[fullfitsurl] = identifier[urljoin] ( identifier[FITS_BASEURL] , identifier[fitsurl] )
keyword[if] identifier[verbose] :
identifier[LOGINFO] ( literal[string] % identifier[fullfitsurl] )
identifier[fitsreq] = identifier[requests] . identifier[get] ( identifier[fullfitsurl] , identifier[timeout] = identifier[timeout] )
keyword[with] identifier[gzip] . identifier[open] ( identifier[cachefname] , literal[string] ) keyword[as] identifier[outfd] :
identifier[outfd] . identifier[write] ( identifier[fitsreq] . identifier[content] )
keyword[else] :
identifier[LOGERROR] ( literal[string] %
identifier[formposition] )
keyword[return] keyword[None]
keyword[except] identifier[requests] . identifier[exceptions] . identifier[HTTPError] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string]
literal[string] % identifier[repr] ( identifier[formposition] ))
keyword[return] keyword[None]
keyword[except] identifier[requests] . identifier[exceptions] . identifier[Timeout] keyword[as] identifier[e] :
identifier[LOGERROR] ( literal[string]
literal[string] %
( identifier[repr] ( identifier[formposition] ), identifier[timeout] ))
keyword[return] keyword[None]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string]
literal[string] % identifier[repr] ( identifier[formposition] ))
keyword[return] keyword[None]
keyword[try] :
identifier[stampfits] = identifier[pyfits] . identifier[open] ( identifier[cachefname] )
identifier[stampfits] . identifier[close] ()
identifier[retdict] ={
literal[string] :{ literal[string] : identifier[ra] ,
literal[string] : identifier[decl] ,
literal[string] : identifier[survey] ,
literal[string] : identifier[scaling] ,
literal[string] : identifier[sizepix] },
literal[string] : identifier[provenance] ,
literal[string] : identifier[cachefname]
}
keyword[return] identifier[retdict]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGERROR] ( literal[string] %
{ literal[string] : identifier[ra] ,
literal[string] : identifier[decl] ,
literal[string] : identifier[survey] ,
literal[string] : identifier[scaling] ,
literal[string] : identifier[sizepix] })
keyword[if] identifier[retry_failed] :
keyword[return] identifier[get_stamp] ( identifier[ra] , identifier[decl] ,
identifier[survey] = identifier[survey] ,
identifier[scaling] = identifier[scaling] ,
identifier[sizepix] = identifier[sizepix] ,
identifier[forcefetch] = keyword[True] ,
identifier[cachedir] = identifier[cachedir] ,
identifier[timeout] = identifier[timeout] ,
identifier[verbose] = identifier[verbose] )
keyword[else] :
keyword[return] keyword[None] | def get_stamp(ra, decl, survey='DSS2 Red', scaling='Linear', sizepix=300, forcefetch=False, cachedir='~/.astrobase/stamp-cache', timeout=10.0, retry_failed=True, verbose=True, jitter=5.0):
"""This gets a FITS cutout from the NASA GSFC SkyView service.
This downloads stamps in FITS format from the NASA SkyView service:
https://skyview.gsfc.nasa.gov/current/cgi/query.pl
Parameters
----------
ra,decl : float
These are decimal equatorial coordinates for the cutout center.
survey : str
The survey name to get the stamp from. This is one of the
values in the 'SkyView Surveys' option boxes on the SkyView
webpage. Currently, we've only tested using 'DSS2 Red' as the value for
this kwarg, but the other ones should work in principle.
scaling : str
This is the pixel value scaling function to use.
sizepix : int
The width and height of the cutout are specified by this value.
forcefetch : bool
If True, will disregard any existing cached copies of the stamp already
downloaded corresponding to the requested center coordinates and
redownload the FITS from the SkyView service.
cachedir : str
This is the path to the astrobase cache directory. All downloaded FITS
stamps are stored here as .fits.gz files so we can immediately respond
with the cached copy when a request is made for a coordinate center
that's already been downloaded.
timeout : float
Sets the timeout in seconds to wait for a response from the NASA SkyView
service.
retry_failed : bool
If the initial request to SkyView fails, and this is True, will retry
until it succeeds.
verbose : bool
If True, indicates progress.
jitter : float
This is used to control the scale of the random wait in seconds before
starting the query. Useful in parallelized situations.
Returns
-------
dict
A dict of the following form is returned::
{
'params':{input ra, decl and kwargs used},
'provenance':'cached' or 'new download',
'fitsfile':FITS file to which the cutout was saved on disk
}
"""
# parse the given params into the correct format for the form
formposition = ['%.4f, %.4f' % (ra, decl)]
formscaling = [scaling]
formparams = copy.deepcopy(SKYVIEW_PARAMS)
formparams['Position'] = formposition
formparams['survey'][0] = survey
formparams['scaling'] = formscaling
formparams['pixels'] = ['%s' % sizepix]
# see if the cachedir exists
if '~' in cachedir:
cachedir = os.path.expanduser(cachedir) # depends on [control=['if'], data=['cachedir']]
if not os.path.exists(cachedir):
os.makedirs(cachedir) # depends on [control=['if'], data=[]]
# figure out if we can get this image from the cache
cachekey = '%s-%s-%s-%s' % (formposition[0], survey, scaling, sizepix)
cachekey = hashlib.sha256(cachekey.encode()).hexdigest()
cachefname = os.path.join(cachedir, '%s.fits.gz' % cachekey)
provenance = 'cache'
# this is to handle older cached stamps that didn't include the sizepix
# parameter
if sizepix == 300:
oldcachekey = '%s-%s-%s' % (formposition[0], survey, scaling)
oldcachekey = hashlib.sha256(oldcachekey.encode()).hexdigest()
oldcachefname = os.path.join(cachedir, '%s.fits.gz' % oldcachekey)
if os.path.exists(oldcachefname):
cachefname = oldcachefname # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# if this exists in the cache and we're not refetching, get the frame
if forcefetch or not os.path.exists(cachefname):
provenance = 'new download'
time.sleep(random.randint(1, jitter))
# fire the request
try:
if verbose:
LOGINFO('submitting stamp request for %s, %s, %s, %s' % (formposition[0], survey, scaling, sizepix)) # depends on [control=['if'], data=[]]
req = requests.get(SKYVIEW_URL, params=formparams, timeout=timeout)
req.raise_for_status()
# get the text of the response, this includes the locations of the
# generated FITS on the server
resp = req.text
# find the URLS of the FITS
fitsurls = FITS_REGEX.findall(resp)
# download the URLs
if fitsurls:
for fitsurl in fitsurls:
fullfitsurl = urljoin(FITS_BASEURL, fitsurl)
if verbose:
LOGINFO('getting %s' % fullfitsurl) # depends on [control=['if'], data=[]]
fitsreq = requests.get(fullfitsurl, timeout=timeout)
with gzip.open(cachefname, 'wb') as outfd:
outfd.write(fitsreq.content) # depends on [control=['with'], data=['outfd']] # depends on [control=['for'], data=['fitsurl']] # depends on [control=['if'], data=[]]
else:
LOGERROR('no FITS URLs found in query results for %s' % formposition)
return None # depends on [control=['try'], data=[]]
except requests.exceptions.HTTPError as e:
LOGEXCEPTION('SkyView stamp request for coordinates %s failed' % repr(formposition))
return None # depends on [control=['except'], data=[]]
except requests.exceptions.Timeout as e:
LOGERROR('SkyView stamp request for coordinates %s did not complete within %s seconds' % (repr(formposition), timeout))
return None # depends on [control=['except'], data=[]]
except Exception as e:
LOGEXCEPTION('SkyView stamp request for coordinates %s failed' % repr(formposition))
return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
#
# DONE WITH FETCHING STUFF
#
# make sure the returned file is OK
try:
stampfits = pyfits.open(cachefname)
stampfits.close()
retdict = {'params': {'ra': ra, 'decl': decl, 'survey': survey, 'scaling': scaling, 'sizepix': sizepix}, 'provenance': provenance, 'fitsfile': cachefname}
return retdict # depends on [control=['try'], data=[]]
except Exception as e:
LOGERROR('could not open cached FITS from Skyview download: %r' % {'ra': ra, 'decl': decl, 'survey': survey, 'scaling': scaling, 'sizepix': sizepix})
if retry_failed:
return get_stamp(ra, decl, survey=survey, scaling=scaling, sizepix=sizepix, forcefetch=True, cachedir=cachedir, timeout=timeout, verbose=verbose) # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['except'], data=[]] |
def add_summary_stats_to_table(table_in, table_out, colnames):
"""Collect summary statisitics from an input table and add them to an output table
Parameters
----------
table_in : `astropy.table.Table`
Table with the input data.
table_out : `astropy.table.Table`
Table with the output data.
colnames : list
List of the column names to get summary statistics for.
"""
for col in colnames:
col_in = table_in[col]
stats = collect_summary_stats(col_in.data)
for k, v in stats.items():
out_name = "%s_%s" % (col, k)
col_out = Column(data=np.vstack(
[v]), name=out_name, dtype=col_in.dtype, shape=v.shape, unit=col_in.unit)
table_out.add_column(col_out) | def function[add_summary_stats_to_table, parameter[table_in, table_out, colnames]]:
constant[Collect summary statisitics from an input table and add them to an output table
Parameters
----------
table_in : `astropy.table.Table`
Table with the input data.
table_out : `astropy.table.Table`
Table with the output data.
colnames : list
List of the column names to get summary statistics for.
]
for taget[name[col]] in starred[name[colnames]] begin[:]
variable[col_in] assign[=] call[name[table_in]][name[col]]
variable[stats] assign[=] call[name[collect_summary_stats], parameter[name[col_in].data]]
for taget[tuple[[<ast.Name object at 0x7da2047e9a20>, <ast.Name object at 0x7da2047e8f40>]]] in starred[call[name[stats].items, parameter[]]] begin[:]
variable[out_name] assign[=] binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7c81f0>, <ast.Name object at 0x7da20c7c8f10>]]]
variable[col_out] assign[=] call[name[Column], parameter[]]
call[name[table_out].add_column, parameter[name[col_out]]] | keyword[def] identifier[add_summary_stats_to_table] ( identifier[table_in] , identifier[table_out] , identifier[colnames] ):
literal[string]
keyword[for] identifier[col] keyword[in] identifier[colnames] :
identifier[col_in] = identifier[table_in] [ identifier[col] ]
identifier[stats] = identifier[collect_summary_stats] ( identifier[col_in] . identifier[data] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[stats] . identifier[items] ():
identifier[out_name] = literal[string] %( identifier[col] , identifier[k] )
identifier[col_out] = identifier[Column] ( identifier[data] = identifier[np] . identifier[vstack] (
[ identifier[v] ]), identifier[name] = identifier[out_name] , identifier[dtype] = identifier[col_in] . identifier[dtype] , identifier[shape] = identifier[v] . identifier[shape] , identifier[unit] = identifier[col_in] . identifier[unit] )
identifier[table_out] . identifier[add_column] ( identifier[col_out] ) | def add_summary_stats_to_table(table_in, table_out, colnames):
"""Collect summary statisitics from an input table and add them to an output table
Parameters
----------
table_in : `astropy.table.Table`
Table with the input data.
table_out : `astropy.table.Table`
Table with the output data.
colnames : list
List of the column names to get summary statistics for.
"""
for col in colnames:
col_in = table_in[col]
stats = collect_summary_stats(col_in.data)
for (k, v) in stats.items():
out_name = '%s_%s' % (col, k)
col_out = Column(data=np.vstack([v]), name=out_name, dtype=col_in.dtype, shape=v.shape, unit=col_in.unit)
table_out.add_column(col_out) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['col']] |
def pnum_to_group(mesh_shape, group_dims, pnum):
"""Group number for grouped allreduce.
Args:
mesh_shape: a Shape
group_dims: a list of integers (the dimensions reduced over)
pnum: an integer
Returns:
an integer
"""
coord = pnum_to_processor_coordinates(mesh_shape, pnum)
remaining_shape = Shape(
[d for i, d in enumerate(mesh_shape) if i not in group_dims])
remaining_coord = [d for i, d in enumerate(coord) if i not in group_dims]
return processor_coordinates_to_pnum(remaining_shape, remaining_coord) | def function[pnum_to_group, parameter[mesh_shape, group_dims, pnum]]:
constant[Group number for grouped allreduce.
Args:
mesh_shape: a Shape
group_dims: a list of integers (the dimensions reduced over)
pnum: an integer
Returns:
an integer
]
variable[coord] assign[=] call[name[pnum_to_processor_coordinates], parameter[name[mesh_shape], name[pnum]]]
variable[remaining_shape] assign[=] call[name[Shape], parameter[<ast.ListComp object at 0x7da20c6c7400>]]
variable[remaining_coord] assign[=] <ast.ListComp object at 0x7da20c6c4c70>
return[call[name[processor_coordinates_to_pnum], parameter[name[remaining_shape], name[remaining_coord]]]] | keyword[def] identifier[pnum_to_group] ( identifier[mesh_shape] , identifier[group_dims] , identifier[pnum] ):
literal[string]
identifier[coord] = identifier[pnum_to_processor_coordinates] ( identifier[mesh_shape] , identifier[pnum] )
identifier[remaining_shape] = identifier[Shape] (
[ identifier[d] keyword[for] identifier[i] , identifier[d] keyword[in] identifier[enumerate] ( identifier[mesh_shape] ) keyword[if] identifier[i] keyword[not] keyword[in] identifier[group_dims] ])
identifier[remaining_coord] =[ identifier[d] keyword[for] identifier[i] , identifier[d] keyword[in] identifier[enumerate] ( identifier[coord] ) keyword[if] identifier[i] keyword[not] keyword[in] identifier[group_dims] ]
keyword[return] identifier[processor_coordinates_to_pnum] ( identifier[remaining_shape] , identifier[remaining_coord] ) | def pnum_to_group(mesh_shape, group_dims, pnum):
"""Group number for grouped allreduce.
Args:
mesh_shape: a Shape
group_dims: a list of integers (the dimensions reduced over)
pnum: an integer
Returns:
an integer
"""
coord = pnum_to_processor_coordinates(mesh_shape, pnum)
remaining_shape = Shape([d for (i, d) in enumerate(mesh_shape) if i not in group_dims])
remaining_coord = [d for (i, d) in enumerate(coord) if i not in group_dims]
return processor_coordinates_to_pnum(remaining_shape, remaining_coord) |
def extract_src_loss_table(dstore, loss_type):
"""
Extract the source loss table for a give loss type, ordered in decreasing
order. Example:
http://127.0.0.1:8800/v1/calc/30/extract/src_loss_table/structural
"""
oq = dstore['oqparam']
li = oq.lti[loss_type]
source_ids = dstore['source_info']['source_id']
idxs = dstore['ruptures'].value[['srcidx', 'grp_id']]
losses = dstore['rup_loss_table'][:, li]
slt = numpy.zeros(len(source_ids), [('grp_id', U32), (loss_type, F32)])
for loss, (srcidx, grp_id) in zip(losses, idxs):
slt[srcidx][loss_type] += loss
slt[srcidx]['grp_id'] = grp_id
slt = util.compose_arrays(source_ids, slt, 'source_id')
slt.sort(order=loss_type)
return slt[::-1] | def function[extract_src_loss_table, parameter[dstore, loss_type]]:
constant[
Extract the source loss table for a give loss type, ordered in decreasing
order. Example:
http://127.0.0.1:8800/v1/calc/30/extract/src_loss_table/structural
]
variable[oq] assign[=] call[name[dstore]][constant[oqparam]]
variable[li] assign[=] call[name[oq].lti][name[loss_type]]
variable[source_ids] assign[=] call[call[name[dstore]][constant[source_info]]][constant[source_id]]
variable[idxs] assign[=] call[call[name[dstore]][constant[ruptures]].value][list[[<ast.Constant object at 0x7da18ede5f00>, <ast.Constant object at 0x7da18ede50f0>]]]
variable[losses] assign[=] call[call[name[dstore]][constant[rup_loss_table]]][tuple[[<ast.Slice object at 0x7da18ede6ec0>, <ast.Name object at 0x7da18ede48e0>]]]
variable[slt] assign[=] call[name[numpy].zeros, parameter[call[name[len], parameter[name[source_ids]]], list[[<ast.Tuple object at 0x7da18ede5bd0>, <ast.Tuple object at 0x7da18ede6aa0>]]]]
for taget[tuple[[<ast.Name object at 0x7da18ede6590>, <ast.Tuple object at 0x7da18ede5420>]]] in starred[call[name[zip], parameter[name[losses], name[idxs]]]] begin[:]
<ast.AugAssign object at 0x7da18ede6d40>
call[call[name[slt]][name[srcidx]]][constant[grp_id]] assign[=] name[grp_id]
variable[slt] assign[=] call[name[util].compose_arrays, parameter[name[source_ids], name[slt], constant[source_id]]]
call[name[slt].sort, parameter[]]
return[call[name[slt]][<ast.Slice object at 0x7da18f58d9c0>]] | keyword[def] identifier[extract_src_loss_table] ( identifier[dstore] , identifier[loss_type] ):
literal[string]
identifier[oq] = identifier[dstore] [ literal[string] ]
identifier[li] = identifier[oq] . identifier[lti] [ identifier[loss_type] ]
identifier[source_ids] = identifier[dstore] [ literal[string] ][ literal[string] ]
identifier[idxs] = identifier[dstore] [ literal[string] ]. identifier[value] [[ literal[string] , literal[string] ]]
identifier[losses] = identifier[dstore] [ literal[string] ][:, identifier[li] ]
identifier[slt] = identifier[numpy] . identifier[zeros] ( identifier[len] ( identifier[source_ids] ),[( literal[string] , identifier[U32] ),( identifier[loss_type] , identifier[F32] )])
keyword[for] identifier[loss] ,( identifier[srcidx] , identifier[grp_id] ) keyword[in] identifier[zip] ( identifier[losses] , identifier[idxs] ):
identifier[slt] [ identifier[srcidx] ][ identifier[loss_type] ]+= identifier[loss]
identifier[slt] [ identifier[srcidx] ][ literal[string] ]= identifier[grp_id]
identifier[slt] = identifier[util] . identifier[compose_arrays] ( identifier[source_ids] , identifier[slt] , literal[string] )
identifier[slt] . identifier[sort] ( identifier[order] = identifier[loss_type] )
keyword[return] identifier[slt] [::- literal[int] ] | def extract_src_loss_table(dstore, loss_type):
"""
Extract the source loss table for a give loss type, ordered in decreasing
order. Example:
http://127.0.0.1:8800/v1/calc/30/extract/src_loss_table/structural
"""
oq = dstore['oqparam']
li = oq.lti[loss_type]
source_ids = dstore['source_info']['source_id']
idxs = dstore['ruptures'].value[['srcidx', 'grp_id']]
losses = dstore['rup_loss_table'][:, li]
slt = numpy.zeros(len(source_ids), [('grp_id', U32), (loss_type, F32)])
for (loss, (srcidx, grp_id)) in zip(losses, idxs):
slt[srcidx][loss_type] += loss
slt[srcidx]['grp_id'] = grp_id # depends on [control=['for'], data=[]]
slt = util.compose_arrays(source_ids, slt, 'source_id')
slt.sort(order=loss_type)
return slt[::-1] |
def _gen_find_command(coll, spec, projection, skip, limit, batch_size,
options, read_concern=DEFAULT_READ_CONCERN,
collation=None):
"""Generate a find command document."""
cmd = SON([('find', coll)])
if '$query' in spec:
cmd.update([(_MODIFIERS[key], val) if key in _MODIFIERS else (key, val)
for key, val in spec.items()])
if '$explain' in cmd:
cmd.pop('$explain')
if '$readPreference' in cmd:
cmd.pop('$readPreference')
else:
cmd['filter'] = spec
if projection:
cmd['projection'] = projection
if skip:
cmd['skip'] = skip
if limit:
cmd['limit'] = abs(limit)
if limit < 0:
cmd['singleBatch'] = True
if batch_size:
cmd['batchSize'] = batch_size
if read_concern.level:
cmd['readConcern'] = read_concern.document
if collation:
cmd['collation'] = collation
if options:
cmd.update([(opt, True)
for opt, val in _OPTIONS.items()
if options & val])
return cmd | def function[_gen_find_command, parameter[coll, spec, projection, skip, limit, batch_size, options, read_concern, collation]]:
constant[Generate a find command document.]
variable[cmd] assign[=] call[name[SON], parameter[list[[<ast.Tuple object at 0x7da18ede76d0>]]]]
if compare[constant[$query] in name[spec]] begin[:]
call[name[cmd].update, parameter[<ast.ListComp object at 0x7da18ede78e0>]]
if compare[constant[$explain] in name[cmd]] begin[:]
call[name[cmd].pop, parameter[constant[$explain]]]
if compare[constant[$readPreference] in name[cmd]] begin[:]
call[name[cmd].pop, parameter[constant[$readPreference]]]
if name[projection] begin[:]
call[name[cmd]][constant[projection]] assign[=] name[projection]
if name[skip] begin[:]
call[name[cmd]][constant[skip]] assign[=] name[skip]
if name[limit] begin[:]
call[name[cmd]][constant[limit]] assign[=] call[name[abs], parameter[name[limit]]]
if compare[name[limit] less[<] constant[0]] begin[:]
call[name[cmd]][constant[singleBatch]] assign[=] constant[True]
if name[batch_size] begin[:]
call[name[cmd]][constant[batchSize]] assign[=] name[batch_size]
if name[read_concern].level begin[:]
call[name[cmd]][constant[readConcern]] assign[=] name[read_concern].document
if name[collation] begin[:]
call[name[cmd]][constant[collation]] assign[=] name[collation]
if name[options] begin[:]
call[name[cmd].update, parameter[<ast.ListComp object at 0x7da18f811600>]]
return[name[cmd]] | keyword[def] identifier[_gen_find_command] ( identifier[coll] , identifier[spec] , identifier[projection] , identifier[skip] , identifier[limit] , identifier[batch_size] ,
identifier[options] , identifier[read_concern] = identifier[DEFAULT_READ_CONCERN] ,
identifier[collation] = keyword[None] ):
literal[string]
identifier[cmd] = identifier[SON] ([( literal[string] , identifier[coll] )])
keyword[if] literal[string] keyword[in] identifier[spec] :
identifier[cmd] . identifier[update] ([( identifier[_MODIFIERS] [ identifier[key] ], identifier[val] ) keyword[if] identifier[key] keyword[in] identifier[_MODIFIERS] keyword[else] ( identifier[key] , identifier[val] )
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[spec] . identifier[items] ()])
keyword[if] literal[string] keyword[in] identifier[cmd] :
identifier[cmd] . identifier[pop] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[cmd] :
identifier[cmd] . identifier[pop] ( literal[string] )
keyword[else] :
identifier[cmd] [ literal[string] ]= identifier[spec]
keyword[if] identifier[projection] :
identifier[cmd] [ literal[string] ]= identifier[projection]
keyword[if] identifier[skip] :
identifier[cmd] [ literal[string] ]= identifier[skip]
keyword[if] identifier[limit] :
identifier[cmd] [ literal[string] ]= identifier[abs] ( identifier[limit] )
keyword[if] identifier[limit] < literal[int] :
identifier[cmd] [ literal[string] ]= keyword[True]
keyword[if] identifier[batch_size] :
identifier[cmd] [ literal[string] ]= identifier[batch_size]
keyword[if] identifier[read_concern] . identifier[level] :
identifier[cmd] [ literal[string] ]= identifier[read_concern] . identifier[document]
keyword[if] identifier[collation] :
identifier[cmd] [ literal[string] ]= identifier[collation]
keyword[if] identifier[options] :
identifier[cmd] . identifier[update] ([( identifier[opt] , keyword[True] )
keyword[for] identifier[opt] , identifier[val] keyword[in] identifier[_OPTIONS] . identifier[items] ()
keyword[if] identifier[options] & identifier[val] ])
keyword[return] identifier[cmd] | def _gen_find_command(coll, spec, projection, skip, limit, batch_size, options, read_concern=DEFAULT_READ_CONCERN, collation=None):
"""Generate a find command document."""
cmd = SON([('find', coll)])
if '$query' in spec:
cmd.update([(_MODIFIERS[key], val) if key in _MODIFIERS else (key, val) for (key, val) in spec.items()])
if '$explain' in cmd:
cmd.pop('$explain') # depends on [control=['if'], data=['cmd']]
if '$readPreference' in cmd:
cmd.pop('$readPreference') # depends on [control=['if'], data=['cmd']] # depends on [control=['if'], data=['spec']]
else:
cmd['filter'] = spec
if projection:
cmd['projection'] = projection # depends on [control=['if'], data=[]]
if skip:
cmd['skip'] = skip # depends on [control=['if'], data=[]]
if limit:
cmd['limit'] = abs(limit)
if limit < 0:
cmd['singleBatch'] = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if batch_size:
cmd['batchSize'] = batch_size # depends on [control=['if'], data=[]]
if read_concern.level:
cmd['readConcern'] = read_concern.document # depends on [control=['if'], data=[]]
if collation:
cmd['collation'] = collation # depends on [control=['if'], data=[]]
if options:
cmd.update([(opt, True) for (opt, val) in _OPTIONS.items() if options & val]) # depends on [control=['if'], data=[]]
return cmd |
def get_content_commit_date(extensions, acceptance_callback=None,
root_dir='.'):
"""Get the datetime for the most recent commit to a project that
affected certain types of content.
Parameters
----------
extensions : sequence of 'str'
Extensions of files to consider in getting the most recent commit
date. For example, ``('rst', 'svg', 'png')`` are content extensions
for a Sphinx project. **Extension comparision is case sensitive.** add
uppercase variants to match uppercase extensions.
acceptance_callback : callable
Callable function whose sole argument is a file path, and returns
`True` or `False` depending on whether the file's commit date should
be considered or not. This callback is only run on files that are
included by ``extensions``. Thus this callback is a way to exclude
specific files that would otherwise be included by their extension.
root_dir : 'str`, optional
Only content contained within this root directory is considered.
This directory must be, or be contained by, a Git repository. This is
the current working directory by default.
Returns
-------
commit_date : `datetime.datetime`
Datetime of the most recent content commit.
Raises
------
RuntimeError
Raised if no content files are found.
"""
logger = logging.getLogger(__name__)
def _null_callback(_):
return True
if acceptance_callback is None:
acceptance_callback = _null_callback
# Cache the repo object for each query
root_dir = os.path.abspath(root_dir)
repo = git.repo.base.Repo(path=root_dir, search_parent_directories=True)
# Iterate over all files with all file extensions, looking for the
# newest commit datetime.
newest_datetime = None
iters = [_iter_filepaths_with_extension(ext, root_dir=root_dir)
for ext in extensions]
for content_path in itertools.chain(*iters):
content_path = os.path.abspath(os.path.join(root_dir, content_path))
if acceptance_callback(content_path):
logger.debug('Found content path %r', content_path)
try:
commit_datetime = read_git_commit_timestamp_for_file(
content_path, repo=repo)
logger.debug('Commit timestamp of %r is %s',
content_path, commit_datetime)
except IOError:
logger.warning(
'Count not get commit for %r, skipping',
content_path)
continue
if not newest_datetime or commit_datetime > newest_datetime:
# Seed initial newest_datetime
# or set a newer newest_datetime
newest_datetime = commit_datetime
logger.debug('Newest commit timestamp is %s', newest_datetime)
logger.debug('Final commit timestamp is %s', newest_datetime)
if newest_datetime is None:
raise RuntimeError('No content files found in {}'.format(root_dir))
return newest_datetime | def function[get_content_commit_date, parameter[extensions, acceptance_callback, root_dir]]:
constant[Get the datetime for the most recent commit to a project that
affected certain types of content.
Parameters
----------
extensions : sequence of 'str'
Extensions of files to consider in getting the most recent commit
date. For example, ``('rst', 'svg', 'png')`` are content extensions
for a Sphinx project. **Extension comparision is case sensitive.** add
uppercase variants to match uppercase extensions.
acceptance_callback : callable
Callable function whose sole argument is a file path, and returns
`True` or `False` depending on whether the file's commit date should
be considered or not. This callback is only run on files that are
included by ``extensions``. Thus this callback is a way to exclude
specific files that would otherwise be included by their extension.
root_dir : 'str`, optional
Only content contained within this root directory is considered.
This directory must be, or be contained by, a Git repository. This is
the current working directory by default.
Returns
-------
commit_date : `datetime.datetime`
Datetime of the most recent content commit.
Raises
------
RuntimeError
Raised if no content files are found.
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
def function[_null_callback, parameter[_]]:
return[constant[True]]
if compare[name[acceptance_callback] is constant[None]] begin[:]
variable[acceptance_callback] assign[=] name[_null_callback]
variable[root_dir] assign[=] call[name[os].path.abspath, parameter[name[root_dir]]]
variable[repo] assign[=] call[name[git].repo.base.Repo, parameter[]]
variable[newest_datetime] assign[=] constant[None]
variable[iters] assign[=] <ast.ListComp object at 0x7da1b004ef50>
for taget[name[content_path]] in starred[call[name[itertools].chain, parameter[<ast.Starred object at 0x7da20c6a9150>]]] begin[:]
variable[content_path] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[root_dir], name[content_path]]]]]
if call[name[acceptance_callback], parameter[name[content_path]]] begin[:]
call[name[logger].debug, parameter[constant[Found content path %r], name[content_path]]]
<ast.Try object at 0x7da20c6a9390>
if <ast.BoolOp object at 0x7da1b004f280> begin[:]
variable[newest_datetime] assign[=] name[commit_datetime]
call[name[logger].debug, parameter[constant[Newest commit timestamp is %s], name[newest_datetime]]]
call[name[logger].debug, parameter[constant[Final commit timestamp is %s], name[newest_datetime]]]
if compare[name[newest_datetime] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b004dc90>
return[name[newest_datetime]] | keyword[def] identifier[get_content_commit_date] ( identifier[extensions] , identifier[acceptance_callback] = keyword[None] ,
identifier[root_dir] = literal[string] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
keyword[def] identifier[_null_callback] ( identifier[_] ):
keyword[return] keyword[True]
keyword[if] identifier[acceptance_callback] keyword[is] keyword[None] :
identifier[acceptance_callback] = identifier[_null_callback]
identifier[root_dir] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[root_dir] )
identifier[repo] = identifier[git] . identifier[repo] . identifier[base] . identifier[Repo] ( identifier[path] = identifier[root_dir] , identifier[search_parent_directories] = keyword[True] )
identifier[newest_datetime] = keyword[None]
identifier[iters] =[ identifier[_iter_filepaths_with_extension] ( identifier[ext] , identifier[root_dir] = identifier[root_dir] )
keyword[for] identifier[ext] keyword[in] identifier[extensions] ]
keyword[for] identifier[content_path] keyword[in] identifier[itertools] . identifier[chain] (* identifier[iters] ):
identifier[content_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root_dir] , identifier[content_path] ))
keyword[if] identifier[acceptance_callback] ( identifier[content_path] ):
identifier[logger] . identifier[debug] ( literal[string] , identifier[content_path] )
keyword[try] :
identifier[commit_datetime] = identifier[read_git_commit_timestamp_for_file] (
identifier[content_path] , identifier[repo] = identifier[repo] )
identifier[logger] . identifier[debug] ( literal[string] ,
identifier[content_path] , identifier[commit_datetime] )
keyword[except] identifier[IOError] :
identifier[logger] . identifier[warning] (
literal[string] ,
identifier[content_path] )
keyword[continue]
keyword[if] keyword[not] identifier[newest_datetime] keyword[or] identifier[commit_datetime] > identifier[newest_datetime] :
identifier[newest_datetime] = identifier[commit_datetime]
identifier[logger] . identifier[debug] ( literal[string] , identifier[newest_datetime] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[newest_datetime] )
keyword[if] identifier[newest_datetime] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[root_dir] ))
keyword[return] identifier[newest_datetime] | def get_content_commit_date(extensions, acceptance_callback=None, root_dir='.'):
"""Get the datetime for the most recent commit to a project that
affected certain types of content.
Parameters
----------
extensions : sequence of 'str'
Extensions of files to consider in getting the most recent commit
date. For example, ``('rst', 'svg', 'png')`` are content extensions
for a Sphinx project. **Extension comparision is case sensitive.** add
uppercase variants to match uppercase extensions.
acceptance_callback : callable
Callable function whose sole argument is a file path, and returns
`True` or `False` depending on whether the file's commit date should
be considered or not. This callback is only run on files that are
included by ``extensions``. Thus this callback is a way to exclude
specific files that would otherwise be included by their extension.
root_dir : 'str`, optional
Only content contained within this root directory is considered.
This directory must be, or be contained by, a Git repository. This is
the current working directory by default.
Returns
-------
commit_date : `datetime.datetime`
Datetime of the most recent content commit.
Raises
------
RuntimeError
Raised if no content files are found.
"""
logger = logging.getLogger(__name__)
def _null_callback(_):
return True
if acceptance_callback is None:
acceptance_callback = _null_callback # depends on [control=['if'], data=['acceptance_callback']]
# Cache the repo object for each query
root_dir = os.path.abspath(root_dir)
repo = git.repo.base.Repo(path=root_dir, search_parent_directories=True)
# Iterate over all files with all file extensions, looking for the
# newest commit datetime.
newest_datetime = None
iters = [_iter_filepaths_with_extension(ext, root_dir=root_dir) for ext in extensions]
for content_path in itertools.chain(*iters):
content_path = os.path.abspath(os.path.join(root_dir, content_path))
if acceptance_callback(content_path):
logger.debug('Found content path %r', content_path)
try:
commit_datetime = read_git_commit_timestamp_for_file(content_path, repo=repo)
logger.debug('Commit timestamp of %r is %s', content_path, commit_datetime) # depends on [control=['try'], data=[]]
except IOError:
logger.warning('Count not get commit for %r, skipping', content_path)
continue # depends on [control=['except'], data=[]]
if not newest_datetime or commit_datetime > newest_datetime:
# Seed initial newest_datetime
# or set a newer newest_datetime
newest_datetime = commit_datetime
logger.debug('Newest commit timestamp is %s', newest_datetime) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
logger.debug('Final commit timestamp is %s', newest_datetime) # depends on [control=['for'], data=['content_path']]
if newest_datetime is None:
raise RuntimeError('No content files found in {}'.format(root_dir)) # depends on [control=['if'], data=[]]
return newest_datetime |
def linkfetch(self):
""""
Public method to call the internal methods
"""
request, handle = self.open()
self._add_headers(request)
if handle:
self._get_crawled_urls(handle, request) | def function[linkfetch, parameter[self]]:
constant["
Public method to call the internal methods
]
<ast.Tuple object at 0x7da1b0954700> assign[=] call[name[self].open, parameter[]]
call[name[self]._add_headers, parameter[name[request]]]
if name[handle] begin[:]
call[name[self]._get_crawled_urls, parameter[name[handle], name[request]]] | keyword[def] identifier[linkfetch] ( identifier[self] ):
literal[string]
identifier[request] , identifier[handle] = identifier[self] . identifier[open] ()
identifier[self] . identifier[_add_headers] ( identifier[request] )
keyword[if] identifier[handle] :
identifier[self] . identifier[_get_crawled_urls] ( identifier[handle] , identifier[request] ) | def linkfetch(self):
""""
Public method to call the internal methods
"""
(request, handle) = self.open()
self._add_headers(request)
if handle:
self._get_crawled_urls(handle, request) # depends on [control=['if'], data=[]] |
def feedkeys(self, keys, options='', escape_csi=True):
"""Push `keys` to Nvim user input buffer.
Options can be a string with the following character flags:
- 'm': Remap keys. This is default.
- 'n': Do not remap keys.
- 't': Handle keys as if typed; otherwise they are handled as if coming
from a mapping. This matters for undo, opening folds, etc.
"""
return self.request('nvim_feedkeys', keys, options, escape_csi) | def function[feedkeys, parameter[self, keys, options, escape_csi]]:
constant[Push `keys` to Nvim user input buffer.
Options can be a string with the following character flags:
- 'm': Remap keys. This is default.
- 'n': Do not remap keys.
- 't': Handle keys as if typed; otherwise they are handled as if coming
from a mapping. This matters for undo, opening folds, etc.
]
return[call[name[self].request, parameter[constant[nvim_feedkeys], name[keys], name[options], name[escape_csi]]]] | keyword[def] identifier[feedkeys] ( identifier[self] , identifier[keys] , identifier[options] = literal[string] , identifier[escape_csi] = keyword[True] ):
literal[string]
keyword[return] identifier[self] . identifier[request] ( literal[string] , identifier[keys] , identifier[options] , identifier[escape_csi] ) | def feedkeys(self, keys, options='', escape_csi=True):
"""Push `keys` to Nvim user input buffer.
Options can be a string with the following character flags:
- 'm': Remap keys. This is default.
- 'n': Do not remap keys.
- 't': Handle keys as if typed; otherwise they are handled as if coming
from a mapping. This matters for undo, opening folds, etc.
"""
return self.request('nvim_feedkeys', keys, options, escape_csi) |
def track_pageview(self, name, url, duration=0, properties=None, measurements=None):
"""Send information about the page viewed in the application (a web page for instance).
Args:
name (str). the name of the page that was viewed.\n
url (str). the URL of the page that was viewed.\n
duration (int). the duration of the page view in milliseconds. (defaults to: 0)\n
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
"""
data = channel.contracts.PageViewData()
data.name = name or NULL_CONSTANT_STRING
data.url = url
data.duration = duration
if properties:
data.properties = properties
if measurements:
data.measurements = measurements
self.track(data, self._context) | def function[track_pageview, parameter[self, name, url, duration, properties, measurements]]:
constant[Send information about the page viewed in the application (a web page for instance).
Args:
name (str). the name of the page that was viewed.
url (str). the URL of the page that was viewed.
duration (int). the duration of the page view in milliseconds. (defaults to: 0)
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
]
variable[data] assign[=] call[name[channel].contracts.PageViewData, parameter[]]
name[data].name assign[=] <ast.BoolOp object at 0x7da1b1080c40>
name[data].url assign[=] name[url]
name[data].duration assign[=] name[duration]
if name[properties] begin[:]
name[data].properties assign[=] name[properties]
if name[measurements] begin[:]
name[data].measurements assign[=] name[measurements]
call[name[self].track, parameter[name[data], name[self]._context]] | keyword[def] identifier[track_pageview] ( identifier[self] , identifier[name] , identifier[url] , identifier[duration] = literal[int] , identifier[properties] = keyword[None] , identifier[measurements] = keyword[None] ):
literal[string]
identifier[data] = identifier[channel] . identifier[contracts] . identifier[PageViewData] ()
identifier[data] . identifier[name] = identifier[name] keyword[or] identifier[NULL_CONSTANT_STRING]
identifier[data] . identifier[url] = identifier[url]
identifier[data] . identifier[duration] = identifier[duration]
keyword[if] identifier[properties] :
identifier[data] . identifier[properties] = identifier[properties]
keyword[if] identifier[measurements] :
identifier[data] . identifier[measurements] = identifier[measurements]
identifier[self] . identifier[track] ( identifier[data] , identifier[self] . identifier[_context] ) | def track_pageview(self, name, url, duration=0, properties=None, measurements=None):
"""Send information about the page viewed in the application (a web page for instance).
Args:
name (str). the name of the page that was viewed.
url (str). the URL of the page that was viewed.
duration (int). the duration of the page view in milliseconds. (defaults to: 0)
properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)
measurements (dict). the set of custom measurements the client wants to attach to this data item. (defaults to: None)
"""
data = channel.contracts.PageViewData()
data.name = name or NULL_CONSTANT_STRING
data.url = url
data.duration = duration
if properties:
data.properties = properties # depends on [control=['if'], data=[]]
if measurements:
data.measurements = measurements # depends on [control=['if'], data=[]]
self.track(data, self._context) |
def from_xdr(cls, xdr):
"""Create a new :class:`TransactionEnvelope` from an XDR string.
:param xdr: The XDR string that represents a transaction
envelope.
:type xdr: bytes, str
"""
xdr_decoded = base64.b64decode(xdr)
te = Xdr.StellarXDRUnpacker(xdr_decoded)
te_xdr_object = te.unpack_TransactionEnvelope()
signatures = te_xdr_object.signatures
tx_xdr_object = te_xdr_object.tx
tx = Transaction.from_xdr_object(tx_xdr_object)
te = TransactionEnvelope(tx, signatures=signatures)
# te = TransactionEnvelope(
# tx, {'signatures': signatures, 'network_id': 'PUBLIC'})
return te | def function[from_xdr, parameter[cls, xdr]]:
constant[Create a new :class:`TransactionEnvelope` from an XDR string.
:param xdr: The XDR string that represents a transaction
envelope.
:type xdr: bytes, str
]
variable[xdr_decoded] assign[=] call[name[base64].b64decode, parameter[name[xdr]]]
variable[te] assign[=] call[name[Xdr].StellarXDRUnpacker, parameter[name[xdr_decoded]]]
variable[te_xdr_object] assign[=] call[name[te].unpack_TransactionEnvelope, parameter[]]
variable[signatures] assign[=] name[te_xdr_object].signatures
variable[tx_xdr_object] assign[=] name[te_xdr_object].tx
variable[tx] assign[=] call[name[Transaction].from_xdr_object, parameter[name[tx_xdr_object]]]
variable[te] assign[=] call[name[TransactionEnvelope], parameter[name[tx]]]
return[name[te]] | keyword[def] identifier[from_xdr] ( identifier[cls] , identifier[xdr] ):
literal[string]
identifier[xdr_decoded] = identifier[base64] . identifier[b64decode] ( identifier[xdr] )
identifier[te] = identifier[Xdr] . identifier[StellarXDRUnpacker] ( identifier[xdr_decoded] )
identifier[te_xdr_object] = identifier[te] . identifier[unpack_TransactionEnvelope] ()
identifier[signatures] = identifier[te_xdr_object] . identifier[signatures]
identifier[tx_xdr_object] = identifier[te_xdr_object] . identifier[tx]
identifier[tx] = identifier[Transaction] . identifier[from_xdr_object] ( identifier[tx_xdr_object] )
identifier[te] = identifier[TransactionEnvelope] ( identifier[tx] , identifier[signatures] = identifier[signatures] )
keyword[return] identifier[te] | def from_xdr(cls, xdr):
"""Create a new :class:`TransactionEnvelope` from an XDR string.
:param xdr: The XDR string that represents a transaction
envelope.
:type xdr: bytes, str
"""
xdr_decoded = base64.b64decode(xdr)
te = Xdr.StellarXDRUnpacker(xdr_decoded)
te_xdr_object = te.unpack_TransactionEnvelope()
signatures = te_xdr_object.signatures
tx_xdr_object = te_xdr_object.tx
tx = Transaction.from_xdr_object(tx_xdr_object)
te = TransactionEnvelope(tx, signatures=signatures)
# te = TransactionEnvelope(
# tx, {'signatures': signatures, 'network_id': 'PUBLIC'})
return te |
def get_final_path(path):
r"""
For a given path, determine the ultimate location of that path.
Useful for resolving symlink targets.
This functions wraps the GetFinalPathNameByHandle from the Windows
SDK.
Note, this function fails if a handle cannot be obtained (such as
for C:\Pagefile.sys on a stock windows system). Consider using
trace_symlink_target instead.
"""
desired_access = api.NULL
share_mode = (
api.FILE_SHARE_READ | api.FILE_SHARE_WRITE | api.FILE_SHARE_DELETE
)
security_attributes = api.LPSECURITY_ATTRIBUTES() # NULL pointer
hFile = api.CreateFile(
path,
desired_access,
share_mode,
security_attributes,
api.OPEN_EXISTING,
api.FILE_FLAG_BACKUP_SEMANTICS,
api.NULL,
)
if hFile == api.INVALID_HANDLE_VALUE:
raise WindowsError()
buf_size = api.GetFinalPathNameByHandle(
hFile, LPWSTR(), 0, api.VOLUME_NAME_DOS)
handle_nonzero_success(buf_size)
buf = create_unicode_buffer(buf_size)
result_length = api.GetFinalPathNameByHandle(
hFile, buf, len(buf), api.VOLUME_NAME_DOS)
assert result_length < len(buf)
handle_nonzero_success(result_length)
handle_nonzero_success(api.CloseHandle(hFile))
return buf[:result_length] | def function[get_final_path, parameter[path]]:
constant[
For a given path, determine the ultimate location of that path.
Useful for resolving symlink targets.
This functions wraps the GetFinalPathNameByHandle from the Windows
SDK.
Note, this function fails if a handle cannot be obtained (such as
for C:\Pagefile.sys on a stock windows system). Consider using
trace_symlink_target instead.
]
variable[desired_access] assign[=] name[api].NULL
variable[share_mode] assign[=] binary_operation[binary_operation[name[api].FILE_SHARE_READ <ast.BitOr object at 0x7da2590d6aa0> name[api].FILE_SHARE_WRITE] <ast.BitOr object at 0x7da2590d6aa0> name[api].FILE_SHARE_DELETE]
variable[security_attributes] assign[=] call[name[api].LPSECURITY_ATTRIBUTES, parameter[]]
variable[hFile] assign[=] call[name[api].CreateFile, parameter[name[path], name[desired_access], name[share_mode], name[security_attributes], name[api].OPEN_EXISTING, name[api].FILE_FLAG_BACKUP_SEMANTICS, name[api].NULL]]
if compare[name[hFile] equal[==] name[api].INVALID_HANDLE_VALUE] begin[:]
<ast.Raise object at 0x7da1b2535ed0>
variable[buf_size] assign[=] call[name[api].GetFinalPathNameByHandle, parameter[name[hFile], call[name[LPWSTR], parameter[]], constant[0], name[api].VOLUME_NAME_DOS]]
call[name[handle_nonzero_success], parameter[name[buf_size]]]
variable[buf] assign[=] call[name[create_unicode_buffer], parameter[name[buf_size]]]
variable[result_length] assign[=] call[name[api].GetFinalPathNameByHandle, parameter[name[hFile], name[buf], call[name[len], parameter[name[buf]]], name[api].VOLUME_NAME_DOS]]
assert[compare[name[result_length] less[<] call[name[len], parameter[name[buf]]]]]
call[name[handle_nonzero_success], parameter[name[result_length]]]
call[name[handle_nonzero_success], parameter[call[name[api].CloseHandle, parameter[name[hFile]]]]]
return[call[name[buf]][<ast.Slice object at 0x7da1b253d480>]] | keyword[def] identifier[get_final_path] ( identifier[path] ):
literal[string]
identifier[desired_access] = identifier[api] . identifier[NULL]
identifier[share_mode] =(
identifier[api] . identifier[FILE_SHARE_READ] | identifier[api] . identifier[FILE_SHARE_WRITE] | identifier[api] . identifier[FILE_SHARE_DELETE]
)
identifier[security_attributes] = identifier[api] . identifier[LPSECURITY_ATTRIBUTES] ()
identifier[hFile] = identifier[api] . identifier[CreateFile] (
identifier[path] ,
identifier[desired_access] ,
identifier[share_mode] ,
identifier[security_attributes] ,
identifier[api] . identifier[OPEN_EXISTING] ,
identifier[api] . identifier[FILE_FLAG_BACKUP_SEMANTICS] ,
identifier[api] . identifier[NULL] ,
)
keyword[if] identifier[hFile] == identifier[api] . identifier[INVALID_HANDLE_VALUE] :
keyword[raise] identifier[WindowsError] ()
identifier[buf_size] = identifier[api] . identifier[GetFinalPathNameByHandle] (
identifier[hFile] , identifier[LPWSTR] (), literal[int] , identifier[api] . identifier[VOLUME_NAME_DOS] )
identifier[handle_nonzero_success] ( identifier[buf_size] )
identifier[buf] = identifier[create_unicode_buffer] ( identifier[buf_size] )
identifier[result_length] = identifier[api] . identifier[GetFinalPathNameByHandle] (
identifier[hFile] , identifier[buf] , identifier[len] ( identifier[buf] ), identifier[api] . identifier[VOLUME_NAME_DOS] )
keyword[assert] identifier[result_length] < identifier[len] ( identifier[buf] )
identifier[handle_nonzero_success] ( identifier[result_length] )
identifier[handle_nonzero_success] ( identifier[api] . identifier[CloseHandle] ( identifier[hFile] ))
keyword[return] identifier[buf] [: identifier[result_length] ] | def get_final_path(path):
"""
For a given path, determine the ultimate location of that path.
Useful for resolving symlink targets.
This functions wraps the GetFinalPathNameByHandle from the Windows
SDK.
Note, this function fails if a handle cannot be obtained (such as
for C:\\Pagefile.sys on a stock windows system). Consider using
trace_symlink_target instead.
"""
desired_access = api.NULL
share_mode = api.FILE_SHARE_READ | api.FILE_SHARE_WRITE | api.FILE_SHARE_DELETE
security_attributes = api.LPSECURITY_ATTRIBUTES() # NULL pointer
hFile = api.CreateFile(path, desired_access, share_mode, security_attributes, api.OPEN_EXISTING, api.FILE_FLAG_BACKUP_SEMANTICS, api.NULL)
if hFile == api.INVALID_HANDLE_VALUE:
raise WindowsError() # depends on [control=['if'], data=[]]
buf_size = api.GetFinalPathNameByHandle(hFile, LPWSTR(), 0, api.VOLUME_NAME_DOS)
handle_nonzero_success(buf_size)
buf = create_unicode_buffer(buf_size)
result_length = api.GetFinalPathNameByHandle(hFile, buf, len(buf), api.VOLUME_NAME_DOS)
assert result_length < len(buf)
handle_nonzero_success(result_length)
handle_nonzero_success(api.CloseHandle(hFile))
return buf[:result_length] |
def set(self, name, value, **kw):
"""Set the attribute to the given value.
The keyword arguments represent the other attribute values
to integrate constraints to other values.
"""
# check write permission
sm = getSecurityManager()
permission = permissions.ManagePortal
if not sm.checkPermission(permission, self.context):
raise Unauthorized("Not allowed to modify the Plone portal")
# set the attribute
if not hasattr(self.context, name):
return False
self.context[name] = value
return True | def function[set, parameter[self, name, value]]:
constant[Set the attribute to the given value.
The keyword arguments represent the other attribute values
to integrate constraints to other values.
]
variable[sm] assign[=] call[name[getSecurityManager], parameter[]]
variable[permission] assign[=] name[permissions].ManagePortal
if <ast.UnaryOp object at 0x7da1b2555090> begin[:]
<ast.Raise object at 0x7da1b2554dc0>
if <ast.UnaryOp object at 0x7da1b2554d90> begin[:]
return[constant[False]]
call[name[self].context][name[name]] assign[=] name[value]
return[constant[True]] | keyword[def] identifier[set] ( identifier[self] , identifier[name] , identifier[value] ,** identifier[kw] ):
literal[string]
identifier[sm] = identifier[getSecurityManager] ()
identifier[permission] = identifier[permissions] . identifier[ManagePortal]
keyword[if] keyword[not] identifier[sm] . identifier[checkPermission] ( identifier[permission] , identifier[self] . identifier[context] ):
keyword[raise] identifier[Unauthorized] ( literal[string] )
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] . identifier[context] , identifier[name] ):
keyword[return] keyword[False]
identifier[self] . identifier[context] [ identifier[name] ]= identifier[value]
keyword[return] keyword[True] | def set(self, name, value, **kw):
"""Set the attribute to the given value.
The keyword arguments represent the other attribute values
to integrate constraints to other values.
"""
# check write permission
sm = getSecurityManager()
permission = permissions.ManagePortal
if not sm.checkPermission(permission, self.context):
raise Unauthorized('Not allowed to modify the Plone portal') # depends on [control=['if'], data=[]]
# set the attribute
if not hasattr(self.context, name):
return False # depends on [control=['if'], data=[]]
self.context[name] = value
return True |
def increment_step_and_update_last_reward(self):
"""
Increment the step count of the trainer and Updates the last reward
"""
if len(self.stats['Environment/Cumulative Reward']) > 0:
mean_reward = np.mean(self.stats['Environment/Cumulative Reward'])
self.policy.update_reward(mean_reward)
self.policy.increment_step()
self.step = self.policy.get_current_step() | def function[increment_step_and_update_last_reward, parameter[self]]:
constant[
Increment the step count of the trainer and Updates the last reward
]
if compare[call[name[len], parameter[call[name[self].stats][constant[Environment/Cumulative Reward]]]] greater[>] constant[0]] begin[:]
variable[mean_reward] assign[=] call[name[np].mean, parameter[call[name[self].stats][constant[Environment/Cumulative Reward]]]]
call[name[self].policy.update_reward, parameter[name[mean_reward]]]
call[name[self].policy.increment_step, parameter[]]
name[self].step assign[=] call[name[self].policy.get_current_step, parameter[]] | keyword[def] identifier[increment_step_and_update_last_reward] ( identifier[self] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[stats] [ literal[string] ])> literal[int] :
identifier[mean_reward] = identifier[np] . identifier[mean] ( identifier[self] . identifier[stats] [ literal[string] ])
identifier[self] . identifier[policy] . identifier[update_reward] ( identifier[mean_reward] )
identifier[self] . identifier[policy] . identifier[increment_step] ()
identifier[self] . identifier[step] = identifier[self] . identifier[policy] . identifier[get_current_step] () | def increment_step_and_update_last_reward(self):
"""
Increment the step count of the trainer and Updates the last reward
"""
if len(self.stats['Environment/Cumulative Reward']) > 0:
mean_reward = np.mean(self.stats['Environment/Cumulative Reward'])
self.policy.update_reward(mean_reward) # depends on [control=['if'], data=[]]
self.policy.increment_step()
self.step = self.policy.get_current_step() |
def timestamp_to_local_time_str(
timestamp, timezone_name, fmt="yyyy-MM-dd HH:mm:ss"):
"""Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
"""
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str | def function[timestamp_to_local_time_str, parameter[timestamp, timezone_name, fmt]]:
constant[Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
]
variable[localized_d] assign[=] call[name[timestamp_to_local_time], parameter[name[timestamp], name[timezone_name]]]
variable[localized_datetime_str] assign[=] call[name[localized_d].format_datetime, parameter[name[fmt]]]
return[name[localized_datetime_str]] | keyword[def] identifier[timestamp_to_local_time_str] (
identifier[timestamp] , identifier[timezone_name] , identifier[fmt] = literal[string] ):
literal[string]
identifier[localized_d] = identifier[timestamp_to_local_time] ( identifier[timestamp] , identifier[timezone_name] )
identifier[localized_datetime_str] = identifier[localized_d] . identifier[format_datetime] ( identifier[fmt] )
keyword[return] identifier[localized_datetime_str] | def timestamp_to_local_time_str(timestamp, timezone_name, fmt='yyyy-MM-dd HH:mm:ss'):
"""Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
"""
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str |
def to_funset(self, lname="clamping", cname="clamped"):
"""
Converts the list of clampings to a set of `gringo.Fun`_ instances
Parameters
----------
lname : str
Predicate name for the clamping id
cname : str
Predicate name for the clamped variable
Returns
-------
set
Representation of all clampings as a set of `gringo.Fun`_ instances
.. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun
"""
fs = set()
for i, clamping in enumerate(self):
fs.add(gringo.Fun(lname, [i]))
fs = fs.union(clamping.to_funset(i, cname))
return fs | def function[to_funset, parameter[self, lname, cname]]:
constant[
Converts the list of clampings to a set of `gringo.Fun`_ instances
Parameters
----------
lname : str
Predicate name for the clamping id
cname : str
Predicate name for the clamped variable
Returns
-------
set
Representation of all clampings as a set of `gringo.Fun`_ instances
.. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun
]
variable[fs] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f09ed70>, <ast.Name object at 0x7da18f09cdf0>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:]
call[name[fs].add, parameter[call[name[gringo].Fun, parameter[name[lname], list[[<ast.Name object at 0x7da18f09c6a0>]]]]]]
variable[fs] assign[=] call[name[fs].union, parameter[call[name[clamping].to_funset, parameter[name[i], name[cname]]]]]
return[name[fs]] | keyword[def] identifier[to_funset] ( identifier[self] , identifier[lname] = literal[string] , identifier[cname] = literal[string] ):
literal[string]
identifier[fs] = identifier[set] ()
keyword[for] identifier[i] , identifier[clamping] keyword[in] identifier[enumerate] ( identifier[self] ):
identifier[fs] . identifier[add] ( identifier[gringo] . identifier[Fun] ( identifier[lname] ,[ identifier[i] ]))
identifier[fs] = identifier[fs] . identifier[union] ( identifier[clamping] . identifier[to_funset] ( identifier[i] , identifier[cname] ))
keyword[return] identifier[fs] | def to_funset(self, lname='clamping', cname='clamped'):
"""
Converts the list of clampings to a set of `gringo.Fun`_ instances
Parameters
----------
lname : str
Predicate name for the clamping id
cname : str
Predicate name for the clamped variable
Returns
-------
set
Representation of all clampings as a set of `gringo.Fun`_ instances
.. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun
"""
fs = set()
for (i, clamping) in enumerate(self):
fs.add(gringo.Fun(lname, [i]))
fs = fs.union(clamping.to_funset(i, cname)) # depends on [control=['for'], data=[]]
return fs |
def gimbal_control_encode(self, target_system, target_component, demanded_rate_x, demanded_rate_y, demanded_rate_z):
'''
Control message for rate gimbal
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
demanded_rate_x : Demanded angular rate X (rad/s) (float)
demanded_rate_y : Demanded angular rate Y (rad/s) (float)
demanded_rate_z : Demanded angular rate Z (rad/s) (float)
'''
return MAVLink_gimbal_control_message(target_system, target_component, demanded_rate_x, demanded_rate_y, demanded_rate_z) | def function[gimbal_control_encode, parameter[self, target_system, target_component, demanded_rate_x, demanded_rate_y, demanded_rate_z]]:
constant[
Control message for rate gimbal
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
demanded_rate_x : Demanded angular rate X (rad/s) (float)
demanded_rate_y : Demanded angular rate Y (rad/s) (float)
demanded_rate_z : Demanded angular rate Z (rad/s) (float)
]
return[call[name[MAVLink_gimbal_control_message], parameter[name[target_system], name[target_component], name[demanded_rate_x], name[demanded_rate_y], name[demanded_rate_z]]]] | keyword[def] identifier[gimbal_control_encode] ( identifier[self] , identifier[target_system] , identifier[target_component] , identifier[demanded_rate_x] , identifier[demanded_rate_y] , identifier[demanded_rate_z] ):
literal[string]
keyword[return] identifier[MAVLink_gimbal_control_message] ( identifier[target_system] , identifier[target_component] , identifier[demanded_rate_x] , identifier[demanded_rate_y] , identifier[demanded_rate_z] ) | def gimbal_control_encode(self, target_system, target_component, demanded_rate_x, demanded_rate_y, demanded_rate_z):
"""
Control message for rate gimbal
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
demanded_rate_x : Demanded angular rate X (rad/s) (float)
demanded_rate_y : Demanded angular rate Y (rad/s) (float)
demanded_rate_z : Demanded angular rate Z (rad/s) (float)
"""
return MAVLink_gimbal_control_message(target_system, target_component, demanded_rate_x, demanded_rate_y, demanded_rate_z) |
def is_velar(c,lang):
"""
Is the character a velar
"""
o=get_offset(c,lang)
return (o>=VELAR_RANGE[0] and o<=VELAR_RANGE[1]) | def function[is_velar, parameter[c, lang]]:
constant[
Is the character a velar
]
variable[o] assign[=] call[name[get_offset], parameter[name[c], name[lang]]]
return[<ast.BoolOp object at 0x7da1b26ae710>] | keyword[def] identifier[is_velar] ( identifier[c] , identifier[lang] ):
literal[string]
identifier[o] = identifier[get_offset] ( identifier[c] , identifier[lang] )
keyword[return] ( identifier[o] >= identifier[VELAR_RANGE] [ literal[int] ] keyword[and] identifier[o] <= identifier[VELAR_RANGE] [ literal[int] ]) | def is_velar(c, lang):
"""
Is the character a velar
"""
o = get_offset(c, lang)
return o >= VELAR_RANGE[0] and o <= VELAR_RANGE[1] |
def last_sleep_breakdown(self):
"""Return durations of sleep stages for last complete session."""
try:
stages = self.intervals[1]['stages']
except KeyError:
return None
breakdown = {'awake': 0, 'light': 0, 'deep': 0, 'rem': 0}
for stage in stages:
if stage['stage'] == 'awake':
breakdown['awake'] += stage['duration']
elif stage['stage'] == 'light':
breakdown['light'] += stage['duration']
elif stage['stage'] == 'deep':
breakdown['deep'] += stage['duration']
elif stage['stage'] == 'rem':
breakdown['rem'] += stage['duration']
return breakdown | def function[last_sleep_breakdown, parameter[self]]:
constant[Return durations of sleep stages for last complete session.]
<ast.Try object at 0x7da20cabed70>
variable[breakdown] assign[=] dictionary[[<ast.Constant object at 0x7da20cabc6d0>, <ast.Constant object at 0x7da20cabf790>, <ast.Constant object at 0x7da20cabd480>, <ast.Constant object at 0x7da20cabd7b0>], [<ast.Constant object at 0x7da20cabec50>, <ast.Constant object at 0x7da20cabce80>, <ast.Constant object at 0x7da20cabc2e0>, <ast.Constant object at 0x7da20cabc310>]]
for taget[name[stage]] in starred[name[stages]] begin[:]
if compare[call[name[stage]][constant[stage]] equal[==] constant[awake]] begin[:]
<ast.AugAssign object at 0x7da18bc70e20>
return[name[breakdown]] | keyword[def] identifier[last_sleep_breakdown] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[stages] = identifier[self] . identifier[intervals] [ literal[int] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[return] keyword[None]
identifier[breakdown] ={ literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] }
keyword[for] identifier[stage] keyword[in] identifier[stages] :
keyword[if] identifier[stage] [ literal[string] ]== literal[string] :
identifier[breakdown] [ literal[string] ]+= identifier[stage] [ literal[string] ]
keyword[elif] identifier[stage] [ literal[string] ]== literal[string] :
identifier[breakdown] [ literal[string] ]+= identifier[stage] [ literal[string] ]
keyword[elif] identifier[stage] [ literal[string] ]== literal[string] :
identifier[breakdown] [ literal[string] ]+= identifier[stage] [ literal[string] ]
keyword[elif] identifier[stage] [ literal[string] ]== literal[string] :
identifier[breakdown] [ literal[string] ]+= identifier[stage] [ literal[string] ]
keyword[return] identifier[breakdown] | def last_sleep_breakdown(self):
"""Return durations of sleep stages for last complete session."""
try:
stages = self.intervals[1]['stages'] # depends on [control=['try'], data=[]]
except KeyError:
return None # depends on [control=['except'], data=[]]
breakdown = {'awake': 0, 'light': 0, 'deep': 0, 'rem': 0}
for stage in stages:
if stage['stage'] == 'awake':
breakdown['awake'] += stage['duration'] # depends on [control=['if'], data=[]]
elif stage['stage'] == 'light':
breakdown['light'] += stage['duration'] # depends on [control=['if'], data=[]]
elif stage['stage'] == 'deep':
breakdown['deep'] += stage['duration'] # depends on [control=['if'], data=[]]
elif stage['stage'] == 'rem':
breakdown['rem'] += stage['duration'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['stage']]
return breakdown |
def diff_with_models(self):
"""
Return a dict stating the differences between current state of models
and the configuration itself.
TODO: Detect fields that are in conf, but not in models
"""
missing_from_conf = defaultdict(set)
for model in get_models():
db_tables_and_columns = get_db_tables_and_columns_of_model(model)
for (table_name, columns) in db_tables_and_columns.items():
model_strategy = self.strategy.get(table_name)
for column in columns:
if not model_strategy or column not in model_strategy:
missing_from_conf[table_name].add(column)
return missing_from_conf | def function[diff_with_models, parameter[self]]:
constant[
Return a dict stating the differences between current state of models
and the configuration itself.
TODO: Detect fields that are in conf, but not in models
]
variable[missing_from_conf] assign[=] call[name[defaultdict], parameter[name[set]]]
for taget[name[model]] in starred[call[name[get_models], parameter[]]] begin[:]
variable[db_tables_and_columns] assign[=] call[name[get_db_tables_and_columns_of_model], parameter[name[model]]]
for taget[tuple[[<ast.Name object at 0x7da1b0c443d0>, <ast.Name object at 0x7da1b0c47d00>]]] in starred[call[name[db_tables_and_columns].items, parameter[]]] begin[:]
variable[model_strategy] assign[=] call[name[self].strategy.get, parameter[name[table_name]]]
for taget[name[column]] in starred[name[columns]] begin[:]
if <ast.BoolOp object at 0x7da1b0c4ded0> begin[:]
call[call[name[missing_from_conf]][name[table_name]].add, parameter[name[column]]]
return[name[missing_from_conf]] | keyword[def] identifier[diff_with_models] ( identifier[self] ):
literal[string]
identifier[missing_from_conf] = identifier[defaultdict] ( identifier[set] )
keyword[for] identifier[model] keyword[in] identifier[get_models] ():
identifier[db_tables_and_columns] = identifier[get_db_tables_and_columns_of_model] ( identifier[model] )
keyword[for] ( identifier[table_name] , identifier[columns] ) keyword[in] identifier[db_tables_and_columns] . identifier[items] ():
identifier[model_strategy] = identifier[self] . identifier[strategy] . identifier[get] ( identifier[table_name] )
keyword[for] identifier[column] keyword[in] identifier[columns] :
keyword[if] keyword[not] identifier[model_strategy] keyword[or] identifier[column] keyword[not] keyword[in] identifier[model_strategy] :
identifier[missing_from_conf] [ identifier[table_name] ]. identifier[add] ( identifier[column] )
keyword[return] identifier[missing_from_conf] | def diff_with_models(self):
"""
Return a dict stating the differences between current state of models
and the configuration itself.
TODO: Detect fields that are in conf, but not in models
"""
missing_from_conf = defaultdict(set)
for model in get_models():
db_tables_and_columns = get_db_tables_and_columns_of_model(model)
for (table_name, columns) in db_tables_and_columns.items():
model_strategy = self.strategy.get(table_name)
for column in columns:
if not model_strategy or column not in model_strategy:
missing_from_conf[table_name].add(column) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['column']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['model']]
return missing_from_conf |
def connectionMade(self):
"""Callback handler from twisted when establishing a new connection."""
self.endpoint = self.transport.getPeer()
# get the reference to the Address object in NodeLeader so we can manipulate it properly.
tmp_addr = Address(f"{self.endpoint.host}:{self.endpoint.port}")
try:
known_idx = self.leader.KNOWN_ADDRS.index(tmp_addr)
self.address = self.leader.KNOWN_ADDRS[known_idx]
except ValueError:
# Not found.
self.leader.AddKnownAddress(tmp_addr)
self.address = tmp_addr
self.address.address = "%s:%s" % (self.endpoint.host, self.endpoint.port)
self.host = self.endpoint.host
self.port = int(self.endpoint.port)
self.leader.AddConnectedPeer(self)
self.leader.RemoveFromQueue(self.address)
self.leader.peers_connecting -= 1
logger.debug(f"{self.address} connection established")
if self.incoming_client:
# start protocol
self.SendVersion() | def function[connectionMade, parameter[self]]:
constant[Callback handler from twisted when establishing a new connection.]
name[self].endpoint assign[=] call[name[self].transport.getPeer, parameter[]]
variable[tmp_addr] assign[=] call[name[Address], parameter[<ast.JoinedStr object at 0x7da18bcc8940>]]
<ast.Try object at 0x7da18bcc89d0>
name[self].address.address assign[=] binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204621120>, <ast.Attribute object at 0x7da1b1df9240>]]]
name[self].host assign[=] name[self].endpoint.host
name[self].port assign[=] call[name[int], parameter[name[self].endpoint.port]]
call[name[self].leader.AddConnectedPeer, parameter[name[self]]]
call[name[self].leader.RemoveFromQueue, parameter[name[self].address]]
<ast.AugAssign object at 0x7da1b1df9e70>
call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b1df94b0>]]
if name[self].incoming_client begin[:]
call[name[self].SendVersion, parameter[]] | keyword[def] identifier[connectionMade] ( identifier[self] ):
literal[string]
identifier[self] . identifier[endpoint] = identifier[self] . identifier[transport] . identifier[getPeer] ()
identifier[tmp_addr] = identifier[Address] ( literal[string] )
keyword[try] :
identifier[known_idx] = identifier[self] . identifier[leader] . identifier[KNOWN_ADDRS] . identifier[index] ( identifier[tmp_addr] )
identifier[self] . identifier[address] = identifier[self] . identifier[leader] . identifier[KNOWN_ADDRS] [ identifier[known_idx] ]
keyword[except] identifier[ValueError] :
identifier[self] . identifier[leader] . identifier[AddKnownAddress] ( identifier[tmp_addr] )
identifier[self] . identifier[address] = identifier[tmp_addr]
identifier[self] . identifier[address] . identifier[address] = literal[string] %( identifier[self] . identifier[endpoint] . identifier[host] , identifier[self] . identifier[endpoint] . identifier[port] )
identifier[self] . identifier[host] = identifier[self] . identifier[endpoint] . identifier[host]
identifier[self] . identifier[port] = identifier[int] ( identifier[self] . identifier[endpoint] . identifier[port] )
identifier[self] . identifier[leader] . identifier[AddConnectedPeer] ( identifier[self] )
identifier[self] . identifier[leader] . identifier[RemoveFromQueue] ( identifier[self] . identifier[address] )
identifier[self] . identifier[leader] . identifier[peers_connecting] -= literal[int]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] identifier[self] . identifier[incoming_client] :
identifier[self] . identifier[SendVersion] () | def connectionMade(self):
"""Callback handler from twisted when establishing a new connection."""
self.endpoint = self.transport.getPeer()
# get the reference to the Address object in NodeLeader so we can manipulate it properly.
tmp_addr = Address(f'{self.endpoint.host}:{self.endpoint.port}')
try:
known_idx = self.leader.KNOWN_ADDRS.index(tmp_addr)
self.address = self.leader.KNOWN_ADDRS[known_idx] # depends on [control=['try'], data=[]]
except ValueError:
# Not found.
self.leader.AddKnownAddress(tmp_addr)
self.address = tmp_addr # depends on [control=['except'], data=[]]
self.address.address = '%s:%s' % (self.endpoint.host, self.endpoint.port)
self.host = self.endpoint.host
self.port = int(self.endpoint.port)
self.leader.AddConnectedPeer(self)
self.leader.RemoveFromQueue(self.address)
self.leader.peers_connecting -= 1
logger.debug(f'{self.address} connection established')
if self.incoming_client:
# start protocol
self.SendVersion() # depends on [control=['if'], data=[]] |
def execute_executable(nova_args, env_vars):
"""
Executes the executable given by the user.
Hey, I know this method has a silly name, but I write the code here and
I'm silly.
"""
process = subprocess.Popen(nova_args,
stdout=sys.stdout,
stderr=subprocess.PIPE,
env=env_vars)
process.wait()
return process | def function[execute_executable, parameter[nova_args, env_vars]]:
constant[
Executes the executable given by the user.
Hey, I know this method has a silly name, but I write the code here and
I'm silly.
]
variable[process] assign[=] call[name[subprocess].Popen, parameter[name[nova_args]]]
call[name[process].wait, parameter[]]
return[name[process]] | keyword[def] identifier[execute_executable] ( identifier[nova_args] , identifier[env_vars] ):
literal[string]
identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[nova_args] ,
identifier[stdout] = identifier[sys] . identifier[stdout] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ,
identifier[env] = identifier[env_vars] )
identifier[process] . identifier[wait] ()
keyword[return] identifier[process] | def execute_executable(nova_args, env_vars):
"""
Executes the executable given by the user.
Hey, I know this method has a silly name, but I write the code here and
I'm silly.
"""
process = subprocess.Popen(nova_args, stdout=sys.stdout, stderr=subprocess.PIPE, env=env_vars)
process.wait()
return process |
def switch_fingerprint_method(self, old=False):
"""
Switches main fingerprinting method.
:param old: if True old fingerprinting method will be used.
:return:
"""
if old:
self.has_fingerprint = self.has_fingerprint_moduli
else:
self.has_fingerprint = self.has_fingerprint_dlog | def function[switch_fingerprint_method, parameter[self, old]]:
constant[
Switches main fingerprinting method.
:param old: if True old fingerprinting method will be used.
:return:
]
if name[old] begin[:]
name[self].has_fingerprint assign[=] name[self].has_fingerprint_moduli | keyword[def] identifier[switch_fingerprint_method] ( identifier[self] , identifier[old] = keyword[False] ):
literal[string]
keyword[if] identifier[old] :
identifier[self] . identifier[has_fingerprint] = identifier[self] . identifier[has_fingerprint_moduli]
keyword[else] :
identifier[self] . identifier[has_fingerprint] = identifier[self] . identifier[has_fingerprint_dlog] | def switch_fingerprint_method(self, old=False):
"""
Switches main fingerprinting method.
:param old: if True old fingerprinting method will be used.
:return:
"""
if old:
self.has_fingerprint = self.has_fingerprint_moduli # depends on [control=['if'], data=[]]
else:
self.has_fingerprint = self.has_fingerprint_dlog |
def list_tags(self, pattern: str = None) -> typing.List[str]:
"""
Returns list of tags, optionally matching "pattern"
:param pattern: optional pattern to filter results
:type pattern: str
:return: existing tags
:rtype: list of str
"""
tags: typing.List[str] = [str(tag) for tag in self.repo.tags]
if not pattern:
LOGGER.debug('tags found in repo: %s', tags)
return tags
LOGGER.debug('filtering tags with pattern: %s', pattern)
filtered_tags: typing.List[str] = [tag for tag in tags if pattern in tag]
LOGGER.debug('filtered tags: %s', filtered_tags)
return filtered_tags | def function[list_tags, parameter[self, pattern]]:
constant[
Returns list of tags, optionally matching "pattern"
:param pattern: optional pattern to filter results
:type pattern: str
:return: existing tags
:rtype: list of str
]
<ast.AnnAssign object at 0x7da2054a6e90>
if <ast.UnaryOp object at 0x7da2054a4640> begin[:]
call[name[LOGGER].debug, parameter[constant[tags found in repo: %s], name[tags]]]
return[name[tags]]
call[name[LOGGER].debug, parameter[constant[filtering tags with pattern: %s], name[pattern]]]
<ast.AnnAssign object at 0x7da20e963280>
call[name[LOGGER].debug, parameter[constant[filtered tags: %s], name[filtered_tags]]]
return[name[filtered_tags]] | keyword[def] identifier[list_tags] ( identifier[self] , identifier[pattern] : identifier[str] = keyword[None] )-> identifier[typing] . identifier[List] [ identifier[str] ]:
literal[string]
identifier[tags] : identifier[typing] . identifier[List] [ identifier[str] ]=[ identifier[str] ( identifier[tag] ) keyword[for] identifier[tag] keyword[in] identifier[self] . identifier[repo] . identifier[tags] ]
keyword[if] keyword[not] identifier[pattern] :
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[tags] )
keyword[return] identifier[tags]
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[pattern] )
identifier[filtered_tags] : identifier[typing] . identifier[List] [ identifier[str] ]=[ identifier[tag] keyword[for] identifier[tag] keyword[in] identifier[tags] keyword[if] identifier[pattern] keyword[in] identifier[tag] ]
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[filtered_tags] )
keyword[return] identifier[filtered_tags] | def list_tags(self, pattern: str=None) -> typing.List[str]:
"""
Returns list of tags, optionally matching "pattern"
:param pattern: optional pattern to filter results
:type pattern: str
:return: existing tags
:rtype: list of str
"""
tags: typing.List[str] = [str(tag) for tag in self.repo.tags]
if not pattern:
LOGGER.debug('tags found in repo: %s', tags)
return tags # depends on [control=['if'], data=[]]
LOGGER.debug('filtering tags with pattern: %s', pattern)
filtered_tags: typing.List[str] = [tag for tag in tags if pattern in tag]
LOGGER.debug('filtered tags: %s', filtered_tags)
return filtered_tags |
def make_application_private(application_id, sar_client=None):
"""
Set the application to be private.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
"""
if not application_id:
raise ValueError('Require application id to make the app private')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
sar_client.put_application_policy(
ApplicationId=application_id,
Statements=[]
) | def function[make_application_private, parameter[application_id, sar_client]]:
constant[
Set the application to be private.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
]
if <ast.UnaryOp object at 0x7da1b1232b00> begin[:]
<ast.Raise object at 0x7da1b1232ad0>
if <ast.UnaryOp object at 0x7da1b1232a40> begin[:]
variable[sar_client] assign[=] call[name[boto3].client, parameter[constant[serverlessrepo]]]
call[name[sar_client].put_application_policy, parameter[]] | keyword[def] identifier[make_application_private] ( identifier[application_id] , identifier[sar_client] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[application_id] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[sar_client] :
identifier[sar_client] = identifier[boto3] . identifier[client] ( literal[string] )
identifier[sar_client] . identifier[put_application_policy] (
identifier[ApplicationId] = identifier[application_id] ,
identifier[Statements] =[]
) | def make_application_private(application_id, sar_client=None):
"""
Set the application to be private.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
"""
if not application_id:
raise ValueError('Require application id to make the app private') # depends on [control=['if'], data=[]]
if not sar_client:
sar_client = boto3.client('serverlessrepo') # depends on [control=['if'], data=[]]
sar_client.put_application_policy(ApplicationId=application_id, Statements=[]) |
def register(self, parent=None):
'''Record the availability of this worker and get a unique identifer.
This sets :attr:`worker_id` and calls :meth:`heartbeat`. This
cannot be called multiple times without calling
:meth:`unregister` in between.
'''
if self.worker_id:
raise ProgrammerError('Worker.register cannot be called again without first calling unregister; it is not idempotent')
self.parent = parent
self.worker_id = nice_identifier()
self.task_master.worker_id = self.worker_id
self.heartbeat()
return self.worker_id | def function[register, parameter[self, parent]]:
constant[Record the availability of this worker and get a unique identifer.
This sets :attr:`worker_id` and calls :meth:`heartbeat`. This
cannot be called multiple times without calling
:meth:`unregister` in between.
]
if name[self].worker_id begin[:]
<ast.Raise object at 0x7da1b14e6b00>
name[self].parent assign[=] name[parent]
name[self].worker_id assign[=] call[name[nice_identifier], parameter[]]
name[self].task_master.worker_id assign[=] name[self].worker_id
call[name[self].heartbeat, parameter[]]
return[name[self].worker_id] | keyword[def] identifier[register] ( identifier[self] , identifier[parent] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[worker_id] :
keyword[raise] identifier[ProgrammerError] ( literal[string] )
identifier[self] . identifier[parent] = identifier[parent]
identifier[self] . identifier[worker_id] = identifier[nice_identifier] ()
identifier[self] . identifier[task_master] . identifier[worker_id] = identifier[self] . identifier[worker_id]
identifier[self] . identifier[heartbeat] ()
keyword[return] identifier[self] . identifier[worker_id] | def register(self, parent=None):
"""Record the availability of this worker and get a unique identifer.
This sets :attr:`worker_id` and calls :meth:`heartbeat`. This
cannot be called multiple times without calling
:meth:`unregister` in between.
"""
if self.worker_id:
raise ProgrammerError('Worker.register cannot be called again without first calling unregister; it is not idempotent') # depends on [control=['if'], data=[]]
self.parent = parent
self.worker_id = nice_identifier()
self.task_master.worker_id = self.worker_id
self.heartbeat()
return self.worker_id |
def shot_open_callback(self, *args, **kwargs):
"""Callback for the shot open button
:returns: None
:rtype: None
:raises: None
"""
tf = self.browser.get_current_selection(1)
if not tf:
return
if not os.path.exists(tf.path):
msg = 'The selected shot does not exist: %s' % tf.path
log.error(msg)
self.statusbar.showMessage(msg)
return
js = JukeboxSignals.get()
js.before_open_shot.emit(tf)
self.open_shot(tf)
js.after_open_shot.emit(tf) | def function[shot_open_callback, parameter[self]]:
constant[Callback for the shot open button
:returns: None
:rtype: None
:raises: None
]
variable[tf] assign[=] call[name[self].browser.get_current_selection, parameter[constant[1]]]
if <ast.UnaryOp object at 0x7da1b144c880> begin[:]
return[None]
if <ast.UnaryOp object at 0x7da1b144c820> begin[:]
variable[msg] assign[=] binary_operation[constant[The selected shot does not exist: %s] <ast.Mod object at 0x7da2590d6920> name[tf].path]
call[name[log].error, parameter[name[msg]]]
call[name[self].statusbar.showMessage, parameter[name[msg]]]
return[None]
variable[js] assign[=] call[name[JukeboxSignals].get, parameter[]]
call[name[js].before_open_shot.emit, parameter[name[tf]]]
call[name[self].open_shot, parameter[name[tf]]]
call[name[js].after_open_shot.emit, parameter[name[tf]]] | keyword[def] identifier[shot_open_callback] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[tf] = identifier[self] . identifier[browser] . identifier[get_current_selection] ( literal[int] )
keyword[if] keyword[not] identifier[tf] :
keyword[return]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[tf] . identifier[path] ):
identifier[msg] = literal[string] % identifier[tf] . identifier[path]
identifier[log] . identifier[error] ( identifier[msg] )
identifier[self] . identifier[statusbar] . identifier[showMessage] ( identifier[msg] )
keyword[return]
identifier[js] = identifier[JukeboxSignals] . identifier[get] ()
identifier[js] . identifier[before_open_shot] . identifier[emit] ( identifier[tf] )
identifier[self] . identifier[open_shot] ( identifier[tf] )
identifier[js] . identifier[after_open_shot] . identifier[emit] ( identifier[tf] ) | def shot_open_callback(self, *args, **kwargs):
"""Callback for the shot open button
:returns: None
:rtype: None
:raises: None
"""
tf = self.browser.get_current_selection(1)
if not tf:
return # depends on [control=['if'], data=[]]
if not os.path.exists(tf.path):
msg = 'The selected shot does not exist: %s' % tf.path
log.error(msg)
self.statusbar.showMessage(msg)
return # depends on [control=['if'], data=[]]
js = JukeboxSignals.get()
js.before_open_shot.emit(tf)
self.open_shot(tf)
js.after_open_shot.emit(tf) |
def calc_local_indices(shape, num_partitions, coordinate):
""" calculate local indices, return start and stop index per dimension per process for local data field
:param shape: global shape of data
:param num_partitions: number of partition for each dimension (from MPI.Compute_dims())
:param coordinate: cartesian coordinate descriptor (from CARTESIAN_COMMUNICATOR.Get_coords(rank))
:return: tuple of start/stop index per dimension ((start_x, stop_x), (start_y, stop_y), ...)
"""
dimension = len(shape)
# check matching of cartesian communicator and shape
assert dimension == len(num_partitions)
decomposed_shapes = []
# build shape list for every dimension
for idx in range(dimension):
local_shape = shape[idx] // num_partitions[idx]
temp_shape_list = []
for _ in range(num_partitions[idx]):
temp_shape_list.append(local_shape)
# expand local partitions to match global shape
for j in range(shape[idx] % num_partitions[idx]):
temp_shape_list[j] += 1
# decomposed_shapes[dimension][partition]
decomposed_shapes.append(temp_shape_list)
# calculate indices for partitions
indices = []
# TODO: redefine calculation -> first select and calculate
for i in range(dimension):
temp_index_list = []
start_idx = 0
end_idx = 0
for j in range(num_partitions[i]):
end_idx = end_idx + decomposed_shapes[i][j]
temp_index_list.append([start_idx, end_idx])
start_idx = end_idx
indices.append(temp_index_list)
start_index = []
stop_index = []
shape = []
# select partition, start and stop index
for idx in range(dimension):
start_index.append(indices[idx][coordinate[idx]][0])
stop_index.append(indices[idx][coordinate[idx]][1])
shape.append(decomposed_shapes[idx][coordinate[idx]])
shape = tuple(shape)
start_index = tuple(start_index)
stop_index = tuple(stop_index)
return start_index, stop_index, shape | def function[calc_local_indices, parameter[shape, num_partitions, coordinate]]:
constant[ calculate local indices, return start and stop index per dimension per process for local data field
:param shape: global shape of data
:param num_partitions: number of partition for each dimension (from MPI.Compute_dims())
:param coordinate: cartesian coordinate descriptor (from CARTESIAN_COMMUNICATOR.Get_coords(rank))
:return: tuple of start/stop index per dimension ((start_x, stop_x), (start_y, stop_y), ...)
]
variable[dimension] assign[=] call[name[len], parameter[name[shape]]]
assert[compare[name[dimension] equal[==] call[name[len], parameter[name[num_partitions]]]]]
variable[decomposed_shapes] assign[=] list[[]]
for taget[name[idx]] in starred[call[name[range], parameter[name[dimension]]]] begin[:]
variable[local_shape] assign[=] binary_operation[call[name[shape]][name[idx]] <ast.FloorDiv object at 0x7da2590d6bc0> call[name[num_partitions]][name[idx]]]
variable[temp_shape_list] assign[=] list[[]]
for taget[name[_]] in starred[call[name[range], parameter[call[name[num_partitions]][name[idx]]]]] begin[:]
call[name[temp_shape_list].append, parameter[name[local_shape]]]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[call[name[shape]][name[idx]] <ast.Mod object at 0x7da2590d6920> call[name[num_partitions]][name[idx]]]]]] begin[:]
<ast.AugAssign object at 0x7da1b28c4df0>
call[name[decomposed_shapes].append, parameter[name[temp_shape_list]]]
variable[indices] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[dimension]]]] begin[:]
variable[temp_index_list] assign[=] list[[]]
variable[start_idx] assign[=] constant[0]
variable[end_idx] assign[=] constant[0]
for taget[name[j]] in starred[call[name[range], parameter[call[name[num_partitions]][name[i]]]]] begin[:]
variable[end_idx] assign[=] binary_operation[name[end_idx] + call[call[name[decomposed_shapes]][name[i]]][name[j]]]
call[name[temp_index_list].append, parameter[list[[<ast.Name object at 0x7da1b28c57e0>, <ast.Name object at 0x7da1b28c7b50>]]]]
variable[start_idx] assign[=] name[end_idx]
call[name[indices].append, parameter[name[temp_index_list]]]
variable[start_index] assign[=] list[[]]
variable[stop_index] assign[=] list[[]]
variable[shape] assign[=] list[[]]
for taget[name[idx]] in starred[call[name[range], parameter[name[dimension]]]] begin[:]
call[name[start_index].append, parameter[call[call[call[name[indices]][name[idx]]][call[name[coordinate]][name[idx]]]][constant[0]]]]
call[name[stop_index].append, parameter[call[call[call[name[indices]][name[idx]]][call[name[coordinate]][name[idx]]]][constant[1]]]]
call[name[shape].append, parameter[call[call[name[decomposed_shapes]][name[idx]]][call[name[coordinate]][name[idx]]]]]
variable[shape] assign[=] call[name[tuple], parameter[name[shape]]]
variable[start_index] assign[=] call[name[tuple], parameter[name[start_index]]]
variable[stop_index] assign[=] call[name[tuple], parameter[name[stop_index]]]
return[tuple[[<ast.Name object at 0x7da1b287c1c0>, <ast.Name object at 0x7da1b287dc60>, <ast.Name object at 0x7da1b287c790>]]] | keyword[def] identifier[calc_local_indices] ( identifier[shape] , identifier[num_partitions] , identifier[coordinate] ):
literal[string]
identifier[dimension] = identifier[len] ( identifier[shape] )
keyword[assert] identifier[dimension] == identifier[len] ( identifier[num_partitions] )
identifier[decomposed_shapes] =[]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[dimension] ):
identifier[local_shape] = identifier[shape] [ identifier[idx] ]// identifier[num_partitions] [ identifier[idx] ]
identifier[temp_shape_list] =[]
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_partitions] [ identifier[idx] ]):
identifier[temp_shape_list] . identifier[append] ( identifier[local_shape] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[shape] [ identifier[idx] ]% identifier[num_partitions] [ identifier[idx] ]):
identifier[temp_shape_list] [ identifier[j] ]+= literal[int]
identifier[decomposed_shapes] . identifier[append] ( identifier[temp_shape_list] )
identifier[indices] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[dimension] ):
identifier[temp_index_list] =[]
identifier[start_idx] = literal[int]
identifier[end_idx] = literal[int]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[num_partitions] [ identifier[i] ]):
identifier[end_idx] = identifier[end_idx] + identifier[decomposed_shapes] [ identifier[i] ][ identifier[j] ]
identifier[temp_index_list] . identifier[append] ([ identifier[start_idx] , identifier[end_idx] ])
identifier[start_idx] = identifier[end_idx]
identifier[indices] . identifier[append] ( identifier[temp_index_list] )
identifier[start_index] =[]
identifier[stop_index] =[]
identifier[shape] =[]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[dimension] ):
identifier[start_index] . identifier[append] ( identifier[indices] [ identifier[idx] ][ identifier[coordinate] [ identifier[idx] ]][ literal[int] ])
identifier[stop_index] . identifier[append] ( identifier[indices] [ identifier[idx] ][ identifier[coordinate] [ identifier[idx] ]][ literal[int] ])
identifier[shape] . identifier[append] ( identifier[decomposed_shapes] [ identifier[idx] ][ identifier[coordinate] [ identifier[idx] ]])
identifier[shape] = identifier[tuple] ( identifier[shape] )
identifier[start_index] = identifier[tuple] ( identifier[start_index] )
identifier[stop_index] = identifier[tuple] ( identifier[stop_index] )
keyword[return] identifier[start_index] , identifier[stop_index] , identifier[shape] | def calc_local_indices(shape, num_partitions, coordinate):
""" calculate local indices, return start and stop index per dimension per process for local data field
:param shape: global shape of data
:param num_partitions: number of partition for each dimension (from MPI.Compute_dims())
:param coordinate: cartesian coordinate descriptor (from CARTESIAN_COMMUNICATOR.Get_coords(rank))
:return: tuple of start/stop index per dimension ((start_x, stop_x), (start_y, stop_y), ...)
"""
dimension = len(shape)
# check matching of cartesian communicator and shape
assert dimension == len(num_partitions)
decomposed_shapes = []
# build shape list for every dimension
for idx in range(dimension):
local_shape = shape[idx] // num_partitions[idx]
temp_shape_list = []
for _ in range(num_partitions[idx]):
temp_shape_list.append(local_shape) # depends on [control=['for'], data=[]]
# expand local partitions to match global shape
for j in range(shape[idx] % num_partitions[idx]):
temp_shape_list[j] += 1 # depends on [control=['for'], data=['j']]
# decomposed_shapes[dimension][partition]
decomposed_shapes.append(temp_shape_list) # depends on [control=['for'], data=['idx']]
# calculate indices for partitions
indices = []
# TODO: redefine calculation -> first select and calculate
for i in range(dimension):
temp_index_list = []
start_idx = 0
end_idx = 0
for j in range(num_partitions[i]):
end_idx = end_idx + decomposed_shapes[i][j]
temp_index_list.append([start_idx, end_idx])
start_idx = end_idx # depends on [control=['for'], data=['j']]
indices.append(temp_index_list) # depends on [control=['for'], data=['i']]
start_index = []
stop_index = []
shape = []
# select partition, start and stop index
for idx in range(dimension):
start_index.append(indices[idx][coordinate[idx]][0])
stop_index.append(indices[idx][coordinate[idx]][1])
shape.append(decomposed_shapes[idx][coordinate[idx]]) # depends on [control=['for'], data=['idx']]
shape = tuple(shape)
start_index = tuple(start_index)
stop_index = tuple(stop_index)
return (start_index, stop_index, shape) |
def open_remote_file(dataset_key, file_name, profile='default',
mode='w', **kwargs):
"""Open a remote file object that can be used to write to or read from
a file in a data.world dataset
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param file_name: The name of the file to open
:type file_name: str
:param mode: the mode for the file - must be 'w', 'wb', 'r', or 'rb' -
indicating read/write ('r'/'w') and optionally "binary"
handling of the file data. (Default value = 'w')
:type mode: str, optional
:param chunk_size: size of chunked bytes to return when reading streamed
bytes in 'rb' mode
:type chunk_size: int, optional
:param decode_unicode: whether to decode textual responses as unicode when
returning streamed lines in 'r' mode
:type decode_unicode: bool, optional
:param profile: (Default value = 'default')
:param **kwargs:
Examples
--------
>>> import datadotworld as dw
>>>
>>> # write a text file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.txt') as w:
... w.write("this is a test.")
>>>
>>> # write a jsonlines file
>>> import json
>>> with dw.open_remote_file('username/test-dataset',
... 'test.jsonl') as w:
... json.dump({'foo':42, 'bar':"A"}, w)
... w.write("\\n")
... json.dump({'foo':13, 'bar':"B"}, w)
... w.write("\\n")
>>>
>>> # write a csv file
>>> import csv
>>> with dw.open_remote_file('username/test-dataset',
... 'test.csv') as w:
... csvw = csv.DictWriter(w, fieldnames=['foo', 'bar'])
... csvw.writeheader()
... csvw.writerow({'foo':42, 'bar':"A"})
... csvw.writerow({'foo':13, 'bar':"B"})
>>>
>>> # write a pandas dataframe as a csv file
>>> import pandas as pd
>>> df = pd.DataFrame({'foo':[1,2,3,4],'bar':['a','b','c','d']})
>>> with dw.open_remote_file('username/test-dataset',
... 'dataframe.csv') as w:
... df.to_csv(w, index=False)
>>>
>>> # write a binary file
>>> with dw.open_remote_file('username/test-dataset',
>>> 'test.txt', mode='wb') as w:
... w.write(bytes([100,97,116,97,46,119,111,114,108,100]))
>>>
>>> # read a text file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.txt', mode='r') as r:
... print(r.read())
>>>
>>> # read a csv file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.csv', mode='r') as r:
... csvr = csv.DictReader(r)
... for row in csvr:
... print(row['column a'], row['column b'])
>>>
>>> # read a binary file
>>> with dw.open_remote_file('username/test-dataset',
... 'test', mode='rb') as r:
... bytes = r.read()
"""
return _get_instance(profile, **kwargs).open_remote_file(
dataset_key, file_name,
mode=mode, **kwargs) | def function[open_remote_file, parameter[dataset_key, file_name, profile, mode]]:
constant[Open a remote file object that can be used to write to or read from
a file in a data.world dataset
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param file_name: The name of the file to open
:type file_name: str
:param mode: the mode for the file - must be 'w', 'wb', 'r', or 'rb' -
indicating read/write ('r'/'w') and optionally "binary"
handling of the file data. (Default value = 'w')
:type mode: str, optional
:param chunk_size: size of chunked bytes to return when reading streamed
bytes in 'rb' mode
:type chunk_size: int, optional
:param decode_unicode: whether to decode textual responses as unicode when
returning streamed lines in 'r' mode
:type decode_unicode: bool, optional
:param profile: (Default value = 'default')
:param **kwargs:
Examples
--------
>>> import datadotworld as dw
>>>
>>> # write a text file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.txt') as w:
... w.write("this is a test.")
>>>
>>> # write a jsonlines file
>>> import json
>>> with dw.open_remote_file('username/test-dataset',
... 'test.jsonl') as w:
... json.dump({'foo':42, 'bar':"A"}, w)
... w.write("\n")
... json.dump({'foo':13, 'bar':"B"}, w)
... w.write("\n")
>>>
>>> # write a csv file
>>> import csv
>>> with dw.open_remote_file('username/test-dataset',
... 'test.csv') as w:
... csvw = csv.DictWriter(w, fieldnames=['foo', 'bar'])
... csvw.writeheader()
... csvw.writerow({'foo':42, 'bar':"A"})
... csvw.writerow({'foo':13, 'bar':"B"})
>>>
>>> # write a pandas dataframe as a csv file
>>> import pandas as pd
>>> df = pd.DataFrame({'foo':[1,2,3,4],'bar':['a','b','c','d']})
>>> with dw.open_remote_file('username/test-dataset',
... 'dataframe.csv') as w:
... df.to_csv(w, index=False)
>>>
>>> # write a binary file
>>> with dw.open_remote_file('username/test-dataset',
>>> 'test.txt', mode='wb') as w:
... w.write(bytes([100,97,116,97,46,119,111,114,108,100]))
>>>
>>> # read a text file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.txt', mode='r') as r:
... print(r.read())
>>>
>>> # read a csv file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.csv', mode='r') as r:
... csvr = csv.DictReader(r)
... for row in csvr:
... print(row['column a'], row['column b'])
>>>
>>> # read a binary file
>>> with dw.open_remote_file('username/test-dataset',
... 'test', mode='rb') as r:
... bytes = r.read()
]
return[call[call[name[_get_instance], parameter[name[profile]]].open_remote_file, parameter[name[dataset_key], name[file_name]]]] | keyword[def] identifier[open_remote_file] ( identifier[dataset_key] , identifier[file_name] , identifier[profile] = literal[string] ,
identifier[mode] = literal[string] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[_get_instance] ( identifier[profile] ,** identifier[kwargs] ). identifier[open_remote_file] (
identifier[dataset_key] , identifier[file_name] ,
identifier[mode] = identifier[mode] ,** identifier[kwargs] ) | def open_remote_file(dataset_key, file_name, profile='default', mode='w', **kwargs):
"""Open a remote file object that can be used to write to or read from
a file in a data.world dataset
:param dataset_key: Dataset identifier, in the form of owner/id
:type dataset_key: str
:param file_name: The name of the file to open
:type file_name: str
:param mode: the mode for the file - must be 'w', 'wb', 'r', or 'rb' -
indicating read/write ('r'/'w') and optionally "binary"
handling of the file data. (Default value = 'w')
:type mode: str, optional
:param chunk_size: size of chunked bytes to return when reading streamed
bytes in 'rb' mode
:type chunk_size: int, optional
:param decode_unicode: whether to decode textual responses as unicode when
returning streamed lines in 'r' mode
:type decode_unicode: bool, optional
:param profile: (Default value = 'default')
:param **kwargs:
Examples
--------
>>> import datadotworld as dw
>>>
>>> # write a text file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.txt') as w:
... w.write("this is a test.")
>>>
>>> # write a jsonlines file
>>> import json
>>> with dw.open_remote_file('username/test-dataset',
... 'test.jsonl') as w:
... json.dump({'foo':42, 'bar':"A"}, w)
... w.write("\\n")
... json.dump({'foo':13, 'bar':"B"}, w)
... w.write("\\n")
>>>
>>> # write a csv file
>>> import csv
>>> with dw.open_remote_file('username/test-dataset',
... 'test.csv') as w:
... csvw = csv.DictWriter(w, fieldnames=['foo', 'bar'])
... csvw.writeheader()
... csvw.writerow({'foo':42, 'bar':"A"})
... csvw.writerow({'foo':13, 'bar':"B"})
>>>
>>> # write a pandas dataframe as a csv file
>>> import pandas as pd
>>> df = pd.DataFrame({'foo':[1,2,3,4],'bar':['a','b','c','d']})
>>> with dw.open_remote_file('username/test-dataset',
... 'dataframe.csv') as w:
... df.to_csv(w, index=False)
>>>
>>> # write a binary file
>>> with dw.open_remote_file('username/test-dataset',
>>> 'test.txt', mode='wb') as w:
... w.write(bytes([100,97,116,97,46,119,111,114,108,100]))
>>>
>>> # read a text file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.txt', mode='r') as r:
... print(r.read())
>>>
>>> # read a csv file
>>> with dw.open_remote_file('username/test-dataset',
... 'test.csv', mode='r') as r:
... csvr = csv.DictReader(r)
... for row in csvr:
... print(row['column a'], row['column b'])
>>>
>>> # read a binary file
>>> with dw.open_remote_file('username/test-dataset',
... 'test', mode='rb') as r:
... bytes = r.read()
"""
return _get_instance(profile, **kwargs).open_remote_file(dataset_key, file_name, mode=mode, **kwargs) |
def removeSingleCachedFile(self, fileStoreID):
"""
Removes a single file described by the fileStoreID from the cache forcibly.
"""
with self._CacheState.open(self) as cacheInfo:
cachedFile = self.encodedFileID(fileStoreID)
cachedFileStats = os.stat(cachedFile)
# We know the file exists because this function was called in the if block. So we
# have to ensure nothing has changed since then.
assert cachedFileStats.st_nlink <= self.nlinkThreshold, \
'Attempting to delete a global file that is in use by another job.'
assert cachedFileStats.st_nlink >= self.nlinkThreshold, \
'A global file has too FEW links at deletion time. Our link threshold is incorrect!'
# Remove the file size from the cached file size if the jobstore is not fileJobStore
# and then delete the file
os.remove(cachedFile)
if self.nlinkThreshold != 2:
cacheInfo.cached -= cachedFileStats.st_size
if not cacheInfo.isBalanced():
self.logToMaster('CACHE: The cache was not balanced on removing single file',
logging.WARN)
self.logToMaster('CACHE: Successfully removed file with ID \'%s\'.' % fileStoreID)
return None | def function[removeSingleCachedFile, parameter[self, fileStoreID]]:
constant[
Removes a single file described by the fileStoreID from the cache forcibly.
]
with call[name[self]._CacheState.open, parameter[name[self]]] begin[:]
variable[cachedFile] assign[=] call[name[self].encodedFileID, parameter[name[fileStoreID]]]
variable[cachedFileStats] assign[=] call[name[os].stat, parameter[name[cachedFile]]]
assert[compare[name[cachedFileStats].st_nlink less_or_equal[<=] name[self].nlinkThreshold]]
assert[compare[name[cachedFileStats].st_nlink greater_or_equal[>=] name[self].nlinkThreshold]]
call[name[os].remove, parameter[name[cachedFile]]]
if compare[name[self].nlinkThreshold not_equal[!=] constant[2]] begin[:]
<ast.AugAssign object at 0x7da20c6a8e20>
if <ast.UnaryOp object at 0x7da20c6aa290> begin[:]
call[name[self].logToMaster, parameter[constant[CACHE: The cache was not balanced on removing single file], name[logging].WARN]]
call[name[self].logToMaster, parameter[binary_operation[constant[CACHE: Successfully removed file with ID '%s'.] <ast.Mod object at 0x7da2590d6920> name[fileStoreID]]]]
return[constant[None]] | keyword[def] identifier[removeSingleCachedFile] ( identifier[self] , identifier[fileStoreID] ):
literal[string]
keyword[with] identifier[self] . identifier[_CacheState] . identifier[open] ( identifier[self] ) keyword[as] identifier[cacheInfo] :
identifier[cachedFile] = identifier[self] . identifier[encodedFileID] ( identifier[fileStoreID] )
identifier[cachedFileStats] = identifier[os] . identifier[stat] ( identifier[cachedFile] )
keyword[assert] identifier[cachedFileStats] . identifier[st_nlink] <= identifier[self] . identifier[nlinkThreshold] , literal[string]
keyword[assert] identifier[cachedFileStats] . identifier[st_nlink] >= identifier[self] . identifier[nlinkThreshold] , literal[string]
identifier[os] . identifier[remove] ( identifier[cachedFile] )
keyword[if] identifier[self] . identifier[nlinkThreshold] != literal[int] :
identifier[cacheInfo] . identifier[cached] -= identifier[cachedFileStats] . identifier[st_size]
keyword[if] keyword[not] identifier[cacheInfo] . identifier[isBalanced] ():
identifier[self] . identifier[logToMaster] ( literal[string] ,
identifier[logging] . identifier[WARN] )
identifier[self] . identifier[logToMaster] ( literal[string] % identifier[fileStoreID] )
keyword[return] keyword[None] | def removeSingleCachedFile(self, fileStoreID):
"""
Removes a single file described by the fileStoreID from the cache forcibly.
"""
with self._CacheState.open(self) as cacheInfo:
cachedFile = self.encodedFileID(fileStoreID)
cachedFileStats = os.stat(cachedFile)
# We know the file exists because this function was called in the if block. So we
# have to ensure nothing has changed since then.
assert cachedFileStats.st_nlink <= self.nlinkThreshold, 'Attempting to delete a global file that is in use by another job.'
assert cachedFileStats.st_nlink >= self.nlinkThreshold, 'A global file has too FEW links at deletion time. Our link threshold is incorrect!'
# Remove the file size from the cached file size if the jobstore is not fileJobStore
# and then delete the file
os.remove(cachedFile)
if self.nlinkThreshold != 2:
cacheInfo.cached -= cachedFileStats.st_size # depends on [control=['if'], data=[]]
if not cacheInfo.isBalanced():
self.logToMaster('CACHE: The cache was not balanced on removing single file', logging.WARN) # depends on [control=['if'], data=[]]
self.logToMaster("CACHE: Successfully removed file with ID '%s'." % fileStoreID) # depends on [control=['with'], data=['cacheInfo']]
return None |
def cfg_intf(self, protocol_interface, phy_interface=None):
"""Called by application to add an interface to the list. """
self.intf_list.append(protocol_interface)
self.cfg_lldp_interface(protocol_interface, phy_interface) | def function[cfg_intf, parameter[self, protocol_interface, phy_interface]]:
constant[Called by application to add an interface to the list. ]
call[name[self].intf_list.append, parameter[name[protocol_interface]]]
call[name[self].cfg_lldp_interface, parameter[name[protocol_interface], name[phy_interface]]] | keyword[def] identifier[cfg_intf] ( identifier[self] , identifier[protocol_interface] , identifier[phy_interface] = keyword[None] ):
literal[string]
identifier[self] . identifier[intf_list] . identifier[append] ( identifier[protocol_interface] )
identifier[self] . identifier[cfg_lldp_interface] ( identifier[protocol_interface] , identifier[phy_interface] ) | def cfg_intf(self, protocol_interface, phy_interface=None):
"""Called by application to add an interface to the list. """
self.intf_list.append(protocol_interface)
self.cfg_lldp_interface(protocol_interface, phy_interface) |
def get_header(self, patch_dir=None):
""" Returns bytes """
lines = []
if patch_dir:
file = patch_dir + File(self.get_name())
name = file.get_name()
else:
name = self.get_name()
with open(name, "rb") as f:
for line in f:
if line.startswith(b"---") or line.startswith(b"Index:"):
break
lines.append(line)
return b"".join(lines) | def function[get_header, parameter[self, patch_dir]]:
constant[ Returns bytes ]
variable[lines] assign[=] list[[]]
if name[patch_dir] begin[:]
variable[file] assign[=] binary_operation[name[patch_dir] + call[name[File], parameter[call[name[self].get_name, parameter[]]]]]
variable[name] assign[=] call[name[file].get_name, parameter[]]
with call[name[open], parameter[name[name], constant[rb]]] begin[:]
for taget[name[line]] in starred[name[f]] begin[:]
if <ast.BoolOp object at 0x7da1b033d2a0> begin[:]
break
call[name[lines].append, parameter[name[line]]]
return[call[constant[b''].join, parameter[name[lines]]]] | keyword[def] identifier[get_header] ( identifier[self] , identifier[patch_dir] = keyword[None] ):
literal[string]
identifier[lines] =[]
keyword[if] identifier[patch_dir] :
identifier[file] = identifier[patch_dir] + identifier[File] ( identifier[self] . identifier[get_name] ())
identifier[name] = identifier[file] . identifier[get_name] ()
keyword[else] :
identifier[name] = identifier[self] . identifier[get_name] ()
keyword[with] identifier[open] ( identifier[name] , literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[line] keyword[in] identifier[f] :
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[break]
identifier[lines] . identifier[append] ( identifier[line] )
keyword[return] literal[string] . identifier[join] ( identifier[lines] ) | def get_header(self, patch_dir=None):
""" Returns bytes """
lines = []
if patch_dir:
file = patch_dir + File(self.get_name())
name = file.get_name() # depends on [control=['if'], data=[]]
else:
name = self.get_name()
with open(name, 'rb') as f:
for line in f:
if line.startswith(b'---') or line.startswith(b'Index:'):
break # depends on [control=['if'], data=[]]
lines.append(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
return b''.join(lines) |
def bellman_operator(self, v, Tv=None, sigma=None):
"""
The Bellman operator, which computes and returns the updated
value function `Tv` for a value function `v`.
Parameters
----------
v : array_like(float, ndim=1)
Value function vector, of length n.
Tv : ndarray(float, ndim=1), optional(default=None)
Optional output array for Tv.
sigma : ndarray(int, ndim=1), optional(default=None)
If not None, the v-greedy policy vector is stored in this
array. Must be of length n.
Returns
-------
Tv : ndarray(float, ndim=1)
Updated value function vector, of length n.
"""
vals = self.R + self.beta * self.Q.dot(v) # Shape: (L,) or (n, m)
if Tv is None:
Tv = np.empty(self.num_states)
self.s_wise_max(vals, out=Tv, out_argmax=sigma)
return Tv | def function[bellman_operator, parameter[self, v, Tv, sigma]]:
constant[
The Bellman operator, which computes and returns the updated
value function `Tv` for a value function `v`.
Parameters
----------
v : array_like(float, ndim=1)
Value function vector, of length n.
Tv : ndarray(float, ndim=1), optional(default=None)
Optional output array for Tv.
sigma : ndarray(int, ndim=1), optional(default=None)
If not None, the v-greedy policy vector is stored in this
array. Must be of length n.
Returns
-------
Tv : ndarray(float, ndim=1)
Updated value function vector, of length n.
]
variable[vals] assign[=] binary_operation[name[self].R + binary_operation[name[self].beta * call[name[self].Q.dot, parameter[name[v]]]]]
if compare[name[Tv] is constant[None]] begin[:]
variable[Tv] assign[=] call[name[np].empty, parameter[name[self].num_states]]
call[name[self].s_wise_max, parameter[name[vals]]]
return[name[Tv]] | keyword[def] identifier[bellman_operator] ( identifier[self] , identifier[v] , identifier[Tv] = keyword[None] , identifier[sigma] = keyword[None] ):
literal[string]
identifier[vals] = identifier[self] . identifier[R] + identifier[self] . identifier[beta] * identifier[self] . identifier[Q] . identifier[dot] ( identifier[v] )
keyword[if] identifier[Tv] keyword[is] keyword[None] :
identifier[Tv] = identifier[np] . identifier[empty] ( identifier[self] . identifier[num_states] )
identifier[self] . identifier[s_wise_max] ( identifier[vals] , identifier[out] = identifier[Tv] , identifier[out_argmax] = identifier[sigma] )
keyword[return] identifier[Tv] | def bellman_operator(self, v, Tv=None, sigma=None):
"""
The Bellman operator, which computes and returns the updated
value function `Tv` for a value function `v`.
Parameters
----------
v : array_like(float, ndim=1)
Value function vector, of length n.
Tv : ndarray(float, ndim=1), optional(default=None)
Optional output array for Tv.
sigma : ndarray(int, ndim=1), optional(default=None)
If not None, the v-greedy policy vector is stored in this
array. Must be of length n.
Returns
-------
Tv : ndarray(float, ndim=1)
Updated value function vector, of length n.
"""
vals = self.R + self.beta * self.Q.dot(v) # Shape: (L,) or (n, m)
if Tv is None:
Tv = np.empty(self.num_states) # depends on [control=['if'], data=['Tv']]
self.s_wise_max(vals, out=Tv, out_argmax=sigma)
return Tv |
def get_api_keys(request):
"""Return the list of API keys."""
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
cursor.execute("""\
SELECT row_to_json(combined_rows) FROM (
SELECT id, key, name, groups FROM api_keys
) AS combined_rows""")
api_keys = [x[0] for x in cursor.fetchall()]
return api_keys | def function[get_api_keys, parameter[request]]:
constant[Return the list of API keys.]
with call[name[db_connect], parameter[]] begin[:]
with call[name[db_conn].cursor, parameter[]] begin[:]
call[name[cursor].execute, parameter[constant[SELECT row_to_json(combined_rows) FROM (
SELECT id, key, name, groups FROM api_keys
) AS combined_rows]]]
variable[api_keys] assign[=] <ast.ListComp object at 0x7da1aff6d6f0>
return[name[api_keys]] | keyword[def] identifier[get_api_keys] ( identifier[request] ):
literal[string]
keyword[with] identifier[db_connect] () keyword[as] identifier[db_conn] :
keyword[with] identifier[db_conn] . identifier[cursor] () keyword[as] identifier[cursor] :
identifier[cursor] . identifier[execute] ( literal[string] )
identifier[api_keys] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[cursor] . identifier[fetchall] ()]
keyword[return] identifier[api_keys] | def get_api_keys(request):
"""Return the list of API keys."""
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
cursor.execute('SELECT row_to_json(combined_rows) FROM (\n SELECT id, key, name, groups FROM api_keys\n) AS combined_rows')
api_keys = [x[0] for x in cursor.fetchall()] # depends on [control=['with'], data=['cursor']] # depends on [control=['with'], data=['db_conn']]
return api_keys |
def set_flavor(self, node, flavor):
"""Set a flavor to a given ironic node.
:param uuid: the ironic node UUID
:param flavor: the flavor name
"""
command = (
'ironic node-update {uuid} add '
'properties/capabilities=profile:{flavor},boot_option:local').format(
uuid=node.uuid, flavor=flavor)
node.flavor = flavor
self.add_environment_file(user='stack', filename='stackrc')
self.run(command, user='stack') | def function[set_flavor, parameter[self, node, flavor]]:
constant[Set a flavor to a given ironic node.
:param uuid: the ironic node UUID
:param flavor: the flavor name
]
variable[command] assign[=] call[constant[ironic node-update {uuid} add properties/capabilities=profile:{flavor},boot_option:local].format, parameter[]]
name[node].flavor assign[=] name[flavor]
call[name[self].add_environment_file, parameter[]]
call[name[self].run, parameter[name[command]]] | keyword[def] identifier[set_flavor] ( identifier[self] , identifier[node] , identifier[flavor] ):
literal[string]
identifier[command] =(
literal[string]
literal[string] ). identifier[format] (
identifier[uuid] = identifier[node] . identifier[uuid] , identifier[flavor] = identifier[flavor] )
identifier[node] . identifier[flavor] = identifier[flavor]
identifier[self] . identifier[add_environment_file] ( identifier[user] = literal[string] , identifier[filename] = literal[string] )
identifier[self] . identifier[run] ( identifier[command] , identifier[user] = literal[string] ) | def set_flavor(self, node, flavor):
"""Set a flavor to a given ironic node.
:param uuid: the ironic node UUID
:param flavor: the flavor name
"""
command = 'ironic node-update {uuid} add properties/capabilities=profile:{flavor},boot_option:local'.format(uuid=node.uuid, flavor=flavor)
node.flavor = flavor
self.add_environment_file(user='stack', filename='stackrc')
self.run(command, user='stack') |
def populate_extra_files():
"""
Creates a list of non-python data files to include in package distribution
"""
out = ['cauldron/settings.json']
for entry in glob.iglob('cauldron/resources/examples/**/*', recursive=True):
out.append(entry)
for entry in glob.iglob('cauldron/resources/templates/**/*', recursive=True):
out.append(entry)
for entry in glob.iglob('cauldron/resources/web/**/*', recursive=True):
out.append(entry)
return out | def function[populate_extra_files, parameter[]]:
constant[
Creates a list of non-python data files to include in package distribution
]
variable[out] assign[=] list[[<ast.Constant object at 0x7da1b1b6ab00>]]
for taget[name[entry]] in starred[call[name[glob].iglob, parameter[constant[cauldron/resources/examples/**/*]]]] begin[:]
call[name[out].append, parameter[name[entry]]]
for taget[name[entry]] in starred[call[name[glob].iglob, parameter[constant[cauldron/resources/templates/**/*]]]] begin[:]
call[name[out].append, parameter[name[entry]]]
for taget[name[entry]] in starred[call[name[glob].iglob, parameter[constant[cauldron/resources/web/**/*]]]] begin[:]
call[name[out].append, parameter[name[entry]]]
return[name[out]] | keyword[def] identifier[populate_extra_files] ():
literal[string]
identifier[out] =[ literal[string] ]
keyword[for] identifier[entry] keyword[in] identifier[glob] . identifier[iglob] ( literal[string] , identifier[recursive] = keyword[True] ):
identifier[out] . identifier[append] ( identifier[entry] )
keyword[for] identifier[entry] keyword[in] identifier[glob] . identifier[iglob] ( literal[string] , identifier[recursive] = keyword[True] ):
identifier[out] . identifier[append] ( identifier[entry] )
keyword[for] identifier[entry] keyword[in] identifier[glob] . identifier[iglob] ( literal[string] , identifier[recursive] = keyword[True] ):
identifier[out] . identifier[append] ( identifier[entry] )
keyword[return] identifier[out] | def populate_extra_files():
"""
Creates a list of non-python data files to include in package distribution
"""
out = ['cauldron/settings.json']
for entry in glob.iglob('cauldron/resources/examples/**/*', recursive=True):
out.append(entry) # depends on [control=['for'], data=['entry']]
for entry in glob.iglob('cauldron/resources/templates/**/*', recursive=True):
out.append(entry) # depends on [control=['for'], data=['entry']]
for entry in glob.iglob('cauldron/resources/web/**/*', recursive=True):
out.append(entry) # depends on [control=['for'], data=['entry']]
return out |
def previousElementSibling(self):
"""Finds the first closest previous sibling of the node which
is an element node. Note the handling of entities
references is different than in the W3C DOM element
traversal spec since we don't have back reference from
entities content to entities references. """
ret = libxml2mod.xmlPreviousElementSibling(self._o)
if ret is None:return None
__tmp = xmlNode(_obj=ret)
return __tmp | def function[previousElementSibling, parameter[self]]:
constant[Finds the first closest previous sibling of the node which
is an element node. Note the handling of entities
references is different than in the W3C DOM element
traversal spec since we don't have back reference from
entities content to entities references. ]
variable[ret] assign[=] call[name[libxml2mod].xmlPreviousElementSibling, parameter[name[self]._o]]
if compare[name[ret] is constant[None]] begin[:]
return[constant[None]]
variable[__tmp] assign[=] call[name[xmlNode], parameter[]]
return[name[__tmp]] | keyword[def] identifier[previousElementSibling] ( identifier[self] ):
literal[string]
identifier[ret] = identifier[libxml2mod] . identifier[xmlPreviousElementSibling] ( identifier[self] . identifier[_o] )
keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[return] keyword[None]
identifier[__tmp] = identifier[xmlNode] ( identifier[_obj] = identifier[ret] )
keyword[return] identifier[__tmp] | def previousElementSibling(self):
"""Finds the first closest previous sibling of the node which
is an element node. Note the handling of entities
references is different than in the W3C DOM element
traversal spec since we don't have back reference from
entities content to entities references. """
ret = libxml2mod.xmlPreviousElementSibling(self._o)
if ret is None:
return None # depends on [control=['if'], data=[]]
__tmp = xmlNode(_obj=ret)
return __tmp |
def user_agent_detail(self, **kwargs):
"""Get the user agent detail.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
path = '%s/%s/user_agent_detail' % (self.manager.path, self.get_id())
return self.manager.gitlab.http_get(path, **kwargs) | def function[user_agent_detail, parameter[self]]:
constant[Get the user agent detail.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
]
variable[path] assign[=] binary_operation[constant[%s/%s/user_agent_detail] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da2043441c0>, <ast.Call object at 0x7da204345180>]]]
return[call[name[self].manager.gitlab.http_get, parameter[name[path]]]] | keyword[def] identifier[user_agent_detail] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = literal[string] %( identifier[self] . identifier[manager] . identifier[path] , identifier[self] . identifier[get_id] ())
keyword[return] identifier[self] . identifier[manager] . identifier[gitlab] . identifier[http_get] ( identifier[path] ,** identifier[kwargs] ) | def user_agent_detail(self, **kwargs):
"""Get the user agent detail.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
"""
path = '%s/%s/user_agent_detail' % (self.manager.path, self.get_id())
return self.manager.gitlab.http_get(path, **kwargs) |
def wrap_lons(lons, base, period):
"""
Wrap longitude values into the range between base and base+period.
"""
lons = lons.astype(np.float64)
return ((lons - base + period * 2) % period) + base | def function[wrap_lons, parameter[lons, base, period]]:
constant[
Wrap longitude values into the range between base and base+period.
]
variable[lons] assign[=] call[name[lons].astype, parameter[name[np].float64]]
return[binary_operation[binary_operation[binary_operation[binary_operation[name[lons] - name[base]] + binary_operation[name[period] * constant[2]]] <ast.Mod object at 0x7da2590d6920> name[period]] + name[base]]] | keyword[def] identifier[wrap_lons] ( identifier[lons] , identifier[base] , identifier[period] ):
literal[string]
identifier[lons] = identifier[lons] . identifier[astype] ( identifier[np] . identifier[float64] )
keyword[return] (( identifier[lons] - identifier[base] + identifier[period] * literal[int] )% identifier[period] )+ identifier[base] | def wrap_lons(lons, base, period):
"""
Wrap longitude values into the range between base and base+period.
"""
lons = lons.astype(np.float64)
return (lons - base + period * 2) % period + base |
def chat(self):
"""
Access the Chat Twilio Domain
:returns: Chat Twilio Domain
:rtype: twilio.rest.chat.Chat
"""
if self._chat is None:
from twilio.rest.chat import Chat
self._chat = Chat(self)
return self._chat | def function[chat, parameter[self]]:
constant[
Access the Chat Twilio Domain
:returns: Chat Twilio Domain
:rtype: twilio.rest.chat.Chat
]
if compare[name[self]._chat is constant[None]] begin[:]
from relative_module[twilio.rest.chat] import module[Chat]
name[self]._chat assign[=] call[name[Chat], parameter[name[self]]]
return[name[self]._chat] | keyword[def] identifier[chat] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_chat] keyword[is] keyword[None] :
keyword[from] identifier[twilio] . identifier[rest] . identifier[chat] keyword[import] identifier[Chat]
identifier[self] . identifier[_chat] = identifier[Chat] ( identifier[self] )
keyword[return] identifier[self] . identifier[_chat] | def chat(self):
"""
Access the Chat Twilio Domain
:returns: Chat Twilio Domain
:rtype: twilio.rest.chat.Chat
"""
if self._chat is None:
from twilio.rest.chat import Chat
self._chat = Chat(self) # depends on [control=['if'], data=[]]
return self._chat |
def is_duplicate_edge(data, data_other):
"""
Check if two edge data dictionaries are the same based on OSM ID and
geometry.
Parameters
----------
data : dict
the first edge's data
data_other : dict
the second edge's data
Returns
-------
is_dupe : bool
"""
is_dupe = False
# if either edge's OSM ID contains multiple values (due to simplification), we want
# to compare as sets so they are order-invariant, otherwise uv does not match vu
osmid = set(data['osmid']) if isinstance(data['osmid'], list) else data['osmid']
osmid_other = set(data_other['osmid']) if isinstance(data_other['osmid'], list) else data_other['osmid']
if osmid == osmid_other:
# if they contain the same OSM ID or set of OSM IDs (due to simplification)
if ('geometry' in data) and ('geometry' in data_other):
# if both edges have a geometry attribute
if is_same_geometry(data['geometry'], data_other['geometry']):
# if their edge geometries have the same coordinates
is_dupe = True
elif ('geometry' in data) and ('geometry' in data_other):
# if neither edge has a geometry attribute
is_dupe = True
else:
# if one edge has geometry attribute but the other doesn't, keep it
pass
return is_dupe | def function[is_duplicate_edge, parameter[data, data_other]]:
constant[
Check if two edge data dictionaries are the same based on OSM ID and
geometry.
Parameters
----------
data : dict
the first edge's data
data_other : dict
the second edge's data
Returns
-------
is_dupe : bool
]
variable[is_dupe] assign[=] constant[False]
variable[osmid] assign[=] <ast.IfExp object at 0x7da1b1b7c7c0>
variable[osmid_other] assign[=] <ast.IfExp object at 0x7da1b1b7fa60>
if compare[name[osmid] equal[==] name[osmid_other]] begin[:]
if <ast.BoolOp object at 0x7da1b1b7cb80> begin[:]
if call[name[is_same_geometry], parameter[call[name[data]][constant[geometry]], call[name[data_other]][constant[geometry]]]] begin[:]
variable[is_dupe] assign[=] constant[True]
return[name[is_dupe]] | keyword[def] identifier[is_duplicate_edge] ( identifier[data] , identifier[data_other] ):
literal[string]
identifier[is_dupe] = keyword[False]
identifier[osmid] = identifier[set] ( identifier[data] [ literal[string] ]) keyword[if] identifier[isinstance] ( identifier[data] [ literal[string] ], identifier[list] ) keyword[else] identifier[data] [ literal[string] ]
identifier[osmid_other] = identifier[set] ( identifier[data_other] [ literal[string] ]) keyword[if] identifier[isinstance] ( identifier[data_other] [ literal[string] ], identifier[list] ) keyword[else] identifier[data_other] [ literal[string] ]
keyword[if] identifier[osmid] == identifier[osmid_other] :
keyword[if] ( literal[string] keyword[in] identifier[data] ) keyword[and] ( literal[string] keyword[in] identifier[data_other] ):
keyword[if] identifier[is_same_geometry] ( identifier[data] [ literal[string] ], identifier[data_other] [ literal[string] ]):
identifier[is_dupe] = keyword[True]
keyword[elif] ( literal[string] keyword[in] identifier[data] ) keyword[and] ( literal[string] keyword[in] identifier[data_other] ):
identifier[is_dupe] = keyword[True]
keyword[else] :
keyword[pass]
keyword[return] identifier[is_dupe] | def is_duplicate_edge(data, data_other):
"""
Check if two edge data dictionaries are the same based on OSM ID and
geometry.
Parameters
----------
data : dict
the first edge's data
data_other : dict
the second edge's data
Returns
-------
is_dupe : bool
"""
is_dupe = False
# if either edge's OSM ID contains multiple values (due to simplification), we want
# to compare as sets so they are order-invariant, otherwise uv does not match vu
osmid = set(data['osmid']) if isinstance(data['osmid'], list) else data['osmid']
osmid_other = set(data_other['osmid']) if isinstance(data_other['osmid'], list) else data_other['osmid']
if osmid == osmid_other:
# if they contain the same OSM ID or set of OSM IDs (due to simplification)
if 'geometry' in data and 'geometry' in data_other:
# if both edges have a geometry attribute
if is_same_geometry(data['geometry'], data_other['geometry']):
# if their edge geometries have the same coordinates
is_dupe = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif 'geometry' in data and 'geometry' in data_other:
# if neither edge has a geometry attribute
is_dupe = True # depends on [control=['if'], data=[]]
else:
# if one edge has geometry attribute but the other doesn't, keep it
pass # depends on [control=['if'], data=[]]
return is_dupe |
def getActiveJobsForClientInfo(self, clientInfo, fields=[]):
""" Fetch jobIDs for jobs in the table with optional fields given a
specific clientInfo """
# Form the sequence of field name strings that will go into the
# request
dbFields = [self._jobs.pubToDBNameDict[x] for x in fields]
dbFieldsStr = ','.join(['job_id'] + dbFields)
with ConnectionFactory.get() as conn:
query = 'SELECT %s FROM %s ' \
'WHERE client_info = %%s ' \
' AND status != %%s' % (dbFieldsStr, self.jobsTableName)
conn.cursor.execute(query, [clientInfo, self.STATUS_COMPLETED])
rows = conn.cursor.fetchall()
return rows | def function[getActiveJobsForClientInfo, parameter[self, clientInfo, fields]]:
constant[ Fetch jobIDs for jobs in the table with optional fields given a
specific clientInfo ]
variable[dbFields] assign[=] <ast.ListComp object at 0x7da20c6c42e0>
variable[dbFieldsStr] assign[=] call[constant[,].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da20c6c4c10>]] + name[dbFields]]]]
with call[name[ConnectionFactory].get, parameter[]] begin[:]
variable[query] assign[=] binary_operation[constant[SELECT %s FROM %s WHERE client_info = %%s AND status != %%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc73430>, <ast.Attribute object at 0x7da18bc71510>]]]
call[name[conn].cursor.execute, parameter[name[query], list[[<ast.Name object at 0x7da18bc73850>, <ast.Attribute object at 0x7da18bc70fa0>]]]]
variable[rows] assign[=] call[name[conn].cursor.fetchall, parameter[]]
return[name[rows]] | keyword[def] identifier[getActiveJobsForClientInfo] ( identifier[self] , identifier[clientInfo] , identifier[fields] =[]):
literal[string]
identifier[dbFields] =[ identifier[self] . identifier[_jobs] . identifier[pubToDBNameDict] [ identifier[x] ] keyword[for] identifier[x] keyword[in] identifier[fields] ]
identifier[dbFieldsStr] = literal[string] . identifier[join] ([ literal[string] ]+ identifier[dbFields] )
keyword[with] identifier[ConnectionFactory] . identifier[get] () keyword[as] identifier[conn] :
identifier[query] = literal[string] literal[string] literal[string] %( identifier[dbFieldsStr] , identifier[self] . identifier[jobsTableName] )
identifier[conn] . identifier[cursor] . identifier[execute] ( identifier[query] ,[ identifier[clientInfo] , identifier[self] . identifier[STATUS_COMPLETED] ])
identifier[rows] = identifier[conn] . identifier[cursor] . identifier[fetchall] ()
keyword[return] identifier[rows] | def getActiveJobsForClientInfo(self, clientInfo, fields=[]):
""" Fetch jobIDs for jobs in the table with optional fields given a
specific clientInfo """
# Form the sequence of field name strings that will go into the
# request
dbFields = [self._jobs.pubToDBNameDict[x] for x in fields]
dbFieldsStr = ','.join(['job_id'] + dbFields)
with ConnectionFactory.get() as conn:
query = 'SELECT %s FROM %s WHERE client_info = %%s AND status != %%s' % (dbFieldsStr, self.jobsTableName)
conn.cursor.execute(query, [clientInfo, self.STATUS_COMPLETED])
rows = conn.cursor.fetchall() # depends on [control=['with'], data=['conn']]
return rows |
def get_oauth_data(self, code, client_id, client_secret, state):
''' Get Oauth data from HelloSign
Args:
code (str): Code returned by HelloSign for our callback url
client_id (str): Client id of the associated app
client_secret (str): Secret token of the associated app
Returns:
A HSAccessTokenAuth object
'''
request = self._get_request()
response = request.post(self.OAUTH_TOKEN_URL, {
"state": state,
"code": code,
"grant_type": "authorization_code",
"client_id": client_id,
"client_secret": client_secret
})
return HSAccessTokenAuth.from_response(response) | def function[get_oauth_data, parameter[self, code, client_id, client_secret, state]]:
constant[ Get Oauth data from HelloSign
Args:
code (str): Code returned by HelloSign for our callback url
client_id (str): Client id of the associated app
client_secret (str): Secret token of the associated app
Returns:
A HSAccessTokenAuth object
]
variable[request] assign[=] call[name[self]._get_request, parameter[]]
variable[response] assign[=] call[name[request].post, parameter[name[self].OAUTH_TOKEN_URL, dictionary[[<ast.Constant object at 0x7da1b0c17430>, <ast.Constant object at 0x7da1b0c15030>, <ast.Constant object at 0x7da1b0c15bd0>, <ast.Constant object at 0x7da1b0c17310>, <ast.Constant object at 0x7da1b0c15990>], [<ast.Name object at 0x7da1b0c16d10>, <ast.Name object at 0x7da1b0c16590>, <ast.Constant object at 0x7da1b0c148b0>, <ast.Name object at 0x7da1b0c14160>, <ast.Name object at 0x7da1b0c15a50>]]]]
return[call[name[HSAccessTokenAuth].from_response, parameter[name[response]]]] | keyword[def] identifier[get_oauth_data] ( identifier[self] , identifier[code] , identifier[client_id] , identifier[client_secret] , identifier[state] ):
literal[string]
identifier[request] = identifier[self] . identifier[_get_request] ()
identifier[response] = identifier[request] . identifier[post] ( identifier[self] . identifier[OAUTH_TOKEN_URL] ,{
literal[string] : identifier[state] ,
literal[string] : identifier[code] ,
literal[string] : literal[string] ,
literal[string] : identifier[client_id] ,
literal[string] : identifier[client_secret]
})
keyword[return] identifier[HSAccessTokenAuth] . identifier[from_response] ( identifier[response] ) | def get_oauth_data(self, code, client_id, client_secret, state):
""" Get Oauth data from HelloSign
Args:
code (str): Code returned by HelloSign for our callback url
client_id (str): Client id of the associated app
client_secret (str): Secret token of the associated app
Returns:
A HSAccessTokenAuth object
"""
request = self._get_request()
response = request.post(self.OAUTH_TOKEN_URL, {'state': state, 'code': code, 'grant_type': 'authorization_code', 'client_id': client_id, 'client_secret': client_secret})
return HSAccessTokenAuth.from_response(response) |
def _format_exception_only(self, etype, value):
"""Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.exc_info()[:2]. The return value is a list of strings, each ending
in a newline. Normally, the list contains a single string; however,
for SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax error
occurred. The message indicating which exception occurred is the
always last string in the list.
Also lifted nearly verbatim from traceback.py
"""
have_filedata = False
Colors = self.Colors
list = []
stype = Colors.excName + etype.__name__ + Colors.Normal
if value is None:
# Not sure if this can still happen in Python 2.6 and above
list.append( str(stype) + '\n')
else:
if etype is SyntaxError:
have_filedata = True
#print 'filename is',filename # dbg
if not value.filename: value.filename = "<string>"
list.append('%s File %s"%s"%s, line %s%d%s\n' % \
(Colors.normalEm,
Colors.filenameEm, value.filename, Colors.normalEm,
Colors.linenoEm, value.lineno, Colors.Normal ))
if value.text is not None:
i = 0
while i < len(value.text) and value.text[i].isspace():
i += 1
list.append('%s %s%s\n' % (Colors.line,
value.text.strip(),
Colors.Normal))
if value.offset is not None:
s = ' '
for c in value.text[i:value.offset-1]:
if c.isspace():
s += c
else:
s += ' '
list.append('%s%s^%s\n' % (Colors.caret, s,
Colors.Normal) )
try:
s = value.msg
except Exception:
s = self._some_str(value)
if s:
list.append('%s%s:%s %s\n' % (str(stype), Colors.excName,
Colors.Normal, s))
else:
list.append('%s\n' % str(stype))
# sync with user hooks
if have_filedata:
ipinst = ipapi.get()
if ipinst is not None:
ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0)
return list | def function[_format_exception_only, parameter[self, etype, value]]:
constant[Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.exc_info()[:2]. The return value is a list of strings, each ending
in a newline. Normally, the list contains a single string; however,
for SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax error
occurred. The message indicating which exception occurred is the
always last string in the list.
Also lifted nearly verbatim from traceback.py
]
variable[have_filedata] assign[=] constant[False]
variable[Colors] assign[=] name[self].Colors
variable[list] assign[=] list[[]]
variable[stype] assign[=] binary_operation[binary_operation[name[Colors].excName + name[etype].__name__] + name[Colors].Normal]
if compare[name[value] is constant[None]] begin[:]
call[name[list].append, parameter[binary_operation[call[name[str], parameter[name[stype]]] + constant[
]]]]
if name[have_filedata] begin[:]
variable[ipinst] assign[=] call[name[ipapi].get, parameter[]]
if compare[name[ipinst] is_not constant[None]] begin[:]
call[name[ipinst].hooks.synchronize_with_editor, parameter[name[value].filename, name[value].lineno, constant[0]]]
return[name[list]] | keyword[def] identifier[_format_exception_only] ( identifier[self] , identifier[etype] , identifier[value] ):
literal[string]
identifier[have_filedata] = keyword[False]
identifier[Colors] = identifier[self] . identifier[Colors]
identifier[list] =[]
identifier[stype] = identifier[Colors] . identifier[excName] + identifier[etype] . identifier[__name__] + identifier[Colors] . identifier[Normal]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[list] . identifier[append] ( identifier[str] ( identifier[stype] )+ literal[string] )
keyword[else] :
keyword[if] identifier[etype] keyword[is] identifier[SyntaxError] :
identifier[have_filedata] = keyword[True]
keyword[if] keyword[not] identifier[value] . identifier[filename] : identifier[value] . identifier[filename] = literal[string]
identifier[list] . identifier[append] ( literal[string] %( identifier[Colors] . identifier[normalEm] ,
identifier[Colors] . identifier[filenameEm] , identifier[value] . identifier[filename] , identifier[Colors] . identifier[normalEm] ,
identifier[Colors] . identifier[linenoEm] , identifier[value] . identifier[lineno] , identifier[Colors] . identifier[Normal] ))
keyword[if] identifier[value] . identifier[text] keyword[is] keyword[not] keyword[None] :
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[value] . identifier[text] ) keyword[and] identifier[value] . identifier[text] [ identifier[i] ]. identifier[isspace] ():
identifier[i] += literal[int]
identifier[list] . identifier[append] ( literal[string] %( identifier[Colors] . identifier[line] ,
identifier[value] . identifier[text] . identifier[strip] (),
identifier[Colors] . identifier[Normal] ))
keyword[if] identifier[value] . identifier[offset] keyword[is] keyword[not] keyword[None] :
identifier[s] = literal[string]
keyword[for] identifier[c] keyword[in] identifier[value] . identifier[text] [ identifier[i] : identifier[value] . identifier[offset] - literal[int] ]:
keyword[if] identifier[c] . identifier[isspace] ():
identifier[s] += identifier[c]
keyword[else] :
identifier[s] += literal[string]
identifier[list] . identifier[append] ( literal[string] %( identifier[Colors] . identifier[caret] , identifier[s] ,
identifier[Colors] . identifier[Normal] ))
keyword[try] :
identifier[s] = identifier[value] . identifier[msg]
keyword[except] identifier[Exception] :
identifier[s] = identifier[self] . identifier[_some_str] ( identifier[value] )
keyword[if] identifier[s] :
identifier[list] . identifier[append] ( literal[string] %( identifier[str] ( identifier[stype] ), identifier[Colors] . identifier[excName] ,
identifier[Colors] . identifier[Normal] , identifier[s] ))
keyword[else] :
identifier[list] . identifier[append] ( literal[string] % identifier[str] ( identifier[stype] ))
keyword[if] identifier[have_filedata] :
identifier[ipinst] = identifier[ipapi] . identifier[get] ()
keyword[if] identifier[ipinst] keyword[is] keyword[not] keyword[None] :
identifier[ipinst] . identifier[hooks] . identifier[synchronize_with_editor] ( identifier[value] . identifier[filename] , identifier[value] . identifier[lineno] , literal[int] )
keyword[return] identifier[list] | def _format_exception_only(self, etype, value):
"""Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.exc_info()[:2]. The return value is a list of strings, each ending
in a newline. Normally, the list contains a single string; however,
for SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax error
occurred. The message indicating which exception occurred is the
always last string in the list.
Also lifted nearly verbatim from traceback.py
"""
have_filedata = False
Colors = self.Colors
list = []
stype = Colors.excName + etype.__name__ + Colors.Normal
if value is None:
# Not sure if this can still happen in Python 2.6 and above
list.append(str(stype) + '\n') # depends on [control=['if'], data=[]]
else:
if etype is SyntaxError:
have_filedata = True
#print 'filename is',filename # dbg
if not value.filename:
value.filename = '<string>' # depends on [control=['if'], data=[]]
list.append('%s File %s"%s"%s, line %s%d%s\n' % (Colors.normalEm, Colors.filenameEm, value.filename, Colors.normalEm, Colors.linenoEm, value.lineno, Colors.Normal))
if value.text is not None:
i = 0
while i < len(value.text) and value.text[i].isspace():
i += 1 # depends on [control=['while'], data=[]]
list.append('%s %s%s\n' % (Colors.line, value.text.strip(), Colors.Normal))
if value.offset is not None:
s = ' '
for c in value.text[i:value.offset - 1]:
if c.isspace():
s += c # depends on [control=['if'], data=[]]
else:
s += ' ' # depends on [control=['for'], data=['c']]
list.append('%s%s^%s\n' % (Colors.caret, s, Colors.Normal)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
try:
s = value.msg # depends on [control=['try'], data=[]]
except Exception:
s = self._some_str(value) # depends on [control=['except'], data=[]]
if s:
list.append('%s%s:%s %s\n' % (str(stype), Colors.excName, Colors.Normal, s)) # depends on [control=['if'], data=[]]
else:
list.append('%s\n' % str(stype))
# sync with user hooks
if have_filedata:
ipinst = ipapi.get()
if ipinst is not None:
ipinst.hooks.synchronize_with_editor(value.filename, value.lineno, 0) # depends on [control=['if'], data=['ipinst']] # depends on [control=['if'], data=[]]
return list |
def std(self, ddof=1, *args, **kwargs):
"""
Compute standard deviation of groups, excluding missing values.
For multiple groupings, the result index will be a MultiIndex.
Parameters
----------
ddof : integer, default 1
degrees of freedom
"""
# TODO: implement at Cython level?
nv.validate_groupby_func('std', args, kwargs)
return np.sqrt(self.var(ddof=ddof, **kwargs)) | def function[std, parameter[self, ddof]]:
constant[
Compute standard deviation of groups, excluding missing values.
For multiple groupings, the result index will be a MultiIndex.
Parameters
----------
ddof : integer, default 1
degrees of freedom
]
call[name[nv].validate_groupby_func, parameter[constant[std], name[args], name[kwargs]]]
return[call[name[np].sqrt, parameter[call[name[self].var, parameter[]]]]] | keyword[def] identifier[std] ( identifier[self] , identifier[ddof] = literal[int] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[nv] . identifier[validate_groupby_func] ( literal[string] , identifier[args] , identifier[kwargs] )
keyword[return] identifier[np] . identifier[sqrt] ( identifier[self] . identifier[var] ( identifier[ddof] = identifier[ddof] ,** identifier[kwargs] )) | def std(self, ddof=1, *args, **kwargs):
"""
Compute standard deviation of groups, excluding missing values.
For multiple groupings, the result index will be a MultiIndex.
Parameters
----------
ddof : integer, default 1
degrees of freedom
"""
# TODO: implement at Cython level?
nv.validate_groupby_func('std', args, kwargs)
return np.sqrt(self.var(ddof=ddof, **kwargs)) |
def saveFiles(fileName1, fileName2, songs, artist, album, trackNum):
"""Save songs to files"""
songs[0].export(fileName1, format=detectFormat(fileName1), tags={'artist': artist, 'album': album, 'track': trackNum})
songs[1].export(fileName2, format=detectFormat(fileName2), tags={'artist': artist, 'album': album, 'track': str(int(trackNum) + 1)}) | def function[saveFiles, parameter[fileName1, fileName2, songs, artist, album, trackNum]]:
constant[Save songs to files]
call[call[name[songs]][constant[0]].export, parameter[name[fileName1]]]
call[call[name[songs]][constant[1]].export, parameter[name[fileName2]]] | keyword[def] identifier[saveFiles] ( identifier[fileName1] , identifier[fileName2] , identifier[songs] , identifier[artist] , identifier[album] , identifier[trackNum] ):
literal[string]
identifier[songs] [ literal[int] ]. identifier[export] ( identifier[fileName1] , identifier[format] = identifier[detectFormat] ( identifier[fileName1] ), identifier[tags] ={ literal[string] : identifier[artist] , literal[string] : identifier[album] , literal[string] : identifier[trackNum] })
identifier[songs] [ literal[int] ]. identifier[export] ( identifier[fileName2] , identifier[format] = identifier[detectFormat] ( identifier[fileName2] ), identifier[tags] ={ literal[string] : identifier[artist] , literal[string] : identifier[album] , literal[string] : identifier[str] ( identifier[int] ( identifier[trackNum] )+ literal[int] )}) | def saveFiles(fileName1, fileName2, songs, artist, album, trackNum):
"""Save songs to files"""
songs[0].export(fileName1, format=detectFormat(fileName1), tags={'artist': artist, 'album': album, 'track': trackNum})
songs[1].export(fileName2, format=detectFormat(fileName2), tags={'artist': artist, 'album': album, 'track': str(int(trackNum) + 1)}) |
def _parse_type(value, type_func):
"""
Attempt to cast *value* into *type_func*, returning *default* if it fails.
"""
default = type_func(0)
if value is None:
return default
try:
return type_func(value)
except ValueError:
return default | def function[_parse_type, parameter[value, type_func]]:
constant[
Attempt to cast *value* into *type_func*, returning *default* if it fails.
]
variable[default] assign[=] call[name[type_func], parameter[constant[0]]]
if compare[name[value] is constant[None]] begin[:]
return[name[default]]
<ast.Try object at 0x7da20c993130> | keyword[def] identifier[_parse_type] ( identifier[value] , identifier[type_func] ):
literal[string]
identifier[default] = identifier[type_func] ( literal[int] )
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[default]
keyword[try] :
keyword[return] identifier[type_func] ( identifier[value] )
keyword[except] identifier[ValueError] :
keyword[return] identifier[default] | def _parse_type(value, type_func):
"""
Attempt to cast *value* into *type_func*, returning *default* if it fails.
"""
default = type_func(0)
if value is None:
return default # depends on [control=['if'], data=[]]
try:
return type_func(value) # depends on [control=['try'], data=[]]
except ValueError:
return default # depends on [control=['except'], data=[]] |
def keyring_auth(username=None, region=None, authenticate=True):
"""
Use the password stored within the keyring to authenticate. If a username
is supplied, that name is used; otherwise, the keyring_username value
from the config file is used.
If there is no username defined, or if the keyring module is not installed,
or there is no password set for the given username, the appropriate errors
will be raised.
If the region is passed, it will authenticate against the proper endpoint
for that region, and set the default region for connections.
"""
if not keyring:
# Module not installed
raise exc.KeyringModuleNotInstalled("The 'keyring' Python module is "
"not installed on this system.")
if username is None:
username = settings.get("keyring_username")
if not username:
raise exc.KeyringUsernameMissing("No username specified for keyring "
"authentication.")
password = keyring.get_password("pyrax", username)
if password is None:
raise exc.KeyringPasswordNotFound("No password was found for the "
"username '%s'." % username)
set_credentials(username, password, region=region,
authenticate=authenticate) | def function[keyring_auth, parameter[username, region, authenticate]]:
constant[
Use the password stored within the keyring to authenticate. If a username
is supplied, that name is used; otherwise, the keyring_username value
from the config file is used.
If there is no username defined, or if the keyring module is not installed,
or there is no password set for the given username, the appropriate errors
will be raised.
If the region is passed, it will authenticate against the proper endpoint
for that region, and set the default region for connections.
]
if <ast.UnaryOp object at 0x7da1b0528460> begin[:]
<ast.Raise object at 0x7da1b0528550>
if compare[name[username] is constant[None]] begin[:]
variable[username] assign[=] call[name[settings].get, parameter[constant[keyring_username]]]
if <ast.UnaryOp object at 0x7da1b052b7f0> begin[:]
<ast.Raise object at 0x7da1b052aa40>
variable[password] assign[=] call[name[keyring].get_password, parameter[constant[pyrax], name[username]]]
if compare[name[password] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b052a620>
call[name[set_credentials], parameter[name[username], name[password]]] | keyword[def] identifier[keyring_auth] ( identifier[username] = keyword[None] , identifier[region] = keyword[None] , identifier[authenticate] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[keyring] :
keyword[raise] identifier[exc] . identifier[KeyringModuleNotInstalled] ( literal[string]
literal[string] )
keyword[if] identifier[username] keyword[is] keyword[None] :
identifier[username] = identifier[settings] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[username] :
keyword[raise] identifier[exc] . identifier[KeyringUsernameMissing] ( literal[string]
literal[string] )
identifier[password] = identifier[keyring] . identifier[get_password] ( literal[string] , identifier[username] )
keyword[if] identifier[password] keyword[is] keyword[None] :
keyword[raise] identifier[exc] . identifier[KeyringPasswordNotFound] ( literal[string]
literal[string] % identifier[username] )
identifier[set_credentials] ( identifier[username] , identifier[password] , identifier[region] = identifier[region] ,
identifier[authenticate] = identifier[authenticate] ) | def keyring_auth(username=None, region=None, authenticate=True):
"""
Use the password stored within the keyring to authenticate. If a username
is supplied, that name is used; otherwise, the keyring_username value
from the config file is used.
If there is no username defined, or if the keyring module is not installed,
or there is no password set for the given username, the appropriate errors
will be raised.
If the region is passed, it will authenticate against the proper endpoint
for that region, and set the default region for connections.
"""
if not keyring:
# Module not installed
raise exc.KeyringModuleNotInstalled("The 'keyring' Python module is not installed on this system.") # depends on [control=['if'], data=[]]
if username is None:
username = settings.get('keyring_username') # depends on [control=['if'], data=['username']]
if not username:
raise exc.KeyringUsernameMissing('No username specified for keyring authentication.') # depends on [control=['if'], data=[]]
password = keyring.get_password('pyrax', username)
if password is None:
raise exc.KeyringPasswordNotFound("No password was found for the username '%s'." % username) # depends on [control=['if'], data=[]]
set_credentials(username, password, region=region, authenticate=authenticate) |
def check_w3_errors (url_data, xml, w3type):
"""Add warnings for W3C HTML or CSS errors in xml format.
w3type is either "W3C HTML" or "W3C CSS"."""
dom = parseString(xml)
for error in dom.getElementsByTagName('m:error'):
warnmsg = _("%(w3type)s validation error at line %(line)s col %(column)s: %(msg)s")
attrs = {
"w3type": w3type,
"line": getXmlText(error, "m:line"),
"column": getXmlText(error, "m:col"),
"msg": getXmlText(error, "m:message"),
}
url_data.add_warning(warnmsg % attrs) | def function[check_w3_errors, parameter[url_data, xml, w3type]]:
constant[Add warnings for W3C HTML or CSS errors in xml format.
w3type is either "W3C HTML" or "W3C CSS".]
variable[dom] assign[=] call[name[parseString], parameter[name[xml]]]
for taget[name[error]] in starred[call[name[dom].getElementsByTagName, parameter[constant[m:error]]]] begin[:]
variable[warnmsg] assign[=] call[name[_], parameter[constant[%(w3type)s validation error at line %(line)s col %(column)s: %(msg)s]]]
variable[attrs] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57190>, <ast.Constant object at 0x7da18eb56200>, <ast.Constant object at 0x7da18eb54f70>, <ast.Constant object at 0x7da18eb56da0>], [<ast.Name object at 0x7da18eb57070>, <ast.Call object at 0x7da18eb573d0>, <ast.Call object at 0x7da18eb56590>, <ast.Call object at 0x7da18eb560e0>]]
call[name[url_data].add_warning, parameter[binary_operation[name[warnmsg] <ast.Mod object at 0x7da2590d6920> name[attrs]]]] | keyword[def] identifier[check_w3_errors] ( identifier[url_data] , identifier[xml] , identifier[w3type] ):
literal[string]
identifier[dom] = identifier[parseString] ( identifier[xml] )
keyword[for] identifier[error] keyword[in] identifier[dom] . identifier[getElementsByTagName] ( literal[string] ):
identifier[warnmsg] = identifier[_] ( literal[string] )
identifier[attrs] ={
literal[string] : identifier[w3type] ,
literal[string] : identifier[getXmlText] ( identifier[error] , literal[string] ),
literal[string] : identifier[getXmlText] ( identifier[error] , literal[string] ),
literal[string] : identifier[getXmlText] ( identifier[error] , literal[string] ),
}
identifier[url_data] . identifier[add_warning] ( identifier[warnmsg] % identifier[attrs] ) | def check_w3_errors(url_data, xml, w3type):
"""Add warnings for W3C HTML or CSS errors in xml format.
w3type is either "W3C HTML" or "W3C CSS"."""
dom = parseString(xml)
for error in dom.getElementsByTagName('m:error'):
warnmsg = _('%(w3type)s validation error at line %(line)s col %(column)s: %(msg)s')
attrs = {'w3type': w3type, 'line': getXmlText(error, 'm:line'), 'column': getXmlText(error, 'm:col'), 'msg': getXmlText(error, 'm:message')}
url_data.add_warning(warnmsg % attrs) # depends on [control=['for'], data=['error']] |
def deproject(self, depth_image):
"""Deprojects a DepthImage into a PointCloud.
Parameters
----------
depth_image : :obj:`DepthImage`
The 2D depth image to projet into a point cloud.
Returns
-------
:obj:`autolab_core.PointCloud`
A 3D point cloud created from the depth image.
Raises
------
ValueError
If depth_image is not a valid DepthImage in the same reference frame
as the camera.
"""
# check valid input
if not isinstance(depth_image, DepthImage):
raise ValueError('Must provide DepthImage object for projection')
if depth_image.frame != self._frame:
raise ValueError('Cannot deproject points in frame %s from camera with frame %s' %(depth_image.frame, self._frame))
# create homogeneous pixels
row_indices = np.arange(depth_image.height)
col_indices = np.arange(depth_image.width)
pixel_grid = np.meshgrid(col_indices, row_indices)
pixels = np.c_[pixel_grid[0].flatten(), pixel_grid[1].flatten()].T
depth_data = depth_image.data.flatten()
pixels_homog = np.r_[pixels, depth_data.reshape(1, depth_data.shape[0])]
# deproject
points_3d = np.linalg.inv(self.S).dot(pixels_homog - np.tile(self.t.reshape(3,1), [1, pixels_homog.shape[1]]))
return PointCloud(data=points_3d, frame=self._frame) | def function[deproject, parameter[self, depth_image]]:
constant[Deprojects a DepthImage into a PointCloud.
Parameters
----------
depth_image : :obj:`DepthImage`
The 2D depth image to projet into a point cloud.
Returns
-------
:obj:`autolab_core.PointCloud`
A 3D point cloud created from the depth image.
Raises
------
ValueError
If depth_image is not a valid DepthImage in the same reference frame
as the camera.
]
if <ast.UnaryOp object at 0x7da204961e40> begin[:]
<ast.Raise object at 0x7da204963c70>
if compare[name[depth_image].frame not_equal[!=] name[self]._frame] begin[:]
<ast.Raise object at 0x7da204960e50>
variable[row_indices] assign[=] call[name[np].arange, parameter[name[depth_image].height]]
variable[col_indices] assign[=] call[name[np].arange, parameter[name[depth_image].width]]
variable[pixel_grid] assign[=] call[name[np].meshgrid, parameter[name[col_indices], name[row_indices]]]
variable[pixels] assign[=] call[name[np].c_][tuple[[<ast.Call object at 0x7da18ede5330>, <ast.Call object at 0x7da18ede4760>]]].T
variable[depth_data] assign[=] call[name[depth_image].data.flatten, parameter[]]
variable[pixels_homog] assign[=] call[name[np].r_][tuple[[<ast.Name object at 0x7da18ede6740>, <ast.Call object at 0x7da18ede7eb0>]]]
variable[points_3d] assign[=] call[call[name[np].linalg.inv, parameter[name[self].S]].dot, parameter[binary_operation[name[pixels_homog] - call[name[np].tile, parameter[call[name[self].t.reshape, parameter[constant[3], constant[1]]], list[[<ast.Constant object at 0x7da18ede4ca0>, <ast.Subscript object at 0x7da18ede7460>]]]]]]]
return[call[name[PointCloud], parameter[]]] | keyword[def] identifier[deproject] ( identifier[self] , identifier[depth_image] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[depth_image] , identifier[DepthImage] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[depth_image] . identifier[frame] != identifier[self] . identifier[_frame] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[depth_image] . identifier[frame] , identifier[self] . identifier[_frame] ))
identifier[row_indices] = identifier[np] . identifier[arange] ( identifier[depth_image] . identifier[height] )
identifier[col_indices] = identifier[np] . identifier[arange] ( identifier[depth_image] . identifier[width] )
identifier[pixel_grid] = identifier[np] . identifier[meshgrid] ( identifier[col_indices] , identifier[row_indices] )
identifier[pixels] = identifier[np] . identifier[c_] [ identifier[pixel_grid] [ literal[int] ]. identifier[flatten] (), identifier[pixel_grid] [ literal[int] ]. identifier[flatten] ()]. identifier[T]
identifier[depth_data] = identifier[depth_image] . identifier[data] . identifier[flatten] ()
identifier[pixels_homog] = identifier[np] . identifier[r_] [ identifier[pixels] , identifier[depth_data] . identifier[reshape] ( literal[int] , identifier[depth_data] . identifier[shape] [ literal[int] ])]
identifier[points_3d] = identifier[np] . identifier[linalg] . identifier[inv] ( identifier[self] . identifier[S] ). identifier[dot] ( identifier[pixels_homog] - identifier[np] . identifier[tile] ( identifier[self] . identifier[t] . identifier[reshape] ( literal[int] , literal[int] ),[ literal[int] , identifier[pixels_homog] . identifier[shape] [ literal[int] ]]))
keyword[return] identifier[PointCloud] ( identifier[data] = identifier[points_3d] , identifier[frame] = identifier[self] . identifier[_frame] ) | def deproject(self, depth_image):
"""Deprojects a DepthImage into a PointCloud.
Parameters
----------
depth_image : :obj:`DepthImage`
The 2D depth image to projet into a point cloud.
Returns
-------
:obj:`autolab_core.PointCloud`
A 3D point cloud created from the depth image.
Raises
------
ValueError
If depth_image is not a valid DepthImage in the same reference frame
as the camera.
"""
# check valid input
if not isinstance(depth_image, DepthImage):
raise ValueError('Must provide DepthImage object for projection') # depends on [control=['if'], data=[]]
if depth_image.frame != self._frame:
raise ValueError('Cannot deproject points in frame %s from camera with frame %s' % (depth_image.frame, self._frame)) # depends on [control=['if'], data=[]]
# create homogeneous pixels
row_indices = np.arange(depth_image.height)
col_indices = np.arange(depth_image.width)
pixel_grid = np.meshgrid(col_indices, row_indices)
pixels = np.c_[pixel_grid[0].flatten(), pixel_grid[1].flatten()].T
depth_data = depth_image.data.flatten()
pixels_homog = np.r_[pixels, depth_data.reshape(1, depth_data.shape[0])]
# deproject
points_3d = np.linalg.inv(self.S).dot(pixels_homog - np.tile(self.t.reshape(3, 1), [1, pixels_homog.shape[1]]))
return PointCloud(data=points_3d, frame=self._frame) |
def _load_ssh_auth_post_yosemite(mac_username):
"""Starting with Yosemite, launchd was rearchitected and now only one
launchd process runs for all users. This allows us to much more easily
impersonate a user through launchd and extract the environment
variables from their running processes."""
user_id = subprocess.check_output(['id', '-u', mac_username])
ssh_auth_sock = subprocess.check_output(['launchctl', 'asuser', user_id, 'launchctl', 'getenv', 'SSH_AUTH_SOCK']).rstrip()
_set_ssh_auth_sock(ssh_auth_sock) | def function[_load_ssh_auth_post_yosemite, parameter[mac_username]]:
constant[Starting with Yosemite, launchd was rearchitected and now only one
launchd process runs for all users. This allows us to much more easily
impersonate a user through launchd and extract the environment
variables from their running processes.]
variable[user_id] assign[=] call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da20e961c30>, <ast.Constant object at 0x7da20e963070>, <ast.Name object at 0x7da20e962050>]]]]
variable[ssh_auth_sock] assign[=] call[call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da20e960be0>, <ast.Constant object at 0x7da20e9629b0>, <ast.Name object at 0x7da20e962ef0>, <ast.Constant object at 0x7da20e963b50>, <ast.Constant object at 0x7da20e961cc0>, <ast.Constant object at 0x7da20e9631f0>]]]].rstrip, parameter[]]
call[name[_set_ssh_auth_sock], parameter[name[ssh_auth_sock]]] | keyword[def] identifier[_load_ssh_auth_post_yosemite] ( identifier[mac_username] ):
literal[string]
identifier[user_id] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , identifier[mac_username] ])
identifier[ssh_auth_sock] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , identifier[user_id] , literal[string] , literal[string] , literal[string] ]). identifier[rstrip] ()
identifier[_set_ssh_auth_sock] ( identifier[ssh_auth_sock] ) | def _load_ssh_auth_post_yosemite(mac_username):
"""Starting with Yosemite, launchd was rearchitected and now only one
launchd process runs for all users. This allows us to much more easily
impersonate a user through launchd and extract the environment
variables from their running processes."""
user_id = subprocess.check_output(['id', '-u', mac_username])
ssh_auth_sock = subprocess.check_output(['launchctl', 'asuser', user_id, 'launchctl', 'getenv', 'SSH_AUTH_SOCK']).rstrip()
_set_ssh_auth_sock(ssh_auth_sock) |
def domains(self):
"""The domains for this app."""
return self._h._get_resources(
resource=('apps', self.name, 'domains'),
obj=Domain, app=self
) | def function[domains, parameter[self]]:
constant[The domains for this app.]
return[call[name[self]._h._get_resources, parameter[]]] | keyword[def] identifier[domains] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_h] . identifier[_get_resources] (
identifier[resource] =( literal[string] , identifier[self] . identifier[name] , literal[string] ),
identifier[obj] = identifier[Domain] , identifier[app] = identifier[self]
) | def domains(self):
"""The domains for this app."""
return self._h._get_resources(resource=('apps', self.name, 'domains'), obj=Domain, app=self) |
def data_cutout_ma(self):
"""
A 2D `~numpy.ma.MaskedArray` cutout from the data.
The mask is `True` for pixels outside of the source segment
(labeled region of interest), masked pixels from the ``mask``
input, or any non-finite ``data`` values (e.g. NaN or inf).
"""
return np.ma.masked_array(self._data[self._slice],
mask=self._total_mask) | def function[data_cutout_ma, parameter[self]]:
constant[
A 2D `~numpy.ma.MaskedArray` cutout from the data.
The mask is `True` for pixels outside of the source segment
(labeled region of interest), masked pixels from the ``mask``
input, or any non-finite ``data`` values (e.g. NaN or inf).
]
return[call[name[np].ma.masked_array, parameter[call[name[self]._data][name[self]._slice]]]] | keyword[def] identifier[data_cutout_ma] ( identifier[self] ):
literal[string]
keyword[return] identifier[np] . identifier[ma] . identifier[masked_array] ( identifier[self] . identifier[_data] [ identifier[self] . identifier[_slice] ],
identifier[mask] = identifier[self] . identifier[_total_mask] ) | def data_cutout_ma(self):
"""
A 2D `~numpy.ma.MaskedArray` cutout from the data.
The mask is `True` for pixels outside of the source segment
(labeled region of interest), masked pixels from the ``mask``
input, or any non-finite ``data`` values (e.g. NaN or inf).
"""
return np.ma.masked_array(self._data[self._slice], mask=self._total_mask) |
def _convert_data_block_and_headers_to_df(self, stream):
"""
stream : Streamlike object
A Streamlike object (nominally StringIO) containing the data to be
extracted
ch : dict
Column headers to use for the output pd.DataFrame
Returns
-------
:obj:`pd.DataFrame`
Dataframe with processed datablock
"""
df = pd.read_csv(
stream,
skip_blank_lines=True,
delim_whitespace=True,
header=None,
index_col=0,
)
if isinstance(df.index, pd.core.indexes.numeric.Float64Index):
df.index = df.index.to_series().round(3)
# reset the columns to be 0..n instead of starting at 1
df.columns = list(range(len(df.columns)))
return df | def function[_convert_data_block_and_headers_to_df, parameter[self, stream]]:
constant[
stream : Streamlike object
A Streamlike object (nominally StringIO) containing the data to be
extracted
ch : dict
Column headers to use for the output pd.DataFrame
Returns
-------
:obj:`pd.DataFrame`
Dataframe with processed datablock
]
variable[df] assign[=] call[name[pd].read_csv, parameter[name[stream]]]
if call[name[isinstance], parameter[name[df].index, name[pd].core.indexes.numeric.Float64Index]] begin[:]
name[df].index assign[=] call[call[name[df].index.to_series, parameter[]].round, parameter[constant[3]]]
name[df].columns assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[name[df].columns]]]]]]
return[name[df]] | keyword[def] identifier[_convert_data_block_and_headers_to_df] ( identifier[self] , identifier[stream] ):
literal[string]
identifier[df] = identifier[pd] . identifier[read_csv] (
identifier[stream] ,
identifier[skip_blank_lines] = keyword[True] ,
identifier[delim_whitespace] = keyword[True] ,
identifier[header] = keyword[None] ,
identifier[index_col] = literal[int] ,
)
keyword[if] identifier[isinstance] ( identifier[df] . identifier[index] , identifier[pd] . identifier[core] . identifier[indexes] . identifier[numeric] . identifier[Float64Index] ):
identifier[df] . identifier[index] = identifier[df] . identifier[index] . identifier[to_series] (). identifier[round] ( literal[int] )
identifier[df] . identifier[columns] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[df] . identifier[columns] )))
keyword[return] identifier[df] | def _convert_data_block_and_headers_to_df(self, stream):
"""
stream : Streamlike object
A Streamlike object (nominally StringIO) containing the data to be
extracted
ch : dict
Column headers to use for the output pd.DataFrame
Returns
-------
:obj:`pd.DataFrame`
Dataframe with processed datablock
"""
df = pd.read_csv(stream, skip_blank_lines=True, delim_whitespace=True, header=None, index_col=0)
if isinstance(df.index, pd.core.indexes.numeric.Float64Index):
df.index = df.index.to_series().round(3) # depends on [control=['if'], data=[]]
# reset the columns to be 0..n instead of starting at 1
df.columns = list(range(len(df.columns)))
return df |
def create(
self, resource_group_name, node_name, session, user_name=None, password=None, retention_period=None, credential_data_format=None, encryption_certificate_thumbprint=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates a session for a node.
:param resource_group_name: The resource group name uniquely
identifies the resource group within the user subscriptionId.
:type resource_group_name: str
:param node_name: The node name (256 characters maximum).
:type node_name: str
:param session: The sessionId from the user.
:type session: str
:param user_name: Encrypted User name to be used to connect to node.
:type user_name: str
:param password: Encrypted Password associated with user name.
:type password: str
:param retention_period: Session retention period. Possible values
include: 'Session', 'Persistent'
:type retention_period: str or
~azure.mgmt.servermanager.models.RetentionPeriod
:param credential_data_format: Credential data format. Possible values
include: 'RsaEncrypted'
:type credential_data_format: str or
~azure.mgmt.servermanager.models.CredentialDataFormat
:param encryption_certificate_thumbprint: Encryption certificate
thumbprint.
:type encryption_certificate_thumbprint: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns SessionResource or
ClientRawResponse<SessionResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.servermanager.models.SessionResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.servermanager.models.SessionResource]]
:raises:
:class:`ErrorException<azure.mgmt.servermanager.models.ErrorException>`
"""
raw_result = self._create_initial(
resource_group_name=resource_group_name,
node_name=node_name,
session=session,
user_name=user_name,
password=password,
retention_period=retention_period,
credential_data_format=credential_data_format,
encryption_certificate_thumbprint=encryption_certificate_thumbprint,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('SessionResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) | def function[create, parameter[self, resource_group_name, node_name, session, user_name, password, retention_period, credential_data_format, encryption_certificate_thumbprint, custom_headers, raw, polling]]:
constant[Creates a session for a node.
:param resource_group_name: The resource group name uniquely
identifies the resource group within the user subscriptionId.
:type resource_group_name: str
:param node_name: The node name (256 characters maximum).
:type node_name: str
:param session: The sessionId from the user.
:type session: str
:param user_name: Encrypted User name to be used to connect to node.
:type user_name: str
:param password: Encrypted Password associated with user name.
:type password: str
:param retention_period: Session retention period. Possible values
include: 'Session', 'Persistent'
:type retention_period: str or
~azure.mgmt.servermanager.models.RetentionPeriod
:param credential_data_format: Credential data format. Possible values
include: 'RsaEncrypted'
:type credential_data_format: str or
~azure.mgmt.servermanager.models.CredentialDataFormat
:param encryption_certificate_thumbprint: Encryption certificate
thumbprint.
:type encryption_certificate_thumbprint: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns SessionResource or
ClientRawResponse<SessionResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.servermanager.models.SessionResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.servermanager.models.SessionResource]]
:raises:
:class:`ErrorException<azure.mgmt.servermanager.models.ErrorException>`
]
variable[raw_result] assign[=] call[name[self]._create_initial, parameter[]]
def function[get_long_running_output, parameter[response]]:
variable[deserialized] assign[=] call[name[self]._deserialize, parameter[constant[SessionResource], name[response]]]
if name[raw] begin[:]
variable[client_raw_response] assign[=] call[name[ClientRawResponse], parameter[name[deserialized], name[response]]]
return[name[client_raw_response]]
return[name[deserialized]]
variable[lro_delay] assign[=] call[name[operation_config].get, parameter[constant[long_running_operation_timeout], name[self].config.long_running_operation_timeout]]
if compare[name[polling] is constant[True]] begin[:]
variable[polling_method] assign[=] call[name[ARMPolling], parameter[name[lro_delay]]]
return[call[name[LROPoller], parameter[name[self]._client, name[raw_result], name[get_long_running_output], name[polling_method]]]] | keyword[def] identifier[create] (
identifier[self] , identifier[resource_group_name] , identifier[node_name] , identifier[session] , identifier[user_name] = keyword[None] , identifier[password] = keyword[None] , identifier[retention_period] = keyword[None] , identifier[credential_data_format] = keyword[None] , identifier[encryption_certificate_thumbprint] = keyword[None] , identifier[custom_headers] = keyword[None] , identifier[raw] = keyword[False] , identifier[polling] = keyword[True] ,** identifier[operation_config] ):
literal[string]
identifier[raw_result] = identifier[self] . identifier[_create_initial] (
identifier[resource_group_name] = identifier[resource_group_name] ,
identifier[node_name] = identifier[node_name] ,
identifier[session] = identifier[session] ,
identifier[user_name] = identifier[user_name] ,
identifier[password] = identifier[password] ,
identifier[retention_period] = identifier[retention_period] ,
identifier[credential_data_format] = identifier[credential_data_format] ,
identifier[encryption_certificate_thumbprint] = identifier[encryption_certificate_thumbprint] ,
identifier[custom_headers] = identifier[custom_headers] ,
identifier[raw] = keyword[True] ,
** identifier[operation_config]
)
keyword[def] identifier[get_long_running_output] ( identifier[response] ):
identifier[deserialized] = identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
keyword[if] identifier[raw] :
identifier[client_raw_response] = identifier[ClientRawResponse] ( identifier[deserialized] , identifier[response] )
keyword[return] identifier[client_raw_response]
keyword[return] identifier[deserialized]
identifier[lro_delay] = identifier[operation_config] . identifier[get] (
literal[string] ,
identifier[self] . identifier[config] . identifier[long_running_operation_timeout] )
keyword[if] identifier[polling] keyword[is] keyword[True] : identifier[polling_method] = identifier[ARMPolling] ( identifier[lro_delay] ,** identifier[operation_config] )
keyword[elif] identifier[polling] keyword[is] keyword[False] : identifier[polling_method] = identifier[NoPolling] ()
keyword[else] : identifier[polling_method] = identifier[polling]
keyword[return] identifier[LROPoller] ( identifier[self] . identifier[_client] , identifier[raw_result] , identifier[get_long_running_output] , identifier[polling_method] ) | def create(self, resource_group_name, node_name, session, user_name=None, password=None, retention_period=None, credential_data_format=None, encryption_certificate_thumbprint=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates a session for a node.
:param resource_group_name: The resource group name uniquely
identifies the resource group within the user subscriptionId.
:type resource_group_name: str
:param node_name: The node name (256 characters maximum).
:type node_name: str
:param session: The sessionId from the user.
:type session: str
:param user_name: Encrypted User name to be used to connect to node.
:type user_name: str
:param password: Encrypted Password associated with user name.
:type password: str
:param retention_period: Session retention period. Possible values
include: 'Session', 'Persistent'
:type retention_period: str or
~azure.mgmt.servermanager.models.RetentionPeriod
:param credential_data_format: Credential data format. Possible values
include: 'RsaEncrypted'
:type credential_data_format: str or
~azure.mgmt.servermanager.models.CredentialDataFormat
:param encryption_certificate_thumbprint: Encryption certificate
thumbprint.
:type encryption_certificate_thumbprint: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns SessionResource or
ClientRawResponse<SessionResource> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.servermanager.models.SessionResource]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.servermanager.models.SessionResource]]
:raises:
:class:`ErrorException<azure.mgmt.servermanager.models.ErrorException>`
"""
raw_result = self._create_initial(resource_group_name=resource_group_name, node_name=node_name, session=session, user_name=user_name, password=password, retention_period=retention_period, credential_data_format=credential_data_format, encryption_certificate_thumbprint=encryption_certificate_thumbprint, custom_headers=custom_headers, raw=True, **operation_config)
def get_long_running_output(response):
deserialized = self._deserialize('SessionResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response # depends on [control=['if'], data=[]]
return deserialized
lro_delay = operation_config.get('long_running_operation_timeout', self.config.long_running_operation_timeout)
if polling is True:
polling_method = ARMPolling(lro_delay, **operation_config) # depends on [control=['if'], data=[]]
elif polling is False:
polling_method = NoPolling() # depends on [control=['if'], data=[]]
else:
polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) |
def loop(self):
"""
loop : 'for' init; ctrl; inc block
"""
self.eat(TokenTypes.FOR_LOOP)
init = NoOp()
if self.cur_token.type != TokenTypes.SEMI_COLON:
init = self.assign_statement()
else:
self.eat(TokenTypes.SEMI_COLON)
ctrl = NoOp()
if self.cur_token.type != TokenTypes.SEMI_COLON:
ctrl = self.expression()
self.eat(TokenTypes.SEMI_COLON)
inc = NoOp()
if self.cur_token.type != TokenTypes.LBRACE:
inc = self.assign_statement()
block = self.block()
return ForLoop(init, ctrl, inc, block) | def function[loop, parameter[self]]:
constant[
loop : 'for' init; ctrl; inc block
]
call[name[self].eat, parameter[name[TokenTypes].FOR_LOOP]]
variable[init] assign[=] call[name[NoOp], parameter[]]
if compare[name[self].cur_token.type not_equal[!=] name[TokenTypes].SEMI_COLON] begin[:]
variable[init] assign[=] call[name[self].assign_statement, parameter[]]
variable[ctrl] assign[=] call[name[NoOp], parameter[]]
if compare[name[self].cur_token.type not_equal[!=] name[TokenTypes].SEMI_COLON] begin[:]
variable[ctrl] assign[=] call[name[self].expression, parameter[]]
call[name[self].eat, parameter[name[TokenTypes].SEMI_COLON]]
variable[inc] assign[=] call[name[NoOp], parameter[]]
if compare[name[self].cur_token.type not_equal[!=] name[TokenTypes].LBRACE] begin[:]
variable[inc] assign[=] call[name[self].assign_statement, parameter[]]
variable[block] assign[=] call[name[self].block, parameter[]]
return[call[name[ForLoop], parameter[name[init], name[ctrl], name[inc], name[block]]]] | keyword[def] identifier[loop] ( identifier[self] ):
literal[string]
identifier[self] . identifier[eat] ( identifier[TokenTypes] . identifier[FOR_LOOP] )
identifier[init] = identifier[NoOp] ()
keyword[if] identifier[self] . identifier[cur_token] . identifier[type] != identifier[TokenTypes] . identifier[SEMI_COLON] :
identifier[init] = identifier[self] . identifier[assign_statement] ()
keyword[else] :
identifier[self] . identifier[eat] ( identifier[TokenTypes] . identifier[SEMI_COLON] )
identifier[ctrl] = identifier[NoOp] ()
keyword[if] identifier[self] . identifier[cur_token] . identifier[type] != identifier[TokenTypes] . identifier[SEMI_COLON] :
identifier[ctrl] = identifier[self] . identifier[expression] ()
identifier[self] . identifier[eat] ( identifier[TokenTypes] . identifier[SEMI_COLON] )
identifier[inc] = identifier[NoOp] ()
keyword[if] identifier[self] . identifier[cur_token] . identifier[type] != identifier[TokenTypes] . identifier[LBRACE] :
identifier[inc] = identifier[self] . identifier[assign_statement] ()
identifier[block] = identifier[self] . identifier[block] ()
keyword[return] identifier[ForLoop] ( identifier[init] , identifier[ctrl] , identifier[inc] , identifier[block] ) | def loop(self):
"""
loop : 'for' init; ctrl; inc block
"""
self.eat(TokenTypes.FOR_LOOP)
init = NoOp()
if self.cur_token.type != TokenTypes.SEMI_COLON:
init = self.assign_statement() # depends on [control=['if'], data=[]]
else:
self.eat(TokenTypes.SEMI_COLON)
ctrl = NoOp()
if self.cur_token.type != TokenTypes.SEMI_COLON:
ctrl = self.expression() # depends on [control=['if'], data=[]]
self.eat(TokenTypes.SEMI_COLON)
inc = NoOp()
if self.cur_token.type != TokenTypes.LBRACE:
inc = self.assign_statement() # depends on [control=['if'], data=[]]
block = self.block()
return ForLoop(init, ctrl, inc, block) |
def save(self, mode=0o600):
"""
Serialize the config data to the user home directory.
:param mode: The octal Unix mode (permissions) for the config file.
"""
if self._parent is not None:
self._parent.save(mode=mode)
else:
config_dir = os.path.dirname(os.path.abspath(self.config_files[-1]))
try:
os.makedirs(config_dir)
except OSError as e:
if not (e.errno == errno.EEXIST and os.path.isdir(config_dir)):
raise
with open(self.config_files[-1], "wb" if sys.version_info < (3, 0) else "w") as fh:
self._dump(fh)
os.chmod(self.config_files[-1], mode)
self._logger.debug("Saved config to %s", self.config_files[-1]) | def function[save, parameter[self, mode]]:
constant[
Serialize the config data to the user home directory.
:param mode: The octal Unix mode (permissions) for the config file.
]
if compare[name[self]._parent is_not constant[None]] begin[:]
call[name[self]._parent.save, parameter[]] | keyword[def] identifier[save] ( identifier[self] , identifier[mode] = literal[int] ):
literal[string]
keyword[if] identifier[self] . identifier[_parent] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_parent] . identifier[save] ( identifier[mode] = identifier[mode] )
keyword[else] :
identifier[config_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[self] . identifier[config_files] [- literal[int] ]))
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[config_dir] )
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[if] keyword[not] ( identifier[e] . identifier[errno] == identifier[errno] . identifier[EEXIST] keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[config_dir] )):
keyword[raise]
keyword[with] identifier[open] ( identifier[self] . identifier[config_files] [- literal[int] ], literal[string] keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ) keyword[else] literal[string] ) keyword[as] identifier[fh] :
identifier[self] . identifier[_dump] ( identifier[fh] )
identifier[os] . identifier[chmod] ( identifier[self] . identifier[config_files] [- literal[int] ], identifier[mode] )
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[config_files] [- literal[int] ]) | def save(self, mode=384):
"""
Serialize the config data to the user home directory.
:param mode: The octal Unix mode (permissions) for the config file.
"""
if self._parent is not None:
self._parent.save(mode=mode) # depends on [control=['if'], data=[]]
else:
config_dir = os.path.dirname(os.path.abspath(self.config_files[-1]))
try:
os.makedirs(config_dir) # depends on [control=['try'], data=[]]
except OSError as e:
if not (e.errno == errno.EEXIST and os.path.isdir(config_dir)):
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
with open(self.config_files[-1], 'wb' if sys.version_info < (3, 0) else 'w') as fh:
self._dump(fh) # depends on [control=['with'], data=['fh']]
os.chmod(self.config_files[-1], mode)
self._logger.debug('Saved config to %s', self.config_files[-1]) |
def select_as_dataframe(self, table_name, columns=None, where=None, extra=None):
"""
Get data in the database and return fetched data as a
:py:class:`pandas.Dataframe` instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param str where: |arg_select_where|
:param str extra: |arg_select_extra|
:return: Table data as a :py:class:`pandas.Dataframe` instance.
:rtype: pandas.DataFrame
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-select-as-dataframe`
.. note::
``pandas`` package required to execute this method.
"""
import pandas
if columns is None:
columns = self.fetch_attr_names(table_name)
result = self.select(
select=AttrList(columns), table_name=table_name, where=where, extra=extra
)
if result is None:
return pandas.DataFrame()
return pandas.DataFrame(result.fetchall(), columns=columns) | def function[select_as_dataframe, parameter[self, table_name, columns, where, extra]]:
constant[
Get data in the database and return fetched data as a
:py:class:`pandas.Dataframe` instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param str where: |arg_select_where|
:param str extra: |arg_select_extra|
:return: Table data as a :py:class:`pandas.Dataframe` instance.
:rtype: pandas.DataFrame
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-select-as-dataframe`
.. note::
``pandas`` package required to execute this method.
]
import module[pandas]
if compare[name[columns] is constant[None]] begin[:]
variable[columns] assign[=] call[name[self].fetch_attr_names, parameter[name[table_name]]]
variable[result] assign[=] call[name[self].select, parameter[]]
if compare[name[result] is constant[None]] begin[:]
return[call[name[pandas].DataFrame, parameter[]]]
return[call[name[pandas].DataFrame, parameter[call[name[result].fetchall, parameter[]]]]] | keyword[def] identifier[select_as_dataframe] ( identifier[self] , identifier[table_name] , identifier[columns] = keyword[None] , identifier[where] = keyword[None] , identifier[extra] = keyword[None] ):
literal[string]
keyword[import] identifier[pandas]
keyword[if] identifier[columns] keyword[is] keyword[None] :
identifier[columns] = identifier[self] . identifier[fetch_attr_names] ( identifier[table_name] )
identifier[result] = identifier[self] . identifier[select] (
identifier[select] = identifier[AttrList] ( identifier[columns] ), identifier[table_name] = identifier[table_name] , identifier[where] = identifier[where] , identifier[extra] = identifier[extra]
)
keyword[if] identifier[result] keyword[is] keyword[None] :
keyword[return] identifier[pandas] . identifier[DataFrame] ()
keyword[return] identifier[pandas] . identifier[DataFrame] ( identifier[result] . identifier[fetchall] (), identifier[columns] = identifier[columns] ) | def select_as_dataframe(self, table_name, columns=None, where=None, extra=None):
"""
Get data in the database and return fetched data as a
:py:class:`pandas.Dataframe` instance.
:param str table_name: |arg_select_table_name|
:param list columns: |arg_select_as_xx_columns|
:param str where: |arg_select_where|
:param str extra: |arg_select_extra|
:return: Table data as a :py:class:`pandas.Dataframe` instance.
:rtype: pandas.DataFrame
:raises simplesqlite.NullDatabaseConnectionError:
|raises_check_connection|
:raises simplesqlite.TableNotFoundError:
|raises_verify_table_existence|
:raises simplesqlite.OperationalError: |raises_operational_error|
:Example:
:ref:`example-select-as-dataframe`
.. note::
``pandas`` package required to execute this method.
"""
import pandas
if columns is None:
columns = self.fetch_attr_names(table_name) # depends on [control=['if'], data=['columns']]
result = self.select(select=AttrList(columns), table_name=table_name, where=where, extra=extra)
if result is None:
return pandas.DataFrame() # depends on [control=['if'], data=[]]
return pandas.DataFrame(result.fetchall(), columns=columns) |
def write_amendment(self, amendment_id, file_content, branch, author):
"""Given an amendment_id, temporary filename of content, branch and auth_info
Deprecated but needed until we merge api local-dep to master...
"""
gh_user = branch.split('_amendment_')[0]
msg = "Update Amendment '%s' via OpenTree API" % amendment_id
return self.write_document(gh_user,
amendment_id,
file_content,
branch, author,
commit_msg=msg) | def function[write_amendment, parameter[self, amendment_id, file_content, branch, author]]:
constant[Given an amendment_id, temporary filename of content, branch and auth_info
Deprecated but needed until we merge api local-dep to master...
]
variable[gh_user] assign[=] call[call[name[branch].split, parameter[constant[_amendment_]]]][constant[0]]
variable[msg] assign[=] binary_operation[constant[Update Amendment '%s' via OpenTree API] <ast.Mod object at 0x7da2590d6920> name[amendment_id]]
return[call[name[self].write_document, parameter[name[gh_user], name[amendment_id], name[file_content], name[branch], name[author]]]] | keyword[def] identifier[write_amendment] ( identifier[self] , identifier[amendment_id] , identifier[file_content] , identifier[branch] , identifier[author] ):
literal[string]
identifier[gh_user] = identifier[branch] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[msg] = literal[string] % identifier[amendment_id]
keyword[return] identifier[self] . identifier[write_document] ( identifier[gh_user] ,
identifier[amendment_id] ,
identifier[file_content] ,
identifier[branch] , identifier[author] ,
identifier[commit_msg] = identifier[msg] ) | def write_amendment(self, amendment_id, file_content, branch, author):
"""Given an amendment_id, temporary filename of content, branch and auth_info
Deprecated but needed until we merge api local-dep to master...
"""
gh_user = branch.split('_amendment_')[0]
msg = "Update Amendment '%s' via OpenTree API" % amendment_id
return self.write_document(gh_user, amendment_id, file_content, branch, author, commit_msg=msg) |
def _execute(self, *args):
""" Execute a given taskwarrior command with arguments
Returns a 2-tuple of stdout and stderr (respectively).
"""
command = (
[
'task',
'rc:%s' % self.config_filename,
]
+ self.get_configuration_override_args()
+ [six.text_type(arg) for arg in args]
)
# subprocess is expecting bytestrings only, so nuke unicode if present
# and remove control characters
for i in range(len(command)):
if isinstance(command[i], six.text_type):
command[i] = (
taskw.utils.clean_ctrl_chars(command[i].encode('utf-8')))
try:
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = proc.communicate()
except OSError as e:
if e.errno == errno.ENOENT:
raise OSError("Unable to find the 'task' command-line tool.")
raise
if proc.returncode != 0:
raise TaskwarriorError(command, stderr, stdout, proc.returncode)
# We should get bytes from the outside world. Turn those into unicode
# as soon as we can.
# Everything going into and coming out of taskwarrior *should* be
# utf-8, but there are weird edge cases where something totally unusual
# made it in.. so we need to be able to handle (or at least try to
# handle) whatever. Kitchen tries its best.
try:
stdout = stdout.decode(self.config.get('encoding', 'utf-8'))
except UnicodeDecodeError as e:
stdout = kitchen.text.converters.to_unicode(stdout)
try:
stderr = stderr.decode(self.config.get('encoding', 'utf-8'))
except UnicodeDecodeError as e:
stderr = kitchen.text.converters.to_unicode(stderr)
# strip any crazy terminal escape characters like bells, backspaces,
# and form feeds
for c in ('\a', '\b', '\f', ''):
stdout = stdout.replace(c, '?')
stderr = stderr.replace(c, '?')
return stdout, stderr | def function[_execute, parameter[self]]:
constant[ Execute a given taskwarrior command with arguments
Returns a 2-tuple of stdout and stderr (respectively).
]
variable[command] assign[=] binary_operation[binary_operation[list[[<ast.Constant object at 0x7da18f58e7d0>, <ast.BinOp object at 0x7da18f58e410>]] + call[name[self].get_configuration_override_args, parameter[]]] + <ast.ListComp object at 0x7da18f58ec20>]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[command]]]]]] begin[:]
if call[name[isinstance], parameter[call[name[command]][name[i]], name[six].text_type]] begin[:]
call[name[command]][name[i]] assign[=] call[name[taskw].utils.clean_ctrl_chars, parameter[call[call[name[command]][name[i]].encode, parameter[constant[utf-8]]]]]
<ast.Try object at 0x7da18f58dfc0>
if compare[name[proc].returncode not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da18f58dc30>
<ast.Try object at 0x7da18f58f850>
<ast.Try object at 0x7da18dc04250>
for taget[name[c]] in starred[tuple[[<ast.Constant object at 0x7da18dc049a0>, <ast.Constant object at 0x7da18dc05bd0>, <ast.Constant object at 0x7da18dc04ca0>, <ast.Constant object at 0x7da18dc05390>]]] begin[:]
variable[stdout] assign[=] call[name[stdout].replace, parameter[name[c], constant[?]]]
variable[stderr] assign[=] call[name[stderr].replace, parameter[name[c], constant[?]]]
return[tuple[[<ast.Name object at 0x7da18dc04190>, <ast.Name object at 0x7da18dc047f0>]]] | keyword[def] identifier[_execute] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[command] =(
[
literal[string] ,
literal[string] % identifier[self] . identifier[config_filename] ,
]
+ identifier[self] . identifier[get_configuration_override_args] ()
+[ identifier[six] . identifier[text_type] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[args] ]
)
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[command] )):
keyword[if] identifier[isinstance] ( identifier[command] [ identifier[i] ], identifier[six] . identifier[text_type] ):
identifier[command] [ identifier[i] ]=(
identifier[taskw] . identifier[utils] . identifier[clean_ctrl_chars] ( identifier[command] [ identifier[i] ]. identifier[encode] ( literal[string] )))
keyword[try] :
identifier[proc] = identifier[subprocess] . identifier[Popen] (
identifier[command] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] ,
)
identifier[stdout] , identifier[stderr] = identifier[proc] . identifier[communicate] ()
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] == identifier[errno] . identifier[ENOENT] :
keyword[raise] identifier[OSError] ( literal[string] )
keyword[raise]
keyword[if] identifier[proc] . identifier[returncode] != literal[int] :
keyword[raise] identifier[TaskwarriorError] ( identifier[command] , identifier[stderr] , identifier[stdout] , identifier[proc] . identifier[returncode] )
keyword[try] :
identifier[stdout] = identifier[stdout] . identifier[decode] ( identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] ))
keyword[except] identifier[UnicodeDecodeError] keyword[as] identifier[e] :
identifier[stdout] = identifier[kitchen] . identifier[text] . identifier[converters] . identifier[to_unicode] ( identifier[stdout] )
keyword[try] :
identifier[stderr] = identifier[stderr] . identifier[decode] ( identifier[self] . identifier[config] . identifier[get] ( literal[string] , literal[string] ))
keyword[except] identifier[UnicodeDecodeError] keyword[as] identifier[e] :
identifier[stderr] = identifier[kitchen] . identifier[text] . identifier[converters] . identifier[to_unicode] ( identifier[stderr] )
keyword[for] identifier[c] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[stdout] = identifier[stdout] . identifier[replace] ( identifier[c] , literal[string] )
identifier[stderr] = identifier[stderr] . identifier[replace] ( identifier[c] , literal[string] )
keyword[return] identifier[stdout] , identifier[stderr] | def _execute(self, *args):
""" Execute a given taskwarrior command with arguments
Returns a 2-tuple of stdout and stderr (respectively).
"""
command = ['task', 'rc:%s' % self.config_filename] + self.get_configuration_override_args() + [six.text_type(arg) for arg in args]
# subprocess is expecting bytestrings only, so nuke unicode if present
# and remove control characters
for i in range(len(command)):
if isinstance(command[i], six.text_type):
command[i] = taskw.utils.clean_ctrl_chars(command[i].encode('utf-8')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
try:
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate() # depends on [control=['try'], data=[]]
except OSError as e:
if e.errno == errno.ENOENT:
raise OSError("Unable to find the 'task' command-line tool.") # depends on [control=['if'], data=[]]
raise # depends on [control=['except'], data=['e']]
if proc.returncode != 0:
raise TaskwarriorError(command, stderr, stdout, proc.returncode) # depends on [control=['if'], data=[]]
# We should get bytes from the outside world. Turn those into unicode
# as soon as we can.
# Everything going into and coming out of taskwarrior *should* be
# utf-8, but there are weird edge cases where something totally unusual
# made it in.. so we need to be able to handle (or at least try to
# handle) whatever. Kitchen tries its best.
try:
stdout = stdout.decode(self.config.get('encoding', 'utf-8')) # depends on [control=['try'], data=[]]
except UnicodeDecodeError as e:
stdout = kitchen.text.converters.to_unicode(stdout) # depends on [control=['except'], data=[]]
try:
stderr = stderr.decode(self.config.get('encoding', 'utf-8')) # depends on [control=['try'], data=[]]
except UnicodeDecodeError as e:
stderr = kitchen.text.converters.to_unicode(stderr) # depends on [control=['except'], data=[]]
# strip any crazy terminal escape characters like bells, backspaces,
# and form feeds
for c in ('\x07', '\x08', '\x0c', '\x1b'):
stdout = stdout.replace(c, '?')
stderr = stderr.replace(c, '?') # depends on [control=['for'], data=['c']]
return (stdout, stderr) |
def insert(self, i, item_weight):
"""
Insert an item with the given weight in the sequence
"""
item, weight = item_weight
self._seq.insert(i, item)
self.weight += weight | def function[insert, parameter[self, i, item_weight]]:
constant[
Insert an item with the given weight in the sequence
]
<ast.Tuple object at 0x7da2054a5b70> assign[=] name[item_weight]
call[name[self]._seq.insert, parameter[name[i], name[item]]]
<ast.AugAssign object at 0x7da2054a40a0> | keyword[def] identifier[insert] ( identifier[self] , identifier[i] , identifier[item_weight] ):
literal[string]
identifier[item] , identifier[weight] = identifier[item_weight]
identifier[self] . identifier[_seq] . identifier[insert] ( identifier[i] , identifier[item] )
identifier[self] . identifier[weight] += identifier[weight] | def insert(self, i, item_weight):
"""
Insert an item with the given weight in the sequence
"""
(item, weight) = item_weight
self._seq.insert(i, item)
self.weight += weight |
def parse(input):
'''
parse input to datetime
'''
if isinstance(input, datetime):
return input
if isinstance(input, date):
return date_to_datetime(input)
if isinstance(input, time):
return time_to_datetime(input)
if isinstance(input, (int, float)):
return timestamp_to_datetime(input)
if isinstance(input, (str)):
return string_to_data_time(input)
return None | def function[parse, parameter[input]]:
constant[
parse input to datetime
]
if call[name[isinstance], parameter[name[input], name[datetime]]] begin[:]
return[name[input]]
if call[name[isinstance], parameter[name[input], name[date]]] begin[:]
return[call[name[date_to_datetime], parameter[name[input]]]]
if call[name[isinstance], parameter[name[input], name[time]]] begin[:]
return[call[name[time_to_datetime], parameter[name[input]]]]
if call[name[isinstance], parameter[name[input], tuple[[<ast.Name object at 0x7da18eb56740>, <ast.Name object at 0x7da18eb57af0>]]]] begin[:]
return[call[name[timestamp_to_datetime], parameter[name[input]]]]
if call[name[isinstance], parameter[name[input], name[str]]] begin[:]
return[call[name[string_to_data_time], parameter[name[input]]]]
return[constant[None]] | keyword[def] identifier[parse] ( identifier[input] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[input] , identifier[datetime] ):
keyword[return] identifier[input]
keyword[if] identifier[isinstance] ( identifier[input] , identifier[date] ):
keyword[return] identifier[date_to_datetime] ( identifier[input] )
keyword[if] identifier[isinstance] ( identifier[input] , identifier[time] ):
keyword[return] identifier[time_to_datetime] ( identifier[input] )
keyword[if] identifier[isinstance] ( identifier[input] ,( identifier[int] , identifier[float] )):
keyword[return] identifier[timestamp_to_datetime] ( identifier[input] )
keyword[if] identifier[isinstance] ( identifier[input] ,( identifier[str] )):
keyword[return] identifier[string_to_data_time] ( identifier[input] )
keyword[return] keyword[None] | def parse(input):
"""
parse input to datetime
"""
if isinstance(input, datetime):
return input # depends on [control=['if'], data=[]]
if isinstance(input, date):
return date_to_datetime(input) # depends on [control=['if'], data=[]]
if isinstance(input, time):
return time_to_datetime(input) # depends on [control=['if'], data=[]]
if isinstance(input, (int, float)):
return timestamp_to_datetime(input) # depends on [control=['if'], data=[]]
if isinstance(input, str):
return string_to_data_time(input) # depends on [control=['if'], data=[]]
return None |
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, "line-protocol-exception-info")
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_interface_detail_output_interface_line_protocol_exception_info, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_interface_detail] assign[=] call[name[ET].Element, parameter[constant[get_interface_detail]]]
variable[config] assign[=] name[get_interface_detail]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_interface_detail], constant[output]]]
variable[interface] assign[=] call[name[ET].SubElement, parameter[name[output], constant[interface]]]
variable[interface_type_key] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[interface-type]]]
name[interface_type_key].text assign[=] call[name[kwargs].pop, parameter[constant[interface_type]]]
variable[interface_name_key] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[interface-name]]]
name[interface_name_key].text assign[=] call[name[kwargs].pop, parameter[constant[interface_name]]]
variable[line_protocol_exception_info] assign[=] call[name[ET].SubElement, parameter[name[interface], constant[line-protocol-exception-info]]]
name[line_protocol_exception_info].text assign[=] call[name[kwargs].pop, parameter[constant[line_protocol_exception_info]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_interface_detail_output_interface_line_protocol_exception_info] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_interface_detail] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_interface_detail]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_interface_detail] , literal[string] )
identifier[interface] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[interface_type_key] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] )
identifier[interface_type_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[interface_name_key] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] )
identifier[interface_name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[line_protocol_exception_info] = identifier[ET] . identifier[SubElement] ( identifier[interface] , literal[string] )
identifier[line_protocol_exception_info] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_interface_detail = ET.Element('get_interface_detail')
config = get_interface_detail
output = ET.SubElement(get_interface_detail, 'output')
interface = ET.SubElement(output, 'interface')
interface_type_key = ET.SubElement(interface, 'interface-type')
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, 'interface-name')
interface_name_key.text = kwargs.pop('interface_name')
line_protocol_exception_info = ET.SubElement(interface, 'line-protocol-exception-info')
line_protocol_exception_info.text = kwargs.pop('line_protocol_exception_info')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def has_active_condition(self, condition, instances):
"""
Given a list of instances, and the condition active for
this switch, returns a boolean representing if the
conditional is met, including a non-instance default.
"""
return_value = None
for instance in instances + [None]:
if not self.can_execute(instance):
continue
result = self.is_active(instance, condition)
if result is False:
return False
elif result is True:
return_value = True
return return_value | def function[has_active_condition, parameter[self, condition, instances]]:
constant[
Given a list of instances, and the condition active for
this switch, returns a boolean representing if the
conditional is met, including a non-instance default.
]
variable[return_value] assign[=] constant[None]
for taget[name[instance]] in starred[binary_operation[name[instances] + list[[<ast.Constant object at 0x7da1b0b45930>]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0b44940> begin[:]
continue
variable[result] assign[=] call[name[self].is_active, parameter[name[instance], name[condition]]]
if compare[name[result] is constant[False]] begin[:]
return[constant[False]]
return[name[return_value]] | keyword[def] identifier[has_active_condition] ( identifier[self] , identifier[condition] , identifier[instances] ):
literal[string]
identifier[return_value] = keyword[None]
keyword[for] identifier[instance] keyword[in] identifier[instances] +[ keyword[None] ]:
keyword[if] keyword[not] identifier[self] . identifier[can_execute] ( identifier[instance] ):
keyword[continue]
identifier[result] = identifier[self] . identifier[is_active] ( identifier[instance] , identifier[condition] )
keyword[if] identifier[result] keyword[is] keyword[False] :
keyword[return] keyword[False]
keyword[elif] identifier[result] keyword[is] keyword[True] :
identifier[return_value] = keyword[True]
keyword[return] identifier[return_value] | def has_active_condition(self, condition, instances):
"""
Given a list of instances, and the condition active for
this switch, returns a boolean representing if the
conditional is met, including a non-instance default.
"""
return_value = None
for instance in instances + [None]:
if not self.can_execute(instance):
continue # depends on [control=['if'], data=[]]
result = self.is_active(instance, condition)
if result is False:
return False # depends on [control=['if'], data=[]]
elif result is True:
return_value = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['instance']]
return return_value |
def generate_hexagonal_lattice(maxv1, minv1, maxv2, minv2, mindist):
"""
This function generates a 2-dimensional lattice of points using a hexagonal
lattice.
Parameters
-----------
maxv1 : float
Largest value in the 1st dimension to cover
minv1 : float
Smallest value in the 1st dimension to cover
maxv2 : float
Largest value in the 2nd dimension to cover
minv2 : float
Smallest value in the 2nd dimension to cover
mindist : float
Maximum allowed mismatch between a point in the parameter space and the
generated bank of points.
Returns
--------
v1s : numpy.array
Array of positions in the first dimension
v2s : numpy.array
Array of positions in the second dimension
"""
if minv1 > maxv1:
raise ValueError("Invalid input to function.")
if minv2 > maxv2:
raise ValueError("Invalid input to function.")
# Place first point
v1s = [minv1]
v2s = [minv2]
initPoint = [minv1,minv2]
# Place first line
initLine = [initPoint]
tmpv1 = minv1
while (tmpv1 < maxv1):
tmpv1 = tmpv1 + (3 * mindist)**(0.5)
initLine.append([tmpv1,minv2])
v1s.append(tmpv1)
v2s.append(minv2)
initLine = numpy.array(initLine)
initLine2 = copy.deepcopy(initLine)
initLine2[:,0] += 0.5 * (3*mindist)**0.5
initLine2[:,1] += 1.5 * (mindist)**0.5
for i in xrange(len(initLine2)):
v1s.append(initLine2[i,0])
v2s.append(initLine2[i,1])
tmpv2_1 = initLine[0,1]
tmpv2_2 = initLine2[0,1]
while tmpv2_1 < maxv2 and tmpv2_2 < maxv2:
tmpv2_1 = tmpv2_1 + 3.0 * (mindist)**0.5
tmpv2_2 = tmpv2_2 + 3.0 * (mindist)**0.5
initLine[:,1] = tmpv2_1
initLine2[:,1] = tmpv2_2
for i in xrange(len(initLine)):
v1s.append(initLine[i,0])
v2s.append(initLine[i,1])
for i in xrange(len(initLine2)):
v1s.append(initLine2[i,0])
v2s.append(initLine2[i,1])
v1s = numpy.array(v1s)
v2s = numpy.array(v2s)
return v1s, v2s | def function[generate_hexagonal_lattice, parameter[maxv1, minv1, maxv2, minv2, mindist]]:
constant[
This function generates a 2-dimensional lattice of points using a hexagonal
lattice.
Parameters
-----------
maxv1 : float
Largest value in the 1st dimension to cover
minv1 : float
Smallest value in the 1st dimension to cover
maxv2 : float
Largest value in the 2nd dimension to cover
minv2 : float
Smallest value in the 2nd dimension to cover
mindist : float
Maximum allowed mismatch between a point in the parameter space and the
generated bank of points.
Returns
--------
v1s : numpy.array
Array of positions in the first dimension
v2s : numpy.array
Array of positions in the second dimension
]
if compare[name[minv1] greater[>] name[maxv1]] begin[:]
<ast.Raise object at 0x7da20e961c30>
if compare[name[minv2] greater[>] name[maxv2]] begin[:]
<ast.Raise object at 0x7da20e962ef0>
variable[v1s] assign[=] list[[<ast.Name object at 0x7da20e961b70>]]
variable[v2s] assign[=] list[[<ast.Name object at 0x7da20e961270>]]
variable[initPoint] assign[=] list[[<ast.Name object at 0x7da20e963f70>, <ast.Name object at 0x7da20e960fa0>]]
variable[initLine] assign[=] list[[<ast.Name object at 0x7da20e960070>]]
variable[tmpv1] assign[=] name[minv1]
while compare[name[tmpv1] less[<] name[maxv1]] begin[:]
variable[tmpv1] assign[=] binary_operation[name[tmpv1] + binary_operation[binary_operation[constant[3] * name[mindist]] ** constant[0.5]]]
call[name[initLine].append, parameter[list[[<ast.Name object at 0x7da20e963a30>, <ast.Name object at 0x7da20e9612a0>]]]]
call[name[v1s].append, parameter[name[tmpv1]]]
call[name[v2s].append, parameter[name[minv2]]]
variable[initLine] assign[=] call[name[numpy].array, parameter[name[initLine]]]
variable[initLine2] assign[=] call[name[copy].deepcopy, parameter[name[initLine]]]
<ast.AugAssign object at 0x7da20e9610f0>
<ast.AugAssign object at 0x7da2044c2bc0>
for taget[name[i]] in starred[call[name[xrange], parameter[call[name[len], parameter[name[initLine2]]]]]] begin[:]
call[name[v1s].append, parameter[call[name[initLine2]][tuple[[<ast.Name object at 0x7da2044c2680>, <ast.Constant object at 0x7da2044c0490>]]]]]
call[name[v2s].append, parameter[call[name[initLine2]][tuple[[<ast.Name object at 0x7da20c6aa380>, <ast.Constant object at 0x7da20c6aac20>]]]]]
variable[tmpv2_1] assign[=] call[name[initLine]][tuple[[<ast.Constant object at 0x7da20c6a82b0>, <ast.Constant object at 0x7da20c6a9180>]]]
variable[tmpv2_2] assign[=] call[name[initLine2]][tuple[[<ast.Constant object at 0x7da20c6aa620>, <ast.Constant object at 0x7da20c6a8ca0>]]]
while <ast.BoolOp object at 0x7da20c6a8f70> begin[:]
variable[tmpv2_1] assign[=] binary_operation[name[tmpv2_1] + binary_operation[constant[3.0] * binary_operation[name[mindist] ** constant[0.5]]]]
variable[tmpv2_2] assign[=] binary_operation[name[tmpv2_2] + binary_operation[constant[3.0] * binary_operation[name[mindist] ** constant[0.5]]]]
call[name[initLine]][tuple[[<ast.Slice object at 0x7da20c6a9000>, <ast.Constant object at 0x7da20c6a8b50>]]] assign[=] name[tmpv2_1]
call[name[initLine2]][tuple[[<ast.Slice object at 0x7da20c6a9720>, <ast.Constant object at 0x7da20c6aac80>]]] assign[=] name[tmpv2_2]
for taget[name[i]] in starred[call[name[xrange], parameter[call[name[len], parameter[name[initLine]]]]]] begin[:]
call[name[v1s].append, parameter[call[name[initLine]][tuple[[<ast.Name object at 0x7da20c6ab070>, <ast.Constant object at 0x7da20c6a8eb0>]]]]]
call[name[v2s].append, parameter[call[name[initLine]][tuple[[<ast.Name object at 0x7da20c76eb60>, <ast.Constant object at 0x7da20c76d780>]]]]]
for taget[name[i]] in starred[call[name[xrange], parameter[call[name[len], parameter[name[initLine2]]]]]] begin[:]
call[name[v1s].append, parameter[call[name[initLine2]][tuple[[<ast.Name object at 0x7da20e962230>, <ast.Constant object at 0x7da20e9604f0>]]]]]
call[name[v2s].append, parameter[call[name[initLine2]][tuple[[<ast.Name object at 0x7da20e963370>, <ast.Constant object at 0x7da20e960130>]]]]]
variable[v1s] assign[=] call[name[numpy].array, parameter[name[v1s]]]
variable[v2s] assign[=] call[name[numpy].array, parameter[name[v2s]]]
return[tuple[[<ast.Name object at 0x7da2041da890>, <ast.Name object at 0x7da20e954580>]]] | keyword[def] identifier[generate_hexagonal_lattice] ( identifier[maxv1] , identifier[minv1] , identifier[maxv2] , identifier[minv2] , identifier[mindist] ):
literal[string]
keyword[if] identifier[minv1] > identifier[maxv1] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[minv2] > identifier[maxv2] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[v1s] =[ identifier[minv1] ]
identifier[v2s] =[ identifier[minv2] ]
identifier[initPoint] =[ identifier[minv1] , identifier[minv2] ]
identifier[initLine] =[ identifier[initPoint] ]
identifier[tmpv1] = identifier[minv1]
keyword[while] ( identifier[tmpv1] < identifier[maxv1] ):
identifier[tmpv1] = identifier[tmpv1] +( literal[int] * identifier[mindist] )**( literal[int] )
identifier[initLine] . identifier[append] ([ identifier[tmpv1] , identifier[minv2] ])
identifier[v1s] . identifier[append] ( identifier[tmpv1] )
identifier[v2s] . identifier[append] ( identifier[minv2] )
identifier[initLine] = identifier[numpy] . identifier[array] ( identifier[initLine] )
identifier[initLine2] = identifier[copy] . identifier[deepcopy] ( identifier[initLine] )
identifier[initLine2] [:, literal[int] ]+= literal[int] *( literal[int] * identifier[mindist] )** literal[int]
identifier[initLine2] [:, literal[int] ]+= literal[int] *( identifier[mindist] )** literal[int]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[initLine2] )):
identifier[v1s] . identifier[append] ( identifier[initLine2] [ identifier[i] , literal[int] ])
identifier[v2s] . identifier[append] ( identifier[initLine2] [ identifier[i] , literal[int] ])
identifier[tmpv2_1] = identifier[initLine] [ literal[int] , literal[int] ]
identifier[tmpv2_2] = identifier[initLine2] [ literal[int] , literal[int] ]
keyword[while] identifier[tmpv2_1] < identifier[maxv2] keyword[and] identifier[tmpv2_2] < identifier[maxv2] :
identifier[tmpv2_1] = identifier[tmpv2_1] + literal[int] *( identifier[mindist] )** literal[int]
identifier[tmpv2_2] = identifier[tmpv2_2] + literal[int] *( identifier[mindist] )** literal[int]
identifier[initLine] [:, literal[int] ]= identifier[tmpv2_1]
identifier[initLine2] [:, literal[int] ]= identifier[tmpv2_2]
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[initLine] )):
identifier[v1s] . identifier[append] ( identifier[initLine] [ identifier[i] , literal[int] ])
identifier[v2s] . identifier[append] ( identifier[initLine] [ identifier[i] , literal[int] ])
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[initLine2] )):
identifier[v1s] . identifier[append] ( identifier[initLine2] [ identifier[i] , literal[int] ])
identifier[v2s] . identifier[append] ( identifier[initLine2] [ identifier[i] , literal[int] ])
identifier[v1s] = identifier[numpy] . identifier[array] ( identifier[v1s] )
identifier[v2s] = identifier[numpy] . identifier[array] ( identifier[v2s] )
keyword[return] identifier[v1s] , identifier[v2s] | def generate_hexagonal_lattice(maxv1, minv1, maxv2, minv2, mindist):
"""
This function generates a 2-dimensional lattice of points using a hexagonal
lattice.
Parameters
-----------
maxv1 : float
Largest value in the 1st dimension to cover
minv1 : float
Smallest value in the 1st dimension to cover
maxv2 : float
Largest value in the 2nd dimension to cover
minv2 : float
Smallest value in the 2nd dimension to cover
mindist : float
Maximum allowed mismatch between a point in the parameter space and the
generated bank of points.
Returns
--------
v1s : numpy.array
Array of positions in the first dimension
v2s : numpy.array
Array of positions in the second dimension
"""
if minv1 > maxv1:
raise ValueError('Invalid input to function.') # depends on [control=['if'], data=[]]
if minv2 > maxv2:
raise ValueError('Invalid input to function.') # depends on [control=['if'], data=[]]
# Place first point
v1s = [minv1]
v2s = [minv2]
initPoint = [minv1, minv2]
# Place first line
initLine = [initPoint]
tmpv1 = minv1
while tmpv1 < maxv1:
tmpv1 = tmpv1 + (3 * mindist) ** 0.5
initLine.append([tmpv1, minv2])
v1s.append(tmpv1)
v2s.append(minv2) # depends on [control=['while'], data=['tmpv1']]
initLine = numpy.array(initLine)
initLine2 = copy.deepcopy(initLine)
initLine2[:, 0] += 0.5 * (3 * mindist) ** 0.5
initLine2[:, 1] += 1.5 * mindist ** 0.5
for i in xrange(len(initLine2)):
v1s.append(initLine2[i, 0])
v2s.append(initLine2[i, 1]) # depends on [control=['for'], data=['i']]
tmpv2_1 = initLine[0, 1]
tmpv2_2 = initLine2[0, 1]
while tmpv2_1 < maxv2 and tmpv2_2 < maxv2:
tmpv2_1 = tmpv2_1 + 3.0 * mindist ** 0.5
tmpv2_2 = tmpv2_2 + 3.0 * mindist ** 0.5
initLine[:, 1] = tmpv2_1
initLine2[:, 1] = tmpv2_2
for i in xrange(len(initLine)):
v1s.append(initLine[i, 0])
v2s.append(initLine[i, 1]) # depends on [control=['for'], data=['i']]
for i in xrange(len(initLine2)):
v1s.append(initLine2[i, 0])
v2s.append(initLine2[i, 1]) # depends on [control=['for'], data=['i']] # depends on [control=['while'], data=[]]
v1s = numpy.array(v1s)
v2s = numpy.array(v2s)
return (v1s, v2s) |
def get_init_container(self,
init_command,
init_args,
env_vars,
context_mounts,
persistence_outputs,
persistence_data):
"""Pod init container for setting outputs path."""
env_vars = to_list(env_vars, check_none=True)
env_vars += [
get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME,
value=json.dumps(self.labels)),
]
return client.V1Container(
name=self.init_container_name,
image=self.init_docker_image,
image_pull_policy=self.init_docker_image_pull_policy,
command=init_command,
env=env_vars,
args=init_args,
volume_mounts=context_mounts) | def function[get_init_container, parameter[self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence_data]]:
constant[Pod init container for setting outputs path.]
variable[env_vars] assign[=] call[name[to_list], parameter[name[env_vars]]]
<ast.AugAssign object at 0x7da18ede5840>
return[call[name[client].V1Container, parameter[]]] | keyword[def] identifier[get_init_container] ( identifier[self] ,
identifier[init_command] ,
identifier[init_args] ,
identifier[env_vars] ,
identifier[context_mounts] ,
identifier[persistence_outputs] ,
identifier[persistence_data] ):
literal[string]
identifier[env_vars] = identifier[to_list] ( identifier[env_vars] , identifier[check_none] = keyword[True] )
identifier[env_vars] +=[
identifier[get_env_var] ( identifier[name] = identifier[constants] . identifier[CONFIG_MAP_JOB_INFO_KEY_NAME] ,
identifier[value] = identifier[json] . identifier[dumps] ( identifier[self] . identifier[labels] )),
]
keyword[return] identifier[client] . identifier[V1Container] (
identifier[name] = identifier[self] . identifier[init_container_name] ,
identifier[image] = identifier[self] . identifier[init_docker_image] ,
identifier[image_pull_policy] = identifier[self] . identifier[init_docker_image_pull_policy] ,
identifier[command] = identifier[init_command] ,
identifier[env] = identifier[env_vars] ,
identifier[args] = identifier[init_args] ,
identifier[volume_mounts] = identifier[context_mounts] ) | def get_init_container(self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence_data):
"""Pod init container for setting outputs path."""
env_vars = to_list(env_vars, check_none=True)
env_vars += [get_env_var(name=constants.CONFIG_MAP_JOB_INFO_KEY_NAME, value=json.dumps(self.labels))]
return client.V1Container(name=self.init_container_name, image=self.init_docker_image, image_pull_policy=self.init_docker_image_pull_policy, command=init_command, env=env_vars, args=init_args, volume_mounts=context_mounts) |
def extract_uasts(self):
"""
Returns a new DataFrame with the parsed UAST data of any blob added to
its row.
>>> blobs_df.extract_uasts
:rtype: UASTsDataFrame
"""
return UASTsDataFrame(self._engine_dataframe.extractUASTs(),
self._session, self._implicits) | def function[extract_uasts, parameter[self]]:
constant[
Returns a new DataFrame with the parsed UAST data of any blob added to
its row.
>>> blobs_df.extract_uasts
:rtype: UASTsDataFrame
]
return[call[name[UASTsDataFrame], parameter[call[name[self]._engine_dataframe.extractUASTs, parameter[]], name[self]._session, name[self]._implicits]]] | keyword[def] identifier[extract_uasts] ( identifier[self] ):
literal[string]
keyword[return] identifier[UASTsDataFrame] ( identifier[self] . identifier[_engine_dataframe] . identifier[extractUASTs] (),
identifier[self] . identifier[_session] , identifier[self] . identifier[_implicits] ) | def extract_uasts(self):
"""
Returns a new DataFrame with the parsed UAST data of any blob added to
its row.
>>> blobs_df.extract_uasts
:rtype: UASTsDataFrame
"""
return UASTsDataFrame(self._engine_dataframe.extractUASTs(), self._session, self._implicits) |
def get_object_by_record(record):
"""Find an object by a given record
Inspects request the record to locate an object
:param record: A dictionary representation of an object
:type record: dict
:returns: Found Object or None
:rtype: object
"""
# nothing to do here
if not record:
return None
if record.get("uid"):
return get_object_by_uid(record["uid"])
if record.get("path"):
return get_object_by_path(record["path"])
if record.get("parent_path") and record.get("id"):
path = "/".join([record["parent_path"], record["id"]])
return get_object_by_path(path)
logger.warn("get_object_by_record::No object found! record='%r'" % record)
return None | def function[get_object_by_record, parameter[record]]:
constant[Find an object by a given record
Inspects request the record to locate an object
:param record: A dictionary representation of an object
:type record: dict
:returns: Found Object or None
:rtype: object
]
if <ast.UnaryOp object at 0x7da1b2617c70> begin[:]
return[constant[None]]
if call[name[record].get, parameter[constant[uid]]] begin[:]
return[call[name[get_object_by_uid], parameter[call[name[record]][constant[uid]]]]]
if call[name[record].get, parameter[constant[path]]] begin[:]
return[call[name[get_object_by_path], parameter[call[name[record]][constant[path]]]]]
if <ast.BoolOp object at 0x7da1b2616530> begin[:]
variable[path] assign[=] call[constant[/].join, parameter[list[[<ast.Subscript object at 0x7da1b2652230>, <ast.Subscript object at 0x7da1b2651360>]]]]
return[call[name[get_object_by_path], parameter[name[path]]]]
call[name[logger].warn, parameter[binary_operation[constant[get_object_by_record::No object found! record='%r'] <ast.Mod object at 0x7da2590d6920> name[record]]]]
return[constant[None]] | keyword[def] identifier[get_object_by_record] ( identifier[record] ):
literal[string]
keyword[if] keyword[not] identifier[record] :
keyword[return] keyword[None]
keyword[if] identifier[record] . identifier[get] ( literal[string] ):
keyword[return] identifier[get_object_by_uid] ( identifier[record] [ literal[string] ])
keyword[if] identifier[record] . identifier[get] ( literal[string] ):
keyword[return] identifier[get_object_by_path] ( identifier[record] [ literal[string] ])
keyword[if] identifier[record] . identifier[get] ( literal[string] ) keyword[and] identifier[record] . identifier[get] ( literal[string] ):
identifier[path] = literal[string] . identifier[join] ([ identifier[record] [ literal[string] ], identifier[record] [ literal[string] ]])
keyword[return] identifier[get_object_by_path] ( identifier[path] )
identifier[logger] . identifier[warn] ( literal[string] % identifier[record] )
keyword[return] keyword[None] | def get_object_by_record(record):
"""Find an object by a given record
Inspects request the record to locate an object
:param record: A dictionary representation of an object
:type record: dict
:returns: Found Object or None
:rtype: object
"""
# nothing to do here
if not record:
return None # depends on [control=['if'], data=[]]
if record.get('uid'):
return get_object_by_uid(record['uid']) # depends on [control=['if'], data=[]]
if record.get('path'):
return get_object_by_path(record['path']) # depends on [control=['if'], data=[]]
if record.get('parent_path') and record.get('id'):
path = '/'.join([record['parent_path'], record['id']])
return get_object_by_path(path) # depends on [control=['if'], data=[]]
logger.warn("get_object_by_record::No object found! record='%r'" % record)
return None |
def get_item(exchange_id, format=u"Default"):
"""
Requests a calendar item from the store.
exchange_id is the id for this event in the Exchange store.
format controls how much data you get back from Exchange. Full docs are here, but acceptible values
are IdOnly, Default, and AllProperties.
http://msdn.microsoft.com/en-us/library/aa564509(v=exchg.140).aspx
<m:GetItem xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types">
<m:ItemShape>
<t:BaseShape>{format}</t:BaseShape>
</m:ItemShape>
<m:ItemIds>
<t:ItemId Id="{exchange_id}"/>
</m:ItemIds>
</m:GetItem>
"""
elements = list()
if type(exchange_id) == list:
for item in exchange_id:
elements.append(T.ItemId(Id=item))
else:
elements = [T.ItemId(Id=exchange_id)]
root = M.GetItem(
M.ItemShape(
T.BaseShape(format)
),
M.ItemIds(
*elements
)
)
return root | def function[get_item, parameter[exchange_id, format]]:
constant[
Requests a calendar item from the store.
exchange_id is the id for this event in the Exchange store.
format controls how much data you get back from Exchange. Full docs are here, but acceptible values
are IdOnly, Default, and AllProperties.
http://msdn.microsoft.com/en-us/library/aa564509(v=exchg.140).aspx
<m:GetItem xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types">
<m:ItemShape>
<t:BaseShape>{format}</t:BaseShape>
</m:ItemShape>
<m:ItemIds>
<t:ItemId Id="{exchange_id}"/>
</m:ItemIds>
</m:GetItem>
]
variable[elements] assign[=] call[name[list], parameter[]]
if compare[call[name[type], parameter[name[exchange_id]]] equal[==] name[list]] begin[:]
for taget[name[item]] in starred[name[exchange_id]] begin[:]
call[name[elements].append, parameter[call[name[T].ItemId, parameter[]]]]
variable[root] assign[=] call[name[M].GetItem, parameter[call[name[M].ItemShape, parameter[call[name[T].BaseShape, parameter[name[format]]]]], call[name[M].ItemIds, parameter[<ast.Starred object at 0x7da1b281b6a0>]]]]
return[name[root]] | keyword[def] identifier[get_item] ( identifier[exchange_id] , identifier[format] = literal[string] ):
literal[string]
identifier[elements] = identifier[list] ()
keyword[if] identifier[type] ( identifier[exchange_id] )== identifier[list] :
keyword[for] identifier[item] keyword[in] identifier[exchange_id] :
identifier[elements] . identifier[append] ( identifier[T] . identifier[ItemId] ( identifier[Id] = identifier[item] ))
keyword[else] :
identifier[elements] =[ identifier[T] . identifier[ItemId] ( identifier[Id] = identifier[exchange_id] )]
identifier[root] = identifier[M] . identifier[GetItem] (
identifier[M] . identifier[ItemShape] (
identifier[T] . identifier[BaseShape] ( identifier[format] )
),
identifier[M] . identifier[ItemIds] (
* identifier[elements]
)
)
keyword[return] identifier[root] | def get_item(exchange_id, format=u'Default'):
"""
Requests a calendar item from the store.
exchange_id is the id for this event in the Exchange store.
format controls how much data you get back from Exchange. Full docs are here, but acceptible values
are IdOnly, Default, and AllProperties.
http://msdn.microsoft.com/en-us/library/aa564509(v=exchg.140).aspx
<m:GetItem xmlns:m="http://schemas.microsoft.com/exchange/services/2006/messages"
xmlns:t="http://schemas.microsoft.com/exchange/services/2006/types">
<m:ItemShape>
<t:BaseShape>{format}</t:BaseShape>
</m:ItemShape>
<m:ItemIds>
<t:ItemId Id="{exchange_id}"/>
</m:ItemIds>
</m:GetItem>
"""
elements = list()
if type(exchange_id) == list:
for item in exchange_id:
elements.append(T.ItemId(Id=item)) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
else:
elements = [T.ItemId(Id=exchange_id)]
root = M.GetItem(M.ItemShape(T.BaseShape(format)), M.ItemIds(*elements))
return root |
def save_xml(self, doc, element):
'''Save this message_sending object into an xml.dom.Element object.'''
for cond in self._targets:
new_element = doc.createElementNS(RTS_NS, RTS_NS_S + 'targets')
new_element.setAttributeNS(XSI_NS, XSI_NS_S + 'type', 'rtsExt:condition_ext')
cond.save_xml(doc, new_element)
element.appendChild(new_element) | def function[save_xml, parameter[self, doc, element]]:
constant[Save this message_sending object into an xml.dom.Element object.]
for taget[name[cond]] in starred[name[self]._targets] begin[:]
variable[new_element] assign[=] call[name[doc].createElementNS, parameter[name[RTS_NS], binary_operation[name[RTS_NS_S] + constant[targets]]]]
call[name[new_element].setAttributeNS, parameter[name[XSI_NS], binary_operation[name[XSI_NS_S] + constant[type]], constant[rtsExt:condition_ext]]]
call[name[cond].save_xml, parameter[name[doc], name[new_element]]]
call[name[element].appendChild, parameter[name[new_element]]] | keyword[def] identifier[save_xml] ( identifier[self] , identifier[doc] , identifier[element] ):
literal[string]
keyword[for] identifier[cond] keyword[in] identifier[self] . identifier[_targets] :
identifier[new_element] = identifier[doc] . identifier[createElementNS] ( identifier[RTS_NS] , identifier[RTS_NS_S] + literal[string] )
identifier[new_element] . identifier[setAttributeNS] ( identifier[XSI_NS] , identifier[XSI_NS_S] + literal[string] , literal[string] )
identifier[cond] . identifier[save_xml] ( identifier[doc] , identifier[new_element] )
identifier[element] . identifier[appendChild] ( identifier[new_element] ) | def save_xml(self, doc, element):
"""Save this message_sending object into an xml.dom.Element object."""
for cond in self._targets:
new_element = doc.createElementNS(RTS_NS, RTS_NS_S + 'targets')
new_element.setAttributeNS(XSI_NS, XSI_NS_S + 'type', 'rtsExt:condition_ext')
cond.save_xml(doc, new_element)
element.appendChild(new_element) # depends on [control=['for'], data=['cond']] |
def to_even_columns(data, headers=None):
"""
Nicely format the 2-dimensional list into evenly spaced columns
"""
result = ''
col_width = max(len(word) for row in data for word in row) + 2 # padding
if headers:
header_width = max(len(word) for row in headers for word in row) + 2
if header_width > col_width:
col_width = header_width
result += "".join(word.ljust(col_width) for word in headers) + "\n"
result += '-' * col_width * len(headers) + "\n"
for row in data:
result += "".join(word.ljust(col_width) for word in row) + "\n"
return result | def function[to_even_columns, parameter[data, headers]]:
constant[
Nicely format the 2-dimensional list into evenly spaced columns
]
variable[result] assign[=] constant[]
variable[col_width] assign[=] binary_operation[call[name[max], parameter[<ast.GeneratorExp object at 0x7da18f00caf0>]] + constant[2]]
if name[headers] begin[:]
variable[header_width] assign[=] binary_operation[call[name[max], parameter[<ast.GeneratorExp object at 0x7da18eb56470>]] + constant[2]]
if compare[name[header_width] greater[>] name[col_width]] begin[:]
variable[col_width] assign[=] name[header_width]
<ast.AugAssign object at 0x7da204961d50>
<ast.AugAssign object at 0x7da204962110>
for taget[name[row]] in starred[name[data]] begin[:]
<ast.AugAssign object at 0x7da204962b60>
return[name[result]] | keyword[def] identifier[to_even_columns] ( identifier[data] , identifier[headers] = keyword[None] ):
literal[string]
identifier[result] = literal[string]
identifier[col_width] = identifier[max] ( identifier[len] ( identifier[word] ) keyword[for] identifier[row] keyword[in] identifier[data] keyword[for] identifier[word] keyword[in] identifier[row] )+ literal[int]
keyword[if] identifier[headers] :
identifier[header_width] = identifier[max] ( identifier[len] ( identifier[word] ) keyword[for] identifier[row] keyword[in] identifier[headers] keyword[for] identifier[word] keyword[in] identifier[row] )+ literal[int]
keyword[if] identifier[header_width] > identifier[col_width] :
identifier[col_width] = identifier[header_width]
identifier[result] += literal[string] . identifier[join] ( identifier[word] . identifier[ljust] ( identifier[col_width] ) keyword[for] identifier[word] keyword[in] identifier[headers] )+ literal[string]
identifier[result] += literal[string] * identifier[col_width] * identifier[len] ( identifier[headers] )+ literal[string]
keyword[for] identifier[row] keyword[in] identifier[data] :
identifier[result] += literal[string] . identifier[join] ( identifier[word] . identifier[ljust] ( identifier[col_width] ) keyword[for] identifier[word] keyword[in] identifier[row] )+ literal[string]
keyword[return] identifier[result] | def to_even_columns(data, headers=None):
"""
Nicely format the 2-dimensional list into evenly spaced columns
"""
result = ''
col_width = max((len(word) for row in data for word in row)) + 2 # padding
if headers:
header_width = max((len(word) for row in headers for word in row)) + 2
if header_width > col_width:
col_width = header_width # depends on [control=['if'], data=['header_width', 'col_width']]
result += ''.join((word.ljust(col_width) for word in headers)) + '\n'
result += '-' * col_width * len(headers) + '\n' # depends on [control=['if'], data=[]]
for row in data:
result += ''.join((word.ljust(col_width) for word in row)) + '\n' # depends on [control=['for'], data=['row']]
return result |
def traverse_to_chron_var(temp_sheet):
"""
Traverse down to the row that has the first variable
:param obj temp_sheet:
:return int:
"""
row = 0
while row < temp_sheet.nrows - 1:
if 'Parameter' in temp_sheet.cell_value(row, 0):
row += 1
break
row += 1
logger_excel.info("traverse_to_chron_var: row:{}".format(row))
return row | def function[traverse_to_chron_var, parameter[temp_sheet]]:
constant[
Traverse down to the row that has the first variable
:param obj temp_sheet:
:return int:
]
variable[row] assign[=] constant[0]
while compare[name[row] less[<] binary_operation[name[temp_sheet].nrows - constant[1]]] begin[:]
if compare[constant[Parameter] in call[name[temp_sheet].cell_value, parameter[name[row], constant[0]]]] begin[:]
<ast.AugAssign object at 0x7da2044c0400>
break
<ast.AugAssign object at 0x7da2044c1f60>
call[name[logger_excel].info, parameter[call[constant[traverse_to_chron_var: row:{}].format, parameter[name[row]]]]]
return[name[row]] | keyword[def] identifier[traverse_to_chron_var] ( identifier[temp_sheet] ):
literal[string]
identifier[row] = literal[int]
keyword[while] identifier[row] < identifier[temp_sheet] . identifier[nrows] - literal[int] :
keyword[if] literal[string] keyword[in] identifier[temp_sheet] . identifier[cell_value] ( identifier[row] , literal[int] ):
identifier[row] += literal[int]
keyword[break]
identifier[row] += literal[int]
identifier[logger_excel] . identifier[info] ( literal[string] . identifier[format] ( identifier[row] ))
keyword[return] identifier[row] | def traverse_to_chron_var(temp_sheet):
"""
Traverse down to the row that has the first variable
:param obj temp_sheet:
:return int:
"""
row = 0
while row < temp_sheet.nrows - 1:
if 'Parameter' in temp_sheet.cell_value(row, 0):
row += 1
break # depends on [control=['if'], data=[]]
row += 1 # depends on [control=['while'], data=['row']]
logger_excel.info('traverse_to_chron_var: row:{}'.format(row))
return row |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.