code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def repeat_biweekly(self):
"""
This function is unique b/c it creates an empty defaultdict,
adds in the event occurrences by creating an instance of Repeater,
then returns the defaultdict, likely to be merged into the 'main'
defaultdict (the one holding all event occurrences for this month).
"""
mycount = defaultdict(list)
d = self.event.l_start_date
while d.year != self.year or d.month != self.month:
d += timedelta(days=14)
r = self.__class__(
mycount, self.year, self.month, d.day, self.event.end_repeat,
self.event, num=self.num, count_first=True
)
r.repeat()
if self.event.is_chunk() and r.count:
r.day = min(r.count)
r.repeat_chunk(self.event.start_end_diff)
return r.count
|
def function[repeat_biweekly, parameter[self]]:
constant[
This function is unique b/c it creates an empty defaultdict,
adds in the event occurrences by creating an instance of Repeater,
then returns the defaultdict, likely to be merged into the 'main'
defaultdict (the one holding all event occurrences for this month).
]
variable[mycount] assign[=] call[name[defaultdict], parameter[name[list]]]
variable[d] assign[=] name[self].event.l_start_date
while <ast.BoolOp object at 0x7da204623d30> begin[:]
<ast.AugAssign object at 0x7da204621000>
variable[r] assign[=] call[name[self].__class__, parameter[name[mycount], name[self].year, name[self].month, name[d].day, name[self].event.end_repeat, name[self].event]]
call[name[r].repeat, parameter[]]
if <ast.BoolOp object at 0x7da204622530> begin[:]
name[r].day assign[=] call[name[min], parameter[name[r].count]]
call[name[r].repeat_chunk, parameter[name[self].event.start_end_diff]]
return[name[r].count]
|
keyword[def] identifier[repeat_biweekly] ( identifier[self] ):
literal[string]
identifier[mycount] = identifier[defaultdict] ( identifier[list] )
identifier[d] = identifier[self] . identifier[event] . identifier[l_start_date]
keyword[while] identifier[d] . identifier[year] != identifier[self] . identifier[year] keyword[or] identifier[d] . identifier[month] != identifier[self] . identifier[month] :
identifier[d] += identifier[timedelta] ( identifier[days] = literal[int] )
identifier[r] = identifier[self] . identifier[__class__] (
identifier[mycount] , identifier[self] . identifier[year] , identifier[self] . identifier[month] , identifier[d] . identifier[day] , identifier[self] . identifier[event] . identifier[end_repeat] ,
identifier[self] . identifier[event] , identifier[num] = identifier[self] . identifier[num] , identifier[count_first] = keyword[True]
)
identifier[r] . identifier[repeat] ()
keyword[if] identifier[self] . identifier[event] . identifier[is_chunk] () keyword[and] identifier[r] . identifier[count] :
identifier[r] . identifier[day] = identifier[min] ( identifier[r] . identifier[count] )
identifier[r] . identifier[repeat_chunk] ( identifier[self] . identifier[event] . identifier[start_end_diff] )
keyword[return] identifier[r] . identifier[count]
|
def repeat_biweekly(self):
"""
This function is unique b/c it creates an empty defaultdict,
adds in the event occurrences by creating an instance of Repeater,
then returns the defaultdict, likely to be merged into the 'main'
defaultdict (the one holding all event occurrences for this month).
"""
mycount = defaultdict(list)
d = self.event.l_start_date
while d.year != self.year or d.month != self.month:
d += timedelta(days=14) # depends on [control=['while'], data=[]]
r = self.__class__(mycount, self.year, self.month, d.day, self.event.end_repeat, self.event, num=self.num, count_first=True)
r.repeat()
if self.event.is_chunk() and r.count:
r.day = min(r.count)
r.repeat_chunk(self.event.start_end_diff) # depends on [control=['if'], data=[]]
return r.count
|
def add_from_lists(self, data_list=None, fun_list=None, dsp_list=None):
"""
Add multiple function and data nodes to dispatcher.
:param data_list:
It is a list of data node kwargs to be loaded.
:type data_list: list[dict], optional
:param fun_list:
It is a list of function node kwargs to be loaded.
:type fun_list: list[dict], optional
:param dsp_list:
It is a list of sub-dispatcher node kwargs to be loaded.
:type dsp_list: list[dict], optional
:returns:
- Data node ids.
- Function node ids.
- Sub-dispatcher node ids.
:rtype: (list[str], list[str], list[str])
.. seealso:: :func:`add_data`, :func:`add_func`, :func:`add_function`,
:func:`add_dispatcher`
**--------------------------------------------------------------------**
**Example**:
.. testsetup::
>>> dsp = Dispatcher(name='Dispatcher')
Define a data list::
>>> data_list = [
... {'data_id': 'a'},
... {'data_id': 'b'},
... {'data_id': 'c'},
... ]
Define a functions list::
>>> def func(a, b):
... return a + b
...
>>> fun_list = [
... {'function': func, 'inputs': ['a', 'b'], 'outputs': ['c']}
... ]
Define a sub-dispatchers list::
>>> sub_dsp = Dispatcher(name='Sub-dispatcher')
>>> sub_dsp.add_function(function=func, inputs=['e', 'f'],
... outputs=['g'])
'func'
>>>
>>> dsp_list = [
... {'dsp_id': 'Sub', 'dsp': sub_dsp,
... 'inputs': {'a': 'e', 'b': 'f'}, 'outputs': {'g': 'c'}},
... ]
Add function and data nodes to dispatcher::
>>> dsp.add_from_lists(data_list, fun_list, dsp_list)
(['a', 'b', 'c'], ['func'], ['Sub'])
"""
if data_list: # Add data nodes.
data_ids = [self.add_data(**v) for v in data_list] # Data ids.
else:
data_ids = []
if fun_list: # Add function nodes.
fun_ids = [self.add_function(**v) for v in fun_list] # Func ids.
else:
fun_ids = []
if dsp_list: # Add dispatcher nodes.
dsp_ids = [self.add_dispatcher(**v) for v in dsp_list] # Dsp ids.
else:
dsp_ids = []
# Return data, function, and sub-dispatcher node ids.
return data_ids, fun_ids, dsp_ids
|
def function[add_from_lists, parameter[self, data_list, fun_list, dsp_list]]:
constant[
Add multiple function and data nodes to dispatcher.
:param data_list:
It is a list of data node kwargs to be loaded.
:type data_list: list[dict], optional
:param fun_list:
It is a list of function node kwargs to be loaded.
:type fun_list: list[dict], optional
:param dsp_list:
It is a list of sub-dispatcher node kwargs to be loaded.
:type dsp_list: list[dict], optional
:returns:
- Data node ids.
- Function node ids.
- Sub-dispatcher node ids.
:rtype: (list[str], list[str], list[str])
.. seealso:: :func:`add_data`, :func:`add_func`, :func:`add_function`,
:func:`add_dispatcher`
**--------------------------------------------------------------------**
**Example**:
.. testsetup::
>>> dsp = Dispatcher(name='Dispatcher')
Define a data list::
>>> data_list = [
... {'data_id': 'a'},
... {'data_id': 'b'},
... {'data_id': 'c'},
... ]
Define a functions list::
>>> def func(a, b):
... return a + b
...
>>> fun_list = [
... {'function': func, 'inputs': ['a', 'b'], 'outputs': ['c']}
... ]
Define a sub-dispatchers list::
>>> sub_dsp = Dispatcher(name='Sub-dispatcher')
>>> sub_dsp.add_function(function=func, inputs=['e', 'f'],
... outputs=['g'])
'func'
>>>
>>> dsp_list = [
... {'dsp_id': 'Sub', 'dsp': sub_dsp,
... 'inputs': {'a': 'e', 'b': 'f'}, 'outputs': {'g': 'c'}},
... ]
Add function and data nodes to dispatcher::
>>> dsp.add_from_lists(data_list, fun_list, dsp_list)
(['a', 'b', 'c'], ['func'], ['Sub'])
]
if name[data_list] begin[:]
variable[data_ids] assign[=] <ast.ListComp object at 0x7da20c992b30>
if name[fun_list] begin[:]
variable[fun_ids] assign[=] <ast.ListComp object at 0x7da1b24afe80>
if name[dsp_list] begin[:]
variable[dsp_ids] assign[=] <ast.ListComp object at 0x7da1b24ad300>
return[tuple[[<ast.Name object at 0x7da1b24ae0b0>, <ast.Name object at 0x7da1b24acbb0>, <ast.Name object at 0x7da1b24ac250>]]]
|
keyword[def] identifier[add_from_lists] ( identifier[self] , identifier[data_list] = keyword[None] , identifier[fun_list] = keyword[None] , identifier[dsp_list] = keyword[None] ):
literal[string]
keyword[if] identifier[data_list] :
identifier[data_ids] =[ identifier[self] . identifier[add_data] (** identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[data_list] ]
keyword[else] :
identifier[data_ids] =[]
keyword[if] identifier[fun_list] :
identifier[fun_ids] =[ identifier[self] . identifier[add_function] (** identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[fun_list] ]
keyword[else] :
identifier[fun_ids] =[]
keyword[if] identifier[dsp_list] :
identifier[dsp_ids] =[ identifier[self] . identifier[add_dispatcher] (** identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[dsp_list] ]
keyword[else] :
identifier[dsp_ids] =[]
keyword[return] identifier[data_ids] , identifier[fun_ids] , identifier[dsp_ids]
|
def add_from_lists(self, data_list=None, fun_list=None, dsp_list=None):
"""
Add multiple function and data nodes to dispatcher.
:param data_list:
It is a list of data node kwargs to be loaded.
:type data_list: list[dict], optional
:param fun_list:
It is a list of function node kwargs to be loaded.
:type fun_list: list[dict], optional
:param dsp_list:
It is a list of sub-dispatcher node kwargs to be loaded.
:type dsp_list: list[dict], optional
:returns:
- Data node ids.
- Function node ids.
- Sub-dispatcher node ids.
:rtype: (list[str], list[str], list[str])
.. seealso:: :func:`add_data`, :func:`add_func`, :func:`add_function`,
:func:`add_dispatcher`
**--------------------------------------------------------------------**
**Example**:
.. testsetup::
>>> dsp = Dispatcher(name='Dispatcher')
Define a data list::
>>> data_list = [
... {'data_id': 'a'},
... {'data_id': 'b'},
... {'data_id': 'c'},
... ]
Define a functions list::
>>> def func(a, b):
... return a + b
...
>>> fun_list = [
... {'function': func, 'inputs': ['a', 'b'], 'outputs': ['c']}
... ]
Define a sub-dispatchers list::
>>> sub_dsp = Dispatcher(name='Sub-dispatcher')
>>> sub_dsp.add_function(function=func, inputs=['e', 'f'],
... outputs=['g'])
'func'
>>>
>>> dsp_list = [
... {'dsp_id': 'Sub', 'dsp': sub_dsp,
... 'inputs': {'a': 'e', 'b': 'f'}, 'outputs': {'g': 'c'}},
... ]
Add function and data nodes to dispatcher::
>>> dsp.add_from_lists(data_list, fun_list, dsp_list)
(['a', 'b', 'c'], ['func'], ['Sub'])
"""
if data_list: # Add data nodes.
data_ids = [self.add_data(**v) for v in data_list] # Data ids. # depends on [control=['if'], data=[]]
else:
data_ids = []
if fun_list: # Add function nodes.
fun_ids = [self.add_function(**v) for v in fun_list] # Func ids. # depends on [control=['if'], data=[]]
else:
fun_ids = []
if dsp_list: # Add dispatcher nodes.
dsp_ids = [self.add_dispatcher(**v) for v in dsp_list] # Dsp ids. # depends on [control=['if'], data=[]]
else:
dsp_ids = []
# Return data, function, and sub-dispatcher node ids.
return (data_ids, fun_ids, dsp_ids)
|
def get_comments(self):
"""
:calls: `GET /gists/:gist_id/comments <http://developer.github.com/v3/gists/comments>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.GistComment.GistComment`
"""
return github.PaginatedList.PaginatedList(
github.GistComment.GistComment,
self._requester,
self.url + "/comments",
None
)
|
def function[get_comments, parameter[self]]:
constant[
:calls: `GET /gists/:gist_id/comments <http://developer.github.com/v3/gists/comments>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.GistComment.GistComment`
]
return[call[name[github].PaginatedList.PaginatedList, parameter[name[github].GistComment.GistComment, name[self]._requester, binary_operation[name[self].url + constant[/comments]], constant[None]]]]
|
keyword[def] identifier[get_comments] ( identifier[self] ):
literal[string]
keyword[return] identifier[github] . identifier[PaginatedList] . identifier[PaginatedList] (
identifier[github] . identifier[GistComment] . identifier[GistComment] ,
identifier[self] . identifier[_requester] ,
identifier[self] . identifier[url] + literal[string] ,
keyword[None]
)
|
def get_comments(self):
"""
:calls: `GET /gists/:gist_id/comments <http://developer.github.com/v3/gists/comments>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.GistComment.GistComment`
"""
return github.PaginatedList.PaginatedList(github.GistComment.GistComment, self._requester, self.url + '/comments', None)
|
def add_pool(self, auth, attr):
""" Create a pool according to `attr`.
* `auth` [BaseAuth]
AAA options.
* `attr` [pool_attr]
A dict containing the attributes the new pool should have.
Returns a dict describing the pool which was added.
This is the documentation of the internal backend function. It's
exposed over XML-RPC, please also see the XML-RPC documentation for
:py:func:`nipap.xmlrpc.NipapXMLRPC.add_pool` for full
understanding.
"""
self._logger.debug("add_pool called; attrs: %s" % unicode(attr))
# sanity check - do we have all attributes?
req_attr = ['name', 'description', 'default_type']
self._check_pool_attr(attr, req_attr)
insert, params = self._sql_expand_insert(attr)
sql = "INSERT INTO ip_net_pool " + insert
self._execute(sql, params)
pool_id = self._lastrowid()
pool = self.list_pool(auth, { 'id': pool_id })[0]
# write to audit table
audit_params = {
'pool_id': pool['id'],
'pool_name': pool['name'],
'username': auth.username,
'authenticated_as': auth.authenticated_as,
'full_name': auth.full_name,
'authoritative_source': auth.authoritative_source,
'description': 'Added pool %s with attr: %s' % (pool['name'], unicode(attr))
}
sql, params = self._sql_expand_insert(audit_params)
self._execute('INSERT INTO ip_net_log %s' % sql, params)
return pool
|
def function[add_pool, parameter[self, auth, attr]]:
constant[ Create a pool according to `attr`.
* `auth` [BaseAuth]
AAA options.
* `attr` [pool_attr]
A dict containing the attributes the new pool should have.
Returns a dict describing the pool which was added.
This is the documentation of the internal backend function. It's
exposed over XML-RPC, please also see the XML-RPC documentation for
:py:func:`nipap.xmlrpc.NipapXMLRPC.add_pool` for full
understanding.
]
call[name[self]._logger.debug, parameter[binary_operation[constant[add_pool called; attrs: %s] <ast.Mod object at 0x7da2590d6920> call[name[unicode], parameter[name[attr]]]]]]
variable[req_attr] assign[=] list[[<ast.Constant object at 0x7da204620190>, <ast.Constant object at 0x7da204620be0>, <ast.Constant object at 0x7da204621ff0>]]
call[name[self]._check_pool_attr, parameter[name[attr], name[req_attr]]]
<ast.Tuple object at 0x7da204622110> assign[=] call[name[self]._sql_expand_insert, parameter[name[attr]]]
variable[sql] assign[=] binary_operation[constant[INSERT INTO ip_net_pool ] + name[insert]]
call[name[self]._execute, parameter[name[sql], name[params]]]
variable[pool_id] assign[=] call[name[self]._lastrowid, parameter[]]
variable[pool] assign[=] call[call[name[self].list_pool, parameter[name[auth], dictionary[[<ast.Constant object at 0x7da2046232e0>], [<ast.Name object at 0x7da204622620>]]]]][constant[0]]
variable[audit_params] assign[=] dictionary[[<ast.Constant object at 0x7da204623cd0>, <ast.Constant object at 0x7da2046225f0>, <ast.Constant object at 0x7da204622740>, <ast.Constant object at 0x7da204620640>, <ast.Constant object at 0x7da2046212a0>, <ast.Constant object at 0x7da204620280>, <ast.Constant object at 0x7da2046226b0>], [<ast.Subscript object at 0x7da204623c10>, <ast.Subscript object at 0x7da204622410>, <ast.Attribute object at 0x7da204623e80>, <ast.Attribute object at 0x7da204622050>, <ast.Attribute object at 0x7da204620b50>, <ast.Attribute object at 0x7da204621450>, <ast.BinOp object at 0x7da204621d80>]]
<ast.Tuple object at 0x7da204622590> assign[=] call[name[self]._sql_expand_insert, parameter[name[audit_params]]]
call[name[self]._execute, parameter[binary_operation[constant[INSERT INTO ip_net_log %s] <ast.Mod object at 0x7da2590d6920> name[sql]], name[params]]]
return[name[pool]]
|
keyword[def] identifier[add_pool] ( identifier[self] , identifier[auth] , identifier[attr] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[unicode] ( identifier[attr] ))
identifier[req_attr] =[ literal[string] , literal[string] , literal[string] ]
identifier[self] . identifier[_check_pool_attr] ( identifier[attr] , identifier[req_attr] )
identifier[insert] , identifier[params] = identifier[self] . identifier[_sql_expand_insert] ( identifier[attr] )
identifier[sql] = literal[string] + identifier[insert]
identifier[self] . identifier[_execute] ( identifier[sql] , identifier[params] )
identifier[pool_id] = identifier[self] . identifier[_lastrowid] ()
identifier[pool] = identifier[self] . identifier[list_pool] ( identifier[auth] ,{ literal[string] : identifier[pool_id] })[ literal[int] ]
identifier[audit_params] ={
literal[string] : identifier[pool] [ literal[string] ],
literal[string] : identifier[pool] [ literal[string] ],
literal[string] : identifier[auth] . identifier[username] ,
literal[string] : identifier[auth] . identifier[authenticated_as] ,
literal[string] : identifier[auth] . identifier[full_name] ,
literal[string] : identifier[auth] . identifier[authoritative_source] ,
literal[string] : literal[string] %( identifier[pool] [ literal[string] ], identifier[unicode] ( identifier[attr] ))
}
identifier[sql] , identifier[params] = identifier[self] . identifier[_sql_expand_insert] ( identifier[audit_params] )
identifier[self] . identifier[_execute] ( literal[string] % identifier[sql] , identifier[params] )
keyword[return] identifier[pool]
|
def add_pool(self, auth, attr):
""" Create a pool according to `attr`.
* `auth` [BaseAuth]
AAA options.
* `attr` [pool_attr]
A dict containing the attributes the new pool should have.
Returns a dict describing the pool which was added.
This is the documentation of the internal backend function. It's
exposed over XML-RPC, please also see the XML-RPC documentation for
:py:func:`nipap.xmlrpc.NipapXMLRPC.add_pool` for full
understanding.
"""
self._logger.debug('add_pool called; attrs: %s' % unicode(attr))
# sanity check - do we have all attributes?
req_attr = ['name', 'description', 'default_type']
self._check_pool_attr(attr, req_attr)
(insert, params) = self._sql_expand_insert(attr)
sql = 'INSERT INTO ip_net_pool ' + insert
self._execute(sql, params)
pool_id = self._lastrowid()
pool = self.list_pool(auth, {'id': pool_id})[0]
# write to audit table
audit_params = {'pool_id': pool['id'], 'pool_name': pool['name'], 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Added pool %s with attr: %s' % (pool['name'], unicode(attr))}
(sql, params) = self._sql_expand_insert(audit_params)
self._execute('INSERT INTO ip_net_log %s' % sql, params)
return pool
|
def rebuild(self):
"""
Rebuilds the scene based on the current settings.
:param start | <QDate>
end | <QDate>
"""
gantt = self.ganttWidget()
scale = gantt.timescale()
rect = self.sceneRect()
header = gantt.treeWidget().header()
# define the rendering options
options = {}
options['start'] = gantt.dateStart()
options['end'] = gantt.dateEnd()
options['cell_width'] = gantt.cellWidth()
options['cell_height'] = gantt.cellHeight()
options['rect'] = rect
options['height'] = rect.height()
options['header_height'] = header.height()
if not header.isVisible():
options['header_height'] = 0
opt = XGanttRenderOptions(**options)
# rebuild the minute timescale
if scale in (gantt.Timescale.Minute, gantt.Timescale.Hour):
opt.start = gantt.dateTimeStart()
opt.end = gantt.dateTimeEnd()
self.rebuildHour(opt)
# rebuild the day timescale
elif scale == gantt.Timescale.Day:
self.rebuildDay(opt)
# rebuild the week timescale
elif scale == gantt.Timescale.Week:
self.rebuildWeek(opt)
self.rebuildTiles()
|
def function[rebuild, parameter[self]]:
constant[
Rebuilds the scene based on the current settings.
:param start | <QDate>
end | <QDate>
]
variable[gantt] assign[=] call[name[self].ganttWidget, parameter[]]
variable[scale] assign[=] call[name[gantt].timescale, parameter[]]
variable[rect] assign[=] call[name[self].sceneRect, parameter[]]
variable[header] assign[=] call[call[name[gantt].treeWidget, parameter[]].header, parameter[]]
variable[options] assign[=] dictionary[[], []]
call[name[options]][constant[start]] assign[=] call[name[gantt].dateStart, parameter[]]
call[name[options]][constant[end]] assign[=] call[name[gantt].dateEnd, parameter[]]
call[name[options]][constant[cell_width]] assign[=] call[name[gantt].cellWidth, parameter[]]
call[name[options]][constant[cell_height]] assign[=] call[name[gantt].cellHeight, parameter[]]
call[name[options]][constant[rect]] assign[=] name[rect]
call[name[options]][constant[height]] assign[=] call[name[rect].height, parameter[]]
call[name[options]][constant[header_height]] assign[=] call[name[header].height, parameter[]]
if <ast.UnaryOp object at 0x7da1b253a5c0> begin[:]
call[name[options]][constant[header_height]] assign[=] constant[0]
variable[opt] assign[=] call[name[XGanttRenderOptions], parameter[]]
if compare[name[scale] in tuple[[<ast.Attribute object at 0x7da1b253bb20>, <ast.Attribute object at 0x7da1b25390f0>]]] begin[:]
name[opt].start assign[=] call[name[gantt].dateTimeStart, parameter[]]
name[opt].end assign[=] call[name[gantt].dateTimeEnd, parameter[]]
call[name[self].rebuildHour, parameter[name[opt]]]
call[name[self].rebuildTiles, parameter[]]
|
keyword[def] identifier[rebuild] ( identifier[self] ):
literal[string]
identifier[gantt] = identifier[self] . identifier[ganttWidget] ()
identifier[scale] = identifier[gantt] . identifier[timescale] ()
identifier[rect] = identifier[self] . identifier[sceneRect] ()
identifier[header] = identifier[gantt] . identifier[treeWidget] (). identifier[header] ()
identifier[options] ={}
identifier[options] [ literal[string] ]= identifier[gantt] . identifier[dateStart] ()
identifier[options] [ literal[string] ]= identifier[gantt] . identifier[dateEnd] ()
identifier[options] [ literal[string] ]= identifier[gantt] . identifier[cellWidth] ()
identifier[options] [ literal[string] ]= identifier[gantt] . identifier[cellHeight] ()
identifier[options] [ literal[string] ]= identifier[rect]
identifier[options] [ literal[string] ]= identifier[rect] . identifier[height] ()
identifier[options] [ literal[string] ]= identifier[header] . identifier[height] ()
keyword[if] keyword[not] identifier[header] . identifier[isVisible] ():
identifier[options] [ literal[string] ]= literal[int]
identifier[opt] = identifier[XGanttRenderOptions] (** identifier[options] )
keyword[if] identifier[scale] keyword[in] ( identifier[gantt] . identifier[Timescale] . identifier[Minute] , identifier[gantt] . identifier[Timescale] . identifier[Hour] ):
identifier[opt] . identifier[start] = identifier[gantt] . identifier[dateTimeStart] ()
identifier[opt] . identifier[end] = identifier[gantt] . identifier[dateTimeEnd] ()
identifier[self] . identifier[rebuildHour] ( identifier[opt] )
keyword[elif] identifier[scale] == identifier[gantt] . identifier[Timescale] . identifier[Day] :
identifier[self] . identifier[rebuildDay] ( identifier[opt] )
keyword[elif] identifier[scale] == identifier[gantt] . identifier[Timescale] . identifier[Week] :
identifier[self] . identifier[rebuildWeek] ( identifier[opt] )
identifier[self] . identifier[rebuildTiles] ()
|
def rebuild(self):
"""
Rebuilds the scene based on the current settings.
:param start | <QDate>
end | <QDate>
"""
gantt = self.ganttWidget()
scale = gantt.timescale()
rect = self.sceneRect()
header = gantt.treeWidget().header() # define the rendering options
options = {}
options['start'] = gantt.dateStart()
options['end'] = gantt.dateEnd()
options['cell_width'] = gantt.cellWidth()
options['cell_height'] = gantt.cellHeight()
options['rect'] = rect
options['height'] = rect.height()
options['header_height'] = header.height()
if not header.isVisible():
options['header_height'] = 0 # depends on [control=['if'], data=[]]
opt = XGanttRenderOptions(**options) # rebuild the minute timescale
if scale in (gantt.Timescale.Minute, gantt.Timescale.Hour):
opt.start = gantt.dateTimeStart()
opt.end = gantt.dateTimeEnd()
self.rebuildHour(opt) # depends on [control=['if'], data=[]] # rebuild the day timescale
elif scale == gantt.Timescale.Day:
self.rebuildDay(opt) # depends on [control=['if'], data=[]] # rebuild the week timescale
elif scale == gantt.Timescale.Week:
self.rebuildWeek(opt) # depends on [control=['if'], data=[]]
self.rebuildTiles()
|
def _make_value_pb(value):
"""Helper for :func:`_make_list_value_pbs`.
:type value: scalar value
:param value: value to convert
:rtype: :class:`~google.protobuf.struct_pb2.Value`
:returns: value protobufs
:raises ValueError: if value is not of a known scalar type.
"""
if value is None:
return Value(null_value="NULL_VALUE")
if isinstance(value, (list, tuple)):
return Value(list_value=_make_list_value_pb(value))
if isinstance(value, bool):
return Value(bool_value=value)
if isinstance(value, six.integer_types):
return Value(string_value=str(value))
if isinstance(value, float):
if math.isnan(value):
return Value(string_value="NaN")
if math.isinf(value):
if value > 0:
return Value(string_value="Infinity")
else:
return Value(string_value="-Infinity")
return Value(number_value=value)
if isinstance(value, datetime_helpers.DatetimeWithNanoseconds):
return Value(string_value=value.rfc3339())
if isinstance(value, datetime.datetime):
return Value(string_value=_datetime_to_rfc3339(value))
if isinstance(value, datetime.date):
return Value(string_value=value.isoformat())
if isinstance(value, six.binary_type):
value = _try_to_coerce_bytes(value)
return Value(string_value=value)
if isinstance(value, six.text_type):
return Value(string_value=value)
if isinstance(value, ListValue):
return Value(list_value=value)
raise ValueError("Unknown type: %s" % (value,))
|
def function[_make_value_pb, parameter[value]]:
constant[Helper for :func:`_make_list_value_pbs`.
:type value: scalar value
:param value: value to convert
:rtype: :class:`~google.protobuf.struct_pb2.Value`
:returns: value protobufs
:raises ValueError: if value is not of a known scalar type.
]
if compare[name[value] is constant[None]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], tuple[[<ast.Name object at 0x7da204564100>, <ast.Name object at 0x7da2045643d0>]]]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[bool]]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[six].integer_types]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[float]]] begin[:]
if call[name[math].isnan, parameter[name[value]]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[math].isinf, parameter[name[value]]] begin[:]
if compare[name[value] greater[>] constant[0]] begin[:]
return[call[name[Value], parameter[]]]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[datetime_helpers].DatetimeWithNanoseconds]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[datetime].datetime]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[datetime].date]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[six].binary_type]] begin[:]
variable[value] assign[=] call[name[_try_to_coerce_bytes], parameter[name[value]]]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[six].text_type]] begin[:]
return[call[name[Value], parameter[]]]
if call[name[isinstance], parameter[name[value], name[ListValue]]] begin[:]
return[call[name[Value], parameter[]]]
<ast.Raise object at 0x7da18dc99180>
|
keyword[def] identifier[_make_value_pb] ( identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
keyword[return] identifier[Value] ( identifier[null_value] = literal[string] )
keyword[if] identifier[isinstance] ( identifier[value] ,( identifier[list] , identifier[tuple] )):
keyword[return] identifier[Value] ( identifier[list_value] = identifier[_make_list_value_pb] ( identifier[value] ))
keyword[if] identifier[isinstance] ( identifier[value] , identifier[bool] ):
keyword[return] identifier[Value] ( identifier[bool_value] = identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[integer_types] ):
keyword[return] identifier[Value] ( identifier[string_value] = identifier[str] ( identifier[value] ))
keyword[if] identifier[isinstance] ( identifier[value] , identifier[float] ):
keyword[if] identifier[math] . identifier[isnan] ( identifier[value] ):
keyword[return] identifier[Value] ( identifier[string_value] = literal[string] )
keyword[if] identifier[math] . identifier[isinf] ( identifier[value] ):
keyword[if] identifier[value] > literal[int] :
keyword[return] identifier[Value] ( identifier[string_value] = literal[string] )
keyword[else] :
keyword[return] identifier[Value] ( identifier[string_value] = literal[string] )
keyword[return] identifier[Value] ( identifier[number_value] = identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[datetime_helpers] . identifier[DatetimeWithNanoseconds] ):
keyword[return] identifier[Value] ( identifier[string_value] = identifier[value] . identifier[rfc3339] ())
keyword[if] identifier[isinstance] ( identifier[value] , identifier[datetime] . identifier[datetime] ):
keyword[return] identifier[Value] ( identifier[string_value] = identifier[_datetime_to_rfc3339] ( identifier[value] ))
keyword[if] identifier[isinstance] ( identifier[value] , identifier[datetime] . identifier[date] ):
keyword[return] identifier[Value] ( identifier[string_value] = identifier[value] . identifier[isoformat] ())
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[binary_type] ):
identifier[value] = identifier[_try_to_coerce_bytes] ( identifier[value] )
keyword[return] identifier[Value] ( identifier[string_value] = identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[text_type] ):
keyword[return] identifier[Value] ( identifier[string_value] = identifier[value] )
keyword[if] identifier[isinstance] ( identifier[value] , identifier[ListValue] ):
keyword[return] identifier[Value] ( identifier[list_value] = identifier[value] )
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[value] ,))
|
def _make_value_pb(value):
"""Helper for :func:`_make_list_value_pbs`.
:type value: scalar value
:param value: value to convert
:rtype: :class:`~google.protobuf.struct_pb2.Value`
:returns: value protobufs
:raises ValueError: if value is not of a known scalar type.
"""
if value is None:
return Value(null_value='NULL_VALUE') # depends on [control=['if'], data=[]]
if isinstance(value, (list, tuple)):
return Value(list_value=_make_list_value_pb(value)) # depends on [control=['if'], data=[]]
if isinstance(value, bool):
return Value(bool_value=value) # depends on [control=['if'], data=[]]
if isinstance(value, six.integer_types):
return Value(string_value=str(value)) # depends on [control=['if'], data=[]]
if isinstance(value, float):
if math.isnan(value):
return Value(string_value='NaN') # depends on [control=['if'], data=[]]
if math.isinf(value):
if value > 0:
return Value(string_value='Infinity') # depends on [control=['if'], data=[]]
else:
return Value(string_value='-Infinity') # depends on [control=['if'], data=[]]
return Value(number_value=value) # depends on [control=['if'], data=[]]
if isinstance(value, datetime_helpers.DatetimeWithNanoseconds):
return Value(string_value=value.rfc3339()) # depends on [control=['if'], data=[]]
if isinstance(value, datetime.datetime):
return Value(string_value=_datetime_to_rfc3339(value)) # depends on [control=['if'], data=[]]
if isinstance(value, datetime.date):
return Value(string_value=value.isoformat()) # depends on [control=['if'], data=[]]
if isinstance(value, six.binary_type):
value = _try_to_coerce_bytes(value)
return Value(string_value=value) # depends on [control=['if'], data=[]]
if isinstance(value, six.text_type):
return Value(string_value=value) # depends on [control=['if'], data=[]]
if isinstance(value, ListValue):
return Value(list_value=value) # depends on [control=['if'], data=[]]
raise ValueError('Unknown type: %s' % (value,))
|
def ping_external_urls_handler(sender, **kwargs):
"""
Ping externals URLS when an entry is saved.
"""
entry = kwargs['instance']
if entry.is_visible and settings.SAVE_PING_EXTERNAL_URLS:
ExternalUrlsPinger(entry)
|
def function[ping_external_urls_handler, parameter[sender]]:
constant[
Ping externals URLS when an entry is saved.
]
variable[entry] assign[=] call[name[kwargs]][constant[instance]]
if <ast.BoolOp object at 0x7da1b222e650> begin[:]
call[name[ExternalUrlsPinger], parameter[name[entry]]]
|
keyword[def] identifier[ping_external_urls_handler] ( identifier[sender] ,** identifier[kwargs] ):
literal[string]
identifier[entry] = identifier[kwargs] [ literal[string] ]
keyword[if] identifier[entry] . identifier[is_visible] keyword[and] identifier[settings] . identifier[SAVE_PING_EXTERNAL_URLS] :
identifier[ExternalUrlsPinger] ( identifier[entry] )
|
def ping_external_urls_handler(sender, **kwargs):
"""
Ping externals URLS when an entry is saved.
"""
entry = kwargs['instance']
if entry.is_visible and settings.SAVE_PING_EXTERNAL_URLS:
ExternalUrlsPinger(entry) # depends on [control=['if'], data=[]]
|
def _std_tuple_of(var=None, std=None, interval=None):
"""
Convienence function for plotting. Given one of var, standard
deviation, or interval, return the std. Any of the three can be an
iterable list.
Examples
--------
>>>_std_tuple_of(var=[1, 3, 9])
(1, 2, 3)
"""
if std is not None:
if np.isscalar(std):
std = (std,)
return std
if interval is not None:
if np.isscalar(interval):
interval = (interval,)
return norm.interval(interval)[1]
if var is None:
raise ValueError("no inputs were provided")
if np.isscalar(var):
var = (var,)
return np.sqrt(var)
|
def function[_std_tuple_of, parameter[var, std, interval]]:
constant[
Convienence function for plotting. Given one of var, standard
deviation, or interval, return the std. Any of the three can be an
iterable list.
Examples
--------
>>>_std_tuple_of(var=[1, 3, 9])
(1, 2, 3)
]
if compare[name[std] is_not constant[None]] begin[:]
if call[name[np].isscalar, parameter[name[std]]] begin[:]
variable[std] assign[=] tuple[[<ast.Name object at 0x7da1b21c76a0>]]
return[name[std]]
if compare[name[interval] is_not constant[None]] begin[:]
if call[name[np].isscalar, parameter[name[interval]]] begin[:]
variable[interval] assign[=] tuple[[<ast.Name object at 0x7da1b21c75e0>]]
return[call[call[name[norm].interval, parameter[name[interval]]]][constant[1]]]
if compare[name[var] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b21c72b0>
if call[name[np].isscalar, parameter[name[var]]] begin[:]
variable[var] assign[=] tuple[[<ast.Name object at 0x7da1b21c7370>]]
return[call[name[np].sqrt, parameter[name[var]]]]
|
keyword[def] identifier[_std_tuple_of] ( identifier[var] = keyword[None] , identifier[std] = keyword[None] , identifier[interval] = keyword[None] ):
literal[string]
keyword[if] identifier[std] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[np] . identifier[isscalar] ( identifier[std] ):
identifier[std] =( identifier[std] ,)
keyword[return] identifier[std]
keyword[if] identifier[interval] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[np] . identifier[isscalar] ( identifier[interval] ):
identifier[interval] =( identifier[interval] ,)
keyword[return] identifier[norm] . identifier[interval] ( identifier[interval] )[ literal[int] ]
keyword[if] identifier[var] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[np] . identifier[isscalar] ( identifier[var] ):
identifier[var] =( identifier[var] ,)
keyword[return] identifier[np] . identifier[sqrt] ( identifier[var] )
|
def _std_tuple_of(var=None, std=None, interval=None):
"""
Convienence function for plotting. Given one of var, standard
deviation, or interval, return the std. Any of the three can be an
iterable list.
Examples
--------
>>>_std_tuple_of(var=[1, 3, 9])
(1, 2, 3)
"""
if std is not None:
if np.isscalar(std):
std = (std,) # depends on [control=['if'], data=[]]
return std # depends on [control=['if'], data=['std']]
if interval is not None:
if np.isscalar(interval):
interval = (interval,) # depends on [control=['if'], data=[]]
return norm.interval(interval)[1] # depends on [control=['if'], data=['interval']]
if var is None:
raise ValueError('no inputs were provided') # depends on [control=['if'], data=[]]
if np.isscalar(var):
var = (var,) # depends on [control=['if'], data=[]]
return np.sqrt(var)
|
def _set_subject(self, subject):
""" sets the subject value for the class instance
Args:
subject(dict, Uri, str): the subject for the class instance
"""
# if not subject:
# self.subject =
def test_uri(value):
""" test to see if the value is a uri or bnode
Returns: Uri or Bnode """
# .__wrapped__
if not isinstance(value, (Uri, BlankNode)):
try:
if value.startswith("_:"):
return BlankNode(value)
else:
return Uri(value)
except:
return BlankNode()
else:
return value
if isinstance(subject, dict):
self.subject = test_uri(subject['s'])
if isinstance(subject['o'], list):
for item in subject['o']:
self.add_property(subject['p'],
item)
else:
self.add_property(subject['p'],
subject['o'])
else:
self.subject = test_uri(subject)
|
def function[_set_subject, parameter[self, subject]]:
constant[ sets the subject value for the class instance
Args:
subject(dict, Uri, str): the subject for the class instance
]
def function[test_uri, parameter[value]]:
constant[ test to see if the value is a uri or bnode
Returns: Uri or Bnode ]
if <ast.UnaryOp object at 0x7da20e74bb80> begin[:]
<ast.Try object at 0x7da20e74bfd0>
if call[name[isinstance], parameter[name[subject], name[dict]]] begin[:]
name[self].subject assign[=] call[name[test_uri], parameter[call[name[subject]][constant[s]]]]
if call[name[isinstance], parameter[call[name[subject]][constant[o]], name[list]]] begin[:]
for taget[name[item]] in starred[call[name[subject]][constant[o]]] begin[:]
call[name[self].add_property, parameter[call[name[subject]][constant[p]], name[item]]]
|
keyword[def] identifier[_set_subject] ( identifier[self] , identifier[subject] ):
literal[string]
keyword[def] identifier[test_uri] ( identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[Uri] , identifier[BlankNode] )):
keyword[try] :
keyword[if] identifier[value] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[BlankNode] ( identifier[value] )
keyword[else] :
keyword[return] identifier[Uri] ( identifier[value] )
keyword[except] :
keyword[return] identifier[BlankNode] ()
keyword[else] :
keyword[return] identifier[value]
keyword[if] identifier[isinstance] ( identifier[subject] , identifier[dict] ):
identifier[self] . identifier[subject] = identifier[test_uri] ( identifier[subject] [ literal[string] ])
keyword[if] identifier[isinstance] ( identifier[subject] [ literal[string] ], identifier[list] ):
keyword[for] identifier[item] keyword[in] identifier[subject] [ literal[string] ]:
identifier[self] . identifier[add_property] ( identifier[subject] [ literal[string] ],
identifier[item] )
keyword[else] :
identifier[self] . identifier[add_property] ( identifier[subject] [ literal[string] ],
identifier[subject] [ literal[string] ])
keyword[else] :
identifier[self] . identifier[subject] = identifier[test_uri] ( identifier[subject] )
|
def _set_subject(self, subject):
""" sets the subject value for the class instance
Args:
subject(dict, Uri, str): the subject for the class instance
"""
# if not subject:
# self.subject =
def test_uri(value):
""" test to see if the value is a uri or bnode
Returns: Uri or Bnode """
# .__wrapped__
if not isinstance(value, (Uri, BlankNode)):
try:
if value.startswith('_:'):
return BlankNode(value) # depends on [control=['if'], data=[]]
else:
return Uri(value) # depends on [control=['try'], data=[]]
except:
return BlankNode() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return value
if isinstance(subject, dict):
self.subject = test_uri(subject['s'])
if isinstance(subject['o'], list):
for item in subject['o']:
self.add_property(subject['p'], item) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
else:
self.add_property(subject['p'], subject['o']) # depends on [control=['if'], data=[]]
else:
self.subject = test_uri(subject)
|
def user_name(with_num=False):
"""Return a random user name.
Basically it's lowercased result of
:py:func:`~forgery_py.forgery.name.first_name()` with a number appended
if `with_num`.
"""
result = first_name()
if with_num:
result += str(random.randint(63, 94))
return result.lower()
|
def function[user_name, parameter[with_num]]:
constant[Return a random user name.
Basically it's lowercased result of
:py:func:`~forgery_py.forgery.name.first_name()` with a number appended
if `with_num`.
]
variable[result] assign[=] call[name[first_name], parameter[]]
if name[with_num] begin[:]
<ast.AugAssign object at 0x7da1aff6f160>
return[call[name[result].lower, parameter[]]]
|
keyword[def] identifier[user_name] ( identifier[with_num] = keyword[False] ):
literal[string]
identifier[result] = identifier[first_name] ()
keyword[if] identifier[with_num] :
identifier[result] += identifier[str] ( identifier[random] . identifier[randint] ( literal[int] , literal[int] ))
keyword[return] identifier[result] . identifier[lower] ()
|
def user_name(with_num=False):
"""Return a random user name.
Basically it's lowercased result of
:py:func:`~forgery_py.forgery.name.first_name()` with a number appended
if `with_num`.
"""
result = first_name()
if with_num:
result += str(random.randint(63, 94)) # depends on [control=['if'], data=[]]
return result.lower()
|
def get_metadata(address=None, tx_hash=None, block_hash=None, api_key=None, private=True, coin_symbol='btc'):
'''
Get metadata using blockcypher's API.
This is data on blockcypher's servers and not embedded into the bitcoin (or other) blockchain.
'''
assert is_valid_coin_symbol(coin_symbol), coin_symbol
assert api_key or not private, 'Cannot see private metadata without an API key'
kwarg = get_valid_metadata_identifier(
coin_symbol=coin_symbol,
address=address,
tx_hash=tx_hash,
block_hash=block_hash,
)
url = make_url(coin_symbol, meta=True, **kwarg)
params = {'token': api_key} if api_key else {'private': 'true'}
r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS)
response_dict = get_valid_json(r)
return response_dict
|
def function[get_metadata, parameter[address, tx_hash, block_hash, api_key, private, coin_symbol]]:
constant[
Get metadata using blockcypher's API.
This is data on blockcypher's servers and not embedded into the bitcoin (or other) blockchain.
]
assert[call[name[is_valid_coin_symbol], parameter[name[coin_symbol]]]]
assert[<ast.BoolOp object at 0x7da18f09d360>]
variable[kwarg] assign[=] call[name[get_valid_metadata_identifier], parameter[]]
variable[url] assign[=] call[name[make_url], parameter[name[coin_symbol]]]
variable[params] assign[=] <ast.IfExp object at 0x7da18f09ff10>
variable[r] assign[=] call[name[requests].get, parameter[name[url]]]
variable[response_dict] assign[=] call[name[get_valid_json], parameter[name[r]]]
return[name[response_dict]]
|
keyword[def] identifier[get_metadata] ( identifier[address] = keyword[None] , identifier[tx_hash] = keyword[None] , identifier[block_hash] = keyword[None] , identifier[api_key] = keyword[None] , identifier[private] = keyword[True] , identifier[coin_symbol] = literal[string] ):
literal[string]
keyword[assert] identifier[is_valid_coin_symbol] ( identifier[coin_symbol] ), identifier[coin_symbol]
keyword[assert] identifier[api_key] keyword[or] keyword[not] identifier[private] , literal[string]
identifier[kwarg] = identifier[get_valid_metadata_identifier] (
identifier[coin_symbol] = identifier[coin_symbol] ,
identifier[address] = identifier[address] ,
identifier[tx_hash] = identifier[tx_hash] ,
identifier[block_hash] = identifier[block_hash] ,
)
identifier[url] = identifier[make_url] ( identifier[coin_symbol] , identifier[meta] = keyword[True] ,** identifier[kwarg] )
identifier[params] ={ literal[string] : identifier[api_key] } keyword[if] identifier[api_key] keyword[else] { literal[string] : literal[string] }
identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] , identifier[verify] = keyword[True] , identifier[timeout] = identifier[TIMEOUT_IN_SECONDS] )
identifier[response_dict] = identifier[get_valid_json] ( identifier[r] )
keyword[return] identifier[response_dict]
|
def get_metadata(address=None, tx_hash=None, block_hash=None, api_key=None, private=True, coin_symbol='btc'):
"""
Get metadata using blockcypher's API.
This is data on blockcypher's servers and not embedded into the bitcoin (or other) blockchain.
"""
assert is_valid_coin_symbol(coin_symbol), coin_symbol
assert api_key or not private, 'Cannot see private metadata without an API key'
kwarg = get_valid_metadata_identifier(coin_symbol=coin_symbol, address=address, tx_hash=tx_hash, block_hash=block_hash)
url = make_url(coin_symbol, meta=True, **kwarg)
params = {'token': api_key} if api_key else {'private': 'true'}
r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS)
response_dict = get_valid_json(r)
return response_dict
|
def relay_events_from(self, originator, event_type, *more_event_types):
"""
Configure this handler to re-dispatch events from another handler.
This method configures this handler dispatch an event of type
*event_type* whenever *originator* dispatches events of the same type
or any of the types in *more_event_types*. Any arguments passed to the
original event are copied to the new event.
This method is mean to be useful for creating composite widgets that
want to present a simple API by making it seem like the events being
generated by their children are actually coming from them. See the
`/composing_widgets` tutorial for an example.
"""
handlers = {
event_type: lambda *args, **kwargs: \
self.dispatch_event(event_type, *args, **kwargs)
for event_type in (event_type,) + more_event_types
}
originator.set_handlers(**handlers)
|
def function[relay_events_from, parameter[self, originator, event_type]]:
constant[
Configure this handler to re-dispatch events from another handler.
This method configures this handler dispatch an event of type
*event_type* whenever *originator* dispatches events of the same type
or any of the types in *more_event_types*. Any arguments passed to the
original event are copied to the new event.
This method is mean to be useful for creating composite widgets that
want to present a simple API by making it seem like the events being
generated by their children are actually coming from them. See the
`/composing_widgets` tutorial for an example.
]
variable[handlers] assign[=] <ast.DictComp object at 0x7da18dc9a6b0>
call[name[originator].set_handlers, parameter[]]
|
keyword[def] identifier[relay_events_from] ( identifier[self] , identifier[originator] , identifier[event_type] ,* identifier[more_event_types] ):
literal[string]
identifier[handlers] ={
identifier[event_type] : keyword[lambda] * identifier[args] ,** identifier[kwargs] : identifier[self] . identifier[dispatch_event] ( identifier[event_type] ,* identifier[args] ,** identifier[kwargs] )
keyword[for] identifier[event_type] keyword[in] ( identifier[event_type] ,)+ identifier[more_event_types]
}
identifier[originator] . identifier[set_handlers] (** identifier[handlers] )
|
def relay_events_from(self, originator, event_type, *more_event_types):
"""
Configure this handler to re-dispatch events from another handler.
This method configures this handler dispatch an event of type
*event_type* whenever *originator* dispatches events of the same type
or any of the types in *more_event_types*. Any arguments passed to the
original event are copied to the new event.
This method is mean to be useful for creating composite widgets that
want to present a simple API by making it seem like the events being
generated by their children are actually coming from them. See the
`/composing_widgets` tutorial for an example.
"""
handlers = {event_type: lambda *args, **kwargs: self.dispatch_event(event_type, *args, **kwargs) for event_type in (event_type,) + more_event_types}
originator.set_handlers(**handlers)
|
def sample_hslice(args):
"""
Return a new live point proposed by "Hamiltonian" Slice Sampling
using a series of random trajectories away from an existing live point.
Each trajectory is based on the provided axes and samples are determined
by moving forwards/backwards in time until the trajectory hits an edge
and approximately reflecting off the boundaries.
Once a series of reflections has been established, we propose a new live
point by slice sampling across the entire path.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new slice directions.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Unzipping.
(u, loglstar, axes, scale,
prior_transform, loglikelihood, kwargs) = args
rstate = np.random
# Periodicity.
nonperiodic = kwargs.get('nonperiodic', None)
# Setup.
n = len(u)
slices = kwargs.get('slices', 5) # number of slices
grad = kwargs.get('grad', None) # gradient of log-likelihood
max_move = kwargs.get('max_move', 100) # limit for `ncall`
compute_jac = kwargs.get('compute_jac', False) # whether Jacobian needed
jitter = 0.25 # 25% jitter
nc = 0
nmove = 0
nreflect = 0
ncontract = 0
# Slice sampling loop.
for it in range(slices):
# Define the left, "inner", and right "nodes" for a given chord.
# We will plan to slice sampling using these chords.
nodes_l, nodes_m, nodes_r = [], [], []
# Propose a direction on the unit n-sphere.
drhat = rstate.randn(n)
drhat /= linalg.norm(drhat)
# Transform and scale based on past tuning.
axis = np.dot(axes, drhat) * scale * 0.01
# Create starting window.
vel = np.array(axis) # current velocity
u_l = u - rstate.uniform(1. - jitter, 1. + jitter) * vel
u_r = u + rstate.uniform(1. - jitter, 1. + jitter) * vel
nodes_l.append(np.array(u_l))
nodes_m.append(np.array(u))
nodes_r.append(np.array(u_r))
# Progress "right" (i.e. "forwards" in time).
reverse, reflect = False, False
u_r = np.array(u)
ncall = 0
while ncall <= max_move:
# Iterate until we can bracket the edge of the distribution.
nodes_l.append(np.array(u_r))
u_out, u_in = None, []
while True:
# Step forward.
u_r += rstate.uniform(1. - jitter, 1. + jitter) * vel
# Evaluate point.
if unitcheck(u_r, nonperiodic):
v_r = prior_transform(np.array(u_r))
logl_r = loglikelihood(np.array(v_r))
nc += 1
ncall += 1
nmove += 1
else:
logl_r = -np.inf
# Check if we satisfy the log-likelihood constraint
# (i.e. are "in" or "out" of bounds).
if logl_r < loglstar:
if reflect:
# If we are out of bounds and just reflected, we
# reverse direction and terminate immediately.
reverse = True
nodes_l.pop() # remove since chord does not exist
break
else:
# If we're already in bounds, then we're safe.
u_out = np.array(u_r)
logl_out = logl_r
# Check if we could compute gradients assuming we
# terminated with the current `u_out`.
if np.isfinite(logl_out):
reverse = False
else:
reverse = True
else:
reflect = False
u_in.append(np.array(u_r))
# Check if we've bracketed the edge.
if u_out is not None:
break
# Define the rest of our chord.
if len(nodes_l) == len(nodes_r) + 1:
try:
u_in = u_in[rstate.choice(len(u_in))] # pick point randomly
except:
u_in = np.array(u)
pass
nodes_m.append(np.array(u_in))
nodes_r.append(np.array(u_out))
# Check if we have turned around.
if reverse:
break
# Reflect off the boundary.
u_r, logl_r = u_out, logl_out
if grad is None:
# If the gradient is not provided, we will attempt to
# approximate it numerically using 2nd-order methods.
h = np.zeros(n)
for i in range(n):
u_r_l, u_r_r = np.array(u_r), np.array(u_r)
# right side
u_r_r[i] += 1e-10
if unitcheck(u_r_r, nonperiodic):
v_r_r = prior_transform(np.array(u_r_r))
logl_r_r = loglikelihood(np.array(v_r_r))
else:
logl_r_r = -np.inf
reverse = True # can't compute gradient
nc += 1
# left side
u_r_l[i] -= 1e-10
if unitcheck(u_r_l, nonperiodic):
v_r_l = prior_transform(np.array(u_r_l))
logl_r_l = loglikelihood(np.array(v_r_l))
else:
logl_r_l = -np.inf
reverse = True # can't compute gradient
if reverse:
break # give up because we have to turn around
nc += 1
# compute dlnl/du
h[i] = (logl_r_r - logl_r_l) / 2e-10
else:
# If the gradient is provided, evaluate it.
h = grad(v_r)
if compute_jac:
jac = []
# Evaluate and apply Jacobian dv/du if gradient
# is defined as d(lnL)/dv instead of d(lnL)/du.
for i in range(n):
u_r_l, u_r_r = np.array(u_r), np.array(u_r)
# right side
u_r_r[i] += 1e-10
if unitcheck(u_r_r, nonperiodic):
v_r_r = prior_transform(np.array(u_r_r))
else:
reverse = True # can't compute Jacobian
v_r_r = np.array(v_r) # assume no movement
# left side
u_r_l[i] -= 1e-10
if unitcheck(u_r_l, nonperiodic):
v_r_l = prior_transform(np.array(u_r_l))
else:
reverse = True # can't compute Jacobian
v_r_r = np.array(v_r) # assume no movement
if reverse:
break # give up because we have to turn around
jac.append((v_r_r - v_r_l) / 2e-10)
jac = np.array(jac)
h = np.dot(jac, h) # apply Jacobian
nc += 1
# Compute specular reflection off boundary.
vel_ref = vel - 2 * h * np.dot(vel, h) / linalg.norm(h)**2
dotprod = np.dot(vel_ref, vel)
dotprod /= linalg.norm(vel_ref) * linalg.norm(vel)
# Check angle of reflection.
if dotprod < -0.99:
# The reflection angle is sufficiently small that it might
# as well be a reflection.
reverse = True
break
else:
# If the reflection angle is sufficiently large, we
# proceed as normal to the new position.
vel = vel_ref
u_out = None
reflect = True
nreflect += 1
# Progress "left" (i.e. "backwards" in time).
reverse, reflect = False, False
vel = -np.array(axis) # current velocity
u_l = np.array(u)
ncall = 0
while ncall <= max_move:
# Iterate until we can bracket the edge of the distribution.
# Use a doubling approach to try and locate the bounds faster.
nodes_r.append(np.array(u_l))
u_out, u_in = None, []
while True:
# Step forward.
u_l += rstate.uniform(1. - jitter, 1. + jitter) * vel
# Evaluate point.
if unitcheck(u_l, nonperiodic):
v_l = prior_transform(np.array(u_l))
logl_l = loglikelihood(np.array(v_l))
nc += 1
ncall += 1
nmove += 1
else:
logl_l = -np.inf
# Check if we satisfy the log-likelihood constraint
# (i.e. are "in" or "out" of bounds).
if logl_l < loglstar:
if reflect:
# If we are out of bounds and just reflected, we
# reverse direction and terminate immediately.
reverse = True
nodes_r.pop() # remove since chord does not exist
break
else:
# If we're already in bounds, then we're safe.
u_out = np.array(u_l)
logl_out = logl_l
# Check if we could compute gradients assuming we
# terminated with the current `u_out`.
if np.isfinite(logl_out):
reverse = False
else:
reverse = True
else:
reflect = False
u_in.append(np.array(u_l))
# Check if we've bracketed the edge.
if u_out is not None:
break
# Define the rest of our chord.
if len(nodes_r) == len(nodes_l) + 1:
try:
u_in = u_in[rstate.choice(len(u_in))] # pick point randomly
except:
u_in = np.array(u)
pass
nodes_m.append(np.array(u_in))
nodes_l.append(np.array(u_out))
# Check if we have turned around.
if reverse:
break
# Reflect off the boundary.
u_l, logl_l = u_out, logl_out
if grad is None:
# If the gradient is not provided, we will attempt to
# approximate it numerically using 2nd-order methods.
h = np.zeros(n)
for i in range(n):
u_l_l, u_l_r = np.array(u_l), np.array(u_l)
# right side
u_l_r[i] += 1e-10
if unitcheck(u_l_r, nonperiodic):
v_l_r = prior_transform(np.array(u_l_r))
logl_l_r = loglikelihood(np.array(v_l_r))
else:
logl_l_r = -np.inf
reverse = True # can't compute gradient
nc += 1
# left side
u_l_l[i] -= 1e-10
if unitcheck(u_l_l, nonperiodic):
v_l_l = prior_transform(np.array(u_l_l))
logl_l_l = loglikelihood(np.array(v_l_l))
else:
logl_l_l = -np.inf
reverse = True # can't compute gradient
if reverse:
break # give up because we have to turn around
nc += 1
# compute dlnl/du
h[i] = (logl_l_r - logl_l_l) / 2e-10
else:
# If the gradient is provided, evaluate it.
h = grad(v_l)
if compute_jac:
jac = []
# Evaluate and apply Jacobian dv/du if gradient
# is defined as d(lnL)/dv instead of d(lnL)/du.
for i in range(n):
u_l_l, u_l_r = np.array(u_l), np.array(u_l)
# right side
u_l_r[i] += 1e-10
if unitcheck(u_l_r, nonperiodic):
v_l_r = prior_transform(np.array(u_l_r))
else:
reverse = True # can't compute Jacobian
v_l_r = np.array(v_l) # assume no movement
# left side
u_l_l[i] -= 1e-10
if unitcheck(u_l_l, nonperiodic):
v_l_l = prior_transform(np.array(u_l_l))
else:
reverse = True # can't compute Jacobian
v_l_r = np.array(v_l) # assume no movement
if reverse:
break # give up because we have to turn around
jac.append((v_l_r - v_l_l) / 2e-10)
jac = np.array(jac)
h = np.dot(jac, h) # apply Jacobian
nc += 1
# Compute specular reflection off boundary.
vel_ref = vel - 2 * h * np.dot(vel, h) / linalg.norm(h)**2
dotprod = np.dot(vel_ref, vel)
dotprod /= linalg.norm(vel_ref) * linalg.norm(vel)
# Check angle of reflection.
if dotprod < -0.99:
# The reflection angle is sufficiently small that it might
# as well be a reflection.
reverse = True
break
else:
# If the reflection angle is sufficiently large, we
# proceed as normal to the new position.
vel = vel_ref
u_out = None
reflect = True
nreflect += 1
# Initialize lengths of chords.
if len(nodes_l) > 1:
# remove initial fallback chord
nodes_l.pop(0)
nodes_m.pop(0)
nodes_r.pop(0)
nodes_l, nodes_m, nodes_r = (np.array(nodes_l), np.array(nodes_m),
np.array(nodes_r))
Nchords = len(nodes_l)
axlen = np.zeros(Nchords, dtype='float')
for i, (nl, nm, nr) in enumerate(zip(nodes_l, nodes_m, nodes_r)):
axlen[i] = linalg.norm(nr - nl)
# Slice sample from all chords simultaneously. This is equivalent to
# slice sampling in *time* along our trajectory.
while True:
# Select chord.
axprob = axlen / np.sum(axlen)
idx = rstate.choice(Nchords, p=axprob)
# Define chord.
u_l, u_m, u_r = nodes_l[idx], nodes_m[idx], nodes_r[idx]
u_hat = u_r - u_l
rprop = rstate.rand()
u_prop = u_l + rprop * u_hat # scale from left
if unitcheck(u_prop, nonperiodic):
v_prop = prior_transform(np.array(u_prop))
logl_prop = loglikelihood(np.array(v_prop))
else:
logl_prop = -np.inf
nc += 1
ncontract += 1
# If we succeed, move to the new position.
if logl_prop >= loglstar:
u = u_prop
break
# If we fail, check if the new point is to the left/right of
# the point interior to the bounds (`u_m`) and update
# the bounds accordingly.
else:
s = np.dot(u_prop - u_m, u_hat) # check sign (+/-)
if s < 0: # left
nodes_l[idx] = u_prop
axlen[idx] *= 1 - rprop
elif s > 0: # right
nodes_r[idx] = u_prop
axlen[idx] *= rprop
else:
raise RuntimeError("Slice sampler has failed to find "
"a valid point. Some useful "
"output quantities:\n"
"u: {0}\n"
"u_left: {1}\n"
"u_right: {2}\n"
"u_hat: {3}\n"
"u_prop: {4}\n"
"loglstar: {5}\n"
"logl_prop: {6}."
.format(u, u_l, u_r, u_hat, u_prop,
loglstar, logl_prop))
blob = {'nmove': nmove, 'nreflect': nreflect, 'ncontract': ncontract}
return u_prop, v_prop, logl_prop, nc, blob
|
def function[sample_hslice, parameter[args]]:
constant[
Return a new live point proposed by "Hamiltonian" Slice Sampling
using a series of random trajectories away from an existing live point.
Each trajectory is based on the provided axes and samples are determined
by moving forwards/backwards in time until the trajectory hits an edge
and approximately reflecting off the boundaries.
Once a series of reflections has been established, we propose a new live
point by slice sampling across the entire path.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new slice directions.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
]
<ast.Tuple object at 0x7da1b1e9be50> assign[=] name[args]
variable[rstate] assign[=] name[np].random
variable[nonperiodic] assign[=] call[name[kwargs].get, parameter[constant[nonperiodic], constant[None]]]
variable[n] assign[=] call[name[len], parameter[name[u]]]
variable[slices] assign[=] call[name[kwargs].get, parameter[constant[slices], constant[5]]]
variable[grad] assign[=] call[name[kwargs].get, parameter[constant[grad], constant[None]]]
variable[max_move] assign[=] call[name[kwargs].get, parameter[constant[max_move], constant[100]]]
variable[compute_jac] assign[=] call[name[kwargs].get, parameter[constant[compute_jac], constant[False]]]
variable[jitter] assign[=] constant[0.25]
variable[nc] assign[=] constant[0]
variable[nmove] assign[=] constant[0]
variable[nreflect] assign[=] constant[0]
variable[ncontract] assign[=] constant[0]
for taget[name[it]] in starred[call[name[range], parameter[name[slices]]]] begin[:]
<ast.Tuple object at 0x7da1b1e9b070> assign[=] tuple[[<ast.List object at 0x7da1b1e9af80>, <ast.List object at 0x7da1b1e9af50>, <ast.List object at 0x7da1b1e9af20>]]
variable[drhat] assign[=] call[name[rstate].randn, parameter[name[n]]]
<ast.AugAssign object at 0x7da1b1e9add0>
variable[axis] assign[=] binary_operation[binary_operation[call[name[np].dot, parameter[name[axes], name[drhat]]] * name[scale]] * constant[0.01]]
variable[vel] assign[=] call[name[np].array, parameter[name[axis]]]
variable[u_l] assign[=] binary_operation[name[u] - binary_operation[call[name[rstate].uniform, parameter[binary_operation[constant[1.0] - name[jitter]], binary_operation[constant[1.0] + name[jitter]]]] * name[vel]]]
variable[u_r] assign[=] binary_operation[name[u] + binary_operation[call[name[rstate].uniform, parameter[binary_operation[constant[1.0] - name[jitter]], binary_operation[constant[1.0] + name[jitter]]]] * name[vel]]]
call[name[nodes_l].append, parameter[call[name[np].array, parameter[name[u_l]]]]]
call[name[nodes_m].append, parameter[call[name[np].array, parameter[name[u]]]]]
call[name[nodes_r].append, parameter[call[name[np].array, parameter[name[u_r]]]]]
<ast.Tuple object at 0x7da1b1e99ea0> assign[=] tuple[[<ast.Constant object at 0x7da1b1e99de0>, <ast.Constant object at 0x7da1b1e99db0>]]
variable[u_r] assign[=] call[name[np].array, parameter[name[u]]]
variable[ncall] assign[=] constant[0]
while compare[name[ncall] less_or_equal[<=] name[max_move]] begin[:]
call[name[nodes_l].append, parameter[call[name[np].array, parameter[name[u_r]]]]]
<ast.Tuple object at 0x7da1b1eccdf0> assign[=] tuple[[<ast.Constant object at 0x7da1b1eccd30>, <ast.List object at 0x7da1b1eccd00>]]
while constant[True] begin[:]
<ast.AugAssign object at 0x7da1b1eccc70>
if call[name[unitcheck], parameter[name[u_r], name[nonperiodic]]] begin[:]
variable[v_r] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_r]]]]]
variable[logl_r] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_r]]]]]
<ast.AugAssign object at 0x7da1b1ecc5e0>
<ast.AugAssign object at 0x7da1b1ecc550>
<ast.AugAssign object at 0x7da1b1ecc4c0>
if compare[name[logl_r] less[<] name[loglstar]] begin[:]
if name[reflect] begin[:]
variable[reverse] assign[=] constant[True]
call[name[nodes_l].pop, parameter[]]
break
if call[name[np].isfinite, parameter[name[logl_out]]] begin[:]
variable[reverse] assign[=] constant[False]
if compare[name[u_out] is_not constant[None]] begin[:]
break
if compare[call[name[len], parameter[name[nodes_l]]] equal[==] binary_operation[call[name[len], parameter[name[nodes_r]]] + constant[1]]] begin[:]
<ast.Try object at 0x7da1b1e8c730>
call[name[nodes_m].append, parameter[call[name[np].array, parameter[name[u_in]]]]]
call[name[nodes_r].append, parameter[call[name[np].array, parameter[name[u_out]]]]]
if name[reverse] begin[:]
break
<ast.Tuple object at 0x7da1b1e8f880> assign[=] tuple[[<ast.Name object at 0x7da1b1e8f4f0>, <ast.Name object at 0x7da1b1e8f550>]]
if compare[name[grad] is constant[None]] begin[:]
variable[h] assign[=] call[name[np].zeros, parameter[name[n]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
<ast.Tuple object at 0x7da1b1e8f7f0> assign[=] tuple[[<ast.Call object at 0x7da1b1e8f280>, <ast.Call object at 0x7da1b1e8f370>]]
<ast.AugAssign object at 0x7da1b1e8f2b0>
if call[name[unitcheck], parameter[name[u_r_r], name[nonperiodic]]] begin[:]
variable[v_r_r] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_r_r]]]]]
variable[logl_r_r] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_r_r]]]]]
<ast.AugAssign object at 0x7da1b1e8ebc0>
<ast.AugAssign object at 0x7da1b1e8eb30>
if call[name[unitcheck], parameter[name[u_r_l], name[nonperiodic]]] begin[:]
variable[v_r_l] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_r_l]]]]]
variable[logl_r_l] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_r_l]]]]]
if name[reverse] begin[:]
break
<ast.AugAssign object at 0x7da1b1e8e410>
call[name[h]][name[i]] assign[=] binary_operation[binary_operation[name[logl_r_r] - name[logl_r_l]] / constant[2e-10]]
variable[vel_ref] assign[=] binary_operation[name[vel] - binary_operation[binary_operation[binary_operation[constant[2] * name[h]] * call[name[np].dot, parameter[name[vel], name[h]]]] / binary_operation[call[name[linalg].norm, parameter[name[h]]] ** constant[2]]]]
variable[dotprod] assign[=] call[name[np].dot, parameter[name[vel_ref], name[vel]]]
<ast.AugAssign object at 0x7da1b1eb83a0>
if compare[name[dotprod] less[<] <ast.UnaryOp object at 0x7da1b1eb8640>] begin[:]
variable[reverse] assign[=] constant[True]
break
<ast.Tuple object at 0x7da1b1eb8a00> assign[=] tuple[[<ast.Constant object at 0x7da1b1eb8ac0>, <ast.Constant object at 0x7da1b1eb8af0>]]
variable[vel] assign[=] <ast.UnaryOp object at 0x7da1b1eb8b80>
variable[u_l] assign[=] call[name[np].array, parameter[name[u]]]
variable[ncall] assign[=] constant[0]
while compare[name[ncall] less_or_equal[<=] name[max_move]] begin[:]
call[name[nodes_r].append, parameter[call[name[np].array, parameter[name[u_l]]]]]
<ast.Tuple object at 0x7da1b1eb90c0> assign[=] tuple[[<ast.Constant object at 0x7da1b1eb9180>, <ast.List object at 0x7da1b1eb91b0>]]
while constant[True] begin[:]
<ast.AugAssign object at 0x7da1b1eb9240>
if call[name[unitcheck], parameter[name[u_l], name[nonperiodic]]] begin[:]
variable[v_l] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_l]]]]]
variable[logl_l] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_l]]]]]
<ast.AugAssign object at 0x7da1b1eb9720>
<ast.AugAssign object at 0x7da1b1eb9690>
<ast.AugAssign object at 0x7da1b1eb9600>
if compare[name[logl_l] less[<] name[loglstar]] begin[:]
if name[reflect] begin[:]
variable[reverse] assign[=] constant[True]
call[name[nodes_r].pop, parameter[]]
break
if call[name[np].isfinite, parameter[name[logl_out]]] begin[:]
variable[reverse] assign[=] constant[False]
if compare[name[u_out] is_not constant[None]] begin[:]
break
if compare[call[name[len], parameter[name[nodes_r]]] equal[==] binary_operation[call[name[len], parameter[name[nodes_l]]] + constant[1]]] begin[:]
<ast.Try object at 0x7da1b1d624d0>
call[name[nodes_m].append, parameter[call[name[np].array, parameter[name[u_in]]]]]
call[name[nodes_l].append, parameter[call[name[np].array, parameter[name[u_out]]]]]
if name[reverse] begin[:]
break
<ast.Tuple object at 0x7da1b1d60940> assign[=] tuple[[<ast.Name object at 0x7da1b1d62aa0>, <ast.Name object at 0x7da1b1d614b0>]]
if compare[name[grad] is constant[None]] begin[:]
variable[h] assign[=] call[name[np].zeros, parameter[name[n]]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
<ast.Tuple object at 0x7da1b1e0d4b0> assign[=] tuple[[<ast.Call object at 0x7da1b1e0c460>, <ast.Call object at 0x7da1b1e0cdf0>]]
<ast.AugAssign object at 0x7da1b1e0f1f0>
if call[name[unitcheck], parameter[name[u_l_r], name[nonperiodic]]] begin[:]
variable[v_l_r] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_l_r]]]]]
variable[logl_l_r] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_l_r]]]]]
<ast.AugAssign object at 0x7da1b1e0d300>
<ast.AugAssign object at 0x7da1b1e0d330>
if call[name[unitcheck], parameter[name[u_l_l], name[nonperiodic]]] begin[:]
variable[v_l_l] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_l_l]]]]]
variable[logl_l_l] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_l_l]]]]]
if name[reverse] begin[:]
break
<ast.AugAssign object at 0x7da1b1e0e380>
call[name[h]][name[i]] assign[=] binary_operation[binary_operation[name[logl_l_r] - name[logl_l_l]] / constant[2e-10]]
variable[vel_ref] assign[=] binary_operation[name[vel] - binary_operation[binary_operation[binary_operation[constant[2] * name[h]] * call[name[np].dot, parameter[name[vel], name[h]]]] / binary_operation[call[name[linalg].norm, parameter[name[h]]] ** constant[2]]]]
variable[dotprod] assign[=] call[name[np].dot, parameter[name[vel_ref], name[vel]]]
<ast.AugAssign object at 0x7da1b1e0c850>
if compare[name[dotprod] less[<] <ast.UnaryOp object at 0x7da1b1e0cbe0>] begin[:]
variable[reverse] assign[=] constant[True]
break
if compare[call[name[len], parameter[name[nodes_l]]] greater[>] constant[1]] begin[:]
call[name[nodes_l].pop, parameter[constant[0]]]
call[name[nodes_m].pop, parameter[constant[0]]]
call[name[nodes_r].pop, parameter[constant[0]]]
<ast.Tuple object at 0x7da1b1e0f460> assign[=] tuple[[<ast.Call object at 0x7da1b1e0f370>, <ast.Call object at 0x7da1b1e0d690>, <ast.Call object at 0x7da1b1e0f0d0>]]
variable[Nchords] assign[=] call[name[len], parameter[name[nodes_l]]]
variable[axlen] assign[=] call[name[np].zeros, parameter[name[Nchords]]]
for taget[tuple[[<ast.Name object at 0x7da1b1e0c190>, <ast.Tuple object at 0x7da1b1e0ded0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[nodes_l], name[nodes_m], name[nodes_r]]]]]] begin[:]
call[name[axlen]][name[i]] assign[=] call[name[linalg].norm, parameter[binary_operation[name[nr] - name[nl]]]]
while constant[True] begin[:]
variable[axprob] assign[=] binary_operation[name[axlen] / call[name[np].sum, parameter[name[axlen]]]]
variable[idx] assign[=] call[name[rstate].choice, parameter[name[Nchords]]]
<ast.Tuple object at 0x7da1b1d46bc0> assign[=] tuple[[<ast.Subscript object at 0x7da1b1d45270>, <ast.Subscript object at 0x7da1b1d46d40>, <ast.Subscript object at 0x7da1b1d449a0>]]
variable[u_hat] assign[=] binary_operation[name[u_r] - name[u_l]]
variable[rprop] assign[=] call[name[rstate].rand, parameter[]]
variable[u_prop] assign[=] binary_operation[name[u_l] + binary_operation[name[rprop] * name[u_hat]]]
if call[name[unitcheck], parameter[name[u_prop], name[nonperiodic]]] begin[:]
variable[v_prop] assign[=] call[name[prior_transform], parameter[call[name[np].array, parameter[name[u_prop]]]]]
variable[logl_prop] assign[=] call[name[loglikelihood], parameter[call[name[np].array, parameter[name[v_prop]]]]]
<ast.AugAssign object at 0x7da1b1d44850>
<ast.AugAssign object at 0x7da1b1d44c70>
if compare[name[logl_prop] greater_or_equal[>=] name[loglstar]] begin[:]
variable[u] assign[=] name[u_prop]
break
variable[blob] assign[=] dictionary[[<ast.Constant object at 0x7da1b1d46aa0>, <ast.Constant object at 0x7da1b1d46380>, <ast.Constant object at 0x7da1b1d46200>], [<ast.Name object at 0x7da1b1d47010>, <ast.Name object at 0x7da1b1d46ef0>, <ast.Name object at 0x7da1b1d46ce0>]]
return[tuple[[<ast.Name object at 0x7da1b1d45630>, <ast.Name object at 0x7da1b1d44b80>, <ast.Name object at 0x7da1b1d47100>, <ast.Name object at 0x7da1b1d46e60>, <ast.Name object at 0x7da1b1d46cb0>]]]
|
keyword[def] identifier[sample_hslice] ( identifier[args] ):
literal[string]
( identifier[u] , identifier[loglstar] , identifier[axes] , identifier[scale] ,
identifier[prior_transform] , identifier[loglikelihood] , identifier[kwargs] )= identifier[args]
identifier[rstate] = identifier[np] . identifier[random]
identifier[nonperiodic] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[n] = identifier[len] ( identifier[u] )
identifier[slices] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[grad] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] )
identifier[max_move] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[compute_jac] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] )
identifier[jitter] = literal[int]
identifier[nc] = literal[int]
identifier[nmove] = literal[int]
identifier[nreflect] = literal[int]
identifier[ncontract] = literal[int]
keyword[for] identifier[it] keyword[in] identifier[range] ( identifier[slices] ):
identifier[nodes_l] , identifier[nodes_m] , identifier[nodes_r] =[],[],[]
identifier[drhat] = identifier[rstate] . identifier[randn] ( identifier[n] )
identifier[drhat] /= identifier[linalg] . identifier[norm] ( identifier[drhat] )
identifier[axis] = identifier[np] . identifier[dot] ( identifier[axes] , identifier[drhat] )* identifier[scale] * literal[int]
identifier[vel] = identifier[np] . identifier[array] ( identifier[axis] )
identifier[u_l] = identifier[u] - identifier[rstate] . identifier[uniform] ( literal[int] - identifier[jitter] , literal[int] + identifier[jitter] )* identifier[vel]
identifier[u_r] = identifier[u] + identifier[rstate] . identifier[uniform] ( literal[int] - identifier[jitter] , literal[int] + identifier[jitter] )* identifier[vel]
identifier[nodes_l] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_l] ))
identifier[nodes_m] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u] ))
identifier[nodes_r] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_r] ))
identifier[reverse] , identifier[reflect] = keyword[False] , keyword[False]
identifier[u_r] = identifier[np] . identifier[array] ( identifier[u] )
identifier[ncall] = literal[int]
keyword[while] identifier[ncall] <= identifier[max_move] :
identifier[nodes_l] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_r] ))
identifier[u_out] , identifier[u_in] = keyword[None] ,[]
keyword[while] keyword[True] :
identifier[u_r] += identifier[rstate] . identifier[uniform] ( literal[int] - identifier[jitter] , literal[int] + identifier[jitter] )* identifier[vel]
keyword[if] identifier[unitcheck] ( identifier[u_r] , identifier[nonperiodic] ):
identifier[v_r] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_r] ))
identifier[logl_r] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_r] ))
identifier[nc] += literal[int]
identifier[ncall] += literal[int]
identifier[nmove] += literal[int]
keyword[else] :
identifier[logl_r] =- identifier[np] . identifier[inf]
keyword[if] identifier[logl_r] < identifier[loglstar] :
keyword[if] identifier[reflect] :
identifier[reverse] = keyword[True]
identifier[nodes_l] . identifier[pop] ()
keyword[break]
keyword[else] :
identifier[u_out] = identifier[np] . identifier[array] ( identifier[u_r] )
identifier[logl_out] = identifier[logl_r]
keyword[if] identifier[np] . identifier[isfinite] ( identifier[logl_out] ):
identifier[reverse] = keyword[False]
keyword[else] :
identifier[reverse] = keyword[True]
keyword[else] :
identifier[reflect] = keyword[False]
identifier[u_in] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_r] ))
keyword[if] identifier[u_out] keyword[is] keyword[not] keyword[None] :
keyword[break]
keyword[if] identifier[len] ( identifier[nodes_l] )== identifier[len] ( identifier[nodes_r] )+ literal[int] :
keyword[try] :
identifier[u_in] = identifier[u_in] [ identifier[rstate] . identifier[choice] ( identifier[len] ( identifier[u_in] ))]
keyword[except] :
identifier[u_in] = identifier[np] . identifier[array] ( identifier[u] )
keyword[pass]
identifier[nodes_m] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_in] ))
identifier[nodes_r] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_out] ))
keyword[if] identifier[reverse] :
keyword[break]
identifier[u_r] , identifier[logl_r] = identifier[u_out] , identifier[logl_out]
keyword[if] identifier[grad] keyword[is] keyword[None] :
identifier[h] = identifier[np] . identifier[zeros] ( identifier[n] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[u_r_l] , identifier[u_r_r] = identifier[np] . identifier[array] ( identifier[u_r] ), identifier[np] . identifier[array] ( identifier[u_r] )
identifier[u_r_r] [ identifier[i] ]+= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_r_r] , identifier[nonperiodic] ):
identifier[v_r_r] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_r_r] ))
identifier[logl_r_r] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_r_r] ))
keyword[else] :
identifier[logl_r_r] =- identifier[np] . identifier[inf]
identifier[reverse] = keyword[True]
identifier[nc] += literal[int]
identifier[u_r_l] [ identifier[i] ]-= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_r_l] , identifier[nonperiodic] ):
identifier[v_r_l] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_r_l] ))
identifier[logl_r_l] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_r_l] ))
keyword[else] :
identifier[logl_r_l] =- identifier[np] . identifier[inf]
identifier[reverse] = keyword[True]
keyword[if] identifier[reverse] :
keyword[break]
identifier[nc] += literal[int]
identifier[h] [ identifier[i] ]=( identifier[logl_r_r] - identifier[logl_r_l] )/ literal[int]
keyword[else] :
identifier[h] = identifier[grad] ( identifier[v_r] )
keyword[if] identifier[compute_jac] :
identifier[jac] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[u_r_l] , identifier[u_r_r] = identifier[np] . identifier[array] ( identifier[u_r] ), identifier[np] . identifier[array] ( identifier[u_r] )
identifier[u_r_r] [ identifier[i] ]+= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_r_r] , identifier[nonperiodic] ):
identifier[v_r_r] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_r_r] ))
keyword[else] :
identifier[reverse] = keyword[True]
identifier[v_r_r] = identifier[np] . identifier[array] ( identifier[v_r] )
identifier[u_r_l] [ identifier[i] ]-= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_r_l] , identifier[nonperiodic] ):
identifier[v_r_l] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_r_l] ))
keyword[else] :
identifier[reverse] = keyword[True]
identifier[v_r_r] = identifier[np] . identifier[array] ( identifier[v_r] )
keyword[if] identifier[reverse] :
keyword[break]
identifier[jac] . identifier[append] (( identifier[v_r_r] - identifier[v_r_l] )/ literal[int] )
identifier[jac] = identifier[np] . identifier[array] ( identifier[jac] )
identifier[h] = identifier[np] . identifier[dot] ( identifier[jac] , identifier[h] )
identifier[nc] += literal[int]
identifier[vel_ref] = identifier[vel] - literal[int] * identifier[h] * identifier[np] . identifier[dot] ( identifier[vel] , identifier[h] )/ identifier[linalg] . identifier[norm] ( identifier[h] )** literal[int]
identifier[dotprod] = identifier[np] . identifier[dot] ( identifier[vel_ref] , identifier[vel] )
identifier[dotprod] /= identifier[linalg] . identifier[norm] ( identifier[vel_ref] )* identifier[linalg] . identifier[norm] ( identifier[vel] )
keyword[if] identifier[dotprod] <- literal[int] :
identifier[reverse] = keyword[True]
keyword[break]
keyword[else] :
identifier[vel] = identifier[vel_ref]
identifier[u_out] = keyword[None]
identifier[reflect] = keyword[True]
identifier[nreflect] += literal[int]
identifier[reverse] , identifier[reflect] = keyword[False] , keyword[False]
identifier[vel] =- identifier[np] . identifier[array] ( identifier[axis] )
identifier[u_l] = identifier[np] . identifier[array] ( identifier[u] )
identifier[ncall] = literal[int]
keyword[while] identifier[ncall] <= identifier[max_move] :
identifier[nodes_r] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_l] ))
identifier[u_out] , identifier[u_in] = keyword[None] ,[]
keyword[while] keyword[True] :
identifier[u_l] += identifier[rstate] . identifier[uniform] ( literal[int] - identifier[jitter] , literal[int] + identifier[jitter] )* identifier[vel]
keyword[if] identifier[unitcheck] ( identifier[u_l] , identifier[nonperiodic] ):
identifier[v_l] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_l] ))
identifier[logl_l] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_l] ))
identifier[nc] += literal[int]
identifier[ncall] += literal[int]
identifier[nmove] += literal[int]
keyword[else] :
identifier[logl_l] =- identifier[np] . identifier[inf]
keyword[if] identifier[logl_l] < identifier[loglstar] :
keyword[if] identifier[reflect] :
identifier[reverse] = keyword[True]
identifier[nodes_r] . identifier[pop] ()
keyword[break]
keyword[else] :
identifier[u_out] = identifier[np] . identifier[array] ( identifier[u_l] )
identifier[logl_out] = identifier[logl_l]
keyword[if] identifier[np] . identifier[isfinite] ( identifier[logl_out] ):
identifier[reverse] = keyword[False]
keyword[else] :
identifier[reverse] = keyword[True]
keyword[else] :
identifier[reflect] = keyword[False]
identifier[u_in] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_l] ))
keyword[if] identifier[u_out] keyword[is] keyword[not] keyword[None] :
keyword[break]
keyword[if] identifier[len] ( identifier[nodes_r] )== identifier[len] ( identifier[nodes_l] )+ literal[int] :
keyword[try] :
identifier[u_in] = identifier[u_in] [ identifier[rstate] . identifier[choice] ( identifier[len] ( identifier[u_in] ))]
keyword[except] :
identifier[u_in] = identifier[np] . identifier[array] ( identifier[u] )
keyword[pass]
identifier[nodes_m] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_in] ))
identifier[nodes_l] . identifier[append] ( identifier[np] . identifier[array] ( identifier[u_out] ))
keyword[if] identifier[reverse] :
keyword[break]
identifier[u_l] , identifier[logl_l] = identifier[u_out] , identifier[logl_out]
keyword[if] identifier[grad] keyword[is] keyword[None] :
identifier[h] = identifier[np] . identifier[zeros] ( identifier[n] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[u_l_l] , identifier[u_l_r] = identifier[np] . identifier[array] ( identifier[u_l] ), identifier[np] . identifier[array] ( identifier[u_l] )
identifier[u_l_r] [ identifier[i] ]+= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_l_r] , identifier[nonperiodic] ):
identifier[v_l_r] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_l_r] ))
identifier[logl_l_r] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_l_r] ))
keyword[else] :
identifier[logl_l_r] =- identifier[np] . identifier[inf]
identifier[reverse] = keyword[True]
identifier[nc] += literal[int]
identifier[u_l_l] [ identifier[i] ]-= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_l_l] , identifier[nonperiodic] ):
identifier[v_l_l] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_l_l] ))
identifier[logl_l_l] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_l_l] ))
keyword[else] :
identifier[logl_l_l] =- identifier[np] . identifier[inf]
identifier[reverse] = keyword[True]
keyword[if] identifier[reverse] :
keyword[break]
identifier[nc] += literal[int]
identifier[h] [ identifier[i] ]=( identifier[logl_l_r] - identifier[logl_l_l] )/ literal[int]
keyword[else] :
identifier[h] = identifier[grad] ( identifier[v_l] )
keyword[if] identifier[compute_jac] :
identifier[jac] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[u_l_l] , identifier[u_l_r] = identifier[np] . identifier[array] ( identifier[u_l] ), identifier[np] . identifier[array] ( identifier[u_l] )
identifier[u_l_r] [ identifier[i] ]+= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_l_r] , identifier[nonperiodic] ):
identifier[v_l_r] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_l_r] ))
keyword[else] :
identifier[reverse] = keyword[True]
identifier[v_l_r] = identifier[np] . identifier[array] ( identifier[v_l] )
identifier[u_l_l] [ identifier[i] ]-= literal[int]
keyword[if] identifier[unitcheck] ( identifier[u_l_l] , identifier[nonperiodic] ):
identifier[v_l_l] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_l_l] ))
keyword[else] :
identifier[reverse] = keyword[True]
identifier[v_l_r] = identifier[np] . identifier[array] ( identifier[v_l] )
keyword[if] identifier[reverse] :
keyword[break]
identifier[jac] . identifier[append] (( identifier[v_l_r] - identifier[v_l_l] )/ literal[int] )
identifier[jac] = identifier[np] . identifier[array] ( identifier[jac] )
identifier[h] = identifier[np] . identifier[dot] ( identifier[jac] , identifier[h] )
identifier[nc] += literal[int]
identifier[vel_ref] = identifier[vel] - literal[int] * identifier[h] * identifier[np] . identifier[dot] ( identifier[vel] , identifier[h] )/ identifier[linalg] . identifier[norm] ( identifier[h] )** literal[int]
identifier[dotprod] = identifier[np] . identifier[dot] ( identifier[vel_ref] , identifier[vel] )
identifier[dotprod] /= identifier[linalg] . identifier[norm] ( identifier[vel_ref] )* identifier[linalg] . identifier[norm] ( identifier[vel] )
keyword[if] identifier[dotprod] <- literal[int] :
identifier[reverse] = keyword[True]
keyword[break]
keyword[else] :
identifier[vel] = identifier[vel_ref]
identifier[u_out] = keyword[None]
identifier[reflect] = keyword[True]
identifier[nreflect] += literal[int]
keyword[if] identifier[len] ( identifier[nodes_l] )> literal[int] :
identifier[nodes_l] . identifier[pop] ( literal[int] )
identifier[nodes_m] . identifier[pop] ( literal[int] )
identifier[nodes_r] . identifier[pop] ( literal[int] )
identifier[nodes_l] , identifier[nodes_m] , identifier[nodes_r] =( identifier[np] . identifier[array] ( identifier[nodes_l] ), identifier[np] . identifier[array] ( identifier[nodes_m] ),
identifier[np] . identifier[array] ( identifier[nodes_r] ))
identifier[Nchords] = identifier[len] ( identifier[nodes_l] )
identifier[axlen] = identifier[np] . identifier[zeros] ( identifier[Nchords] , identifier[dtype] = literal[string] )
keyword[for] identifier[i] ,( identifier[nl] , identifier[nm] , identifier[nr] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[nodes_l] , identifier[nodes_m] , identifier[nodes_r] )):
identifier[axlen] [ identifier[i] ]= identifier[linalg] . identifier[norm] ( identifier[nr] - identifier[nl] )
keyword[while] keyword[True] :
identifier[axprob] = identifier[axlen] / identifier[np] . identifier[sum] ( identifier[axlen] )
identifier[idx] = identifier[rstate] . identifier[choice] ( identifier[Nchords] , identifier[p] = identifier[axprob] )
identifier[u_l] , identifier[u_m] , identifier[u_r] = identifier[nodes_l] [ identifier[idx] ], identifier[nodes_m] [ identifier[idx] ], identifier[nodes_r] [ identifier[idx] ]
identifier[u_hat] = identifier[u_r] - identifier[u_l]
identifier[rprop] = identifier[rstate] . identifier[rand] ()
identifier[u_prop] = identifier[u_l] + identifier[rprop] * identifier[u_hat]
keyword[if] identifier[unitcheck] ( identifier[u_prop] , identifier[nonperiodic] ):
identifier[v_prop] = identifier[prior_transform] ( identifier[np] . identifier[array] ( identifier[u_prop] ))
identifier[logl_prop] = identifier[loglikelihood] ( identifier[np] . identifier[array] ( identifier[v_prop] ))
keyword[else] :
identifier[logl_prop] =- identifier[np] . identifier[inf]
identifier[nc] += literal[int]
identifier[ncontract] += literal[int]
keyword[if] identifier[logl_prop] >= identifier[loglstar] :
identifier[u] = identifier[u_prop]
keyword[break]
keyword[else] :
identifier[s] = identifier[np] . identifier[dot] ( identifier[u_prop] - identifier[u_m] , identifier[u_hat] )
keyword[if] identifier[s] < literal[int] :
identifier[nodes_l] [ identifier[idx] ]= identifier[u_prop]
identifier[axlen] [ identifier[idx] ]*= literal[int] - identifier[rprop]
keyword[elif] identifier[s] > literal[int] :
identifier[nodes_r] [ identifier[idx] ]= identifier[u_prop]
identifier[axlen] [ identifier[idx] ]*= identifier[rprop]
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
. identifier[format] ( identifier[u] , identifier[u_l] , identifier[u_r] , identifier[u_hat] , identifier[u_prop] ,
identifier[loglstar] , identifier[logl_prop] ))
identifier[blob] ={ literal[string] : identifier[nmove] , literal[string] : identifier[nreflect] , literal[string] : identifier[ncontract] }
keyword[return] identifier[u_prop] , identifier[v_prop] , identifier[logl_prop] , identifier[nc] , identifier[blob]
|
def sample_hslice(args):
"""
Return a new live point proposed by "Hamiltonian" Slice Sampling
using a series of random trajectories away from an existing live point.
Each trajectory is based on the provided axes and samples are determined
by moving forwards/backwards in time until the trajectory hits an edge
and approximately reflecting off the boundaries.
Once a series of reflections has been established, we propose a new live
point by slice sampling across the entire path.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new slice directions.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Unzipping.
(u, loglstar, axes, scale, prior_transform, loglikelihood, kwargs) = args
rstate = np.random
# Periodicity.
nonperiodic = kwargs.get('nonperiodic', None)
# Setup.
n = len(u)
slices = kwargs.get('slices', 5) # number of slices
grad = kwargs.get('grad', None) # gradient of log-likelihood
max_move = kwargs.get('max_move', 100) # limit for `ncall`
compute_jac = kwargs.get('compute_jac', False) # whether Jacobian needed
jitter = 0.25 # 25% jitter
nc = 0
nmove = 0
nreflect = 0
ncontract = 0
# Slice sampling loop.
for it in range(slices):
# Define the left, "inner", and right "nodes" for a given chord.
# We will plan to slice sampling using these chords.
(nodes_l, nodes_m, nodes_r) = ([], [], [])
# Propose a direction on the unit n-sphere.
drhat = rstate.randn(n)
drhat /= linalg.norm(drhat)
# Transform and scale based on past tuning.
axis = np.dot(axes, drhat) * scale * 0.01
# Create starting window.
vel = np.array(axis) # current velocity
u_l = u - rstate.uniform(1.0 - jitter, 1.0 + jitter) * vel
u_r = u + rstate.uniform(1.0 - jitter, 1.0 + jitter) * vel
nodes_l.append(np.array(u_l))
nodes_m.append(np.array(u))
nodes_r.append(np.array(u_r))
# Progress "right" (i.e. "forwards" in time).
(reverse, reflect) = (False, False)
u_r = np.array(u)
ncall = 0
while ncall <= max_move:
# Iterate until we can bracket the edge of the distribution.
nodes_l.append(np.array(u_r))
(u_out, u_in) = (None, [])
while True:
# Step forward.
u_r += rstate.uniform(1.0 - jitter, 1.0 + jitter) * vel
# Evaluate point.
if unitcheck(u_r, nonperiodic):
v_r = prior_transform(np.array(u_r))
logl_r = loglikelihood(np.array(v_r))
nc += 1
ncall += 1
nmove += 1 # depends on [control=['if'], data=[]]
else:
logl_r = -np.inf
# Check if we satisfy the log-likelihood constraint
# (i.e. are "in" or "out" of bounds).
if logl_r < loglstar:
if reflect:
# If we are out of bounds and just reflected, we
# reverse direction and terminate immediately.
reverse = True
nodes_l.pop() # remove since chord does not exist
break # depends on [control=['if'], data=[]]
else:
# If we're already in bounds, then we're safe.
u_out = np.array(u_r)
logl_out = logl_r
# Check if we could compute gradients assuming we
# terminated with the current `u_out`.
if np.isfinite(logl_out):
reverse = False # depends on [control=['if'], data=[]]
else:
reverse = True # depends on [control=['if'], data=['logl_r']]
else:
reflect = False
u_in.append(np.array(u_r))
# Check if we've bracketed the edge.
if u_out is not None:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# Define the rest of our chord.
if len(nodes_l) == len(nodes_r) + 1:
try:
u_in = u_in[rstate.choice(len(u_in))] # pick point randomly # depends on [control=['try'], data=[]]
except:
u_in = np.array(u)
pass # depends on [control=['except'], data=[]]
nodes_m.append(np.array(u_in))
nodes_r.append(np.array(u_out)) # depends on [control=['if'], data=[]]
# Check if we have turned around.
if reverse:
break # depends on [control=['if'], data=[]]
# Reflect off the boundary.
(u_r, logl_r) = (u_out, logl_out)
if grad is None:
# If the gradient is not provided, we will attempt to
# approximate it numerically using 2nd-order methods.
h = np.zeros(n)
for i in range(n):
(u_r_l, u_r_r) = (np.array(u_r), np.array(u_r))
# right side
u_r_r[i] += 1e-10
if unitcheck(u_r_r, nonperiodic):
v_r_r = prior_transform(np.array(u_r_r))
logl_r_r = loglikelihood(np.array(v_r_r)) # depends on [control=['if'], data=[]]
else:
logl_r_r = -np.inf
reverse = True # can't compute gradient
nc += 1
# left side
u_r_l[i] -= 1e-10
if unitcheck(u_r_l, nonperiodic):
v_r_l = prior_transform(np.array(u_r_l))
logl_r_l = loglikelihood(np.array(v_r_l)) # depends on [control=['if'], data=[]]
else:
logl_r_l = -np.inf
reverse = True # can't compute gradient
if reverse:
break # give up because we have to turn around # depends on [control=['if'], data=[]]
nc += 1
# compute dlnl/du
h[i] = (logl_r_r - logl_r_l) / 2e-10 # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
else:
# If the gradient is provided, evaluate it.
h = grad(v_r)
if compute_jac:
jac = []
# Evaluate and apply Jacobian dv/du if gradient
# is defined as d(lnL)/dv instead of d(lnL)/du.
for i in range(n):
(u_r_l, u_r_r) = (np.array(u_r), np.array(u_r))
# right side
u_r_r[i] += 1e-10
if unitcheck(u_r_r, nonperiodic):
v_r_r = prior_transform(np.array(u_r_r)) # depends on [control=['if'], data=[]]
else:
reverse = True # can't compute Jacobian
v_r_r = np.array(v_r) # assume no movement
# left side
u_r_l[i] -= 1e-10
if unitcheck(u_r_l, nonperiodic):
v_r_l = prior_transform(np.array(u_r_l)) # depends on [control=['if'], data=[]]
else:
reverse = True # can't compute Jacobian
v_r_r = np.array(v_r) # assume no movement
if reverse:
break # give up because we have to turn around # depends on [control=['if'], data=[]]
jac.append((v_r_r - v_r_l) / 2e-10) # depends on [control=['for'], data=['i']]
jac = np.array(jac)
h = np.dot(jac, h) # apply Jacobian # depends on [control=['if'], data=[]]
nc += 1
# Compute specular reflection off boundary.
vel_ref = vel - 2 * h * np.dot(vel, h) / linalg.norm(h) ** 2
dotprod = np.dot(vel_ref, vel)
dotprod /= linalg.norm(vel_ref) * linalg.norm(vel)
# Check angle of reflection.
if dotprod < -0.99:
# The reflection angle is sufficiently small that it might
# as well be a reflection.
reverse = True
break # depends on [control=['if'], data=[]]
else:
# If the reflection angle is sufficiently large, we
# proceed as normal to the new position.
vel = vel_ref
u_out = None
reflect = True
nreflect += 1 # depends on [control=['while'], data=['ncall']]
# Progress "left" (i.e. "backwards" in time).
(reverse, reflect) = (False, False)
vel = -np.array(axis) # current velocity
u_l = np.array(u)
ncall = 0
while ncall <= max_move:
# Iterate until we can bracket the edge of the distribution.
# Use a doubling approach to try and locate the bounds faster.
nodes_r.append(np.array(u_l))
(u_out, u_in) = (None, [])
while True:
# Step forward.
u_l += rstate.uniform(1.0 - jitter, 1.0 + jitter) * vel
# Evaluate point.
if unitcheck(u_l, nonperiodic):
v_l = prior_transform(np.array(u_l))
logl_l = loglikelihood(np.array(v_l))
nc += 1
ncall += 1
nmove += 1 # depends on [control=['if'], data=[]]
else:
logl_l = -np.inf
# Check if we satisfy the log-likelihood constraint
# (i.e. are "in" or "out" of bounds).
if logl_l < loglstar:
if reflect:
# If we are out of bounds and just reflected, we
# reverse direction and terminate immediately.
reverse = True
nodes_r.pop() # remove since chord does not exist
break # depends on [control=['if'], data=[]]
else:
# If we're already in bounds, then we're safe.
u_out = np.array(u_l)
logl_out = logl_l
# Check if we could compute gradients assuming we
# terminated with the current `u_out`.
if np.isfinite(logl_out):
reverse = False # depends on [control=['if'], data=[]]
else:
reverse = True # depends on [control=['if'], data=['logl_l']]
else:
reflect = False
u_in.append(np.array(u_l))
# Check if we've bracketed the edge.
if u_out is not None:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# Define the rest of our chord.
if len(nodes_r) == len(nodes_l) + 1:
try:
u_in = u_in[rstate.choice(len(u_in))] # pick point randomly # depends on [control=['try'], data=[]]
except:
u_in = np.array(u)
pass # depends on [control=['except'], data=[]]
nodes_m.append(np.array(u_in))
nodes_l.append(np.array(u_out)) # depends on [control=['if'], data=[]]
# Check if we have turned around.
if reverse:
break # depends on [control=['if'], data=[]]
# Reflect off the boundary.
(u_l, logl_l) = (u_out, logl_out)
if grad is None:
# If the gradient is not provided, we will attempt to
# approximate it numerically using 2nd-order methods.
h = np.zeros(n)
for i in range(n):
(u_l_l, u_l_r) = (np.array(u_l), np.array(u_l))
# right side
u_l_r[i] += 1e-10
if unitcheck(u_l_r, nonperiodic):
v_l_r = prior_transform(np.array(u_l_r))
logl_l_r = loglikelihood(np.array(v_l_r)) # depends on [control=['if'], data=[]]
else:
logl_l_r = -np.inf
reverse = True # can't compute gradient
nc += 1
# left side
u_l_l[i] -= 1e-10
if unitcheck(u_l_l, nonperiodic):
v_l_l = prior_transform(np.array(u_l_l))
logl_l_l = loglikelihood(np.array(v_l_l)) # depends on [control=['if'], data=[]]
else:
logl_l_l = -np.inf
reverse = True # can't compute gradient
if reverse:
break # give up because we have to turn around # depends on [control=['if'], data=[]]
nc += 1
# compute dlnl/du
h[i] = (logl_l_r - logl_l_l) / 2e-10 # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
else:
# If the gradient is provided, evaluate it.
h = grad(v_l)
if compute_jac:
jac = []
# Evaluate and apply Jacobian dv/du if gradient
# is defined as d(lnL)/dv instead of d(lnL)/du.
for i in range(n):
(u_l_l, u_l_r) = (np.array(u_l), np.array(u_l))
# right side
u_l_r[i] += 1e-10
if unitcheck(u_l_r, nonperiodic):
v_l_r = prior_transform(np.array(u_l_r)) # depends on [control=['if'], data=[]]
else:
reverse = True # can't compute Jacobian
v_l_r = np.array(v_l) # assume no movement
# left side
u_l_l[i] -= 1e-10
if unitcheck(u_l_l, nonperiodic):
v_l_l = prior_transform(np.array(u_l_l)) # depends on [control=['if'], data=[]]
else:
reverse = True # can't compute Jacobian
v_l_r = np.array(v_l) # assume no movement
if reverse:
break # give up because we have to turn around # depends on [control=['if'], data=[]]
jac.append((v_l_r - v_l_l) / 2e-10) # depends on [control=['for'], data=['i']]
jac = np.array(jac)
h = np.dot(jac, h) # apply Jacobian # depends on [control=['if'], data=[]]
nc += 1
# Compute specular reflection off boundary.
vel_ref = vel - 2 * h * np.dot(vel, h) / linalg.norm(h) ** 2
dotprod = np.dot(vel_ref, vel)
dotprod /= linalg.norm(vel_ref) * linalg.norm(vel)
# Check angle of reflection.
if dotprod < -0.99:
# The reflection angle is sufficiently small that it might
# as well be a reflection.
reverse = True
break # depends on [control=['if'], data=[]]
else:
# If the reflection angle is sufficiently large, we
# proceed as normal to the new position.
vel = vel_ref
u_out = None
reflect = True
nreflect += 1 # depends on [control=['while'], data=['ncall']]
# Initialize lengths of chords.
if len(nodes_l) > 1:
# remove initial fallback chord
nodes_l.pop(0)
nodes_m.pop(0)
nodes_r.pop(0) # depends on [control=['if'], data=[]]
(nodes_l, nodes_m, nodes_r) = (np.array(nodes_l), np.array(nodes_m), np.array(nodes_r))
Nchords = len(nodes_l)
axlen = np.zeros(Nchords, dtype='float')
for (i, (nl, nm, nr)) in enumerate(zip(nodes_l, nodes_m, nodes_r)):
axlen[i] = linalg.norm(nr - nl) # depends on [control=['for'], data=[]]
# Slice sample from all chords simultaneously. This is equivalent to
# slice sampling in *time* along our trajectory.
while True:
# Select chord.
axprob = axlen / np.sum(axlen)
idx = rstate.choice(Nchords, p=axprob)
# Define chord.
(u_l, u_m, u_r) = (nodes_l[idx], nodes_m[idx], nodes_r[idx])
u_hat = u_r - u_l
rprop = rstate.rand()
u_prop = u_l + rprop * u_hat # scale from left
if unitcheck(u_prop, nonperiodic):
v_prop = prior_transform(np.array(u_prop))
logl_prop = loglikelihood(np.array(v_prop)) # depends on [control=['if'], data=[]]
else:
logl_prop = -np.inf
nc += 1
ncontract += 1
# If we succeed, move to the new position.
if logl_prop >= loglstar:
u = u_prop
break # depends on [control=['if'], data=[]]
else:
# If we fail, check if the new point is to the left/right of
# the point interior to the bounds (`u_m`) and update
# the bounds accordingly.
s = np.dot(u_prop - u_m, u_hat) # check sign (+/-)
if s < 0: # left
nodes_l[idx] = u_prop
axlen[idx] *= 1 - rprop # depends on [control=['if'], data=[]]
elif s > 0: # right
nodes_r[idx] = u_prop
axlen[idx] *= rprop # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Slice sampler has failed to find a valid point. Some useful output quantities:\nu: {0}\nu_left: {1}\nu_right: {2}\nu_hat: {3}\nu_prop: {4}\nloglstar: {5}\nlogl_prop: {6}.'.format(u, u_l, u_r, u_hat, u_prop, loglstar, logl_prop)) # depends on [control=['while'], data=[]] # depends on [control=['for'], data=[]]
blob = {'nmove': nmove, 'nreflect': nreflect, 'ncontract': ncontract}
return (u_prop, v_prop, logl_prop, nc, blob)
|
def tail(self, n=10):
"""
Get an SArray that contains the last n elements in the SArray.
Parameters
----------
n : int
The number of elements to fetch
Returns
-------
out : SArray
A new SArray which contains the last n rows of the current SArray.
"""
with cython_context():
return SArray(_proxy=self.__proxy__.tail(n))
|
def function[tail, parameter[self, n]]:
constant[
Get an SArray that contains the last n elements in the SArray.
Parameters
----------
n : int
The number of elements to fetch
Returns
-------
out : SArray
A new SArray which contains the last n rows of the current SArray.
]
with call[name[cython_context], parameter[]] begin[:]
return[call[name[SArray], parameter[]]]
|
keyword[def] identifier[tail] ( identifier[self] , identifier[n] = literal[int] ):
literal[string]
keyword[with] identifier[cython_context] ():
keyword[return] identifier[SArray] ( identifier[_proxy] = identifier[self] . identifier[__proxy__] . identifier[tail] ( identifier[n] ))
|
def tail(self, n=10):
"""
Get an SArray that contains the last n elements in the SArray.
Parameters
----------
n : int
The number of elements to fetch
Returns
-------
out : SArray
A new SArray which contains the last n rows of the current SArray.
"""
with cython_context():
return SArray(_proxy=self.__proxy__.tail(n)) # depends on [control=['with'], data=[]]
|
def is_edge_not_excluding_vertices(self, other):
"""Returns False iif any edge excludes all vertices of other."""
c_a = cos(self.angle)
s_a = sin(self.angle)
# Get min and max of other.
other_x_min, other_x_max, other_y_min, other_y_max = other.get_bbox()
self_x_diff = 0.5 * self.width
self_y_diff = 0.5 * self.height
if c_a > 0:
if s_a > 0:
return \
c_a * other_x_max + s_a * other_y_max < -self_x_diff or \
c_a * other_x_min + s_a * other_y_min > self_x_diff or \
c_a * other_y_max - s_a * other_x_min < -self_y_diff or \
c_a * other_y_min - s_a * other_x_max > self_y_diff
else: # s_a <= 0.0
return \
c_a * other_x_max + s_a * other_y_min < -self_x_diff or \
c_a * other_x_min + s_a * other_y_max > self_x_diff or \
c_a * other_y_max - s_a * other_x_max < -self_y_diff or \
c_a * other_y_min - s_a * other_x_min > self_y_diff
else: # c_a <= 0.0
if s_a > 0:
return \
c_a * other_x_min + s_a * other_y_max < -self_x_diff or \
c_a * other_x_max + s_a * other_y_min > self_x_diff or \
c_a * other_y_min - s_a * other_x_min < -self_y_diff or \
c_a * other_y_max - s_a * other_x_max > self_y_diff
else: # s_a <= 0.0
return \
c_a * other_x_min + s_a * other_y_min < -self_x_diff or \
c_a * other_x_max + s_a * other_y_max > self_x_diff or \
c_a * other_y_min - s_a * other_x_max < -self_y_diff or \
c_a * other_y_max - s_a * other_x_min > self_y_diff
|
def function[is_edge_not_excluding_vertices, parameter[self, other]]:
constant[Returns False iif any edge excludes all vertices of other.]
variable[c_a] assign[=] call[name[cos], parameter[name[self].angle]]
variable[s_a] assign[=] call[name[sin], parameter[name[self].angle]]
<ast.Tuple object at 0x7da1b1617790> assign[=] call[name[other].get_bbox, parameter[]]
variable[self_x_diff] assign[=] binary_operation[constant[0.5] * name[self].width]
variable[self_y_diff] assign[=] binary_operation[constant[0.5] * name[self].height]
if compare[name[c_a] greater[>] constant[0]] begin[:]
if compare[name[s_a] greater[>] constant[0]] begin[:]
return[<ast.BoolOp object at 0x7da1b1615b40>]
|
keyword[def] identifier[is_edge_not_excluding_vertices] ( identifier[self] , identifier[other] ):
literal[string]
identifier[c_a] = identifier[cos] ( identifier[self] . identifier[angle] )
identifier[s_a] = identifier[sin] ( identifier[self] . identifier[angle] )
identifier[other_x_min] , identifier[other_x_max] , identifier[other_y_min] , identifier[other_y_max] = identifier[other] . identifier[get_bbox] ()
identifier[self_x_diff] = literal[int] * identifier[self] . identifier[width]
identifier[self_y_diff] = literal[int] * identifier[self] . identifier[height]
keyword[if] identifier[c_a] > literal[int] :
keyword[if] identifier[s_a] > literal[int] :
keyword[return] identifier[c_a] * identifier[other_x_max] + identifier[s_a] * identifier[other_y_max] <- identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_x_min] + identifier[s_a] * identifier[other_y_min] > identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_y_max] - identifier[s_a] * identifier[other_x_min] <- identifier[self_y_diff] keyword[or] identifier[c_a] * identifier[other_y_min] - identifier[s_a] * identifier[other_x_max] > identifier[self_y_diff]
keyword[else] :
keyword[return] identifier[c_a] * identifier[other_x_max] + identifier[s_a] * identifier[other_y_min] <- identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_x_min] + identifier[s_a] * identifier[other_y_max] > identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_y_max] - identifier[s_a] * identifier[other_x_max] <- identifier[self_y_diff] keyword[or] identifier[c_a] * identifier[other_y_min] - identifier[s_a] * identifier[other_x_min] > identifier[self_y_diff]
keyword[else] :
keyword[if] identifier[s_a] > literal[int] :
keyword[return] identifier[c_a] * identifier[other_x_min] + identifier[s_a] * identifier[other_y_max] <- identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_x_max] + identifier[s_a] * identifier[other_y_min] > identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_y_min] - identifier[s_a] * identifier[other_x_min] <- identifier[self_y_diff] keyword[or] identifier[c_a] * identifier[other_y_max] - identifier[s_a] * identifier[other_x_max] > identifier[self_y_diff]
keyword[else] :
keyword[return] identifier[c_a] * identifier[other_x_min] + identifier[s_a] * identifier[other_y_min] <- identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_x_max] + identifier[s_a] * identifier[other_y_max] > identifier[self_x_diff] keyword[or] identifier[c_a] * identifier[other_y_min] - identifier[s_a] * identifier[other_x_max] <- identifier[self_y_diff] keyword[or] identifier[c_a] * identifier[other_y_max] - identifier[s_a] * identifier[other_x_min] > identifier[self_y_diff]
|
def is_edge_not_excluding_vertices(self, other):
"""Returns False iif any edge excludes all vertices of other."""
c_a = cos(self.angle)
s_a = sin(self.angle)
# Get min and max of other.
(other_x_min, other_x_max, other_y_min, other_y_max) = other.get_bbox()
self_x_diff = 0.5 * self.width
self_y_diff = 0.5 * self.height
if c_a > 0:
if s_a > 0:
return c_a * other_x_max + s_a * other_y_max < -self_x_diff or c_a * other_x_min + s_a * other_y_min > self_x_diff or c_a * other_y_max - s_a * other_x_min < -self_y_diff or (c_a * other_y_min - s_a * other_x_max > self_y_diff) # depends on [control=['if'], data=['s_a']]
else: # s_a <= 0.0
return c_a * other_x_max + s_a * other_y_min < -self_x_diff or c_a * other_x_min + s_a * other_y_max > self_x_diff or c_a * other_y_max - s_a * other_x_max < -self_y_diff or (c_a * other_y_min - s_a * other_x_min > self_y_diff) # depends on [control=['if'], data=['c_a']] # c_a <= 0.0
elif s_a > 0:
return c_a * other_x_min + s_a * other_y_max < -self_x_diff or c_a * other_x_max + s_a * other_y_min > self_x_diff or c_a * other_y_min - s_a * other_x_min < -self_y_diff or (c_a * other_y_max - s_a * other_x_max > self_y_diff) # depends on [control=['if'], data=['s_a']]
else: # s_a <= 0.0
return c_a * other_x_min + s_a * other_y_min < -self_x_diff or c_a * other_x_max + s_a * other_y_max > self_x_diff or c_a * other_y_min - s_a * other_x_max < -self_y_diff or (c_a * other_y_max - s_a * other_x_min > self_y_diff)
|
def get_urlclass_from (scheme, assume_local_file=False):
"""Return checker class for given URL scheme. If the scheme
cannot be matched and assume_local_file is True, assume a local file.
"""
if scheme in ("http", "https"):
klass = httpurl.HttpUrl
elif scheme == "ftp":
klass = ftpurl.FtpUrl
elif scheme == "file":
klass = fileurl.FileUrl
elif scheme == "telnet":
klass = telneturl.TelnetUrl
elif scheme == "mailto":
klass = mailtourl.MailtoUrl
elif scheme in ("nntp", "news", "snews"):
klass = nntpurl.NntpUrl
elif scheme == "dns":
klass = dnsurl.DnsUrl
elif scheme == "itms-services":
klass = itmsservicesurl.ItmsServicesUrl
elif scheme and unknownurl.is_unknown_scheme(scheme):
klass = unknownurl.UnknownUrl
elif assume_local_file:
klass = fileurl.FileUrl
else:
klass = unknownurl.UnknownUrl
return klass
|
def function[get_urlclass_from, parameter[scheme, assume_local_file]]:
constant[Return checker class for given URL scheme. If the scheme
cannot be matched and assume_local_file is True, assume a local file.
]
if compare[name[scheme] in tuple[[<ast.Constant object at 0x7da2047eba30>, <ast.Constant object at 0x7da2047ea530>]]] begin[:]
variable[klass] assign[=] name[httpurl].HttpUrl
return[name[klass]]
|
keyword[def] identifier[get_urlclass_from] ( identifier[scheme] , identifier[assume_local_file] = keyword[False] ):
literal[string]
keyword[if] identifier[scheme] keyword[in] ( literal[string] , literal[string] ):
identifier[klass] = identifier[httpurl] . identifier[HttpUrl]
keyword[elif] identifier[scheme] == literal[string] :
identifier[klass] = identifier[ftpurl] . identifier[FtpUrl]
keyword[elif] identifier[scheme] == literal[string] :
identifier[klass] = identifier[fileurl] . identifier[FileUrl]
keyword[elif] identifier[scheme] == literal[string] :
identifier[klass] = identifier[telneturl] . identifier[TelnetUrl]
keyword[elif] identifier[scheme] == literal[string] :
identifier[klass] = identifier[mailtourl] . identifier[MailtoUrl]
keyword[elif] identifier[scheme] keyword[in] ( literal[string] , literal[string] , literal[string] ):
identifier[klass] = identifier[nntpurl] . identifier[NntpUrl]
keyword[elif] identifier[scheme] == literal[string] :
identifier[klass] = identifier[dnsurl] . identifier[DnsUrl]
keyword[elif] identifier[scheme] == literal[string] :
identifier[klass] = identifier[itmsservicesurl] . identifier[ItmsServicesUrl]
keyword[elif] identifier[scheme] keyword[and] identifier[unknownurl] . identifier[is_unknown_scheme] ( identifier[scheme] ):
identifier[klass] = identifier[unknownurl] . identifier[UnknownUrl]
keyword[elif] identifier[assume_local_file] :
identifier[klass] = identifier[fileurl] . identifier[FileUrl]
keyword[else] :
identifier[klass] = identifier[unknownurl] . identifier[UnknownUrl]
keyword[return] identifier[klass]
|
def get_urlclass_from(scheme, assume_local_file=False):
"""Return checker class for given URL scheme. If the scheme
cannot be matched and assume_local_file is True, assume a local file.
"""
if scheme in ('http', 'https'):
klass = httpurl.HttpUrl # depends on [control=['if'], data=[]]
elif scheme == 'ftp':
klass = ftpurl.FtpUrl # depends on [control=['if'], data=[]]
elif scheme == 'file':
klass = fileurl.FileUrl # depends on [control=['if'], data=[]]
elif scheme == 'telnet':
klass = telneturl.TelnetUrl # depends on [control=['if'], data=[]]
elif scheme == 'mailto':
klass = mailtourl.MailtoUrl # depends on [control=['if'], data=[]]
elif scheme in ('nntp', 'news', 'snews'):
klass = nntpurl.NntpUrl # depends on [control=['if'], data=[]]
elif scheme == 'dns':
klass = dnsurl.DnsUrl # depends on [control=['if'], data=[]]
elif scheme == 'itms-services':
klass = itmsservicesurl.ItmsServicesUrl # depends on [control=['if'], data=[]]
elif scheme and unknownurl.is_unknown_scheme(scheme):
klass = unknownurl.UnknownUrl # depends on [control=['if'], data=[]]
elif assume_local_file:
klass = fileurl.FileUrl # depends on [control=['if'], data=[]]
else:
klass = unknownurl.UnknownUrl
return klass
|
def send_message(self,
subject_or_message: Optional[Union[Message, str]] = None,
to: Optional[Union[str, List[str]]] = None,
**kwargs):
"""
Send an email using the send function as configured by
:attr:`~flask_unchained.bundles.mail.config.Config.MAIL_SEND_FN`.
:param subject_or_message: The subject line, or an instance of
:class:`flask_mail.Message`.
:param to: The message recipient(s).
:param kwargs: Extra values to pass on to :class:`~flask_mail.Message`
"""
to = to or kwargs.pop('recipients', [])
return self.send(subject_or_message, to, **kwargs)
|
def function[send_message, parameter[self, subject_or_message, to]]:
constant[
Send an email using the send function as configured by
:attr:`~flask_unchained.bundles.mail.config.Config.MAIL_SEND_FN`.
:param subject_or_message: The subject line, or an instance of
:class:`flask_mail.Message`.
:param to: The message recipient(s).
:param kwargs: Extra values to pass on to :class:`~flask_mail.Message`
]
variable[to] assign[=] <ast.BoolOp object at 0x7da18bccae60>
return[call[name[self].send, parameter[name[subject_or_message], name[to]]]]
|
keyword[def] identifier[send_message] ( identifier[self] ,
identifier[subject_or_message] : identifier[Optional] [ identifier[Union] [ identifier[Message] , identifier[str] ]]= keyword[None] ,
identifier[to] : identifier[Optional] [ identifier[Union] [ identifier[str] , identifier[List] [ identifier[str] ]]]= keyword[None] ,
** identifier[kwargs] ):
literal[string]
identifier[to] = identifier[to] keyword[or] identifier[kwargs] . identifier[pop] ( literal[string] ,[])
keyword[return] identifier[self] . identifier[send] ( identifier[subject_or_message] , identifier[to] ,** identifier[kwargs] )
|
def send_message(self, subject_or_message: Optional[Union[Message, str]]=None, to: Optional[Union[str, List[str]]]=None, **kwargs):
"""
Send an email using the send function as configured by
:attr:`~flask_unchained.bundles.mail.config.Config.MAIL_SEND_FN`.
:param subject_or_message: The subject line, or an instance of
:class:`flask_mail.Message`.
:param to: The message recipient(s).
:param kwargs: Extra values to pass on to :class:`~flask_mail.Message`
"""
to = to or kwargs.pop('recipients', [])
return self.send(subject_or_message, to, **kwargs)
|
def _init_az_api(self):
"""
Initialise client objects for talking to Azure API.
This is in a separate function so to be called by ``__init__``
and ``__setstate__``.
"""
with self.__lock:
if self._resource_client is None:
log.debug("Making Azure `ServicePrincipalcredentials` object"
" with tenant=%r, client_id=%r, secret=%r ...",
self.tenant_id, self.client_id,
('<redacted>' if self.secret else None))
credentials = ServicePrincipalCredentials(
tenant=self.tenant_id,
client_id=self.client_id,
secret=self.secret,
)
log.debug("Initializing Azure `ComputeManagementclient` ...")
self._compute_client = ComputeManagementClient(credentials, self.subscription_id)
log.debug("Initializing Azure `NetworkManagementclient` ...")
self._network_client = NetworkManagementClient(credentials, self.subscription_id)
log.debug("Initializing Azure `ResourceManagementclient` ...")
self._resource_client = ResourceManagementClient(credentials, self.subscription_id)
log.info("Azure API clients initialized.")
|
def function[_init_az_api, parameter[self]]:
constant[
Initialise client objects for talking to Azure API.
This is in a separate function so to be called by ``__init__``
and ``__setstate__``.
]
with name[self].__lock begin[:]
if compare[name[self]._resource_client is constant[None]] begin[:]
call[name[log].debug, parameter[constant[Making Azure `ServicePrincipalcredentials` object with tenant=%r, client_id=%r, secret=%r ...], name[self].tenant_id, name[self].client_id, <ast.IfExp object at 0x7da1b0744a60>]]
variable[credentials] assign[=] call[name[ServicePrincipalCredentials], parameter[]]
call[name[log].debug, parameter[constant[Initializing Azure `ComputeManagementclient` ...]]]
name[self]._compute_client assign[=] call[name[ComputeManagementClient], parameter[name[credentials], name[self].subscription_id]]
call[name[log].debug, parameter[constant[Initializing Azure `NetworkManagementclient` ...]]]
name[self]._network_client assign[=] call[name[NetworkManagementClient], parameter[name[credentials], name[self].subscription_id]]
call[name[log].debug, parameter[constant[Initializing Azure `ResourceManagementclient` ...]]]
name[self]._resource_client assign[=] call[name[ResourceManagementClient], parameter[name[credentials], name[self].subscription_id]]
call[name[log].info, parameter[constant[Azure API clients initialized.]]]
|
keyword[def] identifier[_init_az_api] ( identifier[self] ):
literal[string]
keyword[with] identifier[self] . identifier[__lock] :
keyword[if] identifier[self] . identifier[_resource_client] keyword[is] keyword[None] :
identifier[log] . identifier[debug] ( literal[string]
literal[string] ,
identifier[self] . identifier[tenant_id] , identifier[self] . identifier[client_id] ,
( literal[string] keyword[if] identifier[self] . identifier[secret] keyword[else] keyword[None] ))
identifier[credentials] = identifier[ServicePrincipalCredentials] (
identifier[tenant] = identifier[self] . identifier[tenant_id] ,
identifier[client_id] = identifier[self] . identifier[client_id] ,
identifier[secret] = identifier[self] . identifier[secret] ,
)
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_compute_client] = identifier[ComputeManagementClient] ( identifier[credentials] , identifier[self] . identifier[subscription_id] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_network_client] = identifier[NetworkManagementClient] ( identifier[credentials] , identifier[self] . identifier[subscription_id] )
identifier[log] . identifier[debug] ( literal[string] )
identifier[self] . identifier[_resource_client] = identifier[ResourceManagementClient] ( identifier[credentials] , identifier[self] . identifier[subscription_id] )
identifier[log] . identifier[info] ( literal[string] )
|
def _init_az_api(self):
"""
Initialise client objects for talking to Azure API.
This is in a separate function so to be called by ``__init__``
and ``__setstate__``.
"""
with self.__lock:
if self._resource_client is None:
log.debug('Making Azure `ServicePrincipalcredentials` object with tenant=%r, client_id=%r, secret=%r ...', self.tenant_id, self.client_id, '<redacted>' if self.secret else None)
credentials = ServicePrincipalCredentials(tenant=self.tenant_id, client_id=self.client_id, secret=self.secret)
log.debug('Initializing Azure `ComputeManagementclient` ...')
self._compute_client = ComputeManagementClient(credentials, self.subscription_id)
log.debug('Initializing Azure `NetworkManagementclient` ...')
self._network_client = NetworkManagementClient(credentials, self.subscription_id)
log.debug('Initializing Azure `ResourceManagementclient` ...')
self._resource_client = ResourceManagementClient(credentials, self.subscription_id)
log.info('Azure API clients initialized.') # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]]
|
def toggle_fold_trigger(self, block):
"""
Toggle a fold trigger block (expand or collapse it).
:param block: The QTextBlock to expand/collapse
"""
if not TextBlockHelper.is_fold_trigger(block):
return
region = FoldScope(block)
if region.collapsed:
region.unfold()
if self._mouse_over_line is not None:
self._add_scope_decorations(
region._trigger, *region.get_range())
else:
region.fold()
self._clear_scope_decos()
self._refresh_editor_and_scrollbars()
self.trigger_state_changed.emit(region._trigger, region.collapsed)
|
def function[toggle_fold_trigger, parameter[self, block]]:
constant[
Toggle a fold trigger block (expand or collapse it).
:param block: The QTextBlock to expand/collapse
]
if <ast.UnaryOp object at 0x7da18dc9be50> begin[:]
return[None]
variable[region] assign[=] call[name[FoldScope], parameter[name[block]]]
if name[region].collapsed begin[:]
call[name[region].unfold, parameter[]]
if compare[name[self]._mouse_over_line is_not constant[None]] begin[:]
call[name[self]._add_scope_decorations, parameter[name[region]._trigger, <ast.Starred object at 0x7da18dc98b80>]]
call[name[self]._refresh_editor_and_scrollbars, parameter[]]
call[name[self].trigger_state_changed.emit, parameter[name[region]._trigger, name[region].collapsed]]
|
keyword[def] identifier[toggle_fold_trigger] ( identifier[self] , identifier[block] ):
literal[string]
keyword[if] keyword[not] identifier[TextBlockHelper] . identifier[is_fold_trigger] ( identifier[block] ):
keyword[return]
identifier[region] = identifier[FoldScope] ( identifier[block] )
keyword[if] identifier[region] . identifier[collapsed] :
identifier[region] . identifier[unfold] ()
keyword[if] identifier[self] . identifier[_mouse_over_line] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_add_scope_decorations] (
identifier[region] . identifier[_trigger] ,* identifier[region] . identifier[get_range] ())
keyword[else] :
identifier[region] . identifier[fold] ()
identifier[self] . identifier[_clear_scope_decos] ()
identifier[self] . identifier[_refresh_editor_and_scrollbars] ()
identifier[self] . identifier[trigger_state_changed] . identifier[emit] ( identifier[region] . identifier[_trigger] , identifier[region] . identifier[collapsed] )
|
def toggle_fold_trigger(self, block):
"""
Toggle a fold trigger block (expand or collapse it).
:param block: The QTextBlock to expand/collapse
"""
if not TextBlockHelper.is_fold_trigger(block):
return # depends on [control=['if'], data=[]]
region = FoldScope(block)
if region.collapsed:
region.unfold()
if self._mouse_over_line is not None:
self._add_scope_decorations(region._trigger, *region.get_range()) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
region.fold()
self._clear_scope_decos()
self._refresh_editor_and_scrollbars()
self.trigger_state_changed.emit(region._trigger, region.collapsed)
|
def reflect_well(value, bounds):
"""Given some boundaries, reflects the value until it falls within both
boundaries. This is done iteratively, reflecting left off of the
`boundaries.max`, then right off of the `boundaries.min`, etc.
Parameters
----------
value : float
The value to apply the reflected boundaries to.
bounds : Bounds instance
Boundaries to reflect between. Both `bounds.min` and `bounds.max` must
be instances of `ReflectedBound`, otherwise an AttributeError is
raised.
Returns
-------
float
The value after being reflected between the two bounds.
"""
while value not in bounds:
value = bounds._max.reflect_left(value)
value = bounds._min.reflect_right(value)
return value
|
def function[reflect_well, parameter[value, bounds]]:
constant[Given some boundaries, reflects the value until it falls within both
boundaries. This is done iteratively, reflecting left off of the
`boundaries.max`, then right off of the `boundaries.min`, etc.
Parameters
----------
value : float
The value to apply the reflected boundaries to.
bounds : Bounds instance
Boundaries to reflect between. Both `bounds.min` and `bounds.max` must
be instances of `ReflectedBound`, otherwise an AttributeError is
raised.
Returns
-------
float
The value after being reflected between the two bounds.
]
while compare[name[value] <ast.NotIn object at 0x7da2590d7190> name[bounds]] begin[:]
variable[value] assign[=] call[name[bounds]._max.reflect_left, parameter[name[value]]]
variable[value] assign[=] call[name[bounds]._min.reflect_right, parameter[name[value]]]
return[name[value]]
|
keyword[def] identifier[reflect_well] ( identifier[value] , identifier[bounds] ):
literal[string]
keyword[while] identifier[value] keyword[not] keyword[in] identifier[bounds] :
identifier[value] = identifier[bounds] . identifier[_max] . identifier[reflect_left] ( identifier[value] )
identifier[value] = identifier[bounds] . identifier[_min] . identifier[reflect_right] ( identifier[value] )
keyword[return] identifier[value]
|
def reflect_well(value, bounds):
"""Given some boundaries, reflects the value until it falls within both
boundaries. This is done iteratively, reflecting left off of the
`boundaries.max`, then right off of the `boundaries.min`, etc.
Parameters
----------
value : float
The value to apply the reflected boundaries to.
bounds : Bounds instance
Boundaries to reflect between. Both `bounds.min` and `bounds.max` must
be instances of `ReflectedBound`, otherwise an AttributeError is
raised.
Returns
-------
float
The value after being reflected between the two bounds.
"""
while value not in bounds:
value = bounds._max.reflect_left(value)
value = bounds._min.reflect_right(value) # depends on [control=['while'], data=['value', 'bounds']]
return value
|
def contains(self, rect):
"""
Tests if another rectangle is contained by this one
Arguments:
rect (Rectangle): The other rectangle
Returns:
bool: True if it is container, False otherwise
"""
return (rect.y >= self.y and \
rect.x >= self.x and \
rect.y+rect.height <= self.y+self.height and \
rect.x+rect.width <= self.x+self.width)
|
def function[contains, parameter[self, rect]]:
constant[
Tests if another rectangle is contained by this one
Arguments:
rect (Rectangle): The other rectangle
Returns:
bool: True if it is container, False otherwise
]
return[<ast.BoolOp object at 0x7da1b0bae230>]
|
keyword[def] identifier[contains] ( identifier[self] , identifier[rect] ):
literal[string]
keyword[return] ( identifier[rect] . identifier[y] >= identifier[self] . identifier[y] keyword[and] identifier[rect] . identifier[x] >= identifier[self] . identifier[x] keyword[and] identifier[rect] . identifier[y] + identifier[rect] . identifier[height] <= identifier[self] . identifier[y] + identifier[self] . identifier[height] keyword[and] identifier[rect] . identifier[x] + identifier[rect] . identifier[width] <= identifier[self] . identifier[x] + identifier[self] . identifier[width] )
|
def contains(self, rect):
"""
Tests if another rectangle is contained by this one
Arguments:
rect (Rectangle): The other rectangle
Returns:
bool: True if it is container, False otherwise
"""
return rect.y >= self.y and rect.x >= self.x and (rect.y + rect.height <= self.y + self.height) and (rect.x + rect.width <= self.x + self.width)
|
def create_network_interface(name, subnet_id=None, subnet_name=None,
private_ip_address=None, description=None,
groups=None, region=None, key=None, keyid=None,
profile=None):
'''
Create an Elastic Network Interface.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt myminion boto_ec2.create_network_interface my_eni subnet-12345 description=my_eni groups=['my_group']
'''
if not salt.utils.data.exactly_one((subnet_id, subnet_name)):
raise SaltInvocationError('One (but not both) of subnet_id or '
'subnet_name must be provided.')
if subnet_name:
resource = __salt__['boto_vpc.get_resource_id']('subnet', subnet_name,
region=region, key=key,
keyid=keyid,
profile=profile)
if 'id' not in resource:
log.warning('Couldn\'t resolve subnet name %s.', subnet_name)
return False
subnet_id = resource['id']
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
r = {}
result = _get_network_interface(conn, name)
if 'result' in result:
r['error'] = {'message': 'An ENI with this Name tag already exists.'}
return r
vpc_id = __salt__['boto_vpc.get_subnet_association'](
[subnet_id], region=region, key=key, keyid=keyid, profile=profile
)
vpc_id = vpc_id.get('vpc_id')
if not vpc_id:
msg = 'subnet_id {0} does not map to a valid vpc id.'.format(subnet_id)
r['error'] = {'message': msg}
return r
_groups = __salt__['boto_secgroup.convert_to_group_ids'](
groups, vpc_id=vpc_id, region=region, key=key,
keyid=keyid, profile=profile
)
try:
eni = conn.create_network_interface(
subnet_id,
private_ip_address=private_ip_address,
description=description,
groups=_groups
)
eni.add_tag('Name', name)
except boto.exception.EC2ResponseError as e:
r['error'] = __utils__['boto.get_error'](e)
return r
r['result'] = _describe_network_interface(eni)
return r
|
def function[create_network_interface, parameter[name, subnet_id, subnet_name, private_ip_address, description, groups, region, key, keyid, profile]]:
constant[
Create an Elastic Network Interface.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt myminion boto_ec2.create_network_interface my_eni subnet-12345 description=my_eni groups=['my_group']
]
if <ast.UnaryOp object at 0x7da1b210ace0> begin[:]
<ast.Raise object at 0x7da1b210bd30>
if name[subnet_name] begin[:]
variable[resource] assign[=] call[call[name[__salt__]][constant[boto_vpc.get_resource_id]], parameter[constant[subnet], name[subnet_name]]]
if compare[constant[id] <ast.NotIn object at 0x7da2590d7190> name[resource]] begin[:]
call[name[log].warning, parameter[constant[Couldn't resolve subnet name %s.], name[subnet_name]]]
return[constant[False]]
variable[subnet_id] assign[=] call[name[resource]][constant[id]]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
variable[r] assign[=] dictionary[[], []]
variable[result] assign[=] call[name[_get_network_interface], parameter[name[conn], name[name]]]
if compare[constant[result] in name[result]] begin[:]
call[name[r]][constant[error]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2108520>], [<ast.Constant object at 0x7da1b2108a90>]]
return[name[r]]
variable[vpc_id] assign[=] call[call[name[__salt__]][constant[boto_vpc.get_subnet_association]], parameter[list[[<ast.Name object at 0x7da1b2108b20>]]]]
variable[vpc_id] assign[=] call[name[vpc_id].get, parameter[constant[vpc_id]]]
if <ast.UnaryOp object at 0x7da1b21089d0> begin[:]
variable[msg] assign[=] call[constant[subnet_id {0} does not map to a valid vpc id.].format, parameter[name[subnet_id]]]
call[name[r]][constant[error]] assign[=] dictionary[[<ast.Constant object at 0x7da1b20bb0d0>], [<ast.Name object at 0x7da1b20b9ea0>]]
return[name[r]]
variable[_groups] assign[=] call[call[name[__salt__]][constant[boto_secgroup.convert_to_group_ids]], parameter[name[groups]]]
<ast.Try object at 0x7da1b20b8f40>
call[name[r]][constant[result]] assign[=] call[name[_describe_network_interface], parameter[name[eni]]]
return[name[r]]
|
keyword[def] identifier[create_network_interface] ( identifier[name] , identifier[subnet_id] = keyword[None] , identifier[subnet_name] = keyword[None] ,
identifier[private_ip_address] = keyword[None] , identifier[description] = keyword[None] ,
identifier[groups] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] ,
identifier[profile] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[salt] . identifier[utils] . identifier[data] . identifier[exactly_one] (( identifier[subnet_id] , identifier[subnet_name] )):
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string] )
keyword[if] identifier[subnet_name] :
identifier[resource] = identifier[__salt__] [ literal[string] ]( literal[string] , identifier[subnet_name] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] ,
identifier[profile] = identifier[profile] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[resource] :
identifier[log] . identifier[warning] ( literal[string] , identifier[subnet_name] )
keyword[return] keyword[False]
identifier[subnet_id] = identifier[resource] [ literal[string] ]
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[r] ={}
identifier[result] = identifier[_get_network_interface] ( identifier[conn] , identifier[name] )
keyword[if] literal[string] keyword[in] identifier[result] :
identifier[r] [ literal[string] ]={ literal[string] : literal[string] }
keyword[return] identifier[r]
identifier[vpc_id] = identifier[__salt__] [ literal[string] ](
[ identifier[subnet_id] ], identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile]
)
identifier[vpc_id] = identifier[vpc_id] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[vpc_id] :
identifier[msg] = literal[string] . identifier[format] ( identifier[subnet_id] )
identifier[r] [ literal[string] ]={ literal[string] : identifier[msg] }
keyword[return] identifier[r]
identifier[_groups] = identifier[__salt__] [ literal[string] ](
identifier[groups] , identifier[vpc_id] = identifier[vpc_id] , identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile]
)
keyword[try] :
identifier[eni] = identifier[conn] . identifier[create_network_interface] (
identifier[subnet_id] ,
identifier[private_ip_address] = identifier[private_ip_address] ,
identifier[description] = identifier[description] ,
identifier[groups] = identifier[_groups]
)
identifier[eni] . identifier[add_tag] ( literal[string] , identifier[name] )
keyword[except] identifier[boto] . identifier[exception] . identifier[EC2ResponseError] keyword[as] identifier[e] :
identifier[r] [ literal[string] ]= identifier[__utils__] [ literal[string] ]( identifier[e] )
keyword[return] identifier[r]
identifier[r] [ literal[string] ]= identifier[_describe_network_interface] ( identifier[eni] )
keyword[return] identifier[r]
|
def create_network_interface(name, subnet_id=None, subnet_name=None, private_ip_address=None, description=None, groups=None, region=None, key=None, keyid=None, profile=None):
"""
Create an Elastic Network Interface.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt myminion boto_ec2.create_network_interface my_eni subnet-12345 description=my_eni groups=['my_group']
"""
if not salt.utils.data.exactly_one((subnet_id, subnet_name)):
raise SaltInvocationError('One (but not both) of subnet_id or subnet_name must be provided.') # depends on [control=['if'], data=[]]
if subnet_name:
resource = __salt__['boto_vpc.get_resource_id']('subnet', subnet_name, region=region, key=key, keyid=keyid, profile=profile)
if 'id' not in resource:
log.warning("Couldn't resolve subnet name %s.", subnet_name)
return False # depends on [control=['if'], data=[]]
subnet_id = resource['id'] # depends on [control=['if'], data=[]]
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
r = {}
result = _get_network_interface(conn, name)
if 'result' in result:
r['error'] = {'message': 'An ENI with this Name tag already exists.'}
return r # depends on [control=['if'], data=[]]
vpc_id = __salt__['boto_vpc.get_subnet_association']([subnet_id], region=region, key=key, keyid=keyid, profile=profile)
vpc_id = vpc_id.get('vpc_id')
if not vpc_id:
msg = 'subnet_id {0} does not map to a valid vpc id.'.format(subnet_id)
r['error'] = {'message': msg}
return r # depends on [control=['if'], data=[]]
_groups = __salt__['boto_secgroup.convert_to_group_ids'](groups, vpc_id=vpc_id, region=region, key=key, keyid=keyid, profile=profile)
try:
eni = conn.create_network_interface(subnet_id, private_ip_address=private_ip_address, description=description, groups=_groups)
eni.add_tag('Name', name) # depends on [control=['try'], data=[]]
except boto.exception.EC2ResponseError as e:
r['error'] = __utils__['boto.get_error'](e)
return r # depends on [control=['except'], data=['e']]
r['result'] = _describe_network_interface(eni)
return r
|
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False)
user_input = web.input(tasks=[], aggregations=[], users=[])
if "submission" in user_input:
# Replay a unique submission
submission = self.database.submissions.find_one({"_id": ObjectId(user_input.submission)})
if submission is None:
raise web.notfound()
web.header('Content-Type', 'application/json')
self.submission_manager.replay_job(course.get_task(submission["taskid"]), submission)
return json.dumps({"status": "waiting"})
else:
# Replay several submissions, check input
tasks = course.get_tasks()
error = False
msg = _("Selected submissions were set for replay.")
for i in user_input.tasks:
if i not in tasks.keys():
msg = _("Task with id {} does not exist !").format(i)
error = True
if not error:
# Load submissions
submissions, __ = self.get_selected_submissions(course, user_input.filter_type, user_input.tasks, user_input.users, user_input.aggregations, user_input.type)
for submission in submissions:
self.submission_manager.replay_job(tasks[submission["taskid"]], submission)
return self.show_page(course, web.input(), msg, error)
|
def function[POST_AUTH, parameter[self, courseid]]:
constant[ GET request ]
<ast.Tuple object at 0x7da18f58ec50> assign[=] call[name[self].get_course_and_check_rights, parameter[name[courseid]]]
variable[user_input] assign[=] call[name[web].input, parameter[]]
if compare[constant[submission] in name[user_input]] begin[:]
variable[submission] assign[=] call[name[self].database.submissions.find_one, parameter[dictionary[[<ast.Constant object at 0x7da18f58e6e0>], [<ast.Call object at 0x7da18f58da80>]]]]
if compare[name[submission] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f58e380>
call[name[web].header, parameter[constant[Content-Type], constant[application/json]]]
call[name[self].submission_manager.replay_job, parameter[call[name[course].get_task, parameter[call[name[submission]][constant[taskid]]]], name[submission]]]
return[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da18f58cbb0>], [<ast.Constant object at 0x7da18f58dcf0>]]]]]
|
keyword[def] identifier[POST_AUTH] ( identifier[self] , identifier[courseid] ):
literal[string]
identifier[course] , identifier[__] = identifier[self] . identifier[get_course_and_check_rights] ( identifier[courseid] , identifier[allow_all_staff] = keyword[False] )
identifier[user_input] = identifier[web] . identifier[input] ( identifier[tasks] =[], identifier[aggregations] =[], identifier[users] =[])
keyword[if] literal[string] keyword[in] identifier[user_input] :
identifier[submission] = identifier[self] . identifier[database] . identifier[submissions] . identifier[find_one] ({ literal[string] : identifier[ObjectId] ( identifier[user_input] . identifier[submission] )})
keyword[if] identifier[submission] keyword[is] keyword[None] :
keyword[raise] identifier[web] . identifier[notfound] ()
identifier[web] . identifier[header] ( literal[string] , literal[string] )
identifier[self] . identifier[submission_manager] . identifier[replay_job] ( identifier[course] . identifier[get_task] ( identifier[submission] [ literal[string] ]), identifier[submission] )
keyword[return] identifier[json] . identifier[dumps] ({ literal[string] : literal[string] })
keyword[else] :
identifier[tasks] = identifier[course] . identifier[get_tasks] ()
identifier[error] = keyword[False]
identifier[msg] = identifier[_] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[user_input] . identifier[tasks] :
keyword[if] identifier[i] keyword[not] keyword[in] identifier[tasks] . identifier[keys] ():
identifier[msg] = identifier[_] ( literal[string] ). identifier[format] ( identifier[i] )
identifier[error] = keyword[True]
keyword[if] keyword[not] identifier[error] :
identifier[submissions] , identifier[__] = identifier[self] . identifier[get_selected_submissions] ( identifier[course] , identifier[user_input] . identifier[filter_type] , identifier[user_input] . identifier[tasks] , identifier[user_input] . identifier[users] , identifier[user_input] . identifier[aggregations] , identifier[user_input] . identifier[type] )
keyword[for] identifier[submission] keyword[in] identifier[submissions] :
identifier[self] . identifier[submission_manager] . identifier[replay_job] ( identifier[tasks] [ identifier[submission] [ literal[string] ]], identifier[submission] )
keyword[return] identifier[self] . identifier[show_page] ( identifier[course] , identifier[web] . identifier[input] (), identifier[msg] , identifier[error] )
|
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ
' GET request '
(course, __) = self.get_course_and_check_rights(courseid, allow_all_staff=False)
user_input = web.input(tasks=[], aggregations=[], users=[])
if 'submission' in user_input:
# Replay a unique submission
submission = self.database.submissions.find_one({'_id': ObjectId(user_input.submission)})
if submission is None:
raise web.notfound() # depends on [control=['if'], data=[]]
web.header('Content-Type', 'application/json')
self.submission_manager.replay_job(course.get_task(submission['taskid']), submission)
return json.dumps({'status': 'waiting'}) # depends on [control=['if'], data=['user_input']]
else:
# Replay several submissions, check input
tasks = course.get_tasks()
error = False
msg = _('Selected submissions were set for replay.')
for i in user_input.tasks:
if i not in tasks.keys():
msg = _('Task with id {} does not exist !').format(i)
error = True # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']]
if not error:
# Load submissions
(submissions, __) = self.get_selected_submissions(course, user_input.filter_type, user_input.tasks, user_input.users, user_input.aggregations, user_input.type)
for submission in submissions:
self.submission_manager.replay_job(tasks[submission['taskid']], submission) # depends on [control=['for'], data=['submission']] # depends on [control=['if'], data=[]]
return self.show_page(course, web.input(), msg, error)
|
def ghissue_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
try:
issue_num = int(text)
if issue_num <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'GitHub issue number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
#app.info('issue %r' % text)
if 'pull' in name.lower():
category = 'pull'
elif 'issue' in name.lower():
category = 'issues'
else:
msg = inliner.reporter.error(
'GitHub roles include "ghpull" and "ghissue", '
'"%s" is invalid.' % name, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = make_link_node(rawtext, app, category, str(issue_num), options)
return [node], []
|
def function[ghissue_role, parameter[name, rawtext, text, lineno, inliner, options, content]]:
constant[Link to a GitHub issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
]
<ast.Try object at 0x7da18f58eb00>
variable[app] assign[=] name[inliner].document.settings.env.app
if compare[constant[pull] in call[name[name].lower, parameter[]]] begin[:]
variable[category] assign[=] constant[pull]
variable[node] assign[=] call[name[make_link_node], parameter[name[rawtext], name[app], name[category], call[name[str], parameter[name[issue_num]]], name[options]]]
return[tuple[[<ast.List object at 0x7da18f811db0>, <ast.List object at 0x7da18f812410>]]]
|
keyword[def] identifier[ghissue_role] ( identifier[name] , identifier[rawtext] , identifier[text] , identifier[lineno] , identifier[inliner] , identifier[options] ={}, identifier[content] =[]):
literal[string]
keyword[try] :
identifier[issue_num] = identifier[int] ( identifier[text] )
keyword[if] identifier[issue_num] <= literal[int] :
keyword[raise] identifier[ValueError]
keyword[except] identifier[ValueError] :
identifier[msg] = identifier[inliner] . identifier[reporter] . identifier[error] (
literal[string]
literal[string] % identifier[text] , identifier[line] = identifier[lineno] )
identifier[prb] = identifier[inliner] . identifier[problematic] ( identifier[rawtext] , identifier[rawtext] , identifier[msg] )
keyword[return] [ identifier[prb] ],[ identifier[msg] ]
identifier[app] = identifier[inliner] . identifier[document] . identifier[settings] . identifier[env] . identifier[app]
keyword[if] literal[string] keyword[in] identifier[name] . identifier[lower] ():
identifier[category] = literal[string]
keyword[elif] literal[string] keyword[in] identifier[name] . identifier[lower] ():
identifier[category] = literal[string]
keyword[else] :
identifier[msg] = identifier[inliner] . identifier[reporter] . identifier[error] (
literal[string]
literal[string] % identifier[name] , identifier[line] = identifier[lineno] )
identifier[prb] = identifier[inliner] . identifier[problematic] ( identifier[rawtext] , identifier[rawtext] , identifier[msg] )
keyword[return] [ identifier[prb] ],[ identifier[msg] ]
identifier[node] = identifier[make_link_node] ( identifier[rawtext] , identifier[app] , identifier[category] , identifier[str] ( identifier[issue_num] ), identifier[options] )
keyword[return] [ identifier[node] ],[]
|
def ghissue_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
try:
issue_num = int(text)
if issue_num <= 0:
raise ValueError # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
msg = inliner.reporter.error('GitHub issue number must be a number greater than or equal to 1; "%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return ([prb], [msg]) # depends on [control=['except'], data=[]]
app = inliner.document.settings.env.app
#app.info('issue %r' % text)
if 'pull' in name.lower():
category = 'pull' # depends on [control=['if'], data=[]]
elif 'issue' in name.lower():
category = 'issues' # depends on [control=['if'], data=[]]
else:
msg = inliner.reporter.error('GitHub roles include "ghpull" and "ghissue", "%s" is invalid.' % name, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return ([prb], [msg])
node = make_link_node(rawtext, app, category, str(issue_num), options)
return ([node], [])
|
async def is_try_or_pull_request(context, task):
"""Determine if a task is a try or a pull-request-like task (restricted privs).
Checks are the ones done in ``is_try`` and ``is_pull_request``
Args:
context (scriptworker.context.Context): the scriptworker context.
task (dict): the task definition to check.
Returns:
bool: True if it's a pull-request or a try task
"""
if is_github_task(task):
return await is_pull_request(context, task)
else:
return is_try(task, context.config['source_env_prefix'])
|
<ast.AsyncFunctionDef object at 0x7da1b0e9f670>
|
keyword[async] keyword[def] identifier[is_try_or_pull_request] ( identifier[context] , identifier[task] ):
literal[string]
keyword[if] identifier[is_github_task] ( identifier[task] ):
keyword[return] keyword[await] identifier[is_pull_request] ( identifier[context] , identifier[task] )
keyword[else] :
keyword[return] identifier[is_try] ( identifier[task] , identifier[context] . identifier[config] [ literal[string] ])
|
async def is_try_or_pull_request(context, task):
"""Determine if a task is a try or a pull-request-like task (restricted privs).
Checks are the ones done in ``is_try`` and ``is_pull_request``
Args:
context (scriptworker.context.Context): the scriptworker context.
task (dict): the task definition to check.
Returns:
bool: True if it's a pull-request or a try task
"""
if is_github_task(task):
return await is_pull_request(context, task) # depends on [control=['if'], data=[]]
else:
return is_try(task, context.config['source_env_prefix'])
|
def get_install_requires():
"""
parse requirements.txt, ignore links, exclude comments
"""
requirements = []
for line in open('requirements.txt').readlines():
# skip to next iteration if comment or empty line
if line.startswith('#') or line == '' or line.startswith('http') or line.startswith('git'):
continue
# add line to requirements
requirements.append(line)
return requirements
|
def function[get_install_requires, parameter[]]:
constant[
parse requirements.txt, ignore links, exclude comments
]
variable[requirements] assign[=] list[[]]
for taget[name[line]] in starred[call[call[name[open], parameter[constant[requirements.txt]]].readlines, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b1e8e590> begin[:]
continue
call[name[requirements].append, parameter[name[line]]]
return[name[requirements]]
|
keyword[def] identifier[get_install_requires] ():
literal[string]
identifier[requirements] =[]
keyword[for] identifier[line] keyword[in] identifier[open] ( literal[string] ). identifier[readlines] ():
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] == literal[string] keyword[or] identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] . identifier[startswith] ( literal[string] ):
keyword[continue]
identifier[requirements] . identifier[append] ( identifier[line] )
keyword[return] identifier[requirements]
|
def get_install_requires():
"""
parse requirements.txt, ignore links, exclude comments
"""
requirements = []
for line in open('requirements.txt').readlines():
# skip to next iteration if comment or empty line
if line.startswith('#') or line == '' or line.startswith('http') or line.startswith('git'):
continue # depends on [control=['if'], data=[]]
# add line to requirements
requirements.append(line) # depends on [control=['for'], data=['line']]
return requirements
|
def _join_path(self, hive, subkey):
"""
Joins the hive and key to make a Registry path.
@type hive: int
@param hive: Registry hive handle.
The hive handle must be one of the following integer constants:
- L{win32.HKEY_CLASSES_ROOT}
- L{win32.HKEY_CURRENT_USER}
- L{win32.HKEY_LOCAL_MACHINE}
- L{win32.HKEY_USERS}
- L{win32.HKEY_PERFORMANCE_DATA}
- L{win32.HKEY_CURRENT_CONFIG}
@type subkey: str
@param subkey: Subkey path.
@rtype: str
@return: Registry path.
"""
path = self._hives_by_value[hive]
if subkey:
path = path + '\\' + subkey
return path
|
def function[_join_path, parameter[self, hive, subkey]]:
constant[
Joins the hive and key to make a Registry path.
@type hive: int
@param hive: Registry hive handle.
The hive handle must be one of the following integer constants:
- L{win32.HKEY_CLASSES_ROOT}
- L{win32.HKEY_CURRENT_USER}
- L{win32.HKEY_LOCAL_MACHINE}
- L{win32.HKEY_USERS}
- L{win32.HKEY_PERFORMANCE_DATA}
- L{win32.HKEY_CURRENT_CONFIG}
@type subkey: str
@param subkey: Subkey path.
@rtype: str
@return: Registry path.
]
variable[path] assign[=] call[name[self]._hives_by_value][name[hive]]
if name[subkey] begin[:]
variable[path] assign[=] binary_operation[binary_operation[name[path] + constant[\]] + name[subkey]]
return[name[path]]
|
keyword[def] identifier[_join_path] ( identifier[self] , identifier[hive] , identifier[subkey] ):
literal[string]
identifier[path] = identifier[self] . identifier[_hives_by_value] [ identifier[hive] ]
keyword[if] identifier[subkey] :
identifier[path] = identifier[path] + literal[string] + identifier[subkey]
keyword[return] identifier[path]
|
def _join_path(self, hive, subkey):
"""
Joins the hive and key to make a Registry path.
@type hive: int
@param hive: Registry hive handle.
The hive handle must be one of the following integer constants:
- L{win32.HKEY_CLASSES_ROOT}
- L{win32.HKEY_CURRENT_USER}
- L{win32.HKEY_LOCAL_MACHINE}
- L{win32.HKEY_USERS}
- L{win32.HKEY_PERFORMANCE_DATA}
- L{win32.HKEY_CURRENT_CONFIG}
@type subkey: str
@param subkey: Subkey path.
@rtype: str
@return: Registry path.
"""
path = self._hives_by_value[hive]
if subkey:
path = path + '\\' + subkey # depends on [control=['if'], data=[]]
return path
|
def to_json(self):
"""Converts track to a JSON serializable format
Returns:
Map with the name, and segments of the track.
"""
return {
'name': self.name,
'segments': [segment.to_json() for segment in self.segments],
'meta': self.meta
}
|
def function[to_json, parameter[self]]:
constant[Converts track to a JSON serializable format
Returns:
Map with the name, and segments of the track.
]
return[dictionary[[<ast.Constant object at 0x7da1b053b310>, <ast.Constant object at 0x7da1b0538130>, <ast.Constant object at 0x7da1b053a1a0>], [<ast.Attribute object at 0x7da1b05386d0>, <ast.ListComp object at 0x7da1b0409ba0>, <ast.Attribute object at 0x7da1b040b130>]]]
|
keyword[def] identifier[to_json] ( identifier[self] ):
literal[string]
keyword[return] {
literal[string] : identifier[self] . identifier[name] ,
literal[string] :[ identifier[segment] . identifier[to_json] () keyword[for] identifier[segment] keyword[in] identifier[self] . identifier[segments] ],
literal[string] : identifier[self] . identifier[meta]
}
|
def to_json(self):
"""Converts track to a JSON serializable format
Returns:
Map with the name, and segments of the track.
"""
return {'name': self.name, 'segments': [segment.to_json() for segment in self.segments], 'meta': self.meta}
|
def iter_batches(iterable, batch_size):
'''
Given a sequence or iterable, yield batches from that iterable until it
runs out. Note that this function returns a generator, and also each
batch will be a generator.
:param iterable: The sequence or iterable to split into batches
:param int batch_size: The number of elements of `iterable` to iterate over
in each batch
>>> batches = iter_batches('abcdefghijkl', batch_size=5)
>>> list(next(batches))
['a', 'b', 'c', 'd', 'e']
>>> list(next(batches))
['f', 'g', 'h', 'i', 'j']
>>> list(next(batches))
['k', 'l']
>>> list(next(batches))
Traceback (most recent call last):
...
StopIteration
Warning: It is important to iterate completely over each batch before
requesting the next, or batch sizes will be truncated to 1. For example,
making a list of all batches before asking for the contents of each
will not work:
>>> batches = list(iter_batches('abcdefghijkl', batch_size=5))
>>> len(batches)
12
>>> list(batches[0])
['a']
However, making a list of each individual batch as it is received will
produce expected behavior (as shown in the first example).
'''
# http://stackoverflow.com/a/8290514/4481448
sourceiter = iter(iterable)
while True:
batchiter = islice(sourceiter, batch_size)
yield chain([batchiter.next()], batchiter)
|
def function[iter_batches, parameter[iterable, batch_size]]:
constant[
Given a sequence or iterable, yield batches from that iterable until it
runs out. Note that this function returns a generator, and also each
batch will be a generator.
:param iterable: The sequence or iterable to split into batches
:param int batch_size: The number of elements of `iterable` to iterate over
in each batch
>>> batches = iter_batches('abcdefghijkl', batch_size=5)
>>> list(next(batches))
['a', 'b', 'c', 'd', 'e']
>>> list(next(batches))
['f', 'g', 'h', 'i', 'j']
>>> list(next(batches))
['k', 'l']
>>> list(next(batches))
Traceback (most recent call last):
...
StopIteration
Warning: It is important to iterate completely over each batch before
requesting the next, or batch sizes will be truncated to 1. For example,
making a list of all batches before asking for the contents of each
will not work:
>>> batches = list(iter_batches('abcdefghijkl', batch_size=5))
>>> len(batches)
12
>>> list(batches[0])
['a']
However, making a list of each individual batch as it is received will
produce expected behavior (as shown in the first example).
]
variable[sourceiter] assign[=] call[name[iter], parameter[name[iterable]]]
while constant[True] begin[:]
variable[batchiter] assign[=] call[name[islice], parameter[name[sourceiter], name[batch_size]]]
<ast.Yield object at 0x7da1b100eb30>
|
keyword[def] identifier[iter_batches] ( identifier[iterable] , identifier[batch_size] ):
literal[string]
identifier[sourceiter] = identifier[iter] ( identifier[iterable] )
keyword[while] keyword[True] :
identifier[batchiter] = identifier[islice] ( identifier[sourceiter] , identifier[batch_size] )
keyword[yield] identifier[chain] ([ identifier[batchiter] . identifier[next] ()], identifier[batchiter] )
|
def iter_batches(iterable, batch_size):
"""
Given a sequence or iterable, yield batches from that iterable until it
runs out. Note that this function returns a generator, and also each
batch will be a generator.
:param iterable: The sequence or iterable to split into batches
:param int batch_size: The number of elements of `iterable` to iterate over
in each batch
>>> batches = iter_batches('abcdefghijkl', batch_size=5)
>>> list(next(batches))
['a', 'b', 'c', 'd', 'e']
>>> list(next(batches))
['f', 'g', 'h', 'i', 'j']
>>> list(next(batches))
['k', 'l']
>>> list(next(batches))
Traceback (most recent call last):
...
StopIteration
Warning: It is important to iterate completely over each batch before
requesting the next, or batch sizes will be truncated to 1. For example,
making a list of all batches before asking for the contents of each
will not work:
>>> batches = list(iter_batches('abcdefghijkl', batch_size=5))
>>> len(batches)
12
>>> list(batches[0])
['a']
However, making a list of each individual batch as it is received will
produce expected behavior (as shown in the first example).
"""
# http://stackoverflow.com/a/8290514/4481448
sourceiter = iter(iterable)
while True:
batchiter = islice(sourceiter, batch_size)
yield chain([batchiter.next()], batchiter) # depends on [control=['while'], data=[]]
|
def __generate_indexed_requirements(dut_count, basekeys, requirements):
"""
Generate indexed requirements from general requirements.
:param dut_count: Amount of duts
:param basekeys: base keys as dict
:param requirements: requirements
:return: Indexed requirements as dict.
"""
dut_requirements = []
for i in range(1, dut_count + 1):
dut_requirement = ResourceRequirements(basekeys.copy())
if i in requirements["duts"]:
for k in requirements["duts"][i]:
dut_requirement.set(k, requirements["duts"][i][k])
elif str(i) in requirements["duts"]:
i = str(i)
for k in requirements["duts"][i]:
dut_requirement.set(k, requirements["duts"][i][k])
dut_requirements.append(dut_requirement)
return dut_requirements
|
def function[__generate_indexed_requirements, parameter[dut_count, basekeys, requirements]]:
constant[
Generate indexed requirements from general requirements.
:param dut_count: Amount of duts
:param basekeys: base keys as dict
:param requirements: requirements
:return: Indexed requirements as dict.
]
variable[dut_requirements] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[dut_count] + constant[1]]]]] begin[:]
variable[dut_requirement] assign[=] call[name[ResourceRequirements], parameter[call[name[basekeys].copy, parameter[]]]]
if compare[name[i] in call[name[requirements]][constant[duts]]] begin[:]
for taget[name[k]] in starred[call[call[name[requirements]][constant[duts]]][name[i]]] begin[:]
call[name[dut_requirement].set, parameter[name[k], call[call[call[name[requirements]][constant[duts]]][name[i]]][name[k]]]]
call[name[dut_requirements].append, parameter[name[dut_requirement]]]
return[name[dut_requirements]]
|
keyword[def] identifier[__generate_indexed_requirements] ( identifier[dut_count] , identifier[basekeys] , identifier[requirements] ):
literal[string]
identifier[dut_requirements] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[dut_count] + literal[int] ):
identifier[dut_requirement] = identifier[ResourceRequirements] ( identifier[basekeys] . identifier[copy] ())
keyword[if] identifier[i] keyword[in] identifier[requirements] [ literal[string] ]:
keyword[for] identifier[k] keyword[in] identifier[requirements] [ literal[string] ][ identifier[i] ]:
identifier[dut_requirement] . identifier[set] ( identifier[k] , identifier[requirements] [ literal[string] ][ identifier[i] ][ identifier[k] ])
keyword[elif] identifier[str] ( identifier[i] ) keyword[in] identifier[requirements] [ literal[string] ]:
identifier[i] = identifier[str] ( identifier[i] )
keyword[for] identifier[k] keyword[in] identifier[requirements] [ literal[string] ][ identifier[i] ]:
identifier[dut_requirement] . identifier[set] ( identifier[k] , identifier[requirements] [ literal[string] ][ identifier[i] ][ identifier[k] ])
identifier[dut_requirements] . identifier[append] ( identifier[dut_requirement] )
keyword[return] identifier[dut_requirements]
|
def __generate_indexed_requirements(dut_count, basekeys, requirements):
"""
Generate indexed requirements from general requirements.
:param dut_count: Amount of duts
:param basekeys: base keys as dict
:param requirements: requirements
:return: Indexed requirements as dict.
"""
dut_requirements = []
for i in range(1, dut_count + 1):
dut_requirement = ResourceRequirements(basekeys.copy())
if i in requirements['duts']:
for k in requirements['duts'][i]:
dut_requirement.set(k, requirements['duts'][i][k]) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=['i']]
elif str(i) in requirements['duts']:
i = str(i)
for k in requirements['duts'][i]:
dut_requirement.set(k, requirements['duts'][i][k]) # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]]
dut_requirements.append(dut_requirement) # depends on [control=['for'], data=['i']]
return dut_requirements
|
def authenticate(self, personal_number, **kwargs):
"""Request an authentication order. The :py:meth:`collect` method
is used to query the status of the order.
:param personal_number: The Swedish personal number
in format YYYYMMDDXXXX.
:type personal_number: str
:return: The OrderResponse parsed to a dictionary.
:rtype: dict
:raises BankIDError: raises a subclass of this error
when error has been returned from server.
"""
if "requirementAlternatives" in kwargs:
warnings.warn(
"Requirement Alternatives " "option is not tested.", BankIDWarning
)
try:
out = self.client.service.Authenticate(
personalNumber=personal_number, **kwargs
)
except Error as e:
raise get_error_class(e, "Could not complete Authenticate order.")
return self._dictify(out)
|
def function[authenticate, parameter[self, personal_number]]:
constant[Request an authentication order. The :py:meth:`collect` method
is used to query the status of the order.
:param personal_number: The Swedish personal number
in format YYYYMMDDXXXX.
:type personal_number: str
:return: The OrderResponse parsed to a dictionary.
:rtype: dict
:raises BankIDError: raises a subclass of this error
when error has been returned from server.
]
if compare[constant[requirementAlternatives] in name[kwargs]] begin[:]
call[name[warnings].warn, parameter[constant[Requirement Alternatives option is not tested.], name[BankIDWarning]]]
<ast.Try object at 0x7da20e961c30>
return[call[name[self]._dictify, parameter[name[out]]]]
|
keyword[def] identifier[authenticate] ( identifier[self] , identifier[personal_number] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[warnings] . identifier[warn] (
literal[string] literal[string] , identifier[BankIDWarning]
)
keyword[try] :
identifier[out] = identifier[self] . identifier[client] . identifier[service] . identifier[Authenticate] (
identifier[personalNumber] = identifier[personal_number] ,** identifier[kwargs]
)
keyword[except] identifier[Error] keyword[as] identifier[e] :
keyword[raise] identifier[get_error_class] ( identifier[e] , literal[string] )
keyword[return] identifier[self] . identifier[_dictify] ( identifier[out] )
|
def authenticate(self, personal_number, **kwargs):
"""Request an authentication order. The :py:meth:`collect` method
is used to query the status of the order.
:param personal_number: The Swedish personal number
in format YYYYMMDDXXXX.
:type personal_number: str
:return: The OrderResponse parsed to a dictionary.
:rtype: dict
:raises BankIDError: raises a subclass of this error
when error has been returned from server.
"""
if 'requirementAlternatives' in kwargs:
warnings.warn('Requirement Alternatives option is not tested.', BankIDWarning) # depends on [control=['if'], data=[]]
try:
out = self.client.service.Authenticate(personalNumber=personal_number, **kwargs) # depends on [control=['try'], data=[]]
except Error as e:
raise get_error_class(e, 'Could not complete Authenticate order.') # depends on [control=['except'], data=['e']]
return self._dictify(out)
|
def datafile_from_hash(hash_, prefix, path):
"""Return pathlib.Path for a data-file with given hash and prefix.
"""
pattern = '%s_%s*.h*' % (prefix, hash_)
datafiles = list(path.glob(pattern))
if len(datafiles) == 0:
raise NoMatchError('No matches for "%s"' % pattern)
if len(datafiles) > 1:
raise MultipleMatchesError('More than 1 match for "%s"' % pattern)
return datafiles[0]
|
def function[datafile_from_hash, parameter[hash_, prefix, path]]:
constant[Return pathlib.Path for a data-file with given hash and prefix.
]
variable[pattern] assign[=] binary_operation[constant[%s_%s*.h*] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f7221a0>, <ast.Name object at 0x7da18f7223e0>]]]
variable[datafiles] assign[=] call[name[list], parameter[call[name[path].glob, parameter[name[pattern]]]]]
if compare[call[name[len], parameter[name[datafiles]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da18f720b80>
if compare[call[name[len], parameter[name[datafiles]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da207f008b0>
return[call[name[datafiles]][constant[0]]]
|
keyword[def] identifier[datafile_from_hash] ( identifier[hash_] , identifier[prefix] , identifier[path] ):
literal[string]
identifier[pattern] = literal[string] %( identifier[prefix] , identifier[hash_] )
identifier[datafiles] = identifier[list] ( identifier[path] . identifier[glob] ( identifier[pattern] ))
keyword[if] identifier[len] ( identifier[datafiles] )== literal[int] :
keyword[raise] identifier[NoMatchError] ( literal[string] % identifier[pattern] )
keyword[if] identifier[len] ( identifier[datafiles] )> literal[int] :
keyword[raise] identifier[MultipleMatchesError] ( literal[string] % identifier[pattern] )
keyword[return] identifier[datafiles] [ literal[int] ]
|
def datafile_from_hash(hash_, prefix, path):
"""Return pathlib.Path for a data-file with given hash and prefix.
"""
pattern = '%s_%s*.h*' % (prefix, hash_)
datafiles = list(path.glob(pattern))
if len(datafiles) == 0:
raise NoMatchError('No matches for "%s"' % pattern) # depends on [control=['if'], data=[]]
if len(datafiles) > 1:
raise MultipleMatchesError('More than 1 match for "%s"' % pattern) # depends on [control=['if'], data=[]]
return datafiles[0]
|
def input_file(filename):
"""
Run all checks on a Python source file.
"""
if excluded(filename) or not filename_match(filename):
return {}
if options.verbose:
message('checking ' + filename)
options.counters['files'] = options.counters.get('files', 0) + 1
errors = Checker(filename).check_all()
if options.testsuite and not errors:
message("%s: %s" % (filename, "no errors found"))
return errors
|
def function[input_file, parameter[filename]]:
constant[
Run all checks on a Python source file.
]
if <ast.BoolOp object at 0x7da1b0a48670> begin[:]
return[dictionary[[], []]]
if name[options].verbose begin[:]
call[name[message], parameter[binary_operation[constant[checking ] + name[filename]]]]
call[name[options].counters][constant[files]] assign[=] binary_operation[call[name[options].counters.get, parameter[constant[files], constant[0]]] + constant[1]]
variable[errors] assign[=] call[call[name[Checker], parameter[name[filename]]].check_all, parameter[]]
if <ast.BoolOp object at 0x7da1b0a4b2b0> begin[:]
call[name[message], parameter[binary_operation[constant[%s: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0a48310>, <ast.Constant object at 0x7da1b0a482b0>]]]]]
return[name[errors]]
|
keyword[def] identifier[input_file] ( identifier[filename] ):
literal[string]
keyword[if] identifier[excluded] ( identifier[filename] ) keyword[or] keyword[not] identifier[filename_match] ( identifier[filename] ):
keyword[return] {}
keyword[if] identifier[options] . identifier[verbose] :
identifier[message] ( literal[string] + identifier[filename] )
identifier[options] . identifier[counters] [ literal[string] ]= identifier[options] . identifier[counters] . identifier[get] ( literal[string] , literal[int] )+ literal[int]
identifier[errors] = identifier[Checker] ( identifier[filename] ). identifier[check_all] ()
keyword[if] identifier[options] . identifier[testsuite] keyword[and] keyword[not] identifier[errors] :
identifier[message] ( literal[string] %( identifier[filename] , literal[string] ))
keyword[return] identifier[errors]
|
def input_file(filename):
"""
Run all checks on a Python source file.
"""
if excluded(filename) or not filename_match(filename):
return {} # depends on [control=['if'], data=[]]
if options.verbose:
message('checking ' + filename) # depends on [control=['if'], data=[]]
options.counters['files'] = options.counters.get('files', 0) + 1
errors = Checker(filename).check_all()
if options.testsuite and (not errors):
message('%s: %s' % (filename, 'no errors found')) # depends on [control=['if'], data=[]]
return errors
|
def get_cgi_parameter_file(form: cgi.FieldStorage,
key: str) -> Optional[bytes]:
"""
Extracts a file's contents from a "file" input in a CGI form, or None
if no such file was uploaded.
"""
(filename, filecontents) = get_cgi_parameter_filename_and_file(form, key)
return filecontents
|
def function[get_cgi_parameter_file, parameter[form, key]]:
constant[
Extracts a file's contents from a "file" input in a CGI form, or None
if no such file was uploaded.
]
<ast.Tuple object at 0x7da18c4cd0f0> assign[=] call[name[get_cgi_parameter_filename_and_file], parameter[name[form], name[key]]]
return[name[filecontents]]
|
keyword[def] identifier[get_cgi_parameter_file] ( identifier[form] : identifier[cgi] . identifier[FieldStorage] ,
identifier[key] : identifier[str] )-> identifier[Optional] [ identifier[bytes] ]:
literal[string]
( identifier[filename] , identifier[filecontents] )= identifier[get_cgi_parameter_filename_and_file] ( identifier[form] , identifier[key] )
keyword[return] identifier[filecontents]
|
def get_cgi_parameter_file(form: cgi.FieldStorage, key: str) -> Optional[bytes]:
"""
Extracts a file's contents from a "file" input in a CGI form, or None
if no such file was uploaded.
"""
(filename, filecontents) = get_cgi_parameter_filename_and_file(form, key)
return filecontents
|
def delete_course_completion(self, user_id, payload): # pylint: disable=unused-argument
"""
Delete a completion status previously sent to the Degreed Completion Status endpoint
Args:
user_id: Unused.
payload: JSON encoded object (serialized from DegreedLearnerDataTransmissionAudit)
containing the required completion status fields for deletion per Degreed documentation.
Returns:
A tuple containing the status code and the body of the response.
Raises:
HTTPError: if we received a failure response code from Degreed
"""
return self._delete(
urljoin(
self.enterprise_configuration.degreed_base_url,
self.global_degreed_config.completion_status_api_path
),
payload,
self.COMPLETION_PROVIDER_SCOPE
)
|
def function[delete_course_completion, parameter[self, user_id, payload]]:
constant[
Delete a completion status previously sent to the Degreed Completion Status endpoint
Args:
user_id: Unused.
payload: JSON encoded object (serialized from DegreedLearnerDataTransmissionAudit)
containing the required completion status fields for deletion per Degreed documentation.
Returns:
A tuple containing the status code and the body of the response.
Raises:
HTTPError: if we received a failure response code from Degreed
]
return[call[name[self]._delete, parameter[call[name[urljoin], parameter[name[self].enterprise_configuration.degreed_base_url, name[self].global_degreed_config.completion_status_api_path]], name[payload], name[self].COMPLETION_PROVIDER_SCOPE]]]
|
keyword[def] identifier[delete_course_completion] ( identifier[self] , identifier[user_id] , identifier[payload] ):
literal[string]
keyword[return] identifier[self] . identifier[_delete] (
identifier[urljoin] (
identifier[self] . identifier[enterprise_configuration] . identifier[degreed_base_url] ,
identifier[self] . identifier[global_degreed_config] . identifier[completion_status_api_path]
),
identifier[payload] ,
identifier[self] . identifier[COMPLETION_PROVIDER_SCOPE]
)
|
def delete_course_completion(self, user_id, payload): # pylint: disable=unused-argument
'\n Delete a completion status previously sent to the Degreed Completion Status endpoint\n\n Args:\n user_id: Unused.\n payload: JSON encoded object (serialized from DegreedLearnerDataTransmissionAudit)\n containing the required completion status fields for deletion per Degreed documentation.\n\n Returns:\n A tuple containing the status code and the body of the response.\n Raises:\n HTTPError: if we received a failure response code from Degreed\n '
return self._delete(urljoin(self.enterprise_configuration.degreed_base_url, self.global_degreed_config.completion_status_api_path), payload, self.COMPLETION_PROVIDER_SCOPE)
|
def complete(self):
"""
Consider a transcript complete if it has start and stop codons and
a coding sequence whose length is divisible by 3
"""
return (
self.contains_start_codon and
self.contains_stop_codon and
self.coding_sequence is not None and
len(self.coding_sequence) % 3 == 0
)
|
def function[complete, parameter[self]]:
constant[
Consider a transcript complete if it has start and stop codons and
a coding sequence whose length is divisible by 3
]
return[<ast.BoolOp object at 0x7da1b08e7bb0>]
|
keyword[def] identifier[complete] ( identifier[self] ):
literal[string]
keyword[return] (
identifier[self] . identifier[contains_start_codon] keyword[and]
identifier[self] . identifier[contains_stop_codon] keyword[and]
identifier[self] . identifier[coding_sequence] keyword[is] keyword[not] keyword[None] keyword[and]
identifier[len] ( identifier[self] . identifier[coding_sequence] )% literal[int] == literal[int]
)
|
def complete(self):
"""
Consider a transcript complete if it has start and stop codons and
a coding sequence whose length is divisible by 3
"""
return self.contains_start_codon and self.contains_stop_codon and (self.coding_sequence is not None) and (len(self.coding_sequence) % 3 == 0)
|
def get_comments_for_reference_on_date(self, reference_id, from_, to):
"""Pass through to provider CommentLookupSession.get_comments_for_reference_on_date"""
# Implemented from azosid template for -
# osid.relationship.RelationshipLookupSession.get_relationships_for_source_on_date_template
if self._can('lookup'):
return self._provider_session.get_comments_for_reference_on_date(reference_id, from_, to)
self._check_lookup_conditions() # raises PermissionDenied
query = self._query_session.get_comment_query()
query.match_source_id(reference_id, match=True)
query.match_date(from_, to, match=True)
return self._try_harder(query)
|
def function[get_comments_for_reference_on_date, parameter[self, reference_id, from_, to]]:
constant[Pass through to provider CommentLookupSession.get_comments_for_reference_on_date]
if call[name[self]._can, parameter[constant[lookup]]] begin[:]
return[call[name[self]._provider_session.get_comments_for_reference_on_date, parameter[name[reference_id], name[from_], name[to]]]]
call[name[self]._check_lookup_conditions, parameter[]]
variable[query] assign[=] call[name[self]._query_session.get_comment_query, parameter[]]
call[name[query].match_source_id, parameter[name[reference_id]]]
call[name[query].match_date, parameter[name[from_], name[to]]]
return[call[name[self]._try_harder, parameter[name[query]]]]
|
keyword[def] identifier[get_comments_for_reference_on_date] ( identifier[self] , identifier[reference_id] , identifier[from_] , identifier[to] ):
literal[string]
keyword[if] identifier[self] . identifier[_can] ( literal[string] ):
keyword[return] identifier[self] . identifier[_provider_session] . identifier[get_comments_for_reference_on_date] ( identifier[reference_id] , identifier[from_] , identifier[to] )
identifier[self] . identifier[_check_lookup_conditions] ()
identifier[query] = identifier[self] . identifier[_query_session] . identifier[get_comment_query] ()
identifier[query] . identifier[match_source_id] ( identifier[reference_id] , identifier[match] = keyword[True] )
identifier[query] . identifier[match_date] ( identifier[from_] , identifier[to] , identifier[match] = keyword[True] )
keyword[return] identifier[self] . identifier[_try_harder] ( identifier[query] )
|
def get_comments_for_reference_on_date(self, reference_id, from_, to):
"""Pass through to provider CommentLookupSession.get_comments_for_reference_on_date"""
# Implemented from azosid template for -
# osid.relationship.RelationshipLookupSession.get_relationships_for_source_on_date_template
if self._can('lookup'):
return self._provider_session.get_comments_for_reference_on_date(reference_id, from_, to) # depends on [control=['if'], data=[]]
self._check_lookup_conditions() # raises PermissionDenied
query = self._query_session.get_comment_query()
query.match_source_id(reference_id, match=True)
query.match_date(from_, to, match=True)
return self._try_harder(query)
|
def render_to_response(self, context, **response_kwargs):
"""
Returns a PDF response with a template rendered with the given context.
"""
filename = response_kwargs.pop('filename', None)
cmd_options = response_kwargs.pop('cmd_options', None)
if issubclass(self.response_class, PDFTemplateResponse):
if filename is None:
filename = self.get_filename()
if cmd_options is None:
cmd_options = self.get_cmd_options()
return super(PDFTemplateView, self).render_to_response(
context=context, filename=filename,
show_content_in_browser=self.show_content_in_browser,
header_template=self.header_template,
footer_template=self.footer_template,
cmd_options=cmd_options,
cover_template=self.cover_template,
**response_kwargs
)
else:
return super(PDFTemplateView, self).render_to_response(
context=context,
**response_kwargs
)
|
def function[render_to_response, parameter[self, context]]:
constant[
Returns a PDF response with a template rendered with the given context.
]
variable[filename] assign[=] call[name[response_kwargs].pop, parameter[constant[filename], constant[None]]]
variable[cmd_options] assign[=] call[name[response_kwargs].pop, parameter[constant[cmd_options], constant[None]]]
if call[name[issubclass], parameter[name[self].response_class, name[PDFTemplateResponse]]] begin[:]
if compare[name[filename] is constant[None]] begin[:]
variable[filename] assign[=] call[name[self].get_filename, parameter[]]
if compare[name[cmd_options] is constant[None]] begin[:]
variable[cmd_options] assign[=] call[name[self].get_cmd_options, parameter[]]
return[call[call[name[super], parameter[name[PDFTemplateView], name[self]]].render_to_response, parameter[]]]
|
keyword[def] identifier[render_to_response] ( identifier[self] , identifier[context] ,** identifier[response_kwargs] ):
literal[string]
identifier[filename] = identifier[response_kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[cmd_options] = identifier[response_kwargs] . identifier[pop] ( literal[string] , keyword[None] )
keyword[if] identifier[issubclass] ( identifier[self] . identifier[response_class] , identifier[PDFTemplateResponse] ):
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = identifier[self] . identifier[get_filename] ()
keyword[if] identifier[cmd_options] keyword[is] keyword[None] :
identifier[cmd_options] = identifier[self] . identifier[get_cmd_options] ()
keyword[return] identifier[super] ( identifier[PDFTemplateView] , identifier[self] ). identifier[render_to_response] (
identifier[context] = identifier[context] , identifier[filename] = identifier[filename] ,
identifier[show_content_in_browser] = identifier[self] . identifier[show_content_in_browser] ,
identifier[header_template] = identifier[self] . identifier[header_template] ,
identifier[footer_template] = identifier[self] . identifier[footer_template] ,
identifier[cmd_options] = identifier[cmd_options] ,
identifier[cover_template] = identifier[self] . identifier[cover_template] ,
** identifier[response_kwargs]
)
keyword[else] :
keyword[return] identifier[super] ( identifier[PDFTemplateView] , identifier[self] ). identifier[render_to_response] (
identifier[context] = identifier[context] ,
** identifier[response_kwargs]
)
|
def render_to_response(self, context, **response_kwargs):
"""
Returns a PDF response with a template rendered with the given context.
"""
filename = response_kwargs.pop('filename', None)
cmd_options = response_kwargs.pop('cmd_options', None)
if issubclass(self.response_class, PDFTemplateResponse):
if filename is None:
filename = self.get_filename() # depends on [control=['if'], data=['filename']]
if cmd_options is None:
cmd_options = self.get_cmd_options() # depends on [control=['if'], data=['cmd_options']]
return super(PDFTemplateView, self).render_to_response(context=context, filename=filename, show_content_in_browser=self.show_content_in_browser, header_template=self.header_template, footer_template=self.footer_template, cmd_options=cmd_options, cover_template=self.cover_template, **response_kwargs) # depends on [control=['if'], data=[]]
else:
return super(PDFTemplateView, self).render_to_response(context=context, **response_kwargs)
|
def get_items_of_credit_note_per_page(self, credit_note_id, per_page=1000, page=1):
"""
Get items of credit note per page
:param credit_note_id: the credit note id
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:return: list
"""
return self._get_resource_per_page(
resource=CREDIT_NOTE_ITEMS,
per_page=per_page,
page=page,
params={'credit_note_id': credit_note_id},
)
|
def function[get_items_of_credit_note_per_page, parameter[self, credit_note_id, per_page, page]]:
constant[
Get items of credit note per page
:param credit_note_id: the credit note id
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:return: list
]
return[call[name[self]._get_resource_per_page, parameter[]]]
|
keyword[def] identifier[get_items_of_credit_note_per_page] ( identifier[self] , identifier[credit_note_id] , identifier[per_page] = literal[int] , identifier[page] = literal[int] ):
literal[string]
keyword[return] identifier[self] . identifier[_get_resource_per_page] (
identifier[resource] = identifier[CREDIT_NOTE_ITEMS] ,
identifier[per_page] = identifier[per_page] ,
identifier[page] = identifier[page] ,
identifier[params] ={ literal[string] : identifier[credit_note_id] },
)
|
def get_items_of_credit_note_per_page(self, credit_note_id, per_page=1000, page=1):
"""
Get items of credit note per page
:param credit_note_id: the credit note id
:param per_page: How many objects per page. Default: 1000
:param page: Which page. Default: 1
:return: list
"""
return self._get_resource_per_page(resource=CREDIT_NOTE_ITEMS, per_page=per_page, page=page, params={'credit_note_id': credit_note_id})
|
def _verify_cas1(ticket, service):
"""
Verifies CAS 1.0 authentication ticket.
:param: ticket
:param: service
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
url = (urljoin(settings.CAS_SERVER_URL, 'validate') + '?' +
urlencode(params))
page = urlopen(url)
try:
verified = page.readline().strip()
if verified == 'yes':
return page.readline().strip()
else:
return None
finally:
page.close()
|
def function[_verify_cas1, parameter[ticket, service]]:
constant[
Verifies CAS 1.0 authentication ticket.
:param: ticket
:param: service
Returns username on success and None on failure.
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20e9563e0>, <ast.Constant object at 0x7da20e956740>], [<ast.Name object at 0x7da20e957ca0>, <ast.Name object at 0x7da20e957d00>]]
variable[url] assign[=] binary_operation[binary_operation[call[name[urljoin], parameter[name[settings].CAS_SERVER_URL, constant[validate]]] + constant[?]] + call[name[urlencode], parameter[name[params]]]]
variable[page] assign[=] call[name[urlopen], parameter[name[url]]]
<ast.Try object at 0x7da20e956950>
|
keyword[def] identifier[_verify_cas1] ( identifier[ticket] , identifier[service] ):
literal[string]
identifier[params] ={ literal[string] : identifier[ticket] , literal[string] : identifier[service] }
identifier[url] =( identifier[urljoin] ( identifier[settings] . identifier[CAS_SERVER_URL] , literal[string] )+ literal[string] +
identifier[urlencode] ( identifier[params] ))
identifier[page] = identifier[urlopen] ( identifier[url] )
keyword[try] :
identifier[verified] = identifier[page] . identifier[readline] (). identifier[strip] ()
keyword[if] identifier[verified] == literal[string] :
keyword[return] identifier[page] . identifier[readline] (). identifier[strip] ()
keyword[else] :
keyword[return] keyword[None]
keyword[finally] :
identifier[page] . identifier[close] ()
|
def _verify_cas1(ticket, service):
"""
Verifies CAS 1.0 authentication ticket.
:param: ticket
:param: service
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
url = urljoin(settings.CAS_SERVER_URL, 'validate') + '?' + urlencode(params)
page = urlopen(url)
try:
verified = page.readline().strip()
if verified == 'yes':
return page.readline().strip() # depends on [control=['if'], data=[]]
else:
return None # depends on [control=['try'], data=[]]
finally:
page.close()
|
def __focus(self, item):
"""Called when focus item has changed"""
cols = self.__get_display_columns()
for col in cols:
self.__event_info =(col,item)
self.event_generate('<<TreeviewInplaceEdit>>')
if col in self._inplace_widgets:
w = self._inplace_widgets[col]
w.bind('<Key-Tab>',
lambda e: w.tk_focusNext().focus_set())
w.bind('<Shift-Key-Tab>',
lambda e: w.tk_focusPrev().focus_set())
|
def function[__focus, parameter[self, item]]:
constant[Called when focus item has changed]
variable[cols] assign[=] call[name[self].__get_display_columns, parameter[]]
for taget[name[col]] in starred[name[cols]] begin[:]
name[self].__event_info assign[=] tuple[[<ast.Name object at 0x7da18dc9b7f0>, <ast.Name object at 0x7da18dc9b2e0>]]
call[name[self].event_generate, parameter[constant[<<TreeviewInplaceEdit>>]]]
if compare[name[col] in name[self]._inplace_widgets] begin[:]
variable[w] assign[=] call[name[self]._inplace_widgets][name[col]]
call[name[w].bind, parameter[constant[<Key-Tab>], <ast.Lambda object at 0x7da1b175e2c0>]]
call[name[w].bind, parameter[constant[<Shift-Key-Tab>], <ast.Lambda object at 0x7da1b175cb80>]]
|
keyword[def] identifier[__focus] ( identifier[self] , identifier[item] ):
literal[string]
identifier[cols] = identifier[self] . identifier[__get_display_columns] ()
keyword[for] identifier[col] keyword[in] identifier[cols] :
identifier[self] . identifier[__event_info] =( identifier[col] , identifier[item] )
identifier[self] . identifier[event_generate] ( literal[string] )
keyword[if] identifier[col] keyword[in] identifier[self] . identifier[_inplace_widgets] :
identifier[w] = identifier[self] . identifier[_inplace_widgets] [ identifier[col] ]
identifier[w] . identifier[bind] ( literal[string] ,
keyword[lambda] identifier[e] : identifier[w] . identifier[tk_focusNext] (). identifier[focus_set] ())
identifier[w] . identifier[bind] ( literal[string] ,
keyword[lambda] identifier[e] : identifier[w] . identifier[tk_focusPrev] (). identifier[focus_set] ())
|
def __focus(self, item):
"""Called when focus item has changed"""
cols = self.__get_display_columns()
for col in cols:
self.__event_info = (col, item)
self.event_generate('<<TreeviewInplaceEdit>>')
if col in self._inplace_widgets:
w = self._inplace_widgets[col]
w.bind('<Key-Tab>', lambda e: w.tk_focusNext().focus_set())
w.bind('<Shift-Key-Tab>', lambda e: w.tk_focusPrev().focus_set()) # depends on [control=['if'], data=['col']] # depends on [control=['for'], data=['col']]
|
def cross_origin(app, *args, **kwargs):
"""
This function is the decorator which is used to wrap a Sanic route with.
In the simplest case, simply use the default parameters to allow all
origins in what is the most permissive configuration. If this method
modifies state or performs authentication which may be brute-forced, you
should add some degree of protection, such as Cross Site Forgery
Request protection.
:param origins:
The origin, or list of origins to allow requests from.
The origin(s) may be regular expressions, case-sensitive strings,
or else an asterisk
Default : '*'
:type origins: list, string or regex
:param methods:
The method or list of methods which the allowed origins are allowed to
access for non-simple requests.
Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE]
:type methods: list or string
:param expose_headers:
The header or list which are safe to expose to the API of a CORS API
specification.
Default : None
:type expose_headers: list or string
:param allow_headers:
The header or list of header field names which can be used when this
resource is accessed by allowed origins. The header(s) may be regular
expressions, case-sensitive strings, or else an asterisk.
Default : '*', allow all headers
:type allow_headers: list, string or regex
:param supports_credentials:
Allows users to make authenticated requests. If true, injects the
`Access-Control-Allow-Credentials` header in responses. This allows
cookies and credentials to be submitted across domains.
:note: This option cannot be used in conjuction with a '*' origin
Default : False
:type supports_credentials: bool
:param max_age:
The maximum time for which this CORS request maybe cached. This value
is set as the `Access-Control-Max-Age` header.
Default : None
:type max_age: timedelta, integer, string or None
:param send_wildcard: If True, and the origins parameter is `*`, a wildcard
`Access-Control-Allow-Origin` header is sent, rather than the
request's `Origin` header.
Default : False
:type send_wildcard: bool
:param vary_header:
If True, the header Vary: Origin will be returned as per the W3
implementation guidelines.
Setting this header when the `Access-Control-Allow-Origin` is
dynamically generated (e.g. when there is more than one allowed
origin, and an Origin than '*' is returned) informs CDNs and other
caches that the CORS headers are dynamic, and cannot be cached.
If False, the Vary header will never be injected or altered.
Default : True
:type vary_header: bool
:param automatic_options:
Only applies to the `cross_origin` decorator. If True, Sanic-CORS will
override Sanic's default OPTIONS handling to return CORS headers for
OPTIONS requests.
Default : True
:type automatic_options: bool
"""
_options = kwargs
_real_decorator = cors.decorate(app, *args, run_middleware=False, with_context=False, **kwargs)
def wrapped_decorator(f):
spf = SanicPluginsFramework(app) # get the singleton from the app
try:
plugin = spf.register_plugin(cors, skip_reg=True)
except ValueError as e:
# this is normal, if this plugin has been registered previously
assert e.args and len(e.args) > 1
plugin = e.args[1]
context = cors.get_context_from_spf(spf)
log = context.log
log(logging.DEBUG, "Enabled {:s} for cross_origin using options: {}".format(str(f), str(_options)))
return _real_decorator(f)
return wrapped_decorator
|
def function[cross_origin, parameter[app]]:
constant[
This function is the decorator which is used to wrap a Sanic route with.
In the simplest case, simply use the default parameters to allow all
origins in what is the most permissive configuration. If this method
modifies state or performs authentication which may be brute-forced, you
should add some degree of protection, such as Cross Site Forgery
Request protection.
:param origins:
The origin, or list of origins to allow requests from.
The origin(s) may be regular expressions, case-sensitive strings,
or else an asterisk
Default : '*'
:type origins: list, string or regex
:param methods:
The method or list of methods which the allowed origins are allowed to
access for non-simple requests.
Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE]
:type methods: list or string
:param expose_headers:
The header or list which are safe to expose to the API of a CORS API
specification.
Default : None
:type expose_headers: list or string
:param allow_headers:
The header or list of header field names which can be used when this
resource is accessed by allowed origins. The header(s) may be regular
expressions, case-sensitive strings, or else an asterisk.
Default : '*', allow all headers
:type allow_headers: list, string or regex
:param supports_credentials:
Allows users to make authenticated requests. If true, injects the
`Access-Control-Allow-Credentials` header in responses. This allows
cookies and credentials to be submitted across domains.
:note: This option cannot be used in conjuction with a '*' origin
Default : False
:type supports_credentials: bool
:param max_age:
The maximum time for which this CORS request maybe cached. This value
is set as the `Access-Control-Max-Age` header.
Default : None
:type max_age: timedelta, integer, string or None
:param send_wildcard: If True, and the origins parameter is `*`, a wildcard
`Access-Control-Allow-Origin` header is sent, rather than the
request's `Origin` header.
Default : False
:type send_wildcard: bool
:param vary_header:
If True, the header Vary: Origin will be returned as per the W3
implementation guidelines.
Setting this header when the `Access-Control-Allow-Origin` is
dynamically generated (e.g. when there is more than one allowed
origin, and an Origin than '*' is returned) informs CDNs and other
caches that the CORS headers are dynamic, and cannot be cached.
If False, the Vary header will never be injected or altered.
Default : True
:type vary_header: bool
:param automatic_options:
Only applies to the `cross_origin` decorator. If True, Sanic-CORS will
override Sanic's default OPTIONS handling to return CORS headers for
OPTIONS requests.
Default : True
:type automatic_options: bool
]
variable[_options] assign[=] name[kwargs]
variable[_real_decorator] assign[=] call[name[cors].decorate, parameter[name[app], <ast.Starred object at 0x7da1b05305b0>]]
def function[wrapped_decorator, parameter[f]]:
variable[spf] assign[=] call[name[SanicPluginsFramework], parameter[name[app]]]
<ast.Try object at 0x7da1b0531750>
variable[context] assign[=] call[name[cors].get_context_from_spf, parameter[name[spf]]]
variable[log] assign[=] name[context].log
call[name[log], parameter[name[logging].DEBUG, call[constant[Enabled {:s} for cross_origin using options: {}].format, parameter[call[name[str], parameter[name[f]]], call[name[str], parameter[name[_options]]]]]]]
return[call[name[_real_decorator], parameter[name[f]]]]
return[name[wrapped_decorator]]
|
keyword[def] identifier[cross_origin] ( identifier[app] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[_options] = identifier[kwargs]
identifier[_real_decorator] = identifier[cors] . identifier[decorate] ( identifier[app] ,* identifier[args] , identifier[run_middleware] = keyword[False] , identifier[with_context] = keyword[False] ,** identifier[kwargs] )
keyword[def] identifier[wrapped_decorator] ( identifier[f] ):
identifier[spf] = identifier[SanicPluginsFramework] ( identifier[app] )
keyword[try] :
identifier[plugin] = identifier[spf] . identifier[register_plugin] ( identifier[cors] , identifier[skip_reg] = keyword[True] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
keyword[assert] identifier[e] . identifier[args] keyword[and] identifier[len] ( identifier[e] . identifier[args] )> literal[int]
identifier[plugin] = identifier[e] . identifier[args] [ literal[int] ]
identifier[context] = identifier[cors] . identifier[get_context_from_spf] ( identifier[spf] )
identifier[log] = identifier[context] . identifier[log]
identifier[log] ( identifier[logging] . identifier[DEBUG] , literal[string] . identifier[format] ( identifier[str] ( identifier[f] ), identifier[str] ( identifier[_options] )))
keyword[return] identifier[_real_decorator] ( identifier[f] )
keyword[return] identifier[wrapped_decorator]
|
def cross_origin(app, *args, **kwargs):
"""
This function is the decorator which is used to wrap a Sanic route with.
In the simplest case, simply use the default parameters to allow all
origins in what is the most permissive configuration. If this method
modifies state or performs authentication which may be brute-forced, you
should add some degree of protection, such as Cross Site Forgery
Request protection.
:param origins:
The origin, or list of origins to allow requests from.
The origin(s) may be regular expressions, case-sensitive strings,
or else an asterisk
Default : '*'
:type origins: list, string or regex
:param methods:
The method or list of methods which the allowed origins are allowed to
access for non-simple requests.
Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE]
:type methods: list or string
:param expose_headers:
The header or list which are safe to expose to the API of a CORS API
specification.
Default : None
:type expose_headers: list or string
:param allow_headers:
The header or list of header field names which can be used when this
resource is accessed by allowed origins. The header(s) may be regular
expressions, case-sensitive strings, or else an asterisk.
Default : '*', allow all headers
:type allow_headers: list, string or regex
:param supports_credentials:
Allows users to make authenticated requests. If true, injects the
`Access-Control-Allow-Credentials` header in responses. This allows
cookies and credentials to be submitted across domains.
:note: This option cannot be used in conjuction with a '*' origin
Default : False
:type supports_credentials: bool
:param max_age:
The maximum time for which this CORS request maybe cached. This value
is set as the `Access-Control-Max-Age` header.
Default : None
:type max_age: timedelta, integer, string or None
:param send_wildcard: If True, and the origins parameter is `*`, a wildcard
`Access-Control-Allow-Origin` header is sent, rather than the
request's `Origin` header.
Default : False
:type send_wildcard: bool
:param vary_header:
If True, the header Vary: Origin will be returned as per the W3
implementation guidelines.
Setting this header when the `Access-Control-Allow-Origin` is
dynamically generated (e.g. when there is more than one allowed
origin, and an Origin than '*' is returned) informs CDNs and other
caches that the CORS headers are dynamic, and cannot be cached.
If False, the Vary header will never be injected or altered.
Default : True
:type vary_header: bool
:param automatic_options:
Only applies to the `cross_origin` decorator. If True, Sanic-CORS will
override Sanic's default OPTIONS handling to return CORS headers for
OPTIONS requests.
Default : True
:type automatic_options: bool
"""
_options = kwargs
_real_decorator = cors.decorate(app, *args, run_middleware=False, with_context=False, **kwargs)
def wrapped_decorator(f):
spf = SanicPluginsFramework(app) # get the singleton from the app
try:
plugin = spf.register_plugin(cors, skip_reg=True) # depends on [control=['try'], data=[]]
except ValueError as e:
# this is normal, if this plugin has been registered previously
assert e.args and len(e.args) > 1
plugin = e.args[1] # depends on [control=['except'], data=['e']]
context = cors.get_context_from_spf(spf)
log = context.log
log(logging.DEBUG, 'Enabled {:s} for cross_origin using options: {}'.format(str(f), str(_options)))
return _real_decorator(f)
return wrapped_decorator
|
def get_rand_Japprox(s, params, num_inds=1000, include_cost=False, **kwargs):
"""
Calculates a random approximation to J by returning J only at a
set of random pixel/voxel locations.
Parameters
----------
s : :class:`peri.states.State`
The state to calculate J for.
params : List
The list of parameter names to calculate the gradient of.
num_inds : Int, optional.
The number of pix/voxels at which to calculate the random
approximation to J. Default is 1000.
include_cost : Bool, optional
Set to True to append a finite-difference measure of the full
cost gradient onto the returned J.
Other Parameters
----------------
All kwargs parameters get passed to s.gradmodel only.
Returns
-------
J : numpy.ndarray
[d, num_inds] array of J, at the given indices.
return_inds : numpy.ndarray or slice
[num_inds] element array or slice(0, None) of the model
indices at which J was evaluated.
"""
start_time = time.time()
tot_pix = s.residuals.size
if num_inds < tot_pix:
inds = np.random.choice(tot_pix, size=num_inds, replace=False)
slicer = None
return_inds = np.sort(inds)
else:
inds = None
return_inds = slice(0, None)
slicer = [slice(0, None)]*len(s.residuals.shape)
if include_cost:
Jact, ge = s.gradmodel_e(params=params, inds=inds, slicer=slicer,flat=False,
**kwargs)
Jact *= -1
J = [Jact, ge]
else:
J = -s.gradmodel(params=params, inds=inds, slicer=slicer, flat=False,
**kwargs)
CLOG.debug('J:\t%f' % (time.time()-start_time))
return J, return_inds
|
def function[get_rand_Japprox, parameter[s, params, num_inds, include_cost]]:
constant[
Calculates a random approximation to J by returning J only at a
set of random pixel/voxel locations.
Parameters
----------
s : :class:`peri.states.State`
The state to calculate J for.
params : List
The list of parameter names to calculate the gradient of.
num_inds : Int, optional.
The number of pix/voxels at which to calculate the random
approximation to J. Default is 1000.
include_cost : Bool, optional
Set to True to append a finite-difference measure of the full
cost gradient onto the returned J.
Other Parameters
----------------
All kwargs parameters get passed to s.gradmodel only.
Returns
-------
J : numpy.ndarray
[d, num_inds] array of J, at the given indices.
return_inds : numpy.ndarray or slice
[num_inds] element array or slice(0, None) of the model
indices at which J was evaluated.
]
variable[start_time] assign[=] call[name[time].time, parameter[]]
variable[tot_pix] assign[=] name[s].residuals.size
if compare[name[num_inds] less[<] name[tot_pix]] begin[:]
variable[inds] assign[=] call[name[np].random.choice, parameter[name[tot_pix]]]
variable[slicer] assign[=] constant[None]
variable[return_inds] assign[=] call[name[np].sort, parameter[name[inds]]]
if name[include_cost] begin[:]
<ast.Tuple object at 0x7da18bc70df0> assign[=] call[name[s].gradmodel_e, parameter[]]
<ast.AugAssign object at 0x7da18bc73fa0>
variable[J] assign[=] list[[<ast.Name object at 0x7da18bc71c60>, <ast.Name object at 0x7da18bc72f20>]]
call[name[CLOG].debug, parameter[binary_operation[constant[J: %f] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[time].time, parameter[]] - name[start_time]]]]]
return[tuple[[<ast.Name object at 0x7da18bc72050>, <ast.Name object at 0x7da18bc70f70>]]]
|
keyword[def] identifier[get_rand_Japprox] ( identifier[s] , identifier[params] , identifier[num_inds] = literal[int] , identifier[include_cost] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[start_time] = identifier[time] . identifier[time] ()
identifier[tot_pix] = identifier[s] . identifier[residuals] . identifier[size]
keyword[if] identifier[num_inds] < identifier[tot_pix] :
identifier[inds] = identifier[np] . identifier[random] . identifier[choice] ( identifier[tot_pix] , identifier[size] = identifier[num_inds] , identifier[replace] = keyword[False] )
identifier[slicer] = keyword[None]
identifier[return_inds] = identifier[np] . identifier[sort] ( identifier[inds] )
keyword[else] :
identifier[inds] = keyword[None]
identifier[return_inds] = identifier[slice] ( literal[int] , keyword[None] )
identifier[slicer] =[ identifier[slice] ( literal[int] , keyword[None] )]* identifier[len] ( identifier[s] . identifier[residuals] . identifier[shape] )
keyword[if] identifier[include_cost] :
identifier[Jact] , identifier[ge] = identifier[s] . identifier[gradmodel_e] ( identifier[params] = identifier[params] , identifier[inds] = identifier[inds] , identifier[slicer] = identifier[slicer] , identifier[flat] = keyword[False] ,
** identifier[kwargs] )
identifier[Jact] *=- literal[int]
identifier[J] =[ identifier[Jact] , identifier[ge] ]
keyword[else] :
identifier[J] =- identifier[s] . identifier[gradmodel] ( identifier[params] = identifier[params] , identifier[inds] = identifier[inds] , identifier[slicer] = identifier[slicer] , identifier[flat] = keyword[False] ,
** identifier[kwargs] )
identifier[CLOG] . identifier[debug] ( literal[string] %( identifier[time] . identifier[time] ()- identifier[start_time] ))
keyword[return] identifier[J] , identifier[return_inds]
|
def get_rand_Japprox(s, params, num_inds=1000, include_cost=False, **kwargs):
"""
Calculates a random approximation to J by returning J only at a
set of random pixel/voxel locations.
Parameters
----------
s : :class:`peri.states.State`
The state to calculate J for.
params : List
The list of parameter names to calculate the gradient of.
num_inds : Int, optional.
The number of pix/voxels at which to calculate the random
approximation to J. Default is 1000.
include_cost : Bool, optional
Set to True to append a finite-difference measure of the full
cost gradient onto the returned J.
Other Parameters
----------------
All kwargs parameters get passed to s.gradmodel only.
Returns
-------
J : numpy.ndarray
[d, num_inds] array of J, at the given indices.
return_inds : numpy.ndarray or slice
[num_inds] element array or slice(0, None) of the model
indices at which J was evaluated.
"""
start_time = time.time()
tot_pix = s.residuals.size
if num_inds < tot_pix:
inds = np.random.choice(tot_pix, size=num_inds, replace=False)
slicer = None
return_inds = np.sort(inds) # depends on [control=['if'], data=['num_inds', 'tot_pix']]
else:
inds = None
return_inds = slice(0, None)
slicer = [slice(0, None)] * len(s.residuals.shape)
if include_cost:
(Jact, ge) = s.gradmodel_e(params=params, inds=inds, slicer=slicer, flat=False, **kwargs)
Jact *= -1
J = [Jact, ge] # depends on [control=['if'], data=[]]
else:
J = -s.gradmodel(params=params, inds=inds, slicer=slicer, flat=False, **kwargs)
CLOG.debug('J:\t%f' % (time.time() - start_time))
return (J, return_inds)
|
def image(self, name, x=None, y=None, w=0,h=0,type='',link=''):
"Put an image on the page"
if not name in self.images:
#First use of image, get info
if(type==''):
pos=name.rfind('.')
if(not pos):
self.error('image file has no extension and no type was specified: '+name)
type=substr(name,pos+1)
type=type.lower()
if(type=='jpg' or type=='jpeg'):
info=self._parsejpg(name)
elif(type=='png'):
info=self._parsepng(name)
else:
#Allow for additional formats
#maybe the image is not showing the correct extension,
#but the header is OK,
succeed_parsing = False
#try all the parsing functions
parsing_functions = [self._parsejpg,self._parsepng,self._parsegif]
for pf in parsing_functions:
try:
info = pf(name)
succeed_parsing = True
break;
except:
pass
#last resource
if not succeed_parsing:
mtd='_parse'+type
if not hasattr(self,mtd):
self.error('Unsupported image type: '+type)
info=getattr(self, mtd)(name)
mtd='_parse'+type
if not hasattr(self,mtd):
self.error('Unsupported image type: '+type)
info=getattr(self, mtd)(name)
info['i']=len(self.images)+1
self.images[name]=info
else:
info=self.images[name]
#Automatic width and height calculation if needed
if(w==0 and h==0):
#Put image at 72 dpi
w=info['w']/self.k
h=info['h']/self.k
elif(w==0):
w=h*info['w']/info['h']
elif(h==0):
h=w*info['h']/info['w']
# Flowing mode
if y is None:
if (self.y + h > self.page_break_trigger and not self.in_footer and self.accept_page_break()):
#Automatic page break
x = self.x
self.add_page(self.cur_orientation)
self.x = x
y = self.y
self.y += h
if x is None:
x = self.x
self._out(sprintf('q %.2f 0 0 %.2f %.2f %.2f cm /I%d Do Q',w*self.k,h*self.k,x*self.k,(self.h-(y+h))*self.k,info['i']))
if(link):
self.link(x,y,w,h,link)
|
def function[image, parameter[self, name, x, y, w, h, type, link]]:
constant[Put an image on the page]
if <ast.UnaryOp object at 0x7da1b1de3bb0> begin[:]
if compare[name[type] equal[==] constant[]] begin[:]
variable[pos] assign[=] call[name[name].rfind, parameter[constant[.]]]
if <ast.UnaryOp object at 0x7da1b1de3880> begin[:]
call[name[self].error, parameter[binary_operation[constant[image file has no extension and no type was specified: ] + name[name]]]]
variable[type] assign[=] call[name[substr], parameter[name[name], binary_operation[name[pos] + constant[1]]]]
variable[type] assign[=] call[name[type].lower, parameter[]]
if <ast.BoolOp object at 0x7da1b1de3400> begin[:]
variable[info] assign[=] call[name[self]._parsejpg, parameter[name[name]]]
call[name[info]][constant[i]] assign[=] binary_operation[call[name[len], parameter[name[self].images]] + constant[1]]
call[name[self].images][name[name]] assign[=] name[info]
if <ast.BoolOp object at 0x7da1b1de1b40> begin[:]
variable[w] assign[=] binary_operation[call[name[info]][constant[w]] / name[self].k]
variable[h] assign[=] binary_operation[call[name[info]][constant[h]] / name[self].k]
if compare[name[y] is constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b1de1060> begin[:]
variable[x] assign[=] name[self].x
call[name[self].add_page, parameter[name[self].cur_orientation]]
name[self].x assign[=] name[x]
variable[y] assign[=] name[self].y
<ast.AugAssign object at 0x7da1b1d0fb80>
if compare[name[x] is constant[None]] begin[:]
variable[x] assign[=] name[self].x
call[name[self]._out, parameter[call[name[sprintf], parameter[constant[q %.2f 0 0 %.2f %.2f %.2f cm /I%d Do Q], binary_operation[name[w] * name[self].k], binary_operation[name[h] * name[self].k], binary_operation[name[x] * name[self].k], binary_operation[binary_operation[name[self].h - binary_operation[name[y] + name[h]]] * name[self].k], call[name[info]][constant[i]]]]]]
if name[link] begin[:]
call[name[self].link, parameter[name[x], name[y], name[w], name[h], name[link]]]
|
keyword[def] identifier[image] ( identifier[self] , identifier[name] , identifier[x] = keyword[None] , identifier[y] = keyword[None] , identifier[w] = literal[int] , identifier[h] = literal[int] , identifier[type] = literal[string] , identifier[link] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[name] keyword[in] identifier[self] . identifier[images] :
keyword[if] ( identifier[type] == literal[string] ):
identifier[pos] = identifier[name] . identifier[rfind] ( literal[string] )
keyword[if] ( keyword[not] identifier[pos] ):
identifier[self] . identifier[error] ( literal[string] + identifier[name] )
identifier[type] = identifier[substr] ( identifier[name] , identifier[pos] + literal[int] )
identifier[type] = identifier[type] . identifier[lower] ()
keyword[if] ( identifier[type] == literal[string] keyword[or] identifier[type] == literal[string] ):
identifier[info] = identifier[self] . identifier[_parsejpg] ( identifier[name] )
keyword[elif] ( identifier[type] == literal[string] ):
identifier[info] = identifier[self] . identifier[_parsepng] ( identifier[name] )
keyword[else] :
identifier[succeed_parsing] = keyword[False]
identifier[parsing_functions] =[ identifier[self] . identifier[_parsejpg] , identifier[self] . identifier[_parsepng] , identifier[self] . identifier[_parsegif] ]
keyword[for] identifier[pf] keyword[in] identifier[parsing_functions] :
keyword[try] :
identifier[info] = identifier[pf] ( identifier[name] )
identifier[succeed_parsing] = keyword[True]
keyword[break] ;
keyword[except] :
keyword[pass]
keyword[if] keyword[not] identifier[succeed_parsing] :
identifier[mtd] = literal[string] + identifier[type]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[mtd] ):
identifier[self] . identifier[error] ( literal[string] + identifier[type] )
identifier[info] = identifier[getattr] ( identifier[self] , identifier[mtd] )( identifier[name] )
identifier[mtd] = literal[string] + identifier[type]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[mtd] ):
identifier[self] . identifier[error] ( literal[string] + identifier[type] )
identifier[info] = identifier[getattr] ( identifier[self] , identifier[mtd] )( identifier[name] )
identifier[info] [ literal[string] ]= identifier[len] ( identifier[self] . identifier[images] )+ literal[int]
identifier[self] . identifier[images] [ identifier[name] ]= identifier[info]
keyword[else] :
identifier[info] = identifier[self] . identifier[images] [ identifier[name] ]
keyword[if] ( identifier[w] == literal[int] keyword[and] identifier[h] == literal[int] ):
identifier[w] = identifier[info] [ literal[string] ]/ identifier[self] . identifier[k]
identifier[h] = identifier[info] [ literal[string] ]/ identifier[self] . identifier[k]
keyword[elif] ( identifier[w] == literal[int] ):
identifier[w] = identifier[h] * identifier[info] [ literal[string] ]/ identifier[info] [ literal[string] ]
keyword[elif] ( identifier[h] == literal[int] ):
identifier[h] = identifier[w] * identifier[info] [ literal[string] ]/ identifier[info] [ literal[string] ]
keyword[if] identifier[y] keyword[is] keyword[None] :
keyword[if] ( identifier[self] . identifier[y] + identifier[h] > identifier[self] . identifier[page_break_trigger] keyword[and] keyword[not] identifier[self] . identifier[in_footer] keyword[and] identifier[self] . identifier[accept_page_break] ()):
identifier[x] = identifier[self] . identifier[x]
identifier[self] . identifier[add_page] ( identifier[self] . identifier[cur_orientation] )
identifier[self] . identifier[x] = identifier[x]
identifier[y] = identifier[self] . identifier[y]
identifier[self] . identifier[y] += identifier[h]
keyword[if] identifier[x] keyword[is] keyword[None] :
identifier[x] = identifier[self] . identifier[x]
identifier[self] . identifier[_out] ( identifier[sprintf] ( literal[string] , identifier[w] * identifier[self] . identifier[k] , identifier[h] * identifier[self] . identifier[k] , identifier[x] * identifier[self] . identifier[k] ,( identifier[self] . identifier[h] -( identifier[y] + identifier[h] ))* identifier[self] . identifier[k] , identifier[info] [ literal[string] ]))
keyword[if] ( identifier[link] ):
identifier[self] . identifier[link] ( identifier[x] , identifier[y] , identifier[w] , identifier[h] , identifier[link] )
|
def image(self, name, x=None, y=None, w=0, h=0, type='', link=''):
"""Put an image on the page"""
if not name in self.images:
#First use of image, get info
if type == '':
pos = name.rfind('.')
if not pos:
self.error('image file has no extension and no type was specified: ' + name) # depends on [control=['if'], data=[]]
type = substr(name, pos + 1) # depends on [control=['if'], data=['type']]
type = type.lower()
if type == 'jpg' or type == 'jpeg':
info = self._parsejpg(name) # depends on [control=['if'], data=[]]
elif type == 'png':
info = self._parsepng(name) # depends on [control=['if'], data=[]]
else:
#Allow for additional formats
#maybe the image is not showing the correct extension,
#but the header is OK,
succeed_parsing = False
#try all the parsing functions
parsing_functions = [self._parsejpg, self._parsepng, self._parsegif]
for pf in parsing_functions:
try:
info = pf(name)
succeed_parsing = True
break # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['pf']]
#last resource
if not succeed_parsing:
mtd = '_parse' + type
if not hasattr(self, mtd):
self.error('Unsupported image type: ' + type) # depends on [control=['if'], data=[]]
info = getattr(self, mtd)(name) # depends on [control=['if'], data=[]]
mtd = '_parse' + type
if not hasattr(self, mtd):
self.error('Unsupported image type: ' + type) # depends on [control=['if'], data=[]]
info = getattr(self, mtd)(name)
info['i'] = len(self.images) + 1
self.images[name] = info # depends on [control=['if'], data=[]]
else:
info = self.images[name]
#Automatic width and height calculation if needed
if w == 0 and h == 0:
#Put image at 72 dpi
w = info['w'] / self.k
h = info['h'] / self.k # depends on [control=['if'], data=[]]
elif w == 0:
w = h * info['w'] / info['h'] # depends on [control=['if'], data=['w']]
elif h == 0:
h = w * info['h'] / info['w'] # depends on [control=['if'], data=['h']]
# Flowing mode
if y is None:
if self.y + h > self.page_break_trigger and (not self.in_footer) and self.accept_page_break():
#Automatic page break
x = self.x
self.add_page(self.cur_orientation)
self.x = x # depends on [control=['if'], data=[]]
y = self.y
self.y += h # depends on [control=['if'], data=['y']]
if x is None:
x = self.x # depends on [control=['if'], data=['x']]
self._out(sprintf('q %.2f 0 0 %.2f %.2f %.2f cm /I%d Do Q', w * self.k, h * self.k, x * self.k, (self.h - (y + h)) * self.k, info['i']))
if link:
self.link(x, y, w, h, link) # depends on [control=['if'], data=[]]
|
def predict_proba(self, data, ntree_limit=None, validate_features=True):
"""
Predict the probability of each `data` example being of a given class.
.. note:: This function is not thread safe
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call ``xgb.copy()`` to make copies
of model object and then call predict
Parameters
----------
data : DMatrix
The dmatrix storing the input.
ntree_limit : int
Limit number of trees in the prediction; defaults to best_ntree_limit if defined
(i.e. it has been trained with early stopping), otherwise 0 (use all trees).
validate_features : bool
When this is True, validate that the Booster's and data's feature_names are identical.
Otherwise, it is assumed that the feature_names are the same.
Returns
-------
prediction : numpy array
a numpy array with the probability of each data example being of a given class.
"""
test_dmatrix = DMatrix(data, missing=self.missing, nthread=self.n_jobs)
if ntree_limit is None:
ntree_limit = getattr(self, "best_ntree_limit", 0)
class_probs = self.get_booster().predict(test_dmatrix,
ntree_limit=ntree_limit,
validate_features=validate_features)
if self.objective == "multi:softprob":
return class_probs
classone_probs = class_probs
classzero_probs = 1.0 - classone_probs
return np.vstack((classzero_probs, classone_probs)).transpose()
|
def function[predict_proba, parameter[self, data, ntree_limit, validate_features]]:
constant[
Predict the probability of each `data` example being of a given class.
.. note:: This function is not thread safe
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call ``xgb.copy()`` to make copies
of model object and then call predict
Parameters
----------
data : DMatrix
The dmatrix storing the input.
ntree_limit : int
Limit number of trees in the prediction; defaults to best_ntree_limit if defined
(i.e. it has been trained with early stopping), otherwise 0 (use all trees).
validate_features : bool
When this is True, validate that the Booster's and data's feature_names are identical.
Otherwise, it is assumed that the feature_names are the same.
Returns
-------
prediction : numpy array
a numpy array with the probability of each data example being of a given class.
]
variable[test_dmatrix] assign[=] call[name[DMatrix], parameter[name[data]]]
if compare[name[ntree_limit] is constant[None]] begin[:]
variable[ntree_limit] assign[=] call[name[getattr], parameter[name[self], constant[best_ntree_limit], constant[0]]]
variable[class_probs] assign[=] call[call[name[self].get_booster, parameter[]].predict, parameter[name[test_dmatrix]]]
if compare[name[self].objective equal[==] constant[multi:softprob]] begin[:]
return[name[class_probs]]
variable[classone_probs] assign[=] name[class_probs]
variable[classzero_probs] assign[=] binary_operation[constant[1.0] - name[classone_probs]]
return[call[call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da1b209dcc0>, <ast.Name object at 0x7da1b209dc90>]]]].transpose, parameter[]]]
|
keyword[def] identifier[predict_proba] ( identifier[self] , identifier[data] , identifier[ntree_limit] = keyword[None] , identifier[validate_features] = keyword[True] ):
literal[string]
identifier[test_dmatrix] = identifier[DMatrix] ( identifier[data] , identifier[missing] = identifier[self] . identifier[missing] , identifier[nthread] = identifier[self] . identifier[n_jobs] )
keyword[if] identifier[ntree_limit] keyword[is] keyword[None] :
identifier[ntree_limit] = identifier[getattr] ( identifier[self] , literal[string] , literal[int] )
identifier[class_probs] = identifier[self] . identifier[get_booster] (). identifier[predict] ( identifier[test_dmatrix] ,
identifier[ntree_limit] = identifier[ntree_limit] ,
identifier[validate_features] = identifier[validate_features] )
keyword[if] identifier[self] . identifier[objective] == literal[string] :
keyword[return] identifier[class_probs]
identifier[classone_probs] = identifier[class_probs]
identifier[classzero_probs] = literal[int] - identifier[classone_probs]
keyword[return] identifier[np] . identifier[vstack] (( identifier[classzero_probs] , identifier[classone_probs] )). identifier[transpose] ()
|
def predict_proba(self, data, ntree_limit=None, validate_features=True):
"""
Predict the probability of each `data` example being of a given class.
.. note:: This function is not thread safe
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call ``xgb.copy()`` to make copies
of model object and then call predict
Parameters
----------
data : DMatrix
The dmatrix storing the input.
ntree_limit : int
Limit number of trees in the prediction; defaults to best_ntree_limit if defined
(i.e. it has been trained with early stopping), otherwise 0 (use all trees).
validate_features : bool
When this is True, validate that the Booster's and data's feature_names are identical.
Otherwise, it is assumed that the feature_names are the same.
Returns
-------
prediction : numpy array
a numpy array with the probability of each data example being of a given class.
"""
test_dmatrix = DMatrix(data, missing=self.missing, nthread=self.n_jobs)
if ntree_limit is None:
ntree_limit = getattr(self, 'best_ntree_limit', 0) # depends on [control=['if'], data=['ntree_limit']]
class_probs = self.get_booster().predict(test_dmatrix, ntree_limit=ntree_limit, validate_features=validate_features)
if self.objective == 'multi:softprob':
return class_probs # depends on [control=['if'], data=[]]
classone_probs = class_probs
classzero_probs = 1.0 - classone_probs
return np.vstack((classzero_probs, classone_probs)).transpose()
|
def add_months(months, timestamp=datetime.datetime.utcnow()):
"""Add a number of months to a timestamp"""
month = timestamp.month
new_month = month + months
years = 0
while new_month < 1:
new_month += 12
years -= 1
while new_month > 12:
new_month -= 12
years += 1
# month = timestamp.month
year = timestamp.year + years
try:
return datetime.datetime(year, new_month, timestamp.day, timestamp.hour, timestamp.minute, timestamp.second)
except ValueError:
# This means that the day exceeds the last day of the month, i.e. it is 30th March, and we are finding the day
# 1 month ago, and it is trying to return 30th February
if months > 0:
# We are adding, so use the first day of the next month
new_month += 1
if new_month > 12:
new_month -= 12
year += 1
return datetime.datetime(year, new_month, 1, timestamp.hour, timestamp.minute, timestamp.second)
else:
# We are subtracting - use the last day of the same month
new_day = calendar.monthrange(year, new_month)[1]
return datetime.datetime(year, new_month, new_day, timestamp.hour, timestamp.minute, timestamp.second)
|
def function[add_months, parameter[months, timestamp]]:
constant[Add a number of months to a timestamp]
variable[month] assign[=] name[timestamp].month
variable[new_month] assign[=] binary_operation[name[month] + name[months]]
variable[years] assign[=] constant[0]
while compare[name[new_month] less[<] constant[1]] begin[:]
<ast.AugAssign object at 0x7da1b26ad2a0>
<ast.AugAssign object at 0x7da1b26ac580>
while compare[name[new_month] greater[>] constant[12]] begin[:]
<ast.AugAssign object at 0x7da1b26afd00>
<ast.AugAssign object at 0x7da1b26aea40>
variable[year] assign[=] binary_operation[name[timestamp].year + name[years]]
<ast.Try object at 0x7da1b26afa90>
|
keyword[def] identifier[add_months] ( identifier[months] , identifier[timestamp] = identifier[datetime] . identifier[datetime] . identifier[utcnow] ()):
literal[string]
identifier[month] = identifier[timestamp] . identifier[month]
identifier[new_month] = identifier[month] + identifier[months]
identifier[years] = literal[int]
keyword[while] identifier[new_month] < literal[int] :
identifier[new_month] += literal[int]
identifier[years] -= literal[int]
keyword[while] identifier[new_month] > literal[int] :
identifier[new_month] -= literal[int]
identifier[years] += literal[int]
identifier[year] = identifier[timestamp] . identifier[year] + identifier[years]
keyword[try] :
keyword[return] identifier[datetime] . identifier[datetime] ( identifier[year] , identifier[new_month] , identifier[timestamp] . identifier[day] , identifier[timestamp] . identifier[hour] , identifier[timestamp] . identifier[minute] , identifier[timestamp] . identifier[second] )
keyword[except] identifier[ValueError] :
keyword[if] identifier[months] > literal[int] :
identifier[new_month] += literal[int]
keyword[if] identifier[new_month] > literal[int] :
identifier[new_month] -= literal[int]
identifier[year] += literal[int]
keyword[return] identifier[datetime] . identifier[datetime] ( identifier[year] , identifier[new_month] , literal[int] , identifier[timestamp] . identifier[hour] , identifier[timestamp] . identifier[minute] , identifier[timestamp] . identifier[second] )
keyword[else] :
identifier[new_day] = identifier[calendar] . identifier[monthrange] ( identifier[year] , identifier[new_month] )[ literal[int] ]
keyword[return] identifier[datetime] . identifier[datetime] ( identifier[year] , identifier[new_month] , identifier[new_day] , identifier[timestamp] . identifier[hour] , identifier[timestamp] . identifier[minute] , identifier[timestamp] . identifier[second] )
|
def add_months(months, timestamp=datetime.datetime.utcnow()):
"""Add a number of months to a timestamp"""
month = timestamp.month
new_month = month + months
years = 0
while new_month < 1:
new_month += 12
years -= 1 # depends on [control=['while'], data=['new_month']]
while new_month > 12:
new_month -= 12
years += 1 # depends on [control=['while'], data=['new_month']]
# month = timestamp.month
year = timestamp.year + years
try:
return datetime.datetime(year, new_month, timestamp.day, timestamp.hour, timestamp.minute, timestamp.second) # depends on [control=['try'], data=[]]
except ValueError:
# This means that the day exceeds the last day of the month, i.e. it is 30th March, and we are finding the day
# 1 month ago, and it is trying to return 30th February
if months > 0:
# We are adding, so use the first day of the next month
new_month += 1
if new_month > 12:
new_month -= 12
year += 1 # depends on [control=['if'], data=['new_month']]
return datetime.datetime(year, new_month, 1, timestamp.hour, timestamp.minute, timestamp.second) # depends on [control=['if'], data=[]]
else:
# We are subtracting - use the last day of the same month
new_day = calendar.monthrange(year, new_month)[1]
return datetime.datetime(year, new_month, new_day, timestamp.hour, timestamp.minute, timestamp.second) # depends on [control=['except'], data=[]]
|
def get_wegsegment_by_id(self, id):
'''
Retrieve a `wegsegment` by the Id.
:param integer id: the Id of the `wegsegment`
:rtype: :class:`Wegsegment`
'''
def creator():
res = crab_gateway_request(
self.client,
'GetWegsegmentByIdentificatorWegsegment', id
)
if res == None:
raise GatewayResourceNotFoundException()
return Wegsegment(
res.IdentificatorWegsegment,
res.StatusWegsegment,
res.GeometriemethodeWegsegment,
res.Geometrie,
Metadata(
res.BeginDatum,
res.BeginTijd,
self.get_bewerking(res.BeginBewerking),
self.get_organisatie(res.BeginOrganisatie)
)
)
if self.caches['short'].is_configured:
key = 'GetWegsegmentByIdentificatorWegsegment#%s' % (id)
wegsegment = self.caches['short'].get_or_create(key, creator)
else:
wegsegment = creator()
wegsegment.set_gateway(self)
return wegsegment
|
def function[get_wegsegment_by_id, parameter[self, id]]:
constant[
Retrieve a `wegsegment` by the Id.
:param integer id: the Id of the `wegsegment`
:rtype: :class:`Wegsegment`
]
def function[creator, parameter[]]:
variable[res] assign[=] call[name[crab_gateway_request], parameter[name[self].client, constant[GetWegsegmentByIdentificatorWegsegment], name[id]]]
if compare[name[res] equal[==] constant[None]] begin[:]
<ast.Raise object at 0x7da1b0aedba0>
return[call[name[Wegsegment], parameter[name[res].IdentificatorWegsegment, name[res].StatusWegsegment, name[res].GeometriemethodeWegsegment, name[res].Geometrie, call[name[Metadata], parameter[name[res].BeginDatum, name[res].BeginTijd, call[name[self].get_bewerking, parameter[name[res].BeginBewerking]], call[name[self].get_organisatie, parameter[name[res].BeginOrganisatie]]]]]]]
if call[name[self].caches][constant[short]].is_configured begin[:]
variable[key] assign[=] binary_operation[constant[GetWegsegmentByIdentificatorWegsegment#%s] <ast.Mod object at 0x7da2590d6920> name[id]]
variable[wegsegment] assign[=] call[call[name[self].caches][constant[short]].get_or_create, parameter[name[key], name[creator]]]
call[name[wegsegment].set_gateway, parameter[name[self]]]
return[name[wegsegment]]
|
keyword[def] identifier[get_wegsegment_by_id] ( identifier[self] , identifier[id] ):
literal[string]
keyword[def] identifier[creator] ():
identifier[res] = identifier[crab_gateway_request] (
identifier[self] . identifier[client] ,
literal[string] , identifier[id]
)
keyword[if] identifier[res] == keyword[None] :
keyword[raise] identifier[GatewayResourceNotFoundException] ()
keyword[return] identifier[Wegsegment] (
identifier[res] . identifier[IdentificatorWegsegment] ,
identifier[res] . identifier[StatusWegsegment] ,
identifier[res] . identifier[GeometriemethodeWegsegment] ,
identifier[res] . identifier[Geometrie] ,
identifier[Metadata] (
identifier[res] . identifier[BeginDatum] ,
identifier[res] . identifier[BeginTijd] ,
identifier[self] . identifier[get_bewerking] ( identifier[res] . identifier[BeginBewerking] ),
identifier[self] . identifier[get_organisatie] ( identifier[res] . identifier[BeginOrganisatie] )
)
)
keyword[if] identifier[self] . identifier[caches] [ literal[string] ]. identifier[is_configured] :
identifier[key] = literal[string] %( identifier[id] )
identifier[wegsegment] = identifier[self] . identifier[caches] [ literal[string] ]. identifier[get_or_create] ( identifier[key] , identifier[creator] )
keyword[else] :
identifier[wegsegment] = identifier[creator] ()
identifier[wegsegment] . identifier[set_gateway] ( identifier[self] )
keyword[return] identifier[wegsegment]
|
def get_wegsegment_by_id(self, id):
"""
Retrieve a `wegsegment` by the Id.
:param integer id: the Id of the `wegsegment`
:rtype: :class:`Wegsegment`
"""
def creator():
res = crab_gateway_request(self.client, 'GetWegsegmentByIdentificatorWegsegment', id)
if res == None:
raise GatewayResourceNotFoundException() # depends on [control=['if'], data=[]]
return Wegsegment(res.IdentificatorWegsegment, res.StatusWegsegment, res.GeometriemethodeWegsegment, res.Geometrie, Metadata(res.BeginDatum, res.BeginTijd, self.get_bewerking(res.BeginBewerking), self.get_organisatie(res.BeginOrganisatie)))
if self.caches['short'].is_configured:
key = 'GetWegsegmentByIdentificatorWegsegment#%s' % id
wegsegment = self.caches['short'].get_or_create(key, creator) # depends on [control=['if'], data=[]]
else:
wegsegment = creator()
wegsegment.set_gateway(self)
return wegsegment
|
def rebuild( self ):
"""
Rebuilds the user interface buttons for this widget.
"""
self.setUpdatesEnabled(False)
# sync up the toolbuttons with our actions
actions = self._actionGroup.actions()
btns = self.findChildren(QToolButton)
horiz = self.direction() in (QBoxLayout.LeftToRight,
QBoxLayout.RightToLeft)
# remove unnecessary buttons
if len(actions) < len(btns):
rem_btns = btns[len(actions)-1:]
btns = btns[:len(actions)]
for btn in rem_btns:
btn.close()
btn.setParent(None)
btn.deleteLater()
# create new buttons
elif len(btns) < len(actions):
for i in range(len(btns), len(actions)):
btn = QToolButton(self)
btn.setAutoFillBackground(True)
btns.append(btn)
self.layout().addWidget(btn)
btn.clicked.connect(self.emitClicked)
# determine coloring options
palette = self.palette()
checked = palette.color(palette.Highlight)
checked_fg = palette.color(palette.HighlightedText)
unchecked = palette.color(palette.Button)
unchecked_fg = palette.color(palette.ButtonText)
border = palette.color(palette.Mid)
# define the stylesheet options
options = {}
options['top_left_radius'] = 0
options['top_right_radius'] = 0
options['bot_left_radius'] = 0
options['bot_right_radius'] = 0
options['border_color'] = border.name()
options['checked_fg'] = checked_fg.name()
options['checked_bg'] = checked.name()
options['checked_bg_alt'] = checked.darker(120).name()
options['unchecked_fg'] = unchecked_fg.name()
options['unchecked_bg'] = unchecked.name()
options['unchecked_bg_alt'] = unchecked.darker(120).name()
options['padding_top'] = 1
options['padding_bottom'] = 1
options['padding_left'] = 1
options['padding_right'] = 1
if horiz:
options['x1'] = 0
options['y1'] = 0
options['x2'] = 0
options['y2'] = 1
else:
options['x1'] = 0
options['y1'] = 0
options['x2'] = 1
options['y2'] = 1
# sync up the actions and buttons
count = len(actions)
palette = self.palette()
font = self.font()
for i, action in enumerate(actions):
btn = btns[i]
# assign the action for this button
if btn.defaultAction() != action:
# clear out any existing actions
for act in btn.actions():
btn.removeAction(act)
# assign the given action
btn.setDefaultAction(action)
options['top_left_radius'] = 1
options['bot_left_radius'] = 1
options['top_right_radius'] = 1
options['bot_right_radius'] = 1
if horiz:
options['padding_left'] = self._padding
options['padding_right'] = self._padding
else:
options['padding_top'] = self._padding
options['padding_bottom'] = self._padding
if not i:
if horiz:
options['top_left_radius'] = self.cornerRadius()
options['bot_left_radius'] = self.cornerRadius()
options['padding_left'] += self.cornerRadius() / 3.0
else:
options['top_left_radius'] = self.cornerRadius()
options['top_right_radius'] = self.cornerRadius()
options['padding_top'] += self.cornerRadius() / 3.0
if i == count - 1:
if horiz:
options['top_right_radius'] = self.cornerRadius()
options['bot_right_radius'] = self.cornerRadius()
options['padding_right'] += self.cornerRadius() / 3.0
else:
options['bot_left_radius'] = self.cornerRadius()
options['bot_right_radius'] = self.cornerRadius()
options['padding_bottom'] += self.cornerRadius() / 3.0
btn.setFont(font)
btn.setPalette(palette)
btn.setStyleSheet(TOOLBUTTON_STYLE % options)
if horiz:
btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
else:
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding)
self.setUpdatesEnabled(True)
|
def function[rebuild, parameter[self]]:
constant[
Rebuilds the user interface buttons for this widget.
]
call[name[self].setUpdatesEnabled, parameter[constant[False]]]
variable[actions] assign[=] call[name[self]._actionGroup.actions, parameter[]]
variable[btns] assign[=] call[name[self].findChildren, parameter[name[QToolButton]]]
variable[horiz] assign[=] compare[call[name[self].direction, parameter[]] in tuple[[<ast.Attribute object at 0x7da1b253a4d0>, <ast.Attribute object at 0x7da1b253b1c0>]]]
if compare[call[name[len], parameter[name[actions]]] less[<] call[name[len], parameter[name[btns]]]] begin[:]
variable[rem_btns] assign[=] call[name[btns]][<ast.Slice object at 0x7da18f00cf70>]
variable[btns] assign[=] call[name[btns]][<ast.Slice object at 0x7da18f00f8b0>]
for taget[name[btn]] in starred[name[rem_btns]] begin[:]
call[name[btn].close, parameter[]]
call[name[btn].setParent, parameter[constant[None]]]
call[name[btn].deleteLater, parameter[]]
variable[palette] assign[=] call[name[self].palette, parameter[]]
variable[checked] assign[=] call[name[palette].color, parameter[name[palette].Highlight]]
variable[checked_fg] assign[=] call[name[palette].color, parameter[name[palette].HighlightedText]]
variable[unchecked] assign[=] call[name[palette].color, parameter[name[palette].Button]]
variable[unchecked_fg] assign[=] call[name[palette].color, parameter[name[palette].ButtonText]]
variable[border] assign[=] call[name[palette].color, parameter[name[palette].Mid]]
variable[options] assign[=] dictionary[[], []]
call[name[options]][constant[top_left_radius]] assign[=] constant[0]
call[name[options]][constant[top_right_radius]] assign[=] constant[0]
call[name[options]][constant[bot_left_radius]] assign[=] constant[0]
call[name[options]][constant[bot_right_radius]] assign[=] constant[0]
call[name[options]][constant[border_color]] assign[=] call[name[border].name, parameter[]]
call[name[options]][constant[checked_fg]] assign[=] call[name[checked_fg].name, parameter[]]
call[name[options]][constant[checked_bg]] assign[=] call[name[checked].name, parameter[]]
call[name[options]][constant[checked_bg_alt]] assign[=] call[call[name[checked].darker, parameter[constant[120]]].name, parameter[]]
call[name[options]][constant[unchecked_fg]] assign[=] call[name[unchecked_fg].name, parameter[]]
call[name[options]][constant[unchecked_bg]] assign[=] call[name[unchecked].name, parameter[]]
call[name[options]][constant[unchecked_bg_alt]] assign[=] call[call[name[unchecked].darker, parameter[constant[120]]].name, parameter[]]
call[name[options]][constant[padding_top]] assign[=] constant[1]
call[name[options]][constant[padding_bottom]] assign[=] constant[1]
call[name[options]][constant[padding_left]] assign[=] constant[1]
call[name[options]][constant[padding_right]] assign[=] constant[1]
if name[horiz] begin[:]
call[name[options]][constant[x1]] assign[=] constant[0]
call[name[options]][constant[y1]] assign[=] constant[0]
call[name[options]][constant[x2]] assign[=] constant[0]
call[name[options]][constant[y2]] assign[=] constant[1]
variable[count] assign[=] call[name[len], parameter[name[actions]]]
variable[palette] assign[=] call[name[self].palette, parameter[]]
variable[font] assign[=] call[name[self].font, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20c794af0>, <ast.Name object at 0x7da20c7942e0>]]] in starred[call[name[enumerate], parameter[name[actions]]]] begin[:]
variable[btn] assign[=] call[name[btns]][name[i]]
if compare[call[name[btn].defaultAction, parameter[]] not_equal[!=] name[action]] begin[:]
for taget[name[act]] in starred[call[name[btn].actions, parameter[]]] begin[:]
call[name[btn].removeAction, parameter[name[act]]]
call[name[btn].setDefaultAction, parameter[name[action]]]
call[name[options]][constant[top_left_radius]] assign[=] constant[1]
call[name[options]][constant[bot_left_radius]] assign[=] constant[1]
call[name[options]][constant[top_right_radius]] assign[=] constant[1]
call[name[options]][constant[bot_right_radius]] assign[=] constant[1]
if name[horiz] begin[:]
call[name[options]][constant[padding_left]] assign[=] name[self]._padding
call[name[options]][constant[padding_right]] assign[=] name[self]._padding
if <ast.UnaryOp object at 0x7da20c795750> begin[:]
if name[horiz] begin[:]
call[name[options]][constant[top_left_radius]] assign[=] call[name[self].cornerRadius, parameter[]]
call[name[options]][constant[bot_left_radius]] assign[=] call[name[self].cornerRadius, parameter[]]
<ast.AugAssign object at 0x7da20c794250>
if compare[name[i] equal[==] binary_operation[name[count] - constant[1]]] begin[:]
if name[horiz] begin[:]
call[name[options]][constant[top_right_radius]] assign[=] call[name[self].cornerRadius, parameter[]]
call[name[options]][constant[bot_right_radius]] assign[=] call[name[self].cornerRadius, parameter[]]
<ast.AugAssign object at 0x7da204346950>
call[name[btn].setFont, parameter[name[font]]]
call[name[btn].setPalette, parameter[name[palette]]]
call[name[btn].setStyleSheet, parameter[binary_operation[name[TOOLBUTTON_STYLE] <ast.Mod object at 0x7da2590d6920> name[options]]]]
if name[horiz] begin[:]
call[name[btn].setSizePolicy, parameter[name[QSizePolicy].Expanding, name[QSizePolicy].Preferred]]
call[name[self].setUpdatesEnabled, parameter[constant[True]]]
|
keyword[def] identifier[rebuild] ( identifier[self] ):
literal[string]
identifier[self] . identifier[setUpdatesEnabled] ( keyword[False] )
identifier[actions] = identifier[self] . identifier[_actionGroup] . identifier[actions] ()
identifier[btns] = identifier[self] . identifier[findChildren] ( identifier[QToolButton] )
identifier[horiz] = identifier[self] . identifier[direction] () keyword[in] ( identifier[QBoxLayout] . identifier[LeftToRight] ,
identifier[QBoxLayout] . identifier[RightToLeft] )
keyword[if] identifier[len] ( identifier[actions] )< identifier[len] ( identifier[btns] ):
identifier[rem_btns] = identifier[btns] [ identifier[len] ( identifier[actions] )- literal[int] :]
identifier[btns] = identifier[btns] [: identifier[len] ( identifier[actions] )]
keyword[for] identifier[btn] keyword[in] identifier[rem_btns] :
identifier[btn] . identifier[close] ()
identifier[btn] . identifier[setParent] ( keyword[None] )
identifier[btn] . identifier[deleteLater] ()
keyword[elif] identifier[len] ( identifier[btns] )< identifier[len] ( identifier[actions] ):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[btns] ), identifier[len] ( identifier[actions] )):
identifier[btn] = identifier[QToolButton] ( identifier[self] )
identifier[btn] . identifier[setAutoFillBackground] ( keyword[True] )
identifier[btns] . identifier[append] ( identifier[btn] )
identifier[self] . identifier[layout] (). identifier[addWidget] ( identifier[btn] )
identifier[btn] . identifier[clicked] . identifier[connect] ( identifier[self] . identifier[emitClicked] )
identifier[palette] = identifier[self] . identifier[palette] ()
identifier[checked] = identifier[palette] . identifier[color] ( identifier[palette] . identifier[Highlight] )
identifier[checked_fg] = identifier[palette] . identifier[color] ( identifier[palette] . identifier[HighlightedText] )
identifier[unchecked] = identifier[palette] . identifier[color] ( identifier[palette] . identifier[Button] )
identifier[unchecked_fg] = identifier[palette] . identifier[color] ( identifier[palette] . identifier[ButtonText] )
identifier[border] = identifier[palette] . identifier[color] ( identifier[palette] . identifier[Mid] )
identifier[options] ={}
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= identifier[border] . identifier[name] ()
identifier[options] [ literal[string] ]= identifier[checked_fg] . identifier[name] ()
identifier[options] [ literal[string] ]= identifier[checked] . identifier[name] ()
identifier[options] [ literal[string] ]= identifier[checked] . identifier[darker] ( literal[int] ). identifier[name] ()
identifier[options] [ literal[string] ]= identifier[unchecked_fg] . identifier[name] ()
identifier[options] [ literal[string] ]= identifier[unchecked] . identifier[name] ()
identifier[options] [ literal[string] ]= identifier[unchecked] . identifier[darker] ( literal[int] ). identifier[name] ()
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
keyword[if] identifier[horiz] :
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
keyword[else] :
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[count] = identifier[len] ( identifier[actions] )
identifier[palette] = identifier[self] . identifier[palette] ()
identifier[font] = identifier[self] . identifier[font] ()
keyword[for] identifier[i] , identifier[action] keyword[in] identifier[enumerate] ( identifier[actions] ):
identifier[btn] = identifier[btns] [ identifier[i] ]
keyword[if] identifier[btn] . identifier[defaultAction] ()!= identifier[action] :
keyword[for] identifier[act] keyword[in] identifier[btn] . identifier[actions] ():
identifier[btn] . identifier[removeAction] ( identifier[act] )
identifier[btn] . identifier[setDefaultAction] ( identifier[action] )
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
identifier[options] [ literal[string] ]= literal[int]
keyword[if] identifier[horiz] :
identifier[options] [ literal[string] ]= identifier[self] . identifier[_padding]
identifier[options] [ literal[string] ]= identifier[self] . identifier[_padding]
keyword[else] :
identifier[options] [ literal[string] ]= identifier[self] . identifier[_padding]
identifier[options] [ literal[string] ]= identifier[self] . identifier[_padding]
keyword[if] keyword[not] identifier[i] :
keyword[if] identifier[horiz] :
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]+= identifier[self] . identifier[cornerRadius] ()/ literal[int]
keyword[else] :
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]+= identifier[self] . identifier[cornerRadius] ()/ literal[int]
keyword[if] identifier[i] == identifier[count] - literal[int] :
keyword[if] identifier[horiz] :
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]+= identifier[self] . identifier[cornerRadius] ()/ literal[int]
keyword[else] :
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]= identifier[self] . identifier[cornerRadius] ()
identifier[options] [ literal[string] ]+= identifier[self] . identifier[cornerRadius] ()/ literal[int]
identifier[btn] . identifier[setFont] ( identifier[font] )
identifier[btn] . identifier[setPalette] ( identifier[palette] )
identifier[btn] . identifier[setStyleSheet] ( identifier[TOOLBUTTON_STYLE] % identifier[options] )
keyword[if] identifier[horiz] :
identifier[btn] . identifier[setSizePolicy] ( identifier[QSizePolicy] . identifier[Expanding] , identifier[QSizePolicy] . identifier[Preferred] )
keyword[else] :
identifier[btn] . identifier[setSizePolicy] ( identifier[QSizePolicy] . identifier[Preferred] , identifier[QSizePolicy] . identifier[Expanding] )
identifier[self] . identifier[setUpdatesEnabled] ( keyword[True] )
|
def rebuild(self):
"""
Rebuilds the user interface buttons for this widget.
"""
self.setUpdatesEnabled(False) # sync up the toolbuttons with our actions
actions = self._actionGroup.actions()
btns = self.findChildren(QToolButton)
horiz = self.direction() in (QBoxLayout.LeftToRight, QBoxLayout.RightToLeft) # remove unnecessary buttons
if len(actions) < len(btns):
rem_btns = btns[len(actions) - 1:]
btns = btns[:len(actions)]
for btn in rem_btns:
btn.close()
btn.setParent(None)
btn.deleteLater() # depends on [control=['for'], data=['btn']] # depends on [control=['if'], data=[]] # create new buttons
elif len(btns) < len(actions):
for i in range(len(btns), len(actions)):
btn = QToolButton(self)
btn.setAutoFillBackground(True)
btns.append(btn)
self.layout().addWidget(btn)
btn.clicked.connect(self.emitClicked) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # determine coloring options
palette = self.palette()
checked = palette.color(palette.Highlight)
checked_fg = palette.color(palette.HighlightedText)
unchecked = palette.color(palette.Button)
unchecked_fg = palette.color(palette.ButtonText)
border = palette.color(palette.Mid) # define the stylesheet options
options = {}
options['top_left_radius'] = 0
options['top_right_radius'] = 0
options['bot_left_radius'] = 0
options['bot_right_radius'] = 0
options['border_color'] = border.name()
options['checked_fg'] = checked_fg.name()
options['checked_bg'] = checked.name()
options['checked_bg_alt'] = checked.darker(120).name()
options['unchecked_fg'] = unchecked_fg.name()
options['unchecked_bg'] = unchecked.name()
options['unchecked_bg_alt'] = unchecked.darker(120).name()
options['padding_top'] = 1
options['padding_bottom'] = 1
options['padding_left'] = 1
options['padding_right'] = 1
if horiz:
options['x1'] = 0
options['y1'] = 0
options['x2'] = 0
options['y2'] = 1 # depends on [control=['if'], data=[]]
else:
options['x1'] = 0
options['y1'] = 0
options['x2'] = 1
options['y2'] = 1 # sync up the actions and buttons
count = len(actions)
palette = self.palette()
font = self.font()
for (i, action) in enumerate(actions):
btn = btns[i] # assign the action for this button
if btn.defaultAction() != action: # clear out any existing actions
for act in btn.actions():
btn.removeAction(act) # depends on [control=['for'], data=['act']] # assign the given action
btn.setDefaultAction(action) # depends on [control=['if'], data=['action']]
options['top_left_radius'] = 1
options['bot_left_radius'] = 1
options['top_right_radius'] = 1
options['bot_right_radius'] = 1
if horiz:
options['padding_left'] = self._padding
options['padding_right'] = self._padding # depends on [control=['if'], data=[]]
else:
options['padding_top'] = self._padding
options['padding_bottom'] = self._padding
if not i:
if horiz:
options['top_left_radius'] = self.cornerRadius()
options['bot_left_radius'] = self.cornerRadius()
options['padding_left'] += self.cornerRadius() / 3.0 # depends on [control=['if'], data=[]]
else:
options['top_left_radius'] = self.cornerRadius()
options['top_right_radius'] = self.cornerRadius()
options['padding_top'] += self.cornerRadius() / 3.0 # depends on [control=['if'], data=[]]
if i == count - 1:
if horiz:
options['top_right_radius'] = self.cornerRadius()
options['bot_right_radius'] = self.cornerRadius()
options['padding_right'] += self.cornerRadius() / 3.0 # depends on [control=['if'], data=[]]
else:
options['bot_left_radius'] = self.cornerRadius()
options['bot_right_radius'] = self.cornerRadius()
options['padding_bottom'] += self.cornerRadius() / 3.0 # depends on [control=['if'], data=[]]
btn.setFont(font)
btn.setPalette(palette)
btn.setStyleSheet(TOOLBUTTON_STYLE % options)
if horiz:
btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred) # depends on [control=['if'], data=[]]
else:
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding) # depends on [control=['for'], data=[]]
self.setUpdatesEnabled(True)
|
def _dpi(self, resolution_tag):
"""
Return the dpi value calculated for *resolution_tag*, which can be
either TIFF_TAG.X_RESOLUTION or TIFF_TAG.Y_RESOLUTION. The
calculation is based on the values of both that tag and the
TIFF_TAG.RESOLUTION_UNIT tag in this parser's |_IfdEntries| instance.
"""
ifd_entries = self._ifd_entries
if resolution_tag not in ifd_entries:
return 72
# resolution unit defaults to inches (2)
resolution_unit = (
ifd_entries[TIFF_TAG.RESOLUTION_UNIT]
if TIFF_TAG.RESOLUTION_UNIT in ifd_entries else 2
)
if resolution_unit == 1: # aspect ratio only
return 72
# resolution_unit == 2 for inches, 3 for centimeters
units_per_inch = 1 if resolution_unit == 2 else 2.54
dots_per_unit = ifd_entries[resolution_tag]
return int(round(dots_per_unit * units_per_inch))
|
def function[_dpi, parameter[self, resolution_tag]]:
constant[
Return the dpi value calculated for *resolution_tag*, which can be
either TIFF_TAG.X_RESOLUTION or TIFF_TAG.Y_RESOLUTION. The
calculation is based on the values of both that tag and the
TIFF_TAG.RESOLUTION_UNIT tag in this parser's |_IfdEntries| instance.
]
variable[ifd_entries] assign[=] name[self]._ifd_entries
if compare[name[resolution_tag] <ast.NotIn object at 0x7da2590d7190> name[ifd_entries]] begin[:]
return[constant[72]]
variable[resolution_unit] assign[=] <ast.IfExp object at 0x7da1b1cb1c60>
if compare[name[resolution_unit] equal[==] constant[1]] begin[:]
return[constant[72]]
variable[units_per_inch] assign[=] <ast.IfExp object at 0x7da1b1cb0940>
variable[dots_per_unit] assign[=] call[name[ifd_entries]][name[resolution_tag]]
return[call[name[int], parameter[call[name[round], parameter[binary_operation[name[dots_per_unit] * name[units_per_inch]]]]]]]
|
keyword[def] identifier[_dpi] ( identifier[self] , identifier[resolution_tag] ):
literal[string]
identifier[ifd_entries] = identifier[self] . identifier[_ifd_entries]
keyword[if] identifier[resolution_tag] keyword[not] keyword[in] identifier[ifd_entries] :
keyword[return] literal[int]
identifier[resolution_unit] =(
identifier[ifd_entries] [ identifier[TIFF_TAG] . identifier[RESOLUTION_UNIT] ]
keyword[if] identifier[TIFF_TAG] . identifier[RESOLUTION_UNIT] keyword[in] identifier[ifd_entries] keyword[else] literal[int]
)
keyword[if] identifier[resolution_unit] == literal[int] :
keyword[return] literal[int]
identifier[units_per_inch] = literal[int] keyword[if] identifier[resolution_unit] == literal[int] keyword[else] literal[int]
identifier[dots_per_unit] = identifier[ifd_entries] [ identifier[resolution_tag] ]
keyword[return] identifier[int] ( identifier[round] ( identifier[dots_per_unit] * identifier[units_per_inch] ))
|
def _dpi(self, resolution_tag):
"""
Return the dpi value calculated for *resolution_tag*, which can be
either TIFF_TAG.X_RESOLUTION or TIFF_TAG.Y_RESOLUTION. The
calculation is based on the values of both that tag and the
TIFF_TAG.RESOLUTION_UNIT tag in this parser's |_IfdEntries| instance.
"""
ifd_entries = self._ifd_entries
if resolution_tag not in ifd_entries:
return 72 # depends on [control=['if'], data=[]]
# resolution unit defaults to inches (2)
resolution_unit = ifd_entries[TIFF_TAG.RESOLUTION_UNIT] if TIFF_TAG.RESOLUTION_UNIT in ifd_entries else 2
if resolution_unit == 1: # aspect ratio only
return 72 # depends on [control=['if'], data=[]]
# resolution_unit == 2 for inches, 3 for centimeters
units_per_inch = 1 if resolution_unit == 2 else 2.54
dots_per_unit = ifd_entries[resolution_tag]
return int(round(dots_per_unit * units_per_inch))
|
def read(path, encoding="UTF-8"):
"""Read and return content from file *path*"""
with OPEN_FUNC(path, 'rb') as _file:
cont = _file.read()
return cont.decode(encoding)
|
def function[read, parameter[path, encoding]]:
constant[Read and return content from file *path*]
with call[name[OPEN_FUNC], parameter[name[path], constant[rb]]] begin[:]
variable[cont] assign[=] call[name[_file].read, parameter[]]
return[call[name[cont].decode, parameter[name[encoding]]]]
|
keyword[def] identifier[read] ( identifier[path] , identifier[encoding] = literal[string] ):
literal[string]
keyword[with] identifier[OPEN_FUNC] ( identifier[path] , literal[string] ) keyword[as] identifier[_file] :
identifier[cont] = identifier[_file] . identifier[read] ()
keyword[return] identifier[cont] . identifier[decode] ( identifier[encoding] )
|
def read(path, encoding='UTF-8'):
"""Read and return content from file *path*"""
with OPEN_FUNC(path, 'rb') as _file:
cont = _file.read()
return cont.decode(encoding) # depends on [control=['with'], data=['_file']]
|
def start_view_change(self, proposed_view_no: int, continue_vc=False):
"""
Trigger the view change process.
:param proposed_view_no: the new view number after view change.
"""
# TODO: consider moving this to pool manager
# TODO: view change is a special case, which can have different
# implementations - we need to make this logic pluggable
if self.pre_vc_strategy and (not continue_vc):
self.pre_view_change_in_progress = True
self.pre_vc_strategy.prepare_view_change(proposed_view_no)
return
elif self.pre_vc_strategy:
self.pre_vc_strategy.on_strategy_complete()
self.previous_view_no = self.view_no
self.view_no = proposed_view_no
self.pre_view_change_in_progress = False
self.view_change_in_progress = True
self.previous_master_primary = self.provider.current_primary_name()
self.set_defaults()
self._process_vcd_for_future_view()
self.initInsChngThrottling()
self.provider.notify_view_change_start()
self.provider.start_catchup()
|
def function[start_view_change, parameter[self, proposed_view_no, continue_vc]]:
constant[
Trigger the view change process.
:param proposed_view_no: the new view number after view change.
]
if <ast.BoolOp object at 0x7da2047e97b0> begin[:]
name[self].pre_view_change_in_progress assign[=] constant[True]
call[name[self].pre_vc_strategy.prepare_view_change, parameter[name[proposed_view_no]]]
return[None]
name[self].previous_view_no assign[=] name[self].view_no
name[self].view_no assign[=] name[proposed_view_no]
name[self].pre_view_change_in_progress assign[=] constant[False]
name[self].view_change_in_progress assign[=] constant[True]
name[self].previous_master_primary assign[=] call[name[self].provider.current_primary_name, parameter[]]
call[name[self].set_defaults, parameter[]]
call[name[self]._process_vcd_for_future_view, parameter[]]
call[name[self].initInsChngThrottling, parameter[]]
call[name[self].provider.notify_view_change_start, parameter[]]
call[name[self].provider.start_catchup, parameter[]]
|
keyword[def] identifier[start_view_change] ( identifier[self] , identifier[proposed_view_no] : identifier[int] , identifier[continue_vc] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[pre_vc_strategy] keyword[and] ( keyword[not] identifier[continue_vc] ):
identifier[self] . identifier[pre_view_change_in_progress] = keyword[True]
identifier[self] . identifier[pre_vc_strategy] . identifier[prepare_view_change] ( identifier[proposed_view_no] )
keyword[return]
keyword[elif] identifier[self] . identifier[pre_vc_strategy] :
identifier[self] . identifier[pre_vc_strategy] . identifier[on_strategy_complete] ()
identifier[self] . identifier[previous_view_no] = identifier[self] . identifier[view_no]
identifier[self] . identifier[view_no] = identifier[proposed_view_no]
identifier[self] . identifier[pre_view_change_in_progress] = keyword[False]
identifier[self] . identifier[view_change_in_progress] = keyword[True]
identifier[self] . identifier[previous_master_primary] = identifier[self] . identifier[provider] . identifier[current_primary_name] ()
identifier[self] . identifier[set_defaults] ()
identifier[self] . identifier[_process_vcd_for_future_view] ()
identifier[self] . identifier[initInsChngThrottling] ()
identifier[self] . identifier[provider] . identifier[notify_view_change_start] ()
identifier[self] . identifier[provider] . identifier[start_catchup] ()
|
def start_view_change(self, proposed_view_no: int, continue_vc=False):
"""
Trigger the view change process.
:param proposed_view_no: the new view number after view change.
"""
# TODO: consider moving this to pool manager
# TODO: view change is a special case, which can have different
# implementations - we need to make this logic pluggable
if self.pre_vc_strategy and (not continue_vc):
self.pre_view_change_in_progress = True
self.pre_vc_strategy.prepare_view_change(proposed_view_no)
return # depends on [control=['if'], data=[]]
elif self.pre_vc_strategy:
self.pre_vc_strategy.on_strategy_complete() # depends on [control=['if'], data=[]]
self.previous_view_no = self.view_no
self.view_no = proposed_view_no
self.pre_view_change_in_progress = False
self.view_change_in_progress = True
self.previous_master_primary = self.provider.current_primary_name()
self.set_defaults()
self._process_vcd_for_future_view()
self.initInsChngThrottling()
self.provider.notify_view_change_start()
self.provider.start_catchup()
|
def _ensure_running(name, no_start=False, path=None):
'''
If the container is not currently running, start it. This function returns
the state that the container was in before changing
path
path to the container parent directory
default: /var/lib/lxc (system)
.. versionadded:: 2015.8.0
'''
_ensure_exists(name, path=path)
pre = state(name, path=path)
if pre == 'running':
# This will be a no-op but running the function will give us a pretty
# return dict.
return start(name, path=path)
elif pre == 'stopped':
if no_start:
raise CommandExecutionError(
'Container \'{0}\' is not running'.format(name)
)
return start(name, path=path)
elif pre == 'frozen':
if no_start:
raise CommandExecutionError(
'Container \'{0}\' is not running'.format(name)
)
return unfreeze(name, path=path)
|
def function[_ensure_running, parameter[name, no_start, path]]:
constant[
If the container is not currently running, start it. This function returns
the state that the container was in before changing
path
path to the container parent directory
default: /var/lib/lxc (system)
.. versionadded:: 2015.8.0
]
call[name[_ensure_exists], parameter[name[name]]]
variable[pre] assign[=] call[name[state], parameter[name[name]]]
if compare[name[pre] equal[==] constant[running]] begin[:]
return[call[name[start], parameter[name[name]]]]
|
keyword[def] identifier[_ensure_running] ( identifier[name] , identifier[no_start] = keyword[False] , identifier[path] = keyword[None] ):
literal[string]
identifier[_ensure_exists] ( identifier[name] , identifier[path] = identifier[path] )
identifier[pre] = identifier[state] ( identifier[name] , identifier[path] = identifier[path] )
keyword[if] identifier[pre] == literal[string] :
keyword[return] identifier[start] ( identifier[name] , identifier[path] = identifier[path] )
keyword[elif] identifier[pre] == literal[string] :
keyword[if] identifier[no_start] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[name] )
)
keyword[return] identifier[start] ( identifier[name] , identifier[path] = identifier[path] )
keyword[elif] identifier[pre] == literal[string] :
keyword[if] identifier[no_start] :
keyword[raise] identifier[CommandExecutionError] (
literal[string] . identifier[format] ( identifier[name] )
)
keyword[return] identifier[unfreeze] ( identifier[name] , identifier[path] = identifier[path] )
|
def _ensure_running(name, no_start=False, path=None):
"""
If the container is not currently running, start it. This function returns
the state that the container was in before changing
path
path to the container parent directory
default: /var/lib/lxc (system)
.. versionadded:: 2015.8.0
"""
_ensure_exists(name, path=path)
pre = state(name, path=path)
if pre == 'running':
# This will be a no-op but running the function will give us a pretty
# return dict.
return start(name, path=path) # depends on [control=['if'], data=[]]
elif pre == 'stopped':
if no_start:
raise CommandExecutionError("Container '{0}' is not running".format(name)) # depends on [control=['if'], data=[]]
return start(name, path=path) # depends on [control=['if'], data=[]]
elif pre == 'frozen':
if no_start:
raise CommandExecutionError("Container '{0}' is not running".format(name)) # depends on [control=['if'], data=[]]
return unfreeze(name, path=path) # depends on [control=['if'], data=[]]
|
def convert_images_to_pil(self, images, dither, nq=0, images_info=None):
""" convert_images_to_pil(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
# im = Image.fromarray(im[:,:,:3],'RGB')
self.transparency = True
im = Image.fromarray(im[:, :, :4], 'RGBA')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
if nq >= 1:
# NeuQuant algorithm
for im in images:
im = im.convert("RGBA") # NQ assumes RGBA
nq_instance = NeuQuant(im, int(nq)) # Learn colors from image
if dither:
im = im.convert("RGB").quantize(palette=nq_instance.palette_image(), colors=255)
else:
im = nq_instance.quantize(im, colors=255) # Use to quantize the image itself
self.transparency = True # since NQ assumes transparency
if self.transparency:
alpha = im.split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <= 128 else 0)
im.paste(255, mask=mask)
images2.append(im)
else:
# for index,im in enumerate(images):
for i in range(len(images)):
im = images[i].convert('RGB').convert('P', palette=Image.ADAPTIVE, dither=dither, colors=255)
if self.transparency:
alpha = images[i].split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <= 128 else 0)
im.paste(255, mask=mask)
images2.append(im)
# Done
return images2
|
def function[convert_images_to_pil, parameter[self, images, dither, nq, images_info]]:
constant[ convert_images_to_pil(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
]
variable[images2] assign[=] list[[]]
for taget[name[im]] in starred[name[images]] begin[:]
if call[name[isinstance], parameter[name[im], name[Image].Image]] begin[:]
call[name[images2].append, parameter[name[im]]]
<ast.Tuple object at 0x7da20e962dd0> assign[=] tuple[[<ast.Name object at 0x7da20e9604c0>, <ast.List object at 0x7da20e9604f0>]]
if compare[name[nq] greater_or_equal[>=] constant[1]] begin[:]
for taget[name[im]] in starred[name[images]] begin[:]
variable[im] assign[=] call[name[im].convert, parameter[constant[RGBA]]]
variable[nq_instance] assign[=] call[name[NeuQuant], parameter[name[im], call[name[int], parameter[name[nq]]]]]
if name[dither] begin[:]
variable[im] assign[=] call[call[name[im].convert, parameter[constant[RGB]]].quantize, parameter[]]
name[self].transparency assign[=] constant[True]
if name[self].transparency begin[:]
variable[alpha] assign[=] call[call[name[im].split, parameter[]]][constant[3]]
variable[mask] assign[=] call[name[Image].eval, parameter[name[alpha], <ast.Lambda object at 0x7da1b13354b0>]]
call[name[im].paste, parameter[constant[255]]]
call[name[images2].append, parameter[name[im]]]
return[name[images2]]
|
keyword[def] identifier[convert_images_to_pil] ( identifier[self] , identifier[images] , identifier[dither] , identifier[nq] = literal[int] , identifier[images_info] = keyword[None] ):
literal[string]
identifier[images2] =[]
keyword[for] identifier[im] keyword[in] identifier[images] :
keyword[if] identifier[isinstance] ( identifier[im] , identifier[Image] . identifier[Image] ):
identifier[images2] . identifier[append] ( identifier[im] )
keyword[elif] identifier[np] keyword[and] identifier[isinstance] ( identifier[im] , identifier[np] . identifier[ndarray] ):
keyword[if] identifier[im] . identifier[ndim] == literal[int] keyword[and] identifier[im] . identifier[shape] [ literal[int] ]== literal[int] :
identifier[im] = identifier[Image] . identifier[fromarray] ( identifier[im] , literal[string] )
keyword[elif] identifier[im] . identifier[ndim] == literal[int] keyword[and] identifier[im] . identifier[shape] [ literal[int] ]== literal[int] :
identifier[self] . identifier[transparency] = keyword[True]
identifier[im] = identifier[Image] . identifier[fromarray] ( identifier[im] [:,:,: literal[int] ], literal[string] )
keyword[elif] identifier[im] . identifier[ndim] == literal[int] :
identifier[im] = identifier[Image] . identifier[fromarray] ( identifier[im] , literal[string] )
identifier[images2] . identifier[append] ( identifier[im] )
identifier[images] , identifier[images2] = identifier[images2] ,[]
keyword[if] identifier[nq] >= literal[int] :
keyword[for] identifier[im] keyword[in] identifier[images] :
identifier[im] = identifier[im] . identifier[convert] ( literal[string] )
identifier[nq_instance] = identifier[NeuQuant] ( identifier[im] , identifier[int] ( identifier[nq] ))
keyword[if] identifier[dither] :
identifier[im] = identifier[im] . identifier[convert] ( literal[string] ). identifier[quantize] ( identifier[palette] = identifier[nq_instance] . identifier[palette_image] (), identifier[colors] = literal[int] )
keyword[else] :
identifier[im] = identifier[nq_instance] . identifier[quantize] ( identifier[im] , identifier[colors] = literal[int] )
identifier[self] . identifier[transparency] = keyword[True]
keyword[if] identifier[self] . identifier[transparency] :
identifier[alpha] = identifier[im] . identifier[split] ()[ literal[int] ]
identifier[mask] = identifier[Image] . identifier[eval] ( identifier[alpha] , keyword[lambda] identifier[a] : literal[int] keyword[if] identifier[a] <= literal[int] keyword[else] literal[int] )
identifier[im] . identifier[paste] ( literal[int] , identifier[mask] = identifier[mask] )
identifier[images2] . identifier[append] ( identifier[im] )
keyword[else] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[images] )):
identifier[im] = identifier[images] [ identifier[i] ]. identifier[convert] ( literal[string] ). identifier[convert] ( literal[string] , identifier[palette] = identifier[Image] . identifier[ADAPTIVE] , identifier[dither] = identifier[dither] , identifier[colors] = literal[int] )
keyword[if] identifier[self] . identifier[transparency] :
identifier[alpha] = identifier[images] [ identifier[i] ]. identifier[split] ()[ literal[int] ]
identifier[mask] = identifier[Image] . identifier[eval] ( identifier[alpha] , keyword[lambda] identifier[a] : literal[int] keyword[if] identifier[a] <= literal[int] keyword[else] literal[int] )
identifier[im] . identifier[paste] ( literal[int] , identifier[mask] = identifier[mask] )
identifier[images2] . identifier[append] ( identifier[im] )
keyword[return] identifier[images2]
|
def convert_images_to_pil(self, images, dither, nq=0, images_info=None):
""" convert_images_to_pil(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im) # depends on [control=['if'], data=[]]
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB') # depends on [control=['if'], data=[]]
elif im.ndim == 3 and im.shape[2] == 4:
# im = Image.fromarray(im[:,:,:3],'RGB')
self.transparency = True
im = Image.fromarray(im[:, :, :4], 'RGBA') # depends on [control=['if'], data=[]]
elif im.ndim == 2:
im = Image.fromarray(im, 'L') # depends on [control=['if'], data=[]]
images2.append(im) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['im']]
# Convert to paletted PIL images
(images, images2) = (images2, [])
if nq >= 1:
# NeuQuant algorithm
for im in images:
im = im.convert('RGBA') # NQ assumes RGBA
nq_instance = NeuQuant(im, int(nq)) # Learn colors from image
if dither:
im = im.convert('RGB').quantize(palette=nq_instance.palette_image(), colors=255) # depends on [control=['if'], data=[]]
else:
im = nq_instance.quantize(im, colors=255) # Use to quantize the image itself
self.transparency = True # since NQ assumes transparency
if self.transparency:
alpha = im.split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <= 128 else 0)
im.paste(255, mask=mask) # depends on [control=['if'], data=[]]
images2.append(im) # depends on [control=['for'], data=['im']] # depends on [control=['if'], data=['nq']]
else:
# for index,im in enumerate(images):
for i in range(len(images)):
im = images[i].convert('RGB').convert('P', palette=Image.ADAPTIVE, dither=dither, colors=255)
if self.transparency:
alpha = images[i].split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <= 128 else 0)
im.paste(255, mask=mask) # depends on [control=['if'], data=[]]
images2.append(im) # depends on [control=['for'], data=['i']]
# Done
return images2
|
def clean(inst):
"""Routine to return FPMU data cleaned to the specified level
Parameters
-----------
inst : (pysat.Instrument)
Instrument class object, whose attribute clean_level is used to return
the desired level of data selectivity.
Returns
--------
Void : (NoneType)
data in inst is modified in-place.
Notes
--------
No cleaning currently available for FPMU
"""
inst.data.replace(-999., np.nan, inplace=True) # Te
inst.data.replace(-9.9999998e+30, np.nan, inplace=True) #Ni
return None
|
def function[clean, parameter[inst]]:
constant[Routine to return FPMU data cleaned to the specified level
Parameters
-----------
inst : (pysat.Instrument)
Instrument class object, whose attribute clean_level is used to return
the desired level of data selectivity.
Returns
--------
Void : (NoneType)
data in inst is modified in-place.
Notes
--------
No cleaning currently available for FPMU
]
call[name[inst].data.replace, parameter[<ast.UnaryOp object at 0x7da1b1153430>, name[np].nan]]
call[name[inst].data.replace, parameter[<ast.UnaryOp object at 0x7da1b11509d0>, name[np].nan]]
return[constant[None]]
|
keyword[def] identifier[clean] ( identifier[inst] ):
literal[string]
identifier[inst] . identifier[data] . identifier[replace] (- literal[int] , identifier[np] . identifier[nan] , identifier[inplace] = keyword[True] )
identifier[inst] . identifier[data] . identifier[replace] (- literal[int] , identifier[np] . identifier[nan] , identifier[inplace] = keyword[True] )
keyword[return] keyword[None]
|
def clean(inst):
"""Routine to return FPMU data cleaned to the specified level
Parameters
-----------
inst : (pysat.Instrument)
Instrument class object, whose attribute clean_level is used to return
the desired level of data selectivity.
Returns
--------
Void : (NoneType)
data in inst is modified in-place.
Notes
--------
No cleaning currently available for FPMU
"""
inst.data.replace(-999.0, np.nan, inplace=True) # Te
inst.data.replace(-9.9999998e+30, np.nan, inplace=True) #Ni
return None
|
def _serialize(self, value, ct):
"""Auto-serialization of the value based upon the content-type value.
:param any value: The value to serialize
:param ietfparse.: The content type to serialize
:rtype: str
:raises: ValueError
"""
key = '{}/{}'.format(ct.content_type, ct.content_subtype)
if key not in self._SERIALIZATION_MAP:
raise ValueError('Unsupported content-type: {}'.format(key))
elif not self._SERIALIZATION_MAP[key].get('enabled', True):
self.logger.debug('%s is not enabled in the serialization map',
key)
raise ValueError('Disabled content-type: {}'.format(key))
return self._maybe_invoke_serialization(
self._maybe_decode(value, ct.parameters.get('charset', 'utf-8')),
'dump', key)
|
def function[_serialize, parameter[self, value, ct]]:
constant[Auto-serialization of the value based upon the content-type value.
:param any value: The value to serialize
:param ietfparse.: The content type to serialize
:rtype: str
:raises: ValueError
]
variable[key] assign[=] call[constant[{}/{}].format, parameter[name[ct].content_type, name[ct].content_subtype]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]._SERIALIZATION_MAP] begin[:]
<ast.Raise object at 0x7da18ede4310>
return[call[name[self]._maybe_invoke_serialization, parameter[call[name[self]._maybe_decode, parameter[name[value], call[name[ct].parameters.get, parameter[constant[charset], constant[utf-8]]]]], constant[dump], name[key]]]]
|
keyword[def] identifier[_serialize] ( identifier[self] , identifier[value] , identifier[ct] ):
literal[string]
identifier[key] = literal[string] . identifier[format] ( identifier[ct] . identifier[content_type] , identifier[ct] . identifier[content_subtype] )
keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_SERIALIZATION_MAP] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[key] ))
keyword[elif] keyword[not] identifier[self] . identifier[_SERIALIZATION_MAP] [ identifier[key] ]. identifier[get] ( literal[string] , keyword[True] ):
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ,
identifier[key] )
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[key] ))
keyword[return] identifier[self] . identifier[_maybe_invoke_serialization] (
identifier[self] . identifier[_maybe_decode] ( identifier[value] , identifier[ct] . identifier[parameters] . identifier[get] ( literal[string] , literal[string] )),
literal[string] , identifier[key] )
|
def _serialize(self, value, ct):
"""Auto-serialization of the value based upon the content-type value.
:param any value: The value to serialize
:param ietfparse.: The content type to serialize
:rtype: str
:raises: ValueError
"""
key = '{}/{}'.format(ct.content_type, ct.content_subtype)
if key not in self._SERIALIZATION_MAP:
raise ValueError('Unsupported content-type: {}'.format(key)) # depends on [control=['if'], data=['key']]
elif not self._SERIALIZATION_MAP[key].get('enabled', True):
self.logger.debug('%s is not enabled in the serialization map', key)
raise ValueError('Disabled content-type: {}'.format(key)) # depends on [control=['if'], data=[]]
return self._maybe_invoke_serialization(self._maybe_decode(value, ct.parameters.get('charset', 'utf-8')), 'dump', key)
|
def generate_docker_bashscript_file(temp_dir, docker_dir, globs, cmd, job_name):
'''
Creates a bashscript to inject into a docker container for the job.
This script wraps the job command(s) given in a bash script, hard links the
outputs and returns an "rc" file containing the exit code. All of this is
done in an effort to parallel the Broad's cromwell engine, which is the
native WDL runner. As they've chosen to write and then run a bashscript for
every command, so shall we.
:param temp_dir: The current directory outside of docker to deposit the
bashscript into, which will be the bind mount that docker
loads files from into its own containerized filesystem.
This is usually the tempDir created by this individual job
using 'tempDir = job.fileStore.getLocalTempDir()'.
:param docker_dir: The working directory inside of the docker container
which is bind mounted to 'temp_dir'. By default this is
'data'.
:param globs: A list of expected output files to retrieve as glob patterns
that will be returned as hard links to the current working
directory.
:param cmd: A bash command to be written into the bash script and run.
:param job_name: The job's name, only used to write in a file name
identifying the script as written for that job.
Will be used to call the script later.
:return: Nothing, but it writes and deposits a bash script in temp_dir
intended to be run inside of a docker container for this job.
'''
wdl_copyright = heredoc_wdl(''' \n
# Borrowed/rewritten from the Broad's Cromwell implementation. As
# that is under a BSD-ish license, I include here the license off
# of their GitHub repo. Thank you Broadies!
# Copyright (c) 2015, Broad Institute, Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name Broad Institute, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
# make a temp directory w/identifier
''')
prefix_dict = {"docker_dir": docker_dir,
"cmd": cmd}
bashfile_prefix = heredoc_wdl('''
tmpDir=$(mktemp -d /{docker_dir}/execution/tmp.XXXXXX)
chmod 777 $tmpDir
# set destination for java to deposit all of its files
export _JAVA_OPTIONS=-Djava.io.tmpdir=$tmpDir
export TMPDIR=$tmpDir
(
cd /{docker_dir}/execution
{cmd}
)
# gather the input command return code
echo $? > "$tmpDir/rc.tmp"
''', prefix_dict)
bashfile_string = '#!/bin/bash' + wdl_copyright + bashfile_prefix
begin_globbing_string = heredoc_wdl('''
(
mkdir "$tmpDir/globs"
''')
bashfile_string = bashfile_string + begin_globbing_string
for glob_input in globs:
add_this_glob = \
'( ln -L ' + glob_input + \
' "$tmpDir/globs" 2> /dev/null ) || ( ln ' + glob_input + \
' "$tmpDir/globs" )\n'
bashfile_string = bashfile_string + add_this_glob
bashfile_suffix = heredoc_wdl('''
)
# flush RAM to disk
sync
mv "$tmpDir/rc.tmp" "$tmpDir/rc"
chmod -R 777 $tmpDir
''')
bashfile_string = bashfile_string + bashfile_suffix
with open(os.path.join(temp_dir, job_name + '_script.sh'), 'w') as bashfile:
bashfile.write(bashfile_string)
|
def function[generate_docker_bashscript_file, parameter[temp_dir, docker_dir, globs, cmd, job_name]]:
constant[
Creates a bashscript to inject into a docker container for the job.
This script wraps the job command(s) given in a bash script, hard links the
outputs and returns an "rc" file containing the exit code. All of this is
done in an effort to parallel the Broad's cromwell engine, which is the
native WDL runner. As they've chosen to write and then run a bashscript for
every command, so shall we.
:param temp_dir: The current directory outside of docker to deposit the
bashscript into, which will be the bind mount that docker
loads files from into its own containerized filesystem.
This is usually the tempDir created by this individual job
using 'tempDir = job.fileStore.getLocalTempDir()'.
:param docker_dir: The working directory inside of the docker container
which is bind mounted to 'temp_dir'. By default this is
'data'.
:param globs: A list of expected output files to retrieve as glob patterns
that will be returned as hard links to the current working
directory.
:param cmd: A bash command to be written into the bash script and run.
:param job_name: The job's name, only used to write in a file name
identifying the script as written for that job.
Will be used to call the script later.
:return: Nothing, but it writes and deposits a bash script in temp_dir
intended to be run inside of a docker container for this job.
]
variable[wdl_copyright] assign[=] call[name[heredoc_wdl], parameter[constant[
# Borrowed/rewritten from the Broad's Cromwell implementation. As
# that is under a BSD-ish license, I include here the license off
# of their GitHub repo. Thank you Broadies!
# Copyright (c) 2015, Broad Institute, Inc.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name Broad Institute, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
# make a temp directory w/identifier
]]]
variable[prefix_dict] assign[=] dictionary[[<ast.Constant object at 0x7da18dc05450>, <ast.Constant object at 0x7da18dc05ae0>], [<ast.Name object at 0x7da18dc04e20>, <ast.Name object at 0x7da18dc06320>]]
variable[bashfile_prefix] assign[=] call[name[heredoc_wdl], parameter[constant[
tmpDir=$(mktemp -d /{docker_dir}/execution/tmp.XXXXXX)
chmod 777 $tmpDir
# set destination for java to deposit all of its files
export _JAVA_OPTIONS=-Djava.io.tmpdir=$tmpDir
export TMPDIR=$tmpDir
(
cd /{docker_dir}/execution
{cmd}
)
# gather the input command return code
echo $? > "$tmpDir/rc.tmp"
], name[prefix_dict]]]
variable[bashfile_string] assign[=] binary_operation[binary_operation[constant[#!/bin/bash] + name[wdl_copyright]] + name[bashfile_prefix]]
variable[begin_globbing_string] assign[=] call[name[heredoc_wdl], parameter[constant[
(
mkdir "$tmpDir/globs"
]]]
variable[bashfile_string] assign[=] binary_operation[name[bashfile_string] + name[begin_globbing_string]]
for taget[name[glob_input]] in starred[name[globs]] begin[:]
variable[add_this_glob] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[constant[( ln -L ] + name[glob_input]] + constant[ "$tmpDir/globs" 2> /dev/null ) || ( ln ]] + name[glob_input]] + constant[ "$tmpDir/globs" )
]]
variable[bashfile_string] assign[=] binary_operation[name[bashfile_string] + name[add_this_glob]]
variable[bashfile_suffix] assign[=] call[name[heredoc_wdl], parameter[constant[
)
# flush RAM to disk
sync
mv "$tmpDir/rc.tmp" "$tmpDir/rc"
chmod -R 777 $tmpDir
]]]
variable[bashfile_string] assign[=] binary_operation[name[bashfile_string] + name[bashfile_suffix]]
with call[name[open], parameter[call[name[os].path.join, parameter[name[temp_dir], binary_operation[name[job_name] + constant[_script.sh]]]], constant[w]]] begin[:]
call[name[bashfile].write, parameter[name[bashfile_string]]]
|
keyword[def] identifier[generate_docker_bashscript_file] ( identifier[temp_dir] , identifier[docker_dir] , identifier[globs] , identifier[cmd] , identifier[job_name] ):
literal[string]
identifier[wdl_copyright] = identifier[heredoc_wdl] ( literal[string] )
identifier[prefix_dict] ={ literal[string] : identifier[docker_dir] ,
literal[string] : identifier[cmd] }
identifier[bashfile_prefix] = identifier[heredoc_wdl] ( literal[string] , identifier[prefix_dict] )
identifier[bashfile_string] = literal[string] + identifier[wdl_copyright] + identifier[bashfile_prefix]
identifier[begin_globbing_string] = identifier[heredoc_wdl] ( literal[string] )
identifier[bashfile_string] = identifier[bashfile_string] + identifier[begin_globbing_string]
keyword[for] identifier[glob_input] keyword[in] identifier[globs] :
identifier[add_this_glob] = literal[string] + identifier[glob_input] + literal[string] + identifier[glob_input] + literal[string]
identifier[bashfile_string] = identifier[bashfile_string] + identifier[add_this_glob]
identifier[bashfile_suffix] = identifier[heredoc_wdl] ( literal[string] )
identifier[bashfile_string] = identifier[bashfile_string] + identifier[bashfile_suffix]
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[temp_dir] , identifier[job_name] + literal[string] ), literal[string] ) keyword[as] identifier[bashfile] :
identifier[bashfile] . identifier[write] ( identifier[bashfile_string] )
|
def generate_docker_bashscript_file(temp_dir, docker_dir, globs, cmd, job_name):
"""
Creates a bashscript to inject into a docker container for the job.
This script wraps the job command(s) given in a bash script, hard links the
outputs and returns an "rc" file containing the exit code. All of this is
done in an effort to parallel the Broad's cromwell engine, which is the
native WDL runner. As they've chosen to write and then run a bashscript for
every command, so shall we.
:param temp_dir: The current directory outside of docker to deposit the
bashscript into, which will be the bind mount that docker
loads files from into its own containerized filesystem.
This is usually the tempDir created by this individual job
using 'tempDir = job.fileStore.getLocalTempDir()'.
:param docker_dir: The working directory inside of the docker container
which is bind mounted to 'temp_dir'. By default this is
'data'.
:param globs: A list of expected output files to retrieve as glob patterns
that will be returned as hard links to the current working
directory.
:param cmd: A bash command to be written into the bash script and run.
:param job_name: The job's name, only used to write in a file name
identifying the script as written for that job.
Will be used to call the script later.
:return: Nothing, but it writes and deposits a bash script in temp_dir
intended to be run inside of a docker container for this job.
"""
wdl_copyright = heredoc_wdl(' \n\n # Borrowed/rewritten from the Broad\'s Cromwell implementation. As \n # that is under a BSD-ish license, I include here the license off \n # of their GitHub repo. Thank you Broadies!\n\n # Copyright (c) 2015, Broad Institute, Inc.\n # All rights reserved.\n\n # Redistribution and use in source and binary forms, with or without\n # modification, are permitted provided that the following conditions are met:\n\n # * Redistributions of source code must retain the above copyright notice, this\n # list of conditions and the following disclaimer.\n\n # * Redistributions in binary form must reproduce the above copyright notice,\n # this list of conditions and the following disclaimer in the documentation\n # and/or other materials provided with the distribution.\n\n # * Neither the name Broad Institute, Inc. nor the names of its\n # contributors may be used to endorse or promote products derived from\n # this software without specific prior written permission.\n\n # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\n\n # make a temp directory w/identifier\n ')
prefix_dict = {'docker_dir': docker_dir, 'cmd': cmd}
bashfile_prefix = heredoc_wdl('\n tmpDir=$(mktemp -d /{docker_dir}/execution/tmp.XXXXXX)\n chmod 777 $tmpDir\n # set destination for java to deposit all of its files\n export _JAVA_OPTIONS=-Djava.io.tmpdir=$tmpDir\n export TMPDIR=$tmpDir\n\n (\n cd /{docker_dir}/execution\n {cmd}\n )\n\n # gather the input command return code\n echo $? > "$tmpDir/rc.tmp"\n\n ', prefix_dict)
bashfile_string = '#!/bin/bash' + wdl_copyright + bashfile_prefix
begin_globbing_string = heredoc_wdl('\n (\n mkdir "$tmpDir/globs"\n ')
bashfile_string = bashfile_string + begin_globbing_string
for glob_input in globs:
add_this_glob = '( ln -L ' + glob_input + ' "$tmpDir/globs" 2> /dev/null ) || ( ln ' + glob_input + ' "$tmpDir/globs" )\n'
bashfile_string = bashfile_string + add_this_glob # depends on [control=['for'], data=['glob_input']]
bashfile_suffix = heredoc_wdl('\n )\n\n # flush RAM to disk\n sync\n\n mv "$tmpDir/rc.tmp" "$tmpDir/rc"\n chmod -R 777 $tmpDir\n ')
bashfile_string = bashfile_string + bashfile_suffix
with open(os.path.join(temp_dir, job_name + '_script.sh'), 'w') as bashfile:
bashfile.write(bashfile_string) # depends on [control=['with'], data=['bashfile']]
|
def _page_to_text(page):
"""Extract the text from a page.
Args:
page: a unicode string
Returns:
a unicode string
"""
# text start tag looks like "<text ..otherstuff>"
start_pos = page.find(u"<text")
assert start_pos != -1
end_tag_pos = page.find(u">", start_pos)
assert end_tag_pos != -1
end_tag_pos += len(u">")
end_pos = page.find(u"</text>")
if end_pos == -1:
return u""
return page[end_tag_pos:end_pos]
|
def function[_page_to_text, parameter[page]]:
constant[Extract the text from a page.
Args:
page: a unicode string
Returns:
a unicode string
]
variable[start_pos] assign[=] call[name[page].find, parameter[constant[<text]]]
assert[compare[name[start_pos] not_equal[!=] <ast.UnaryOp object at 0x7da1b2099630>]]
variable[end_tag_pos] assign[=] call[name[page].find, parameter[constant[>], name[start_pos]]]
assert[compare[name[end_tag_pos] not_equal[!=] <ast.UnaryOp object at 0x7da1b203d9f0>]]
<ast.AugAssign object at 0x7da1b203cfa0>
variable[end_pos] assign[=] call[name[page].find, parameter[constant[</text>]]]
if compare[name[end_pos] equal[==] <ast.UnaryOp object at 0x7da1b203fc70>] begin[:]
return[constant[]]
return[call[name[page]][<ast.Slice object at 0x7da1b203e260>]]
|
keyword[def] identifier[_page_to_text] ( identifier[page] ):
literal[string]
identifier[start_pos] = identifier[page] . identifier[find] ( literal[string] )
keyword[assert] identifier[start_pos] !=- literal[int]
identifier[end_tag_pos] = identifier[page] . identifier[find] ( literal[string] , identifier[start_pos] )
keyword[assert] identifier[end_tag_pos] !=- literal[int]
identifier[end_tag_pos] += identifier[len] ( literal[string] )
identifier[end_pos] = identifier[page] . identifier[find] ( literal[string] )
keyword[if] identifier[end_pos] ==- literal[int] :
keyword[return] literal[string]
keyword[return] identifier[page] [ identifier[end_tag_pos] : identifier[end_pos] ]
|
def _page_to_text(page):
"""Extract the text from a page.
Args:
page: a unicode string
Returns:
a unicode string
"""
# text start tag looks like "<text ..otherstuff>"
start_pos = page.find(u'<text')
assert start_pos != -1
end_tag_pos = page.find(u'>', start_pos)
assert end_tag_pos != -1
end_tag_pos += len(u'>')
end_pos = page.find(u'</text>')
if end_pos == -1:
return u'' # depends on [control=['if'], data=[]]
return page[end_tag_pos:end_pos]
|
def _register_bench_extension(self, plugin_name, plugin_instance):
"""
Register a bench extension.
:param plugin_name: Plugin name
:param plugin_instance: PluginBase
:return: Nothing
"""
for attr in plugin_instance.get_bench_api().keys():
if hasattr(self.bench, attr):
raise PluginException("Attribute {} already exists in bench! Unable to add "
"plugin {}.".format(attr, plugin_name))
setattr(self.bench, attr, plugin_instance.get_bench_api().get(attr))
|
def function[_register_bench_extension, parameter[self, plugin_name, plugin_instance]]:
constant[
Register a bench extension.
:param plugin_name: Plugin name
:param plugin_instance: PluginBase
:return: Nothing
]
for taget[name[attr]] in starred[call[call[name[plugin_instance].get_bench_api, parameter[]].keys, parameter[]]] begin[:]
if call[name[hasattr], parameter[name[self].bench, name[attr]]] begin[:]
<ast.Raise object at 0x7da1b0ebcc10>
call[name[setattr], parameter[name[self].bench, name[attr], call[call[name[plugin_instance].get_bench_api, parameter[]].get, parameter[name[attr]]]]]
|
keyword[def] identifier[_register_bench_extension] ( identifier[self] , identifier[plugin_name] , identifier[plugin_instance] ):
literal[string]
keyword[for] identifier[attr] keyword[in] identifier[plugin_instance] . identifier[get_bench_api] (). identifier[keys] ():
keyword[if] identifier[hasattr] ( identifier[self] . identifier[bench] , identifier[attr] ):
keyword[raise] identifier[PluginException] ( literal[string]
literal[string] . identifier[format] ( identifier[attr] , identifier[plugin_name] ))
identifier[setattr] ( identifier[self] . identifier[bench] , identifier[attr] , identifier[plugin_instance] . identifier[get_bench_api] (). identifier[get] ( identifier[attr] ))
|
def _register_bench_extension(self, plugin_name, plugin_instance):
"""
Register a bench extension.
:param plugin_name: Plugin name
:param plugin_instance: PluginBase
:return: Nothing
"""
for attr in plugin_instance.get_bench_api().keys():
if hasattr(self.bench, attr):
raise PluginException('Attribute {} already exists in bench! Unable to add plugin {}.'.format(attr, plugin_name)) # depends on [control=['if'], data=[]]
setattr(self.bench, attr, plugin_instance.get_bench_api().get(attr)) # depends on [control=['for'], data=['attr']]
|
def shear(cls, x_angle=0, y_angle=0):
"""Create a shear transform along one or both axes.
:param x_angle: Angle in degrees to shear along the x-axis.
:type x_angle: float
:param y_angle: Angle in degrees to shear along the y-axis.
:type y_angle: float
:rtype: Affine
"""
sx = math.tan(math.radians(x_angle))
sy = math.tan(math.radians(y_angle))
return tuple.__new__(cls, (1.0, sy, 0.0, sx, 1.0, 0.0, 0.0, 0.0, 1.0))
|
def function[shear, parameter[cls, x_angle, y_angle]]:
constant[Create a shear transform along one or both axes.
:param x_angle: Angle in degrees to shear along the x-axis.
:type x_angle: float
:param y_angle: Angle in degrees to shear along the y-axis.
:type y_angle: float
:rtype: Affine
]
variable[sx] assign[=] call[name[math].tan, parameter[call[name[math].radians, parameter[name[x_angle]]]]]
variable[sy] assign[=] call[name[math].tan, parameter[call[name[math].radians, parameter[name[y_angle]]]]]
return[call[name[tuple].__new__, parameter[name[cls], tuple[[<ast.Constant object at 0x7da1b03391b0>, <ast.Name object at 0x7da1b0339180>, <ast.Constant object at 0x7da1b0339210>, <ast.Name object at 0x7da1b0339270>, <ast.Constant object at 0x7da1b0339240>, <ast.Constant object at 0x7da1b03391e0>, <ast.Constant object at 0x7da1b0339150>, <ast.Constant object at 0x7da1b03392d0>, <ast.Constant object at 0x7da1b0338bb0>]]]]]
|
keyword[def] identifier[shear] ( identifier[cls] , identifier[x_angle] = literal[int] , identifier[y_angle] = literal[int] ):
literal[string]
identifier[sx] = identifier[math] . identifier[tan] ( identifier[math] . identifier[radians] ( identifier[x_angle] ))
identifier[sy] = identifier[math] . identifier[tan] ( identifier[math] . identifier[radians] ( identifier[y_angle] ))
keyword[return] identifier[tuple] . identifier[__new__] ( identifier[cls] ,( literal[int] , identifier[sy] , literal[int] , identifier[sx] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ))
|
def shear(cls, x_angle=0, y_angle=0):
"""Create a shear transform along one or both axes.
:param x_angle: Angle in degrees to shear along the x-axis.
:type x_angle: float
:param y_angle: Angle in degrees to shear along the y-axis.
:type y_angle: float
:rtype: Affine
"""
sx = math.tan(math.radians(x_angle))
sy = math.tan(math.radians(y_angle))
return tuple.__new__(cls, (1.0, sy, 0.0, sx, 1.0, 0.0, 0.0, 0.0, 1.0))
|
def get_hex(self, signed=True):
"""
Given all the data the user has given so far, make the hex using pybitcointools
"""
total_ins_satoshi = self.total_input_satoshis()
if total_ins_satoshi == 0:
raise ValueError("Can't make transaction, there are zero inputs")
# Note: there can be zero outs (sweep or coalesc transactions)
total_outs_satoshi = sum([x['value'] for x in self.outs])
if not self.fee_satoshi:
self.fee() # use default of $0.02
change_satoshi = total_ins_satoshi - (total_outs_satoshi + self.fee_satoshi)
if change_satoshi < 0:
raise ValueError(
"Input amount (%s) must be more than all output amounts (%s) plus fees (%s). You need more %s."
% (total_ins_satoshi, total_outs_satoshi, self.fee_satoshi, self.crypto.upper())
)
ins = [x['input'] for x in self.ins]
if change_satoshi > 0:
if self.verbose:
print("Adding change address of %s satoshis to %s" % (change_satoshi, self.change_address))
change = [{'value': change_satoshi, 'address': self.change_address}]
else:
change = [] # no change ?!
if self.verbose: print("Inputs == Outputs, no change address needed.")
tx = mktx(ins, self.outs + change)
if signed:
for i, input_data in enumerate(self.ins):
if not input_data['private_key']:
raise Exception("Can't sign transaction, missing private key for input %s" % i)
tx = sign(tx, i, input_data['private_key'])
return tx
|
def function[get_hex, parameter[self, signed]]:
constant[
Given all the data the user has given so far, make the hex using pybitcointools
]
variable[total_ins_satoshi] assign[=] call[name[self].total_input_satoshis, parameter[]]
if compare[name[total_ins_satoshi] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b11bdd80>
variable[total_outs_satoshi] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b11bfdf0>]]
if <ast.UnaryOp object at 0x7da1b11be830> begin[:]
call[name[self].fee, parameter[]]
variable[change_satoshi] assign[=] binary_operation[name[total_ins_satoshi] - binary_operation[name[total_outs_satoshi] + name[self].fee_satoshi]]
if compare[name[change_satoshi] less[<] constant[0]] begin[:]
<ast.Raise object at 0x7da1b11bd390>
variable[ins] assign[=] <ast.ListComp object at 0x7da1b11bdfc0>
if compare[name[change_satoshi] greater[>] constant[0]] begin[:]
if name[self].verbose begin[:]
call[name[print], parameter[binary_operation[constant[Adding change address of %s satoshis to %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b11bc3d0>, <ast.Attribute object at 0x7da1b11bd840>]]]]]
variable[change] assign[=] list[[<ast.Dict object at 0x7da1b11bf1f0>]]
variable[tx] assign[=] call[name[mktx], parameter[name[ins], binary_operation[name[self].outs + name[change]]]]
if name[signed] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b11bf550>, <ast.Name object at 0x7da1b11bc850>]]] in starred[call[name[enumerate], parameter[name[self].ins]]] begin[:]
if <ast.UnaryOp object at 0x7da1b11bcf70> begin[:]
<ast.Raise object at 0x7da1b11bcfa0>
variable[tx] assign[=] call[name[sign], parameter[name[tx], name[i], call[name[input_data]][constant[private_key]]]]
return[name[tx]]
|
keyword[def] identifier[get_hex] ( identifier[self] , identifier[signed] = keyword[True] ):
literal[string]
identifier[total_ins_satoshi] = identifier[self] . identifier[total_input_satoshis] ()
keyword[if] identifier[total_ins_satoshi] == literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[total_outs_satoshi] = identifier[sum] ([ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[outs] ])
keyword[if] keyword[not] identifier[self] . identifier[fee_satoshi] :
identifier[self] . identifier[fee] ()
identifier[change_satoshi] = identifier[total_ins_satoshi] -( identifier[total_outs_satoshi] + identifier[self] . identifier[fee_satoshi] )
keyword[if] identifier[change_satoshi] < literal[int] :
keyword[raise] identifier[ValueError] (
literal[string]
%( identifier[total_ins_satoshi] , identifier[total_outs_satoshi] , identifier[self] . identifier[fee_satoshi] , identifier[self] . identifier[crypto] . identifier[upper] ())
)
identifier[ins] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[ins] ]
keyword[if] identifier[change_satoshi] > literal[int] :
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] %( identifier[change_satoshi] , identifier[self] . identifier[change_address] ))
identifier[change] =[{ literal[string] : identifier[change_satoshi] , literal[string] : identifier[self] . identifier[change_address] }]
keyword[else] :
identifier[change] =[]
keyword[if] identifier[self] . identifier[verbose] : identifier[print] ( literal[string] )
identifier[tx] = identifier[mktx] ( identifier[ins] , identifier[self] . identifier[outs] + identifier[change] )
keyword[if] identifier[signed] :
keyword[for] identifier[i] , identifier[input_data] keyword[in] identifier[enumerate] ( identifier[self] . identifier[ins] ):
keyword[if] keyword[not] identifier[input_data] [ literal[string] ]:
keyword[raise] identifier[Exception] ( literal[string] % identifier[i] )
identifier[tx] = identifier[sign] ( identifier[tx] , identifier[i] , identifier[input_data] [ literal[string] ])
keyword[return] identifier[tx]
|
def get_hex(self, signed=True):
"""
Given all the data the user has given so far, make the hex using pybitcointools
"""
total_ins_satoshi = self.total_input_satoshis()
if total_ins_satoshi == 0:
raise ValueError("Can't make transaction, there are zero inputs") # depends on [control=['if'], data=[]]
# Note: there can be zero outs (sweep or coalesc transactions)
total_outs_satoshi = sum([x['value'] for x in self.outs])
if not self.fee_satoshi:
self.fee() # use default of $0.02 # depends on [control=['if'], data=[]]
change_satoshi = total_ins_satoshi - (total_outs_satoshi + self.fee_satoshi)
if change_satoshi < 0:
raise ValueError('Input amount (%s) must be more than all output amounts (%s) plus fees (%s). You need more %s.' % (total_ins_satoshi, total_outs_satoshi, self.fee_satoshi, self.crypto.upper())) # depends on [control=['if'], data=[]]
ins = [x['input'] for x in self.ins]
if change_satoshi > 0:
if self.verbose:
print('Adding change address of %s satoshis to %s' % (change_satoshi, self.change_address)) # depends on [control=['if'], data=[]]
change = [{'value': change_satoshi, 'address': self.change_address}] # depends on [control=['if'], data=['change_satoshi']]
else:
change = [] # no change ?!
if self.verbose:
print('Inputs == Outputs, no change address needed.') # depends on [control=['if'], data=[]]
tx = mktx(ins, self.outs + change)
if signed:
for (i, input_data) in enumerate(self.ins):
if not input_data['private_key']:
raise Exception("Can't sign transaction, missing private key for input %s" % i) # depends on [control=['if'], data=[]]
tx = sign(tx, i, input_data['private_key']) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return tx
|
def ajax_only(view_func):
"""Required the view is only accessed via AJAX."""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.is_ajax():
return view_func(request, *args, **kwargs)
else:
return http.HttpResponseBadRequest()
return _wrapped_view
|
def function[ajax_only, parameter[view_func]]:
constant[Required the view is only accessed via AJAX.]
def function[_wrapped_view, parameter[request]]:
if call[name[request].is_ajax, parameter[]] begin[:]
return[call[name[view_func], parameter[name[request], <ast.Starred object at 0x7da2041d8f10>]]]
return[name[_wrapped_view]]
|
keyword[def] identifier[ajax_only] ( identifier[view_func] ):
literal[string]
@ identifier[wraps] ( identifier[view_func] , identifier[assigned] = identifier[available_attrs] ( identifier[view_func] ))
keyword[def] identifier[_wrapped_view] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[request] . identifier[is_ajax] ():
keyword[return] identifier[view_func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[return] identifier[http] . identifier[HttpResponseBadRequest] ()
keyword[return] identifier[_wrapped_view]
|
def ajax_only(view_func):
"""Required the view is only accessed via AJAX."""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.is_ajax():
return view_func(request, *args, **kwargs) # depends on [control=['if'], data=[]]
else:
return http.HttpResponseBadRequest()
return _wrapped_view
|
def heuristic_crossover(random, candidates, args):
"""Return the offspring of heuristic crossover on the candidates.
It performs heuristic crossover (HX), which is similar to the
update rule used in particle swarm optimization. This function
also makes use of the bounder function as specified in the EC's
``evolve`` method.
.. note::
This function assumes that candidates can be pickled (for hashing
as keys to a dictionary).
.. Arguments:
random -- the random number generator object
candidates -- the candidate solutions
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
bounder = args['_ec'].bounder
if len(candidates) % 2 == 1:
candidates = candidates[:-1]
# Since we don't have fitness information in the candidates, we need
# to make a dictionary containing the candidate and its corresponding
# individual in the population.
population = list(args['_ec'].population)
lookup = dict(zip([pickle.dumps(p.candidate, 1) for p in population], population))
moms = candidates[::2]
dads = candidates[1::2]
children = []
for mom, dad in zip(moms, dads):
if random.random() < crossover_rate:
bro = copy.copy(dad)
sis = copy.copy(mom)
mom_is_better = lookup[pickle.dumps(mom, 1)] > lookup[pickle.dumps(dad, 1)]
for i, (m, d) in enumerate(zip(mom, dad)):
negpos = 1 if mom_is_better else -1
val = d if mom_is_better else m
bro[i] = val + random.random() * negpos * (m - d)
sis[i] = val + random.random() * negpos * (m - d)
bro = bounder(bro, args)
sis = bounder(sis, args)
children.append(bro)
children.append(sis)
else:
children.append(mom)
children.append(dad)
return children
|
def function[heuristic_crossover, parameter[random, candidates, args]]:
constant[Return the offspring of heuristic crossover on the candidates.
It performs heuristic crossover (HX), which is similar to the
update rule used in particle swarm optimization. This function
also makes use of the bounder function as specified in the EC's
``evolve`` method.
.. note::
This function assumes that candidates can be pickled (for hashing
as keys to a dictionary).
.. Arguments:
random -- the random number generator object
candidates -- the candidate solutions
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
]
variable[crossover_rate] assign[=] call[name[args].setdefault, parameter[constant[crossover_rate], constant[1.0]]]
variable[bounder] assign[=] call[name[args]][constant[_ec]].bounder
if compare[binary_operation[call[name[len], parameter[name[candidates]]] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[1]] begin[:]
variable[candidates] assign[=] call[name[candidates]][<ast.Slice object at 0x7da18eb55ae0>]
variable[population] assign[=] call[name[list], parameter[call[name[args]][constant[_ec]].population]]
variable[lookup] assign[=] call[name[dict], parameter[call[name[zip], parameter[<ast.ListComp object at 0x7da18eb55e70>, name[population]]]]]
variable[moms] assign[=] call[name[candidates]][<ast.Slice object at 0x7da18eb547c0>]
variable[dads] assign[=] call[name[candidates]][<ast.Slice object at 0x7da18eb56230>]
variable[children] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18eb56560>, <ast.Name object at 0x7da18eb54d60>]]] in starred[call[name[zip], parameter[name[moms], name[dads]]]] begin[:]
if compare[call[name[random].random, parameter[]] less[<] name[crossover_rate]] begin[:]
variable[bro] assign[=] call[name[copy].copy, parameter[name[dad]]]
variable[sis] assign[=] call[name[copy].copy, parameter[name[mom]]]
variable[mom_is_better] assign[=] compare[call[name[lookup]][call[name[pickle].dumps, parameter[name[mom], constant[1]]]] greater[>] call[name[lookup]][call[name[pickle].dumps, parameter[name[dad], constant[1]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b13e0eb0>, <ast.Tuple object at 0x7da1b13e1cc0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[mom], name[dad]]]]]] begin[:]
variable[negpos] assign[=] <ast.IfExp object at 0x7da1b13e1810>
variable[val] assign[=] <ast.IfExp object at 0x7da1b13e3af0>
call[name[bro]][name[i]] assign[=] binary_operation[name[val] + binary_operation[binary_operation[call[name[random].random, parameter[]] * name[negpos]] * binary_operation[name[m] - name[d]]]]
call[name[sis]][name[i]] assign[=] binary_operation[name[val] + binary_operation[binary_operation[call[name[random].random, parameter[]] * name[negpos]] * binary_operation[name[m] - name[d]]]]
variable[bro] assign[=] call[name[bounder], parameter[name[bro], name[args]]]
variable[sis] assign[=] call[name[bounder], parameter[name[sis], name[args]]]
call[name[children].append, parameter[name[bro]]]
call[name[children].append, parameter[name[sis]]]
return[name[children]]
|
keyword[def] identifier[heuristic_crossover] ( identifier[random] , identifier[candidates] , identifier[args] ):
literal[string]
identifier[crossover_rate] = identifier[args] . identifier[setdefault] ( literal[string] , literal[int] )
identifier[bounder] = identifier[args] [ literal[string] ]. identifier[bounder]
keyword[if] identifier[len] ( identifier[candidates] )% literal[int] == literal[int] :
identifier[candidates] = identifier[candidates] [:- literal[int] ]
identifier[population] = identifier[list] ( identifier[args] [ literal[string] ]. identifier[population] )
identifier[lookup] = identifier[dict] ( identifier[zip] ([ identifier[pickle] . identifier[dumps] ( identifier[p] . identifier[candidate] , literal[int] ) keyword[for] identifier[p] keyword[in] identifier[population] ], identifier[population] ))
identifier[moms] = identifier[candidates] [:: literal[int] ]
identifier[dads] = identifier[candidates] [ literal[int] :: literal[int] ]
identifier[children] =[]
keyword[for] identifier[mom] , identifier[dad] keyword[in] identifier[zip] ( identifier[moms] , identifier[dads] ):
keyword[if] identifier[random] . identifier[random] ()< identifier[crossover_rate] :
identifier[bro] = identifier[copy] . identifier[copy] ( identifier[dad] )
identifier[sis] = identifier[copy] . identifier[copy] ( identifier[mom] )
identifier[mom_is_better] = identifier[lookup] [ identifier[pickle] . identifier[dumps] ( identifier[mom] , literal[int] )]> identifier[lookup] [ identifier[pickle] . identifier[dumps] ( identifier[dad] , literal[int] )]
keyword[for] identifier[i] ,( identifier[m] , identifier[d] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[mom] , identifier[dad] )):
identifier[negpos] = literal[int] keyword[if] identifier[mom_is_better] keyword[else] - literal[int]
identifier[val] = identifier[d] keyword[if] identifier[mom_is_better] keyword[else] identifier[m]
identifier[bro] [ identifier[i] ]= identifier[val] + identifier[random] . identifier[random] ()* identifier[negpos] *( identifier[m] - identifier[d] )
identifier[sis] [ identifier[i] ]= identifier[val] + identifier[random] . identifier[random] ()* identifier[negpos] *( identifier[m] - identifier[d] )
identifier[bro] = identifier[bounder] ( identifier[bro] , identifier[args] )
identifier[sis] = identifier[bounder] ( identifier[sis] , identifier[args] )
identifier[children] . identifier[append] ( identifier[bro] )
identifier[children] . identifier[append] ( identifier[sis] )
keyword[else] :
identifier[children] . identifier[append] ( identifier[mom] )
identifier[children] . identifier[append] ( identifier[dad] )
keyword[return] identifier[children]
|
def heuristic_crossover(random, candidates, args):
"""Return the offspring of heuristic crossover on the candidates.
It performs heuristic crossover (HX), which is similar to the
update rule used in particle swarm optimization. This function
also makes use of the bounder function as specified in the EC's
``evolve`` method.
.. note::
This function assumes that candidates can be pickled (for hashing
as keys to a dictionary).
.. Arguments:
random -- the random number generator object
candidates -- the candidate solutions
args -- a dictionary of keyword arguments
Optional keyword arguments in args:
- *crossover_rate* -- the rate at which crossover is performed
(default 1.0)
"""
crossover_rate = args.setdefault('crossover_rate', 1.0)
bounder = args['_ec'].bounder
if len(candidates) % 2 == 1:
candidates = candidates[:-1] # depends on [control=['if'], data=[]] # Since we don't have fitness information in the candidates, we need
# to make a dictionary containing the candidate and its corresponding
# individual in the population.
population = list(args['_ec'].population)
lookup = dict(zip([pickle.dumps(p.candidate, 1) for p in population], population))
moms = candidates[::2]
dads = candidates[1::2]
children = []
for (mom, dad) in zip(moms, dads):
if random.random() < crossover_rate:
bro = copy.copy(dad)
sis = copy.copy(mom)
mom_is_better = lookup[pickle.dumps(mom, 1)] > lookup[pickle.dumps(dad, 1)]
for (i, (m, d)) in enumerate(zip(mom, dad)):
negpos = 1 if mom_is_better else -1
val = d if mom_is_better else m
bro[i] = val + random.random() * negpos * (m - d)
sis[i] = val + random.random() * negpos * (m - d) # depends on [control=['for'], data=[]]
bro = bounder(bro, args)
sis = bounder(sis, args)
children.append(bro)
children.append(sis) # depends on [control=['if'], data=[]]
else:
children.append(mom)
children.append(dad) # depends on [control=['for'], data=[]]
return children
|
def _loadServices(self):
"""
Load module services.
:return: <void>
"""
servicesPath = os.path.join(self.path, "service")
if not os.path.isdir(servicesPath):
return
self._scanDirectoryForServices(servicesPath)
|
def function[_loadServices, parameter[self]]:
constant[
Load module services.
:return: <void>
]
variable[servicesPath] assign[=] call[name[os].path.join, parameter[name[self].path, constant[service]]]
if <ast.UnaryOp object at 0x7da18bc701c0> begin[:]
return[None]
call[name[self]._scanDirectoryForServices, parameter[name[servicesPath]]]
|
keyword[def] identifier[_loadServices] ( identifier[self] ):
literal[string]
identifier[servicesPath] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[servicesPath] ):
keyword[return]
identifier[self] . identifier[_scanDirectoryForServices] ( identifier[servicesPath] )
|
def _loadServices(self):
"""
Load module services.
:return: <void>
"""
servicesPath = os.path.join(self.path, 'service')
if not os.path.isdir(servicesPath):
return # depends on [control=['if'], data=[]]
self._scanDirectoryForServices(servicesPath)
|
def stop_ppp_link(self):
'''stop the link'''
if self.ppp_fd == -1:
return
try:
self.mpself.select_extra.pop(self.ppp_fd)
os.close(self.ppp_fd)
os.waitpid(self.pid, 0)
except Exception:
pass
self.pid = -1
self.ppp_fd = -1
print("stopped ppp link")
|
def function[stop_ppp_link, parameter[self]]:
constant[stop the link]
if compare[name[self].ppp_fd equal[==] <ast.UnaryOp object at 0x7da18ede4730>] begin[:]
return[None]
<ast.Try object at 0x7da18ede4f70>
name[self].pid assign[=] <ast.UnaryOp object at 0x7da18ede6920>
name[self].ppp_fd assign[=] <ast.UnaryOp object at 0x7da1b2347910>
call[name[print], parameter[constant[stopped ppp link]]]
|
keyword[def] identifier[stop_ppp_link] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[ppp_fd] ==- literal[int] :
keyword[return]
keyword[try] :
identifier[self] . identifier[mpself] . identifier[select_extra] . identifier[pop] ( identifier[self] . identifier[ppp_fd] )
identifier[os] . identifier[close] ( identifier[self] . identifier[ppp_fd] )
identifier[os] . identifier[waitpid] ( identifier[self] . identifier[pid] , literal[int] )
keyword[except] identifier[Exception] :
keyword[pass]
identifier[self] . identifier[pid] =- literal[int]
identifier[self] . identifier[ppp_fd] =- literal[int]
identifier[print] ( literal[string] )
|
def stop_ppp_link(self):
"""stop the link"""
if self.ppp_fd == -1:
return # depends on [control=['if'], data=[]]
try:
self.mpself.select_extra.pop(self.ppp_fd)
os.close(self.ppp_fd)
os.waitpid(self.pid, 0) # depends on [control=['try'], data=[]]
except Exception:
pass # depends on [control=['except'], data=[]]
self.pid = -1
self.ppp_fd = -1
print('stopped ppp link')
|
def from_inline(cls: Type[RevocationType], version: int, currency: str, inline: str) -> RevocationType:
"""
Return Revocation document instance from inline string
Only self.pubkey is populated.
You must populate self.identity with an Identity instance to use raw/sign/signed_raw methods
:param version: Version number
:param currency: Name of the currency
:param inline: Inline document
:return:
"""
cert_data = Revocation.re_inline.match(inline)
if cert_data is None:
raise MalformedDocumentError("Revokation")
pubkey = cert_data.group(1)
signature = cert_data.group(2)
return cls(version, currency, pubkey, signature)
|
def function[from_inline, parameter[cls, version, currency, inline]]:
constant[
Return Revocation document instance from inline string
Only self.pubkey is populated.
You must populate self.identity with an Identity instance to use raw/sign/signed_raw methods
:param version: Version number
:param currency: Name of the currency
:param inline: Inline document
:return:
]
variable[cert_data] assign[=] call[name[Revocation].re_inline.match, parameter[name[inline]]]
if compare[name[cert_data] is constant[None]] begin[:]
<ast.Raise object at 0x7da2054a6230>
variable[pubkey] assign[=] call[name[cert_data].group, parameter[constant[1]]]
variable[signature] assign[=] call[name[cert_data].group, parameter[constant[2]]]
return[call[name[cls], parameter[name[version], name[currency], name[pubkey], name[signature]]]]
|
keyword[def] identifier[from_inline] ( identifier[cls] : identifier[Type] [ identifier[RevocationType] ], identifier[version] : identifier[int] , identifier[currency] : identifier[str] , identifier[inline] : identifier[str] )-> identifier[RevocationType] :
literal[string]
identifier[cert_data] = identifier[Revocation] . identifier[re_inline] . identifier[match] ( identifier[inline] )
keyword[if] identifier[cert_data] keyword[is] keyword[None] :
keyword[raise] identifier[MalformedDocumentError] ( literal[string] )
identifier[pubkey] = identifier[cert_data] . identifier[group] ( literal[int] )
identifier[signature] = identifier[cert_data] . identifier[group] ( literal[int] )
keyword[return] identifier[cls] ( identifier[version] , identifier[currency] , identifier[pubkey] , identifier[signature] )
|
def from_inline(cls: Type[RevocationType], version: int, currency: str, inline: str) -> RevocationType:
"""
Return Revocation document instance from inline string
Only self.pubkey is populated.
You must populate self.identity with an Identity instance to use raw/sign/signed_raw methods
:param version: Version number
:param currency: Name of the currency
:param inline: Inline document
:return:
"""
cert_data = Revocation.re_inline.match(inline)
if cert_data is None:
raise MalformedDocumentError('Revokation') # depends on [control=['if'], data=[]]
pubkey = cert_data.group(1)
signature = cert_data.group(2)
return cls(version, currency, pubkey, signature)
|
def require_oauth(self, realm=None, require_resource_owner=True,
require_verifier=False, require_realm=False):
"""Mark the view function f as a protected resource"""
def decorator(f):
@wraps(f)
def verify_request(*args, **kwargs):
"""Verify OAuth params before running view function f"""
try:
if request.form:
body = request.form.to_dict()
else:
body = request.data.decode("utf-8")
verify_result = self.verify_request(request.url.decode("utf-8"),
http_method=request.method.decode("utf-8"),
body=body,
headers=request.headers,
require_resource_owner=require_resource_owner,
require_verifier=require_verifier,
require_realm=require_realm or bool(realm),
required_realm=realm)
valid, oauth_request = verify_result
if valid:
request.oauth = self.collect_request_parameters(request)
# Request tokens are only valid when a verifier is too
token = {}
if require_verifier:
token[u'request_token'] = request.oauth.resource_owner_key
else:
token[u'access_token'] = request.oauth.resource_owner_key
# All nonce/timestamp pairs must be stored to prevent
# replay attacks, they may be connected to a specific
# client and token to decrease collision probability.
self.save_timestamp_and_nonce(request.oauth.client_key,
request.oauth.timestamp, request.oauth.nonce,
**token)
# By this point, the request is fully authorized
return f(*args, **kwargs)
else:
# Unauthorized requests should not diclose their cause
raise Unauthorized()
except ValueError as err:
# Caused by missing of or badly formatted parameters
raise BadRequest(err.message)
return verify_request
return decorator
|
def function[require_oauth, parameter[self, realm, require_resource_owner, require_verifier, require_realm]]:
constant[Mark the view function f as a protected resource]
def function[decorator, parameter[f]]:
def function[verify_request, parameter[]]:
constant[Verify OAuth params before running view function f]
<ast.Try object at 0x7da18eb54910>
return[name[verify_request]]
return[name[decorator]]
|
keyword[def] identifier[require_oauth] ( identifier[self] , identifier[realm] = keyword[None] , identifier[require_resource_owner] = keyword[True] ,
identifier[require_verifier] = keyword[False] , identifier[require_realm] = keyword[False] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[f] ):
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[verify_request] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[if] identifier[request] . identifier[form] :
identifier[body] = identifier[request] . identifier[form] . identifier[to_dict] ()
keyword[else] :
identifier[body] = identifier[request] . identifier[data] . identifier[decode] ( literal[string] )
identifier[verify_result] = identifier[self] . identifier[verify_request] ( identifier[request] . identifier[url] . identifier[decode] ( literal[string] ),
identifier[http_method] = identifier[request] . identifier[method] . identifier[decode] ( literal[string] ),
identifier[body] = identifier[body] ,
identifier[headers] = identifier[request] . identifier[headers] ,
identifier[require_resource_owner] = identifier[require_resource_owner] ,
identifier[require_verifier] = identifier[require_verifier] ,
identifier[require_realm] = identifier[require_realm] keyword[or] identifier[bool] ( identifier[realm] ),
identifier[required_realm] = identifier[realm] )
identifier[valid] , identifier[oauth_request] = identifier[verify_result]
keyword[if] identifier[valid] :
identifier[request] . identifier[oauth] = identifier[self] . identifier[collect_request_parameters] ( identifier[request] )
identifier[token] ={}
keyword[if] identifier[require_verifier] :
identifier[token] [ literal[string] ]= identifier[request] . identifier[oauth] . identifier[resource_owner_key]
keyword[else] :
identifier[token] [ literal[string] ]= identifier[request] . identifier[oauth] . identifier[resource_owner_key]
identifier[self] . identifier[save_timestamp_and_nonce] ( identifier[request] . identifier[oauth] . identifier[client_key] ,
identifier[request] . identifier[oauth] . identifier[timestamp] , identifier[request] . identifier[oauth] . identifier[nonce] ,
** identifier[token] )
keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[else] :
keyword[raise] identifier[Unauthorized] ()
keyword[except] identifier[ValueError] keyword[as] identifier[err] :
keyword[raise] identifier[BadRequest] ( identifier[err] . identifier[message] )
keyword[return] identifier[verify_request]
keyword[return] identifier[decorator]
|
def require_oauth(self, realm=None, require_resource_owner=True, require_verifier=False, require_realm=False):
"""Mark the view function f as a protected resource"""
def decorator(f):
@wraps(f)
def verify_request(*args, **kwargs):
"""Verify OAuth params before running view function f"""
try:
if request.form:
body = request.form.to_dict() # depends on [control=['if'], data=[]]
else:
body = request.data.decode('utf-8')
verify_result = self.verify_request(request.url.decode('utf-8'), http_method=request.method.decode('utf-8'), body=body, headers=request.headers, require_resource_owner=require_resource_owner, require_verifier=require_verifier, require_realm=require_realm or bool(realm), required_realm=realm)
(valid, oauth_request) = verify_result
if valid:
request.oauth = self.collect_request_parameters(request)
# Request tokens are only valid when a verifier is too
token = {}
if require_verifier:
token[u'request_token'] = request.oauth.resource_owner_key # depends on [control=['if'], data=[]]
else:
token[u'access_token'] = request.oauth.resource_owner_key
# All nonce/timestamp pairs must be stored to prevent
# replay attacks, they may be connected to a specific
# client and token to decrease collision probability.
self.save_timestamp_and_nonce(request.oauth.client_key, request.oauth.timestamp, request.oauth.nonce, **token)
# By this point, the request is fully authorized
return f(*args, **kwargs) # depends on [control=['if'], data=[]]
else:
# Unauthorized requests should not diclose their cause
raise Unauthorized() # depends on [control=['try'], data=[]]
except ValueError as err:
# Caused by missing of or badly formatted parameters
raise BadRequest(err.message) # depends on [control=['except'], data=['err']]
return verify_request
return decorator
|
def sep_dist_floc(ConcAluminum, ConcClay, coag, material,
DIM_FRACTAL, DiamTarget):
"""Return separation distance as a function of floc size."""
return (material.Diameter
* (np.pi/(6
* frac_vol_floc_initial(ConcAluminum, ConcClay,
coag, material)
))**(1/3)
* (DiamTarget / material.Diameter)**(DIM_FRACTAL / 3)
)
|
def function[sep_dist_floc, parameter[ConcAluminum, ConcClay, coag, material, DIM_FRACTAL, DiamTarget]]:
constant[Return separation distance as a function of floc size.]
return[binary_operation[binary_operation[name[material].Diameter * binary_operation[binary_operation[name[np].pi / binary_operation[constant[6] * call[name[frac_vol_floc_initial], parameter[name[ConcAluminum], name[ConcClay], name[coag], name[material]]]]] ** binary_operation[constant[1] / constant[3]]]] * binary_operation[binary_operation[name[DiamTarget] / name[material].Diameter] ** binary_operation[name[DIM_FRACTAL] / constant[3]]]]]
|
keyword[def] identifier[sep_dist_floc] ( identifier[ConcAluminum] , identifier[ConcClay] , identifier[coag] , identifier[material] ,
identifier[DIM_FRACTAL] , identifier[DiamTarget] ):
literal[string]
keyword[return] ( identifier[material] . identifier[Diameter]
*( identifier[np] . identifier[pi] /( literal[int]
* identifier[frac_vol_floc_initial] ( identifier[ConcAluminum] , identifier[ConcClay] ,
identifier[coag] , identifier[material] )
))**( literal[int] / literal[int] )
*( identifier[DiamTarget] / identifier[material] . identifier[Diameter] )**( identifier[DIM_FRACTAL] / literal[int] )
)
|
def sep_dist_floc(ConcAluminum, ConcClay, coag, material, DIM_FRACTAL, DiamTarget):
"""Return separation distance as a function of floc size."""
return material.Diameter * (np.pi / (6 * frac_vol_floc_initial(ConcAluminum, ConcClay, coag, material))) ** (1 / 3) * (DiamTarget / material.Diameter) ** (DIM_FRACTAL / 3)
|
def _MI_setitem(self, args, value):
'Separate __setitem__ function of MIMapping'
indices = self.indices
N = len(indices)
empty = N == 0
if empty: # init the dict
index1, key, index2, index1_last = MI_parse_args(self, args, allow_new=True)
exist_names = [index1]
item = [key]
try:
MI_check_index_name(index2)
exist_names.append(index2)
item.append(value)
except TypeError:
Nvalue, value = get_value_len(value)
if len(index2) != Nvalue:
raise ValueError(
'Number of keys (%s) based on argument %s does not match '
'number of values (%s)' % (len(index2), index2, Nvalue))
exist_names.extend(index2)
item.extend(value)
if index1_last:
exist_names = exist_names[1:] + exist_names[:1]
item = item[1:] + item[:1]
_MI_init(self, [item], exist_names)
return
index1, key, index2, item, old_value = MI_parse_args(self, args, allow_new=True)
names = force_list(indices.keys())
is_new_key = item is None
single = isinstance(index2, int)
if single:
index2_list = [index2]
value = [value]
old_value = [old_value]
else:
index2_list = index2
Nvalue, value = get_value_len(value)
if len(index2_list) != Nvalue:
raise ValueError('Number of keys (%s) based on argument %s does not match '
'number of values (%s)' % (len(index2_list), index2, Nvalue))
if is_new_key:
old_value = [None] * Nvalue # fake it
# remove duplicate in index2_list
index2_d = OrderedDict()
for e, index in enumerate(index2_list):
index2_d[index] = e # order of each unique index
if len(index2_d) < len(index2_list): # exist duplicate indices
idx = index2_d.values()
index2_list = mget_list(index2_list, idx)
value = mget_list(value, idx)
old_value = mget_list(old_value, idx)
# check duplicate values
for i, v, old_v in zip(index2_list, value, old_value):
# index2_list may contain index1; not allow duplicate value for index1 either
if v in indices[i]:
if is_new_key or v != old_v:
raise ValueExistsError(v, i, names[i])
if is_new_key:
if set(index2_list + [index1]) != set(range(N)):
raise ValueError('Indices of the new item do not match existing indices')
d = {}
d[index1] = key
# index2_list may also override index1
d.update(zip(index2_list, value))
values = [d[i] for i in range(N)] # reorder based on the indices
key = values[0]
val = values[1] if len(values) == 2 else values[1:]
super(MIMapping, self).__setitem__(key, val)
for i, v in zip(names[1:], values[1:]):
indices[i][v] = key
else: # not new key
# import pdb;pdb.set_trace()
key1 = item[0]
item2 = list(item) # copy item first
mset_list(item2, index2_list, value) # index2_list may also override index1
key2 = item2[0]
val = item2[1] if len(item2) == 2 else item2[1:]
if key1 == key2:
super(MIMapping, self).__setitem__(key1, val)
else:
od_replace_key(self, key1, key2, val)
for i, v_old, v_new in zip(names[1:], item[1:], item2[1:]):
od_replace_key(indices[i], v_old, v_new, key2)
|
def function[_MI_setitem, parameter[self, args, value]]:
constant[Separate __setitem__ function of MIMapping]
variable[indices] assign[=] name[self].indices
variable[N] assign[=] call[name[len], parameter[name[indices]]]
variable[empty] assign[=] compare[name[N] equal[==] constant[0]]
if name[empty] begin[:]
<ast.Tuple object at 0x7da18f00e530> assign[=] call[name[MI_parse_args], parameter[name[self], name[args]]]
variable[exist_names] assign[=] list[[<ast.Name object at 0x7da18f00d1e0>]]
variable[item] assign[=] list[[<ast.Name object at 0x7da18f00e3b0>]]
<ast.Try object at 0x7da18f00fd30>
if name[index1_last] begin[:]
variable[exist_names] assign[=] binary_operation[call[name[exist_names]][<ast.Slice object at 0x7da18f00dfc0>] + call[name[exist_names]][<ast.Slice object at 0x7da18f00c730>]]
variable[item] assign[=] binary_operation[call[name[item]][<ast.Slice object at 0x7da18f00c4f0>] + call[name[item]][<ast.Slice object at 0x7da18f00f910>]]
call[name[_MI_init], parameter[name[self], list[[<ast.Name object at 0x7da18f00d420>]], name[exist_names]]]
return[None]
<ast.Tuple object at 0x7da18f00e6e0> assign[=] call[name[MI_parse_args], parameter[name[self], name[args]]]
variable[names] assign[=] call[name[force_list], parameter[call[name[indices].keys, parameter[]]]]
variable[is_new_key] assign[=] compare[name[item] is constant[None]]
variable[single] assign[=] call[name[isinstance], parameter[name[index2], name[int]]]
if name[single] begin[:]
variable[index2_list] assign[=] list[[<ast.Name object at 0x7da18f00fc10>]]
variable[value] assign[=] list[[<ast.Name object at 0x7da18f00dcc0>]]
variable[old_value] assign[=] list[[<ast.Name object at 0x7da18f00fd00>]]
variable[index2_d] assign[=] call[name[OrderedDict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f00cd60>, <ast.Name object at 0x7da18f00c1f0>]]] in starred[call[name[enumerate], parameter[name[index2_list]]]] begin[:]
call[name[index2_d]][name[index]] assign[=] name[e]
if compare[call[name[len], parameter[name[index2_d]]] less[<] call[name[len], parameter[name[index2_list]]]] begin[:]
variable[idx] assign[=] call[name[index2_d].values, parameter[]]
variable[index2_list] assign[=] call[name[mget_list], parameter[name[index2_list], name[idx]]]
variable[value] assign[=] call[name[mget_list], parameter[name[value], name[idx]]]
variable[old_value] assign[=] call[name[mget_list], parameter[name[old_value], name[idx]]]
for taget[tuple[[<ast.Name object at 0x7da18f00e740>, <ast.Name object at 0x7da18f00f010>, <ast.Name object at 0x7da18f00cbb0>]]] in starred[call[name[zip], parameter[name[index2_list], name[value], name[old_value]]]] begin[:]
if compare[name[v] in call[name[indices]][name[i]]] begin[:]
if <ast.BoolOp object at 0x7da18f00c0a0> begin[:]
<ast.Raise object at 0x7da18f00dc60>
if name[is_new_key] begin[:]
if compare[call[name[set], parameter[binary_operation[name[index2_list] + list[[<ast.Name object at 0x7da18f00f7c0>]]]]] not_equal[!=] call[name[set], parameter[call[name[range], parameter[name[N]]]]]] begin[:]
<ast.Raise object at 0x7da18c4cee90>
variable[d] assign[=] dictionary[[], []]
call[name[d]][name[index1]] assign[=] name[key]
call[name[d].update, parameter[call[name[zip], parameter[name[index2_list], name[value]]]]]
variable[values] assign[=] <ast.ListComp object at 0x7da18c4cf0a0>
variable[key] assign[=] call[name[values]][constant[0]]
variable[val] assign[=] <ast.IfExp object at 0x7da18c4cca60>
call[call[name[super], parameter[name[MIMapping], name[self]]].__setitem__, parameter[name[key], name[val]]]
for taget[tuple[[<ast.Name object at 0x7da18c4cc280>, <ast.Name object at 0x7da18c4ce8c0>]]] in starred[call[name[zip], parameter[call[name[names]][<ast.Slice object at 0x7da18c4cc5e0>], call[name[values]][<ast.Slice object at 0x7da18c4cdde0>]]]] begin[:]
call[call[name[indices]][name[i]]][name[v]] assign[=] name[key]
|
keyword[def] identifier[_MI_setitem] ( identifier[self] , identifier[args] , identifier[value] ):
literal[string]
identifier[indices] = identifier[self] . identifier[indices]
identifier[N] = identifier[len] ( identifier[indices] )
identifier[empty] = identifier[N] == literal[int]
keyword[if] identifier[empty] :
identifier[index1] , identifier[key] , identifier[index2] , identifier[index1_last] = identifier[MI_parse_args] ( identifier[self] , identifier[args] , identifier[allow_new] = keyword[True] )
identifier[exist_names] =[ identifier[index1] ]
identifier[item] =[ identifier[key] ]
keyword[try] :
identifier[MI_check_index_name] ( identifier[index2] )
identifier[exist_names] . identifier[append] ( identifier[index2] )
identifier[item] . identifier[append] ( identifier[value] )
keyword[except] identifier[TypeError] :
identifier[Nvalue] , identifier[value] = identifier[get_value_len] ( identifier[value] )
keyword[if] identifier[len] ( identifier[index2] )!= identifier[Nvalue] :
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] %( identifier[len] ( identifier[index2] ), identifier[index2] , identifier[Nvalue] ))
identifier[exist_names] . identifier[extend] ( identifier[index2] )
identifier[item] . identifier[extend] ( identifier[value] )
keyword[if] identifier[index1_last] :
identifier[exist_names] = identifier[exist_names] [ literal[int] :]+ identifier[exist_names] [: literal[int] ]
identifier[item] = identifier[item] [ literal[int] :]+ identifier[item] [: literal[int] ]
identifier[_MI_init] ( identifier[self] ,[ identifier[item] ], identifier[exist_names] )
keyword[return]
identifier[index1] , identifier[key] , identifier[index2] , identifier[item] , identifier[old_value] = identifier[MI_parse_args] ( identifier[self] , identifier[args] , identifier[allow_new] = keyword[True] )
identifier[names] = identifier[force_list] ( identifier[indices] . identifier[keys] ())
identifier[is_new_key] = identifier[item] keyword[is] keyword[None]
identifier[single] = identifier[isinstance] ( identifier[index2] , identifier[int] )
keyword[if] identifier[single] :
identifier[index2_list] =[ identifier[index2] ]
identifier[value] =[ identifier[value] ]
identifier[old_value] =[ identifier[old_value] ]
keyword[else] :
identifier[index2_list] = identifier[index2]
identifier[Nvalue] , identifier[value] = identifier[get_value_len] ( identifier[value] )
keyword[if] identifier[len] ( identifier[index2_list] )!= identifier[Nvalue] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] %( identifier[len] ( identifier[index2_list] ), identifier[index2] , identifier[Nvalue] ))
keyword[if] identifier[is_new_key] :
identifier[old_value] =[ keyword[None] ]* identifier[Nvalue]
identifier[index2_d] = identifier[OrderedDict] ()
keyword[for] identifier[e] , identifier[index] keyword[in] identifier[enumerate] ( identifier[index2_list] ):
identifier[index2_d] [ identifier[index] ]= identifier[e]
keyword[if] identifier[len] ( identifier[index2_d] )< identifier[len] ( identifier[index2_list] ):
identifier[idx] = identifier[index2_d] . identifier[values] ()
identifier[index2_list] = identifier[mget_list] ( identifier[index2_list] , identifier[idx] )
identifier[value] = identifier[mget_list] ( identifier[value] , identifier[idx] )
identifier[old_value] = identifier[mget_list] ( identifier[old_value] , identifier[idx] )
keyword[for] identifier[i] , identifier[v] , identifier[old_v] keyword[in] identifier[zip] ( identifier[index2_list] , identifier[value] , identifier[old_value] ):
keyword[if] identifier[v] keyword[in] identifier[indices] [ identifier[i] ]:
keyword[if] identifier[is_new_key] keyword[or] identifier[v] != identifier[old_v] :
keyword[raise] identifier[ValueExistsError] ( identifier[v] , identifier[i] , identifier[names] [ identifier[i] ])
keyword[if] identifier[is_new_key] :
keyword[if] identifier[set] ( identifier[index2_list] +[ identifier[index1] ])!= identifier[set] ( identifier[range] ( identifier[N] )):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[d] ={}
identifier[d] [ identifier[index1] ]= identifier[key]
identifier[d] . identifier[update] ( identifier[zip] ( identifier[index2_list] , identifier[value] ))
identifier[values] =[ identifier[d] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N] )]
identifier[key] = identifier[values] [ literal[int] ]
identifier[val] = identifier[values] [ literal[int] ] keyword[if] identifier[len] ( identifier[values] )== literal[int] keyword[else] identifier[values] [ literal[int] :]
identifier[super] ( identifier[MIMapping] , identifier[self] ). identifier[__setitem__] ( identifier[key] , identifier[val] )
keyword[for] identifier[i] , identifier[v] keyword[in] identifier[zip] ( identifier[names] [ literal[int] :], identifier[values] [ literal[int] :]):
identifier[indices] [ identifier[i] ][ identifier[v] ]= identifier[key]
keyword[else] :
identifier[key1] = identifier[item] [ literal[int] ]
identifier[item2] = identifier[list] ( identifier[item] )
identifier[mset_list] ( identifier[item2] , identifier[index2_list] , identifier[value] )
identifier[key2] = identifier[item2] [ literal[int] ]
identifier[val] = identifier[item2] [ literal[int] ] keyword[if] identifier[len] ( identifier[item2] )== literal[int] keyword[else] identifier[item2] [ literal[int] :]
keyword[if] identifier[key1] == identifier[key2] :
identifier[super] ( identifier[MIMapping] , identifier[self] ). identifier[__setitem__] ( identifier[key1] , identifier[val] )
keyword[else] :
identifier[od_replace_key] ( identifier[self] , identifier[key1] , identifier[key2] , identifier[val] )
keyword[for] identifier[i] , identifier[v_old] , identifier[v_new] keyword[in] identifier[zip] ( identifier[names] [ literal[int] :], identifier[item] [ literal[int] :], identifier[item2] [ literal[int] :]):
identifier[od_replace_key] ( identifier[indices] [ identifier[i] ], identifier[v_old] , identifier[v_new] , identifier[key2] )
|
def _MI_setitem(self, args, value):
"""Separate __setitem__ function of MIMapping"""
indices = self.indices
N = len(indices)
empty = N == 0
if empty: # init the dict
(index1, key, index2, index1_last) = MI_parse_args(self, args, allow_new=True)
exist_names = [index1]
item = [key]
try:
MI_check_index_name(index2)
exist_names.append(index2)
item.append(value) # depends on [control=['try'], data=[]]
except TypeError:
(Nvalue, value) = get_value_len(value)
if len(index2) != Nvalue:
raise ValueError('Number of keys (%s) based on argument %s does not match number of values (%s)' % (len(index2), index2, Nvalue)) # depends on [control=['if'], data=['Nvalue']]
exist_names.extend(index2)
item.extend(value) # depends on [control=['except'], data=[]]
if index1_last:
exist_names = exist_names[1:] + exist_names[:1]
item = item[1:] + item[:1] # depends on [control=['if'], data=[]]
_MI_init(self, [item], exist_names)
return # depends on [control=['if'], data=[]]
(index1, key, index2, item, old_value) = MI_parse_args(self, args, allow_new=True)
names = force_list(indices.keys())
is_new_key = item is None
single = isinstance(index2, int)
if single:
index2_list = [index2]
value = [value]
old_value = [old_value] # depends on [control=['if'], data=[]]
else:
index2_list = index2
(Nvalue, value) = get_value_len(value)
if len(index2_list) != Nvalue:
raise ValueError('Number of keys (%s) based on argument %s does not match number of values (%s)' % (len(index2_list), index2, Nvalue)) # depends on [control=['if'], data=['Nvalue']]
if is_new_key:
old_value = [None] * Nvalue # fake it # depends on [control=['if'], data=[]]
# remove duplicate in index2_list
index2_d = OrderedDict()
for (e, index) in enumerate(index2_list):
index2_d[index] = e # order of each unique index # depends on [control=['for'], data=[]]
if len(index2_d) < len(index2_list): # exist duplicate indices
idx = index2_d.values()
index2_list = mget_list(index2_list, idx)
value = mget_list(value, idx)
old_value = mget_list(old_value, idx) # depends on [control=['if'], data=[]]
# check duplicate values
for (i, v, old_v) in zip(index2_list, value, old_value):
# index2_list may contain index1; not allow duplicate value for index1 either
if v in indices[i]:
if is_new_key or v != old_v:
raise ValueExistsError(v, i, names[i]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['v']] # depends on [control=['for'], data=[]]
if is_new_key:
if set(index2_list + [index1]) != set(range(N)):
raise ValueError('Indices of the new item do not match existing indices') # depends on [control=['if'], data=[]]
d = {}
d[index1] = key
# index2_list may also override index1
d.update(zip(index2_list, value))
values = [d[i] for i in range(N)] # reorder based on the indices
key = values[0]
val = values[1] if len(values) == 2 else values[1:]
super(MIMapping, self).__setitem__(key, val)
for (i, v) in zip(names[1:], values[1:]):
indices[i][v] = key # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
else: # not new key
# import pdb;pdb.set_trace()
key1 = item[0]
item2 = list(item) # copy item first
mset_list(item2, index2_list, value) # index2_list may also override index1
key2 = item2[0]
val = item2[1] if len(item2) == 2 else item2[1:]
if key1 == key2:
super(MIMapping, self).__setitem__(key1, val) # depends on [control=['if'], data=['key1']]
else:
od_replace_key(self, key1, key2, val)
for (i, v_old, v_new) in zip(names[1:], item[1:], item2[1:]):
od_replace_key(indices[i], v_old, v_new, key2) # depends on [control=['for'], data=[]]
|
def pyramid_synthesis(Gs, cap, pe, order=30, **kwargs):
r"""Synthesize a signal from its pyramid coefficients.
Parameters
----------
Gs : Array of Graphs
A multiresolution sequence of graph structures.
cap : ndarray
Coarsest approximation of the original signal.
pe : ndarray
Prediction error at each level.
use_exact : bool
To use exact graph spectral filtering instead of the Chebyshev approximation.
order : int
Degree of the Chebyshev approximation (default=30).
least_squares : bool
To use the least squares synthesis (default=False).
h_filters : ndarray
The filters used in the analysis operator.
These are required for least squares synthesis, but not for the direct synthesis method.
use_landweber : bool
To use the Landweber iteration approximation in the least squares synthesis.
reg_eps : float
Interpolation parameter.
landweber_its : int
Number of iterations in the Landweber approximation for least squares synthesis.
landweber_tau : float
Parameter for the Landweber iteration.
Returns
-------
reconstruction : ndarray
The reconstructed signal.
ca : ndarray
Coarse approximations at each level
"""
least_squares = bool(kwargs.pop('least_squares', False))
def_ul = Gs[0].N > 3000 or Gs[0]._e is None or Gs[0]._U is None
use_landweber = bool(kwargs.pop('use_landweber', def_ul))
reg_eps = float(kwargs.get('reg_eps', 0.005))
if least_squares and 'h_filters' not in kwargs:
ValueError('h-filters not provided.')
levels = len(Gs) - 1
if len(pe) != levels:
ValueError('Gs and pe have different shapes.')
ca = [cap]
# Reconstruct each level
for i in range(levels):
if not least_squares:
s_pred = interpolate(Gs[levels - i - 1], ca[i], Gs[levels - i].mr['idx'],
order=order, reg_eps=reg_eps, **kwargs)
ca.append(s_pred + pe[levels - i - 1])
else:
ca.append(_pyramid_single_interpolation(Gs[levels - i - 1], ca[i],
pe[levels - i - 1], h_filters[levels - i - 1],
use_landweber=use_landweber, **kwargs))
ca.reverse()
reconstruction = ca[0]
return reconstruction, ca
|
def function[pyramid_synthesis, parameter[Gs, cap, pe, order]]:
constant[Synthesize a signal from its pyramid coefficients.
Parameters
----------
Gs : Array of Graphs
A multiresolution sequence of graph structures.
cap : ndarray
Coarsest approximation of the original signal.
pe : ndarray
Prediction error at each level.
use_exact : bool
To use exact graph spectral filtering instead of the Chebyshev approximation.
order : int
Degree of the Chebyshev approximation (default=30).
least_squares : bool
To use the least squares synthesis (default=False).
h_filters : ndarray
The filters used in the analysis operator.
These are required for least squares synthesis, but not for the direct synthesis method.
use_landweber : bool
To use the Landweber iteration approximation in the least squares synthesis.
reg_eps : float
Interpolation parameter.
landweber_its : int
Number of iterations in the Landweber approximation for least squares synthesis.
landweber_tau : float
Parameter for the Landweber iteration.
Returns
-------
reconstruction : ndarray
The reconstructed signal.
ca : ndarray
Coarse approximations at each level
]
variable[least_squares] assign[=] call[name[bool], parameter[call[name[kwargs].pop, parameter[constant[least_squares], constant[False]]]]]
variable[def_ul] assign[=] <ast.BoolOp object at 0x7da2043445e0>
variable[use_landweber] assign[=] call[name[bool], parameter[call[name[kwargs].pop, parameter[constant[use_landweber], name[def_ul]]]]]
variable[reg_eps] assign[=] call[name[float], parameter[call[name[kwargs].get, parameter[constant[reg_eps], constant[0.005]]]]]
if <ast.BoolOp object at 0x7da2047ebe20> begin[:]
call[name[ValueError], parameter[constant[h-filters not provided.]]]
variable[levels] assign[=] binary_operation[call[name[len], parameter[name[Gs]]] - constant[1]]
if compare[call[name[len], parameter[name[pe]]] not_equal[!=] name[levels]] begin[:]
call[name[ValueError], parameter[constant[Gs and pe have different shapes.]]]
variable[ca] assign[=] list[[<ast.Name object at 0x7da20e9b3eb0>]]
for taget[name[i]] in starred[call[name[range], parameter[name[levels]]]] begin[:]
if <ast.UnaryOp object at 0x7da20e9b38b0> begin[:]
variable[s_pred] assign[=] call[name[interpolate], parameter[call[name[Gs]][binary_operation[binary_operation[name[levels] - name[i]] - constant[1]]], call[name[ca]][name[i]], call[call[name[Gs]][binary_operation[name[levels] - name[i]]].mr][constant[idx]]]]
call[name[ca].append, parameter[binary_operation[name[s_pred] + call[name[pe]][binary_operation[binary_operation[name[levels] - name[i]] - constant[1]]]]]]
call[name[ca].reverse, parameter[]]
variable[reconstruction] assign[=] call[name[ca]][constant[0]]
return[tuple[[<ast.Name object at 0x7da20e9b2590>, <ast.Name object at 0x7da20e9b1450>]]]
|
keyword[def] identifier[pyramid_synthesis] ( identifier[Gs] , identifier[cap] , identifier[pe] , identifier[order] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[least_squares] = identifier[bool] ( identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] ))
identifier[def_ul] = identifier[Gs] [ literal[int] ]. identifier[N] > literal[int] keyword[or] identifier[Gs] [ literal[int] ]. identifier[_e] keyword[is] keyword[None] keyword[or] identifier[Gs] [ literal[int] ]. identifier[_U] keyword[is] keyword[None]
identifier[use_landweber] = identifier[bool] ( identifier[kwargs] . identifier[pop] ( literal[string] , identifier[def_ul] ))
identifier[reg_eps] = identifier[float] ( identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ))
keyword[if] identifier[least_squares] keyword[and] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[ValueError] ( literal[string] )
identifier[levels] = identifier[len] ( identifier[Gs] )- literal[int]
keyword[if] identifier[len] ( identifier[pe] )!= identifier[levels] :
identifier[ValueError] ( literal[string] )
identifier[ca] =[ identifier[cap] ]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[levels] ):
keyword[if] keyword[not] identifier[least_squares] :
identifier[s_pred] = identifier[interpolate] ( identifier[Gs] [ identifier[levels] - identifier[i] - literal[int] ], identifier[ca] [ identifier[i] ], identifier[Gs] [ identifier[levels] - identifier[i] ]. identifier[mr] [ literal[string] ],
identifier[order] = identifier[order] , identifier[reg_eps] = identifier[reg_eps] ,** identifier[kwargs] )
identifier[ca] . identifier[append] ( identifier[s_pred] + identifier[pe] [ identifier[levels] - identifier[i] - literal[int] ])
keyword[else] :
identifier[ca] . identifier[append] ( identifier[_pyramid_single_interpolation] ( identifier[Gs] [ identifier[levels] - identifier[i] - literal[int] ], identifier[ca] [ identifier[i] ],
identifier[pe] [ identifier[levels] - identifier[i] - literal[int] ], identifier[h_filters] [ identifier[levels] - identifier[i] - literal[int] ],
identifier[use_landweber] = identifier[use_landweber] ,** identifier[kwargs] ))
identifier[ca] . identifier[reverse] ()
identifier[reconstruction] = identifier[ca] [ literal[int] ]
keyword[return] identifier[reconstruction] , identifier[ca]
|
def pyramid_synthesis(Gs, cap, pe, order=30, **kwargs):
"""Synthesize a signal from its pyramid coefficients.
Parameters
----------
Gs : Array of Graphs
A multiresolution sequence of graph structures.
cap : ndarray
Coarsest approximation of the original signal.
pe : ndarray
Prediction error at each level.
use_exact : bool
To use exact graph spectral filtering instead of the Chebyshev approximation.
order : int
Degree of the Chebyshev approximation (default=30).
least_squares : bool
To use the least squares synthesis (default=False).
h_filters : ndarray
The filters used in the analysis operator.
These are required for least squares synthesis, but not for the direct synthesis method.
use_landweber : bool
To use the Landweber iteration approximation in the least squares synthesis.
reg_eps : float
Interpolation parameter.
landweber_its : int
Number of iterations in the Landweber approximation for least squares synthesis.
landweber_tau : float
Parameter for the Landweber iteration.
Returns
-------
reconstruction : ndarray
The reconstructed signal.
ca : ndarray
Coarse approximations at each level
"""
least_squares = bool(kwargs.pop('least_squares', False))
def_ul = Gs[0].N > 3000 or Gs[0]._e is None or Gs[0]._U is None
use_landweber = bool(kwargs.pop('use_landweber', def_ul))
reg_eps = float(kwargs.get('reg_eps', 0.005))
if least_squares and 'h_filters' not in kwargs:
ValueError('h-filters not provided.') # depends on [control=['if'], data=[]]
levels = len(Gs) - 1
if len(pe) != levels:
ValueError('Gs and pe have different shapes.') # depends on [control=['if'], data=[]]
ca = [cap]
# Reconstruct each level
for i in range(levels):
if not least_squares:
s_pred = interpolate(Gs[levels - i - 1], ca[i], Gs[levels - i].mr['idx'], order=order, reg_eps=reg_eps, **kwargs)
ca.append(s_pred + pe[levels - i - 1]) # depends on [control=['if'], data=[]]
else:
ca.append(_pyramid_single_interpolation(Gs[levels - i - 1], ca[i], pe[levels - i - 1], h_filters[levels - i - 1], use_landweber=use_landweber, **kwargs)) # depends on [control=['for'], data=['i']]
ca.reverse()
reconstruction = ca[0]
return (reconstruction, ca)
|
def render_to_response(self, context, indent=None):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context, indent=indent))
|
def function[render_to_response, parameter[self, context, indent]]:
constant[Returns a JSON response containing 'context' as payload]
return[call[name[self].get_json_response, parameter[call[name[self].convert_context_to_json, parameter[name[context]]]]]]
|
keyword[def] identifier[render_to_response] ( identifier[self] , identifier[context] , identifier[indent] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[get_json_response] ( identifier[self] . identifier[convert_context_to_json] ( identifier[context] , identifier[indent] = identifier[indent] ))
|
def render_to_response(self, context, indent=None):
"""Returns a JSON response containing 'context' as payload"""
return self.get_json_response(self.convert_context_to_json(context, indent=indent))
|
def format_message(self, evr_hist_data):
''' Format EVR message with EVR data
Given a byte array of EVR data, format the EVR's message attribute
printf format strings and split the byte array into appropriately
sized chunks. Supports most format strings containing length and type
fields.
Args:
evr_hist_data: A bytearray of EVR data. Bytes are expected to be in
MSB ordering.
Example formatting::
# This is the character '!', string 'Foo', and int '4279317316'
bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44])
Returns:
The EVR's message string formatted with the EVR data or the
unformatted EVR message string if there are no valid format
strings present in it.
Raises:
ValueError: When the bytearray cannot be fully processed with the
specified format strings. This is usually a result of the
expected data length and the byte array length not matching.
'''
size_formatter_info = {
's' : -1,
'c' : 1,
'i' : 4,
'd' : 4,
'u' : 4,
'x' : 4,
'hh': 1,
'h' : 2,
'l' : 4,
'll': 8,
'f' : 8,
'g' : 8,
'e' : 8,
}
type_formatter_info = {
'c' : 'U{}',
'i' : 'MSB_I{}',
'd' : 'MSB_I{}',
'u' : 'MSB_U{}',
'f' : 'MSB_D{}',
'e' : 'MSB_D{}',
'g' : 'MSB_D{}',
'x' : 'MSB_U{}',
}
formatters = re.findall("%(?:\d+\$)?([cdieEfgGosuxXhlL]+)", self._message)
cur_byte_index = 0
data_chunks = []
for f in formatters:
# If the format string we found is > 1 character we know that a length
# field is included and we need to adjust our sizing accordingly.
f_size_char = f_type = f[-1]
if len(f) > 1:
f_size_char = f[:-1]
fsize = size_formatter_info[f_size_char.lower()]
try:
if f_type != 's':
end_index = cur_byte_index + fsize
fstr = type_formatter_info[f_type.lower()].format(fsize*8)
# Type formatting can give us incorrect format strings when
# a size formatter promotes a smaller data type. For instnace,
# 'hhu' says we'll promote a char (1 byte) to an unsigned
# int for display. Here, the type format string would be
# incorrectly set to 'MSB_U8' if we didn't correct.
if fsize == 1 and 'MSB_' in fstr:
fstr = fstr[4:]
d = dtype.PrimitiveType(fstr).decode(
evr_hist_data[cur_byte_index:end_index]
)
# Some formatters have an undefined data size (such as strings)
# and require additional processing to determine the length of
# the data and decode data.
else:
end_index = str(evr_hist_data).index('\x00', cur_byte_index)
d = str(evr_hist_data[cur_byte_index:end_index])
data_chunks.append(d)
except:
msg = "Unable to format EVR Message with data {}".format(evr_hist_data)
log.error(msg)
raise ValueError(msg)
cur_byte_index = end_index
# If we were formatting a string we need to add another index offset
# to exclude the null terminator.
if f == 's':
cur_byte_index += 1
# Format and return the EVR message if formatters were present, otherwise
# just return the EVR message as is.
if len(formatters) == 0:
return self._message
else:
# Python format strings cannot handle size formatter information. So something
# such as %llu needs to be adjusted to be a valid identifier in python by
# removing the size formatter.
msg = self._message
for f in formatters:
if len(f) > 1:
msg = msg.replace('%{}'.format(f), '%{}'.format(f[-1]))
return msg % tuple(data_chunks)
|
def function[format_message, parameter[self, evr_hist_data]]:
constant[ Format EVR message with EVR data
Given a byte array of EVR data, format the EVR's message attribute
printf format strings and split the byte array into appropriately
sized chunks. Supports most format strings containing length and type
fields.
Args:
evr_hist_data: A bytearray of EVR data. Bytes are expected to be in
MSB ordering.
Example formatting::
# This is the character '!', string 'Foo', and int '4279317316'
bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44])
Returns:
The EVR's message string formatted with the EVR data or the
unformatted EVR message string if there are no valid format
strings present in it.
Raises:
ValueError: When the bytearray cannot be fully processed with the
specified format strings. This is usually a result of the
expected data length and the byte array length not matching.
]
variable[size_formatter_info] assign[=] dictionary[[<ast.Constant object at 0x7da204620df0>, <ast.Constant object at 0x7da204622350>, <ast.Constant object at 0x7da204620160>, <ast.Constant object at 0x7da204622050>, <ast.Constant object at 0x7da204622ef0>, <ast.Constant object at 0x7da204620100>, <ast.Constant object at 0x7da204622ce0>, <ast.Constant object at 0x7da2046204f0>, <ast.Constant object at 0x7da2046238b0>, <ast.Constant object at 0x7da2046209a0>, <ast.Constant object at 0x7da204621f00>, <ast.Constant object at 0x7da204621d20>, <ast.Constant object at 0x7da204620d60>], [<ast.UnaryOp object at 0x7da204622b30>, <ast.Constant object at 0x7da204621990>, <ast.Constant object at 0x7da2046216f0>, <ast.Constant object at 0x7da204620490>, <ast.Constant object at 0x7da204623fa0>, <ast.Constant object at 0x7da204623d90>, <ast.Constant object at 0x7da204620a60>, <ast.Constant object at 0x7da204623b80>, <ast.Constant object at 0x7da204620bb0>, <ast.Constant object at 0x7da2046215d0>, <ast.Constant object at 0x7da204622140>, <ast.Constant object at 0x7da204623fd0>, <ast.Constant object at 0x7da204620a30>]]
variable[type_formatter_info] assign[=] dictionary[[<ast.Constant object at 0x7da2046236d0>, <ast.Constant object at 0x7da204623af0>, <ast.Constant object at 0x7da204620280>, <ast.Constant object at 0x7da204621840>, <ast.Constant object at 0x7da204622d40>, <ast.Constant object at 0x7da204623460>, <ast.Constant object at 0x7da204620a90>, <ast.Constant object at 0x7da204621bd0>], [<ast.Constant object at 0x7da204621180>, <ast.Constant object at 0x7da2046239d0>, <ast.Constant object at 0x7da204620760>, <ast.Constant object at 0x7da2046201f0>, <ast.Constant object at 0x7da204623640>, <ast.Constant object at 0x7da2046228f0>, <ast.Constant object at 0x7da2046231c0>, <ast.Constant object at 0x7da2046227a0>]]
variable[formatters] assign[=] call[name[re].findall, parameter[constant[%(?:\d+\$)?([cdieEfgGosuxXhlL]+)], name[self]._message]]
variable[cur_byte_index] assign[=] constant[0]
variable[data_chunks] assign[=] list[[]]
for taget[name[f]] in starred[name[formatters]] begin[:]
variable[f_size_char] assign[=] call[name[f]][<ast.UnaryOp object at 0x7da2046220b0>]
if compare[call[name[len], parameter[name[f]]] greater[>] constant[1]] begin[:]
variable[f_size_char] assign[=] call[name[f]][<ast.Slice object at 0x7da204620b80>]
variable[fsize] assign[=] call[name[size_formatter_info]][call[name[f_size_char].lower, parameter[]]]
<ast.Try object at 0x7da2046224d0>
variable[cur_byte_index] assign[=] name[end_index]
if compare[name[f] equal[==] constant[s]] begin[:]
<ast.AugAssign object at 0x7da207f99c90>
if compare[call[name[len], parameter[name[formatters]]] equal[==] constant[0]] begin[:]
return[name[self]._message]
|
keyword[def] identifier[format_message] ( identifier[self] , identifier[evr_hist_data] ):
literal[string]
identifier[size_formatter_info] ={
literal[string] :- literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
}
identifier[type_formatter_info] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
}
identifier[formatters] = identifier[re] . identifier[findall] ( literal[string] , identifier[self] . identifier[_message] )
identifier[cur_byte_index] = literal[int]
identifier[data_chunks] =[]
keyword[for] identifier[f] keyword[in] identifier[formatters] :
identifier[f_size_char] = identifier[f_type] = identifier[f] [- literal[int] ]
keyword[if] identifier[len] ( identifier[f] )> literal[int] :
identifier[f_size_char] = identifier[f] [:- literal[int] ]
identifier[fsize] = identifier[size_formatter_info] [ identifier[f_size_char] . identifier[lower] ()]
keyword[try] :
keyword[if] identifier[f_type] != literal[string] :
identifier[end_index] = identifier[cur_byte_index] + identifier[fsize]
identifier[fstr] = identifier[type_formatter_info] [ identifier[f_type] . identifier[lower] ()]. identifier[format] ( identifier[fsize] * literal[int] )
keyword[if] identifier[fsize] == literal[int] keyword[and] literal[string] keyword[in] identifier[fstr] :
identifier[fstr] = identifier[fstr] [ literal[int] :]
identifier[d] = identifier[dtype] . identifier[PrimitiveType] ( identifier[fstr] ). identifier[decode] (
identifier[evr_hist_data] [ identifier[cur_byte_index] : identifier[end_index] ]
)
keyword[else] :
identifier[end_index] = identifier[str] ( identifier[evr_hist_data] ). identifier[index] ( literal[string] , identifier[cur_byte_index] )
identifier[d] = identifier[str] ( identifier[evr_hist_data] [ identifier[cur_byte_index] : identifier[end_index] ])
identifier[data_chunks] . identifier[append] ( identifier[d] )
keyword[except] :
identifier[msg] = literal[string] . identifier[format] ( identifier[evr_hist_data] )
identifier[log] . identifier[error] ( identifier[msg] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
identifier[cur_byte_index] = identifier[end_index]
keyword[if] identifier[f] == literal[string] :
identifier[cur_byte_index] += literal[int]
keyword[if] identifier[len] ( identifier[formatters] )== literal[int] :
keyword[return] identifier[self] . identifier[_message]
keyword[else] :
identifier[msg] = identifier[self] . identifier[_message]
keyword[for] identifier[f] keyword[in] identifier[formatters] :
keyword[if] identifier[len] ( identifier[f] )> literal[int] :
identifier[msg] = identifier[msg] . identifier[replace] ( literal[string] . identifier[format] ( identifier[f] ), literal[string] . identifier[format] ( identifier[f] [- literal[int] ]))
keyword[return] identifier[msg] % identifier[tuple] ( identifier[data_chunks] )
|
def format_message(self, evr_hist_data):
""" Format EVR message with EVR data
Given a byte array of EVR data, format the EVR's message attribute
printf format strings and split the byte array into appropriately
sized chunks. Supports most format strings containing length and type
fields.
Args:
evr_hist_data: A bytearray of EVR data. Bytes are expected to be in
MSB ordering.
Example formatting::
# This is the character '!', string 'Foo', and int '4279317316'
bytearray([0x21, 0x46, 0x6f, 0x6f, 0x00, 0xff, 0x11, 0x33, 0x44])
Returns:
The EVR's message string formatted with the EVR data or the
unformatted EVR message string if there are no valid format
strings present in it.
Raises:
ValueError: When the bytearray cannot be fully processed with the
specified format strings. This is usually a result of the
expected data length and the byte array length not matching.
"""
size_formatter_info = {'s': -1, 'c': 1, 'i': 4, 'd': 4, 'u': 4, 'x': 4, 'hh': 1, 'h': 2, 'l': 4, 'll': 8, 'f': 8, 'g': 8, 'e': 8}
type_formatter_info = {'c': 'U{}', 'i': 'MSB_I{}', 'd': 'MSB_I{}', 'u': 'MSB_U{}', 'f': 'MSB_D{}', 'e': 'MSB_D{}', 'g': 'MSB_D{}', 'x': 'MSB_U{}'}
formatters = re.findall('%(?:\\d+\\$)?([cdieEfgGosuxXhlL]+)', self._message)
cur_byte_index = 0
data_chunks = []
for f in formatters:
# If the format string we found is > 1 character we know that a length
# field is included and we need to adjust our sizing accordingly.
f_size_char = f_type = f[-1]
if len(f) > 1:
f_size_char = f[:-1] # depends on [control=['if'], data=[]]
fsize = size_formatter_info[f_size_char.lower()]
try:
if f_type != 's':
end_index = cur_byte_index + fsize
fstr = type_formatter_info[f_type.lower()].format(fsize * 8)
# Type formatting can give us incorrect format strings when
# a size formatter promotes a smaller data type. For instnace,
# 'hhu' says we'll promote a char (1 byte) to an unsigned
# int for display. Here, the type format string would be
# incorrectly set to 'MSB_U8' if we didn't correct.
if fsize == 1 and 'MSB_' in fstr:
fstr = fstr[4:] # depends on [control=['if'], data=[]]
d = dtype.PrimitiveType(fstr).decode(evr_hist_data[cur_byte_index:end_index]) # depends on [control=['if'], data=['f_type']]
else:
# Some formatters have an undefined data size (such as strings)
# and require additional processing to determine the length of
# the data and decode data.
end_index = str(evr_hist_data).index('\x00', cur_byte_index)
d = str(evr_hist_data[cur_byte_index:end_index])
data_chunks.append(d) # depends on [control=['try'], data=[]]
except:
msg = 'Unable to format EVR Message with data {}'.format(evr_hist_data)
log.error(msg)
raise ValueError(msg) # depends on [control=['except'], data=[]]
cur_byte_index = end_index
# If we were formatting a string we need to add another index offset
# to exclude the null terminator.
if f == 's':
cur_byte_index += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
# Format and return the EVR message if formatters were present, otherwise
# just return the EVR message as is.
if len(formatters) == 0:
return self._message # depends on [control=['if'], data=[]]
else:
# Python format strings cannot handle size formatter information. So something
# such as %llu needs to be adjusted to be a valid identifier in python by
# removing the size formatter.
msg = self._message
for f in formatters:
if len(f) > 1:
msg = msg.replace('%{}'.format(f), '%{}'.format(f[-1])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']]
return msg % tuple(data_chunks)
|
def dump(self):
"""Return the object itself."""
return {
'title': self.title,
'issue_id': self.issue_id,
'reporter': self.reporter,
'assignee': self.assignee,
'status': self.status,
'product': self.product,
'component': self.component,
'created_at': self.created_at,
'updated_at': self.updated_at,
'closed_at': self.closed_at,
'status_code': self.status_code
}
|
def function[dump, parameter[self]]:
constant[Return the object itself.]
return[dictionary[[<ast.Constant object at 0x7da1b0fc78b0>, <ast.Constant object at 0x7da1b0fc6890>, <ast.Constant object at 0x7da1b0d3e1d0>, <ast.Constant object at 0x7da1b0d3ea40>, <ast.Constant object at 0x7da1b0d3c8b0>, <ast.Constant object at 0x7da1b0d3d5d0>, <ast.Constant object at 0x7da1b0d19d20>, <ast.Constant object at 0x7da1b0d1b5e0>, <ast.Constant object at 0x7da1b0d1af80>, <ast.Constant object at 0x7da1b0d1b340>, <ast.Constant object at 0x7da1b0d1afb0>], [<ast.Attribute object at 0x7da1b0d18820>, <ast.Attribute object at 0x7da1b0d1b0d0>, <ast.Attribute object at 0x7da1b0d1b610>, <ast.Attribute object at 0x7da1b0d1b880>, <ast.Attribute object at 0x7da1b0d18f10>, <ast.Attribute object at 0x7da1b0d18fd0>, <ast.Attribute object at 0x7da1b0d193f0>, <ast.Attribute object at 0x7da1b0d1b0a0>, <ast.Attribute object at 0x7da1b0d1ad40>, <ast.Attribute object at 0x7da1b0d19f00>, <ast.Attribute object at 0x7da1b0d18dc0>]]]
|
keyword[def] identifier[dump] ( identifier[self] ):
literal[string]
keyword[return] {
literal[string] : identifier[self] . identifier[title] ,
literal[string] : identifier[self] . identifier[issue_id] ,
literal[string] : identifier[self] . identifier[reporter] ,
literal[string] : identifier[self] . identifier[assignee] ,
literal[string] : identifier[self] . identifier[status] ,
literal[string] : identifier[self] . identifier[product] ,
literal[string] : identifier[self] . identifier[component] ,
literal[string] : identifier[self] . identifier[created_at] ,
literal[string] : identifier[self] . identifier[updated_at] ,
literal[string] : identifier[self] . identifier[closed_at] ,
literal[string] : identifier[self] . identifier[status_code]
}
|
def dump(self):
"""Return the object itself."""
return {'title': self.title, 'issue_id': self.issue_id, 'reporter': self.reporter, 'assignee': self.assignee, 'status': self.status, 'product': self.product, 'component': self.component, 'created_at': self.created_at, 'updated_at': self.updated_at, 'closed_at': self.closed_at, 'status_code': self.status_code}
|
def global_position_int_cov_encode(self, time_boot_ms, time_utc, estimator_type, lat, lon, alt, relative_alt, vx, vy, vz, covariance):
'''
The filtered global position (e.g. fused GPS and accelerometers). The
position is in GPS-frame (right-handed, Z-up). It is
designed as scaled integer message since the
resolution of float is not sufficient. NOTE: This
message is intended for onboard networks / companion
computers and higher-bandwidth links and optimized for
accuracy and completeness. Please use the
GLOBAL_POSITION_INT message for a minimal subset.
time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t)
time_utc : Timestamp (microseconds since UNIX epoch) in UTC. 0 for unknown. Commonly filled by the precision time source of a GPS receiver. (uint64_t)
estimator_type : Class id of the estimator this estimate originated from. (uint8_t)
lat : Latitude, expressed as degrees * 1E7 (int32_t)
lon : Longitude, expressed as degrees * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters), above MSL (int32_t)
relative_alt : Altitude above ground in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s (float)
vy : Ground Y Speed (Longitude), expressed as m/s (float)
vz : Ground Z Speed (Altitude), expressed as m/s (float)
covariance : Covariance matrix (first six entries are the first ROW, next six entries are the second row, etc.) (float)
'''
return MAVLink_global_position_int_cov_message(time_boot_ms, time_utc, estimator_type, lat, lon, alt, relative_alt, vx, vy, vz, covariance)
|
def function[global_position_int_cov_encode, parameter[self, time_boot_ms, time_utc, estimator_type, lat, lon, alt, relative_alt, vx, vy, vz, covariance]]:
constant[
The filtered global position (e.g. fused GPS and accelerometers). The
position is in GPS-frame (right-handed, Z-up). It is
designed as scaled integer message since the
resolution of float is not sufficient. NOTE: This
message is intended for onboard networks / companion
computers and higher-bandwidth links and optimized for
accuracy and completeness. Please use the
GLOBAL_POSITION_INT message for a minimal subset.
time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t)
time_utc : Timestamp (microseconds since UNIX epoch) in UTC. 0 for unknown. Commonly filled by the precision time source of a GPS receiver. (uint64_t)
estimator_type : Class id of the estimator this estimate originated from. (uint8_t)
lat : Latitude, expressed as degrees * 1E7 (int32_t)
lon : Longitude, expressed as degrees * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters), above MSL (int32_t)
relative_alt : Altitude above ground in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s (float)
vy : Ground Y Speed (Longitude), expressed as m/s (float)
vz : Ground Z Speed (Altitude), expressed as m/s (float)
covariance : Covariance matrix (first six entries are the first ROW, next six entries are the second row, etc.) (float)
]
return[call[name[MAVLink_global_position_int_cov_message], parameter[name[time_boot_ms], name[time_utc], name[estimator_type], name[lat], name[lon], name[alt], name[relative_alt], name[vx], name[vy], name[vz], name[covariance]]]]
|
keyword[def] identifier[global_position_int_cov_encode] ( identifier[self] , identifier[time_boot_ms] , identifier[time_utc] , identifier[estimator_type] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[relative_alt] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[covariance] ):
literal[string]
keyword[return] identifier[MAVLink_global_position_int_cov_message] ( identifier[time_boot_ms] , identifier[time_utc] , identifier[estimator_type] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[relative_alt] , identifier[vx] , identifier[vy] , identifier[vz] , identifier[covariance] )
|
def global_position_int_cov_encode(self, time_boot_ms, time_utc, estimator_type, lat, lon, alt, relative_alt, vx, vy, vz, covariance):
"""
The filtered global position (e.g. fused GPS and accelerometers). The
position is in GPS-frame (right-handed, Z-up). It is
designed as scaled integer message since the
resolution of float is not sufficient. NOTE: This
message is intended for onboard networks / companion
computers and higher-bandwidth links and optimized for
accuracy and completeness. Please use the
GLOBAL_POSITION_INT message for a minimal subset.
time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t)
time_utc : Timestamp (microseconds since UNIX epoch) in UTC. 0 for unknown. Commonly filled by the precision time source of a GPS receiver. (uint64_t)
estimator_type : Class id of the estimator this estimate originated from. (uint8_t)
lat : Latitude, expressed as degrees * 1E7 (int32_t)
lon : Longitude, expressed as degrees * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters), above MSL (int32_t)
relative_alt : Altitude above ground in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s (float)
vy : Ground Y Speed (Longitude), expressed as m/s (float)
vz : Ground Z Speed (Altitude), expressed as m/s (float)
covariance : Covariance matrix (first six entries are the first ROW, next six entries are the second row, etc.) (float)
"""
return MAVLink_global_position_int_cov_message(time_boot_ms, time_utc, estimator_type, lat, lon, alt, relative_alt, vx, vy, vz, covariance)
|
def shards(self, add_shard=False):
"""Get a list of shards belonging to this instance.
:param bool add_shard: A boolean indicating whether to add a new shard to the specified
instance.
"""
url = self._service_url + 'shards/'
if add_shard:
response = requests.post(url, **self._instances._default_request_kwargs)
else:
response = requests.get(url, **self._instances._default_request_kwargs)
return response.json()
|
def function[shards, parameter[self, add_shard]]:
constant[Get a list of shards belonging to this instance.
:param bool add_shard: A boolean indicating whether to add a new shard to the specified
instance.
]
variable[url] assign[=] binary_operation[name[self]._service_url + constant[shards/]]
if name[add_shard] begin[:]
variable[response] assign[=] call[name[requests].post, parameter[name[url]]]
return[call[name[response].json, parameter[]]]
|
keyword[def] identifier[shards] ( identifier[self] , identifier[add_shard] = keyword[False] ):
literal[string]
identifier[url] = identifier[self] . identifier[_service_url] + literal[string]
keyword[if] identifier[add_shard] :
identifier[response] = identifier[requests] . identifier[post] ( identifier[url] ,** identifier[self] . identifier[_instances] . identifier[_default_request_kwargs] )
keyword[else] :
identifier[response] = identifier[requests] . identifier[get] ( identifier[url] ,** identifier[self] . identifier[_instances] . identifier[_default_request_kwargs] )
keyword[return] identifier[response] . identifier[json] ()
|
def shards(self, add_shard=False):
"""Get a list of shards belonging to this instance.
:param bool add_shard: A boolean indicating whether to add a new shard to the specified
instance.
"""
url = self._service_url + 'shards/'
if add_shard:
response = requests.post(url, **self._instances._default_request_kwargs) # depends on [control=['if'], data=[]]
else:
response = requests.get(url, **self._instances._default_request_kwargs)
return response.json()
|
def get_SCAT_box(slope, x_mean, y_mean, beta_threshold = .1):
"""
takes in data and returns information about SCAT box:
the largest possible x_value, the largest possible y_value,
and functions for the two bounding lines of the box
"""
# if beta_threshold is -999, that means null
if beta_threshold == -999:
beta_threshold = .1
slope_err_threshold = abs(slope) * beta_threshold
x, y = x_mean, y_mean
# get lines that pass through mass center, with opposite slope
slope1 = slope + (2* slope_err_threshold)
line1_y_int = y - (slope1 * x)
line1_x_int = -1 * (old_div(line1_y_int, slope1))
slope2 = slope - (2 * slope_err_threshold)
line2_y_int = y - (slope2 * x)
line2_x_int = -1 * (old_div(line2_y_int, slope2))
# l1_y_int and l2_x_int form the bottom line of the box
# l2_y_int and l1_x_int form the top line of the box
# print "_diagonal line1:", (0, line2_y_int), (line2_x_int, 0), (x, y)
# print "_diagonal line2:", (0, line1_y_int), (line1_x_int, 0), (x, y)
# print "_bottom line:", [(0, line1_y_int), (line2_x_int, 0)]
# print "_top line:", [(0, line2_y_int), (line1_x_int, 0)]
low_bound = [(0, line1_y_int), (line2_x_int, 0)]
high_bound = [(0, line2_y_int), (line1_x_int, 0)]
x_max = high_bound[1][0]#
y_max = high_bound[0][1]
# function for low_bound
low_slope = old_div((low_bound[0][1] - low_bound[1][1]), (low_bound[0][0] - low_bound[1][0])) #
low_y_int = low_bound[0][1]
def low_bound(x):
y = low_slope * x + low_y_int
return y
# function for high_bound
high_slope = old_div((high_bound[0][1] - high_bound[1][1]), (high_bound[0][0] - high_bound[1][0])) # y_0-y_1/x_0-x_1
high_y_int = high_bound[0][1]
def high_bound(x):
y = high_slope * x + high_y_int
return y
high_line = [high_y_int, high_slope]
low_line = [low_y_int, low_slope]
return low_bound, high_bound, x_max, y_max, low_line, high_line
|
def function[get_SCAT_box, parameter[slope, x_mean, y_mean, beta_threshold]]:
constant[
takes in data and returns information about SCAT box:
the largest possible x_value, the largest possible y_value,
and functions for the two bounding lines of the box
]
if compare[name[beta_threshold] equal[==] <ast.UnaryOp object at 0x7da204345990>] begin[:]
variable[beta_threshold] assign[=] constant[0.1]
variable[slope_err_threshold] assign[=] binary_operation[call[name[abs], parameter[name[slope]]] * name[beta_threshold]]
<ast.Tuple object at 0x7da204345000> assign[=] tuple[[<ast.Name object at 0x7da204347160>, <ast.Name object at 0x7da204346f20>]]
variable[slope1] assign[=] binary_operation[name[slope] + binary_operation[constant[2] * name[slope_err_threshold]]]
variable[line1_y_int] assign[=] binary_operation[name[y] - binary_operation[name[slope1] * name[x]]]
variable[line1_x_int] assign[=] binary_operation[<ast.UnaryOp object at 0x7da204346c20> * call[name[old_div], parameter[name[line1_y_int], name[slope1]]]]
variable[slope2] assign[=] binary_operation[name[slope] - binary_operation[constant[2] * name[slope_err_threshold]]]
variable[line2_y_int] assign[=] binary_operation[name[y] - binary_operation[name[slope2] * name[x]]]
variable[line2_x_int] assign[=] binary_operation[<ast.UnaryOp object at 0x7da204345e10> * call[name[old_div], parameter[name[line2_y_int], name[slope2]]]]
variable[low_bound] assign[=] list[[<ast.Tuple object at 0x7da204344be0>, <ast.Tuple object at 0x7da204344040>]]
variable[high_bound] assign[=] list[[<ast.Tuple object at 0x7da204347250>, <ast.Tuple object at 0x7da204344130>]]
variable[x_max] assign[=] call[call[name[high_bound]][constant[1]]][constant[0]]
variable[y_max] assign[=] call[call[name[high_bound]][constant[0]]][constant[1]]
variable[low_slope] assign[=] call[name[old_div], parameter[binary_operation[call[call[name[low_bound]][constant[0]]][constant[1]] - call[call[name[low_bound]][constant[1]]][constant[1]]], binary_operation[call[call[name[low_bound]][constant[0]]][constant[0]] - call[call[name[low_bound]][constant[1]]][constant[0]]]]]
variable[low_y_int] assign[=] call[call[name[low_bound]][constant[0]]][constant[1]]
def function[low_bound, parameter[x]]:
variable[y] assign[=] binary_operation[binary_operation[name[low_slope] * name[x]] + name[low_y_int]]
return[name[y]]
variable[high_slope] assign[=] call[name[old_div], parameter[binary_operation[call[call[name[high_bound]][constant[0]]][constant[1]] - call[call[name[high_bound]][constant[1]]][constant[1]]], binary_operation[call[call[name[high_bound]][constant[0]]][constant[0]] - call[call[name[high_bound]][constant[1]]][constant[0]]]]]
variable[high_y_int] assign[=] call[call[name[high_bound]][constant[0]]][constant[1]]
def function[high_bound, parameter[x]]:
variable[y] assign[=] binary_operation[binary_operation[name[high_slope] * name[x]] + name[high_y_int]]
return[name[y]]
variable[high_line] assign[=] list[[<ast.Name object at 0x7da18f00de70>, <ast.Name object at 0x7da18f00ea40>]]
variable[low_line] assign[=] list[[<ast.Name object at 0x7da18f00cb20>, <ast.Name object at 0x7da18f00f610>]]
return[tuple[[<ast.Name object at 0x7da18f00cee0>, <ast.Name object at 0x7da18f00d870>, <ast.Name object at 0x7da18f00feb0>, <ast.Name object at 0x7da18f00cd90>, <ast.Name object at 0x7da18f00f550>, <ast.Name object at 0x7da18f00f0d0>]]]
|
keyword[def] identifier[get_SCAT_box] ( identifier[slope] , identifier[x_mean] , identifier[y_mean] , identifier[beta_threshold] = literal[int] ):
literal[string]
keyword[if] identifier[beta_threshold] ==- literal[int] :
identifier[beta_threshold] = literal[int]
identifier[slope_err_threshold] = identifier[abs] ( identifier[slope] )* identifier[beta_threshold]
identifier[x] , identifier[y] = identifier[x_mean] , identifier[y_mean]
identifier[slope1] = identifier[slope] +( literal[int] * identifier[slope_err_threshold] )
identifier[line1_y_int] = identifier[y] -( identifier[slope1] * identifier[x] )
identifier[line1_x_int] =- literal[int] *( identifier[old_div] ( identifier[line1_y_int] , identifier[slope1] ))
identifier[slope2] = identifier[slope] -( literal[int] * identifier[slope_err_threshold] )
identifier[line2_y_int] = identifier[y] -( identifier[slope2] * identifier[x] )
identifier[line2_x_int] =- literal[int] *( identifier[old_div] ( identifier[line2_y_int] , identifier[slope2] ))
identifier[low_bound] =[( literal[int] , identifier[line1_y_int] ),( identifier[line2_x_int] , literal[int] )]
identifier[high_bound] =[( literal[int] , identifier[line2_y_int] ),( identifier[line1_x_int] , literal[int] )]
identifier[x_max] = identifier[high_bound] [ literal[int] ][ literal[int] ]
identifier[y_max] = identifier[high_bound] [ literal[int] ][ literal[int] ]
identifier[low_slope] = identifier[old_div] (( identifier[low_bound] [ literal[int] ][ literal[int] ]- identifier[low_bound] [ literal[int] ][ literal[int] ]),( identifier[low_bound] [ literal[int] ][ literal[int] ]- identifier[low_bound] [ literal[int] ][ literal[int] ]))
identifier[low_y_int] = identifier[low_bound] [ literal[int] ][ literal[int] ]
keyword[def] identifier[low_bound] ( identifier[x] ):
identifier[y] = identifier[low_slope] * identifier[x] + identifier[low_y_int]
keyword[return] identifier[y]
identifier[high_slope] = identifier[old_div] (( identifier[high_bound] [ literal[int] ][ literal[int] ]- identifier[high_bound] [ literal[int] ][ literal[int] ]),( identifier[high_bound] [ literal[int] ][ literal[int] ]- identifier[high_bound] [ literal[int] ][ literal[int] ]))
identifier[high_y_int] = identifier[high_bound] [ literal[int] ][ literal[int] ]
keyword[def] identifier[high_bound] ( identifier[x] ):
identifier[y] = identifier[high_slope] * identifier[x] + identifier[high_y_int]
keyword[return] identifier[y]
identifier[high_line] =[ identifier[high_y_int] , identifier[high_slope] ]
identifier[low_line] =[ identifier[low_y_int] , identifier[low_slope] ]
keyword[return] identifier[low_bound] , identifier[high_bound] , identifier[x_max] , identifier[y_max] , identifier[low_line] , identifier[high_line]
|
def get_SCAT_box(slope, x_mean, y_mean, beta_threshold=0.1):
"""
takes in data and returns information about SCAT box:
the largest possible x_value, the largest possible y_value,
and functions for the two bounding lines of the box
"""
# if beta_threshold is -999, that means null
if beta_threshold == -999:
beta_threshold = 0.1 # depends on [control=['if'], data=['beta_threshold']]
slope_err_threshold = abs(slope) * beta_threshold
(x, y) = (x_mean, y_mean)
# get lines that pass through mass center, with opposite slope
slope1 = slope + 2 * slope_err_threshold
line1_y_int = y - slope1 * x
line1_x_int = -1 * old_div(line1_y_int, slope1)
slope2 = slope - 2 * slope_err_threshold
line2_y_int = y - slope2 * x
line2_x_int = -1 * old_div(line2_y_int, slope2)
# l1_y_int and l2_x_int form the bottom line of the box
# l2_y_int and l1_x_int form the top line of the box
# print "_diagonal line1:", (0, line2_y_int), (line2_x_int, 0), (x, y)
# print "_diagonal line2:", (0, line1_y_int), (line1_x_int, 0), (x, y)
# print "_bottom line:", [(0, line1_y_int), (line2_x_int, 0)]
# print "_top line:", [(0, line2_y_int), (line1_x_int, 0)]
low_bound = [(0, line1_y_int), (line2_x_int, 0)]
high_bound = [(0, line2_y_int), (line1_x_int, 0)]
x_max = high_bound[1][0] #
y_max = high_bound[0][1]
# function for low_bound
low_slope = old_div(low_bound[0][1] - low_bound[1][1], low_bound[0][0] - low_bound[1][0]) #
low_y_int = low_bound[0][1]
def low_bound(x):
y = low_slope * x + low_y_int
return y
# function for high_bound
high_slope = old_div(high_bound[0][1] - high_bound[1][1], high_bound[0][0] - high_bound[1][0]) # y_0-y_1/x_0-x_1
high_y_int = high_bound[0][1]
def high_bound(x):
y = high_slope * x + high_y_int
return y
high_line = [high_y_int, high_slope]
low_line = [low_y_int, low_slope]
return (low_bound, high_bound, x_max, y_max, low_line, high_line)
|
def urljoin(*fragments):
"""Concatenate multi part strings into urls."""
# Strip possible already existent final slashes of fragments except for the last one
parts = [fragment.rstrip('/') for fragment in fragments[:len(fragments) - 1]]
parts.append(fragments[-1])
return '/'.join(parts)
|
def function[urljoin, parameter[]]:
constant[Concatenate multi part strings into urls.]
variable[parts] assign[=] <ast.ListComp object at 0x7da1b1218f10>
call[name[parts].append, parameter[call[name[fragments]][<ast.UnaryOp object at 0x7da1b121a140>]]]
return[call[constant[/].join, parameter[name[parts]]]]
|
keyword[def] identifier[urljoin] (* identifier[fragments] ):
literal[string]
identifier[parts] =[ identifier[fragment] . identifier[rstrip] ( literal[string] ) keyword[for] identifier[fragment] keyword[in] identifier[fragments] [: identifier[len] ( identifier[fragments] )- literal[int] ]]
identifier[parts] . identifier[append] ( identifier[fragments] [- literal[int] ])
keyword[return] literal[string] . identifier[join] ( identifier[parts] )
|
def urljoin(*fragments):
"""Concatenate multi part strings into urls."""
# Strip possible already existent final slashes of fragments except for the last one
parts = [fragment.rstrip('/') for fragment in fragments[:len(fragments) - 1]]
parts.append(fragments[-1])
return '/'.join(parts)
|
def gain_to_loss_ratio(self):
"""Gain-to-loss ratio, ratio of positive to negative returns.
Formula:
(n pos. / n neg.) * (avg. up-month return / avg. down-month return)
[Source: CFA Institute]
Returns
-------
float
"""
gt = self > 0
lt = self < 0
return (nansum(gt) / nansum(lt)) * (self[gt].mean() / self[lt].mean())
|
def function[gain_to_loss_ratio, parameter[self]]:
constant[Gain-to-loss ratio, ratio of positive to negative returns.
Formula:
(n pos. / n neg.) * (avg. up-month return / avg. down-month return)
[Source: CFA Institute]
Returns
-------
float
]
variable[gt] assign[=] compare[name[self] greater[>] constant[0]]
variable[lt] assign[=] compare[name[self] less[<] constant[0]]
return[binary_operation[binary_operation[call[name[nansum], parameter[name[gt]]] / call[name[nansum], parameter[name[lt]]]] * binary_operation[call[call[name[self]][name[gt]].mean, parameter[]] / call[call[name[self]][name[lt]].mean, parameter[]]]]]
|
keyword[def] identifier[gain_to_loss_ratio] ( identifier[self] ):
literal[string]
identifier[gt] = identifier[self] > literal[int]
identifier[lt] = identifier[self] < literal[int]
keyword[return] ( identifier[nansum] ( identifier[gt] )/ identifier[nansum] ( identifier[lt] ))*( identifier[self] [ identifier[gt] ]. identifier[mean] ()/ identifier[self] [ identifier[lt] ]. identifier[mean] ())
|
def gain_to_loss_ratio(self):
"""Gain-to-loss ratio, ratio of positive to negative returns.
Formula:
(n pos. / n neg.) * (avg. up-month return / avg. down-month return)
[Source: CFA Institute]
Returns
-------
float
"""
gt = self > 0
lt = self < 0
return nansum(gt) / nansum(lt) * (self[gt].mean() / self[lt].mean())
|
def get_path_attribute(obj, path):
"""Given a path like `related_record.related_record2.id`, this method
will be able to pull the value of ID from that object, returning None
if it doesn't exist.
Args:
obj (fleaker.db.Model):
The object to attempt to pull the value from
path (str):
The path to follow to pull the value from
Returns:
(int|str|None):
The value at the end of the path. None if it doesn't exist at
any point in the path.
"""
# Strip out ignored keys passed in
path = path.replace('original.', '').replace('current_user.', '')
attr_parts = path.split('.')
res = obj
try:
for part in attr_parts:
try:
res = getattr(res, part)
except AttributeError:
res = getattr(res.get(), part)
except (peewee.DoesNotExist, AttributeError):
return None
return res
|
def function[get_path_attribute, parameter[obj, path]]:
constant[Given a path like `related_record.related_record2.id`, this method
will be able to pull the value of ID from that object, returning None
if it doesn't exist.
Args:
obj (fleaker.db.Model):
The object to attempt to pull the value from
path (str):
The path to follow to pull the value from
Returns:
(int|str|None):
The value at the end of the path. None if it doesn't exist at
any point in the path.
]
variable[path] assign[=] call[call[name[path].replace, parameter[constant[original.], constant[]]].replace, parameter[constant[current_user.], constant[]]]
variable[attr_parts] assign[=] call[name[path].split, parameter[constant[.]]]
variable[res] assign[=] name[obj]
<ast.Try object at 0x7da18bc728f0>
return[name[res]]
|
keyword[def] identifier[get_path_attribute] ( identifier[obj] , identifier[path] ):
literal[string]
identifier[path] = identifier[path] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[attr_parts] = identifier[path] . identifier[split] ( literal[string] )
identifier[res] = identifier[obj]
keyword[try] :
keyword[for] identifier[part] keyword[in] identifier[attr_parts] :
keyword[try] :
identifier[res] = identifier[getattr] ( identifier[res] , identifier[part] )
keyword[except] identifier[AttributeError] :
identifier[res] = identifier[getattr] ( identifier[res] . identifier[get] (), identifier[part] )
keyword[except] ( identifier[peewee] . identifier[DoesNotExist] , identifier[AttributeError] ):
keyword[return] keyword[None]
keyword[return] identifier[res]
|
def get_path_attribute(obj, path):
"""Given a path like `related_record.related_record2.id`, this method
will be able to pull the value of ID from that object, returning None
if it doesn't exist.
Args:
obj (fleaker.db.Model):
The object to attempt to pull the value from
path (str):
The path to follow to pull the value from
Returns:
(int|str|None):
The value at the end of the path. None if it doesn't exist at
any point in the path.
"""
# Strip out ignored keys passed in
path = path.replace('original.', '').replace('current_user.', '')
attr_parts = path.split('.')
res = obj
try:
for part in attr_parts:
try:
res = getattr(res, part) # depends on [control=['try'], data=[]]
except AttributeError:
res = getattr(res.get(), part) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['part']] # depends on [control=['try'], data=[]]
except (peewee.DoesNotExist, AttributeError):
return None # depends on [control=['except'], data=[]]
return res
|
def initialize_dag(self,
targets: Optional[List[str]] = [],
nested: bool = False) -> SoS_DAG:
'''Create a DAG by analyzing sections statically.'''
self.reset_dict()
dag = SoS_DAG(name=self.md5)
targets = sos_targets(targets)
self.add_forward_workflow(dag, self.workflow.sections)
#
if self.resolve_dangling_targets(dag, targets) == 0:
if targets:
raise UnknownTarget(f'No step to generate target {targets}.')
# now, there should be no dangling targets, let us connect nodes
dag.build()
# dag.show_nodes()
# trim the DAG if targets are specified
if targets:
dag = dag.subgraph_from(targets)
# check error
cycle = dag.circular_dependencies()
if cycle:
raise RuntimeError(
f'Circular dependency detected {cycle}. It is likely a later step produces input of a previous step.'
)
dag.save(env.config['output_dag'])
return dag
|
def function[initialize_dag, parameter[self, targets, nested]]:
constant[Create a DAG by analyzing sections statically.]
call[name[self].reset_dict, parameter[]]
variable[dag] assign[=] call[name[SoS_DAG], parameter[]]
variable[targets] assign[=] call[name[sos_targets], parameter[name[targets]]]
call[name[self].add_forward_workflow, parameter[name[dag], name[self].workflow.sections]]
if compare[call[name[self].resolve_dangling_targets, parameter[name[dag], name[targets]]] equal[==] constant[0]] begin[:]
if name[targets] begin[:]
<ast.Raise object at 0x7da1b11c60b0>
call[name[dag].build, parameter[]]
if name[targets] begin[:]
variable[dag] assign[=] call[name[dag].subgraph_from, parameter[name[targets]]]
variable[cycle] assign[=] call[name[dag].circular_dependencies, parameter[]]
if name[cycle] begin[:]
<ast.Raise object at 0x7da1b11c6680>
call[name[dag].save, parameter[call[name[env].config][constant[output_dag]]]]
return[name[dag]]
|
keyword[def] identifier[initialize_dag] ( identifier[self] ,
identifier[targets] : identifier[Optional] [ identifier[List] [ identifier[str] ]]=[],
identifier[nested] : identifier[bool] = keyword[False] )-> identifier[SoS_DAG] :
literal[string]
identifier[self] . identifier[reset_dict] ()
identifier[dag] = identifier[SoS_DAG] ( identifier[name] = identifier[self] . identifier[md5] )
identifier[targets] = identifier[sos_targets] ( identifier[targets] )
identifier[self] . identifier[add_forward_workflow] ( identifier[dag] , identifier[self] . identifier[workflow] . identifier[sections] )
keyword[if] identifier[self] . identifier[resolve_dangling_targets] ( identifier[dag] , identifier[targets] )== literal[int] :
keyword[if] identifier[targets] :
keyword[raise] identifier[UnknownTarget] ( literal[string] )
identifier[dag] . identifier[build] ()
keyword[if] identifier[targets] :
identifier[dag] = identifier[dag] . identifier[subgraph_from] ( identifier[targets] )
identifier[cycle] = identifier[dag] . identifier[circular_dependencies] ()
keyword[if] identifier[cycle] :
keyword[raise] identifier[RuntimeError] (
literal[string]
)
identifier[dag] . identifier[save] ( identifier[env] . identifier[config] [ literal[string] ])
keyword[return] identifier[dag]
|
def initialize_dag(self, targets: Optional[List[str]]=[], nested: bool=False) -> SoS_DAG:
"""Create a DAG by analyzing sections statically."""
self.reset_dict()
dag = SoS_DAG(name=self.md5)
targets = sos_targets(targets)
self.add_forward_workflow(dag, self.workflow.sections)
#
if self.resolve_dangling_targets(dag, targets) == 0:
if targets:
raise UnknownTarget(f'No step to generate target {targets}.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# now, there should be no dangling targets, let us connect nodes
dag.build()
# dag.show_nodes()
# trim the DAG if targets are specified
if targets:
dag = dag.subgraph_from(targets) # depends on [control=['if'], data=[]]
# check error
cycle = dag.circular_dependencies()
if cycle:
raise RuntimeError(f'Circular dependency detected {cycle}. It is likely a later step produces input of a previous step.') # depends on [control=['if'], data=[]]
dag.save(env.config['output_dag'])
return dag
|
def last(conf):
"""How long you have kept signing in."""
try:
v2ex = V2ex(conf.config)
v2ex.login()
last_date = v2ex.get_last()
click.echo(last_date)
except KeyError:
click.echo('Keyerror, please check your config file.')
except IndexError:
click.echo('Please check your username and password.')
|
def function[last, parameter[conf]]:
constant[How long you have kept signing in.]
<ast.Try object at 0x7da1b2662f20>
|
keyword[def] identifier[last] ( identifier[conf] ):
literal[string]
keyword[try] :
identifier[v2ex] = identifier[V2ex] ( identifier[conf] . identifier[config] )
identifier[v2ex] . identifier[login] ()
identifier[last_date] = identifier[v2ex] . identifier[get_last] ()
identifier[click] . identifier[echo] ( identifier[last_date] )
keyword[except] identifier[KeyError] :
identifier[click] . identifier[echo] ( literal[string] )
keyword[except] identifier[IndexError] :
identifier[click] . identifier[echo] ( literal[string] )
|
def last(conf):
"""How long you have kept signing in."""
try:
v2ex = V2ex(conf.config)
v2ex.login()
last_date = v2ex.get_last()
click.echo(last_date) # depends on [control=['try'], data=[]]
except KeyError:
click.echo('Keyerror, please check your config file.') # depends on [control=['except'], data=[]]
except IndexError:
click.echo('Please check your username and password.') # depends on [control=['except'], data=[]]
|
def _add_cli_param(params, key, value):
'''
Adds key and value as a command line parameter to params.
'''
if value is not None:
params.append('--{0}={1}'.format(key, value))
|
def function[_add_cli_param, parameter[params, key, value]]:
constant[
Adds key and value as a command line parameter to params.
]
if compare[name[value] is_not constant[None]] begin[:]
call[name[params].append, parameter[call[constant[--{0}={1}].format, parameter[name[key], name[value]]]]]
|
keyword[def] identifier[_add_cli_param] ( identifier[params] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[params] . identifier[append] ( literal[string] . identifier[format] ( identifier[key] , identifier[value] ))
|
def _add_cli_param(params, key, value):
"""
Adds key and value as a command line parameter to params.
"""
if value is not None:
params.append('--{0}={1}'.format(key, value)) # depends on [control=['if'], data=['value']]
|
def usb(
ctx, enable, disable, list, enable_all, touch_eject, no_touch_eject,
autoeject_timeout, chalresp_timeout, lock_code, force):
"""
Enable or disable applications over USB.
"""
def ensure_not_all_disabled(ctx, usb_enabled):
for app in APPLICATION:
if app & usb_enabled:
return
ctx.fail('Can not disable all applications over USB.')
if not (list or
enable_all or
enable or
disable or
touch_eject or
no_touch_eject or
autoeject_timeout or
chalresp_timeout):
ctx.fail('No configuration options chosen.')
enable = APPLICATION.__members__.keys() if enable_all else enable
_ensure_not_invalid_options(ctx, enable, disable)
if touch_eject and no_touch_eject:
ctx.fail('Invalid options.')
dev = ctx.obj['dev']
usb_supported = dev.config.usb_supported
usb_enabled = dev.config.usb_enabled
flags = dev.config.device_flags
if not usb_supported:
ctx.fail('USB interface not supported.')
if list:
_list_apps(ctx, usb_enabled)
if touch_eject:
flags |= FLAGS.MODE_FLAG_EJECT
if no_touch_eject:
flags &= ~FLAGS.MODE_FLAG_EJECT
for app in enable:
if APPLICATION[app] & usb_supported:
usb_enabled |= APPLICATION[app]
else:
ctx.fail('{} not supported over USB.'.format(app))
for app in disable:
if APPLICATION[app] & usb_supported:
usb_enabled &= ~APPLICATION[app]
else:
ctx.fail('{} not supported over USB.'.format(app))
ensure_not_all_disabled(ctx, usb_enabled)
f_confirm = '{}{}{}{}{}{}Configure USB interface?'.format(
'Enable {}.\n'.format(
', '.join(
[str(APPLICATION[app]) for app in enable])) if enable else '',
'Disable {}.\n'.format(
', '.join(
[str(APPLICATION[app]) for app in disable])) if disable else '',
'Set touch eject.\n' if touch_eject else '',
'Disable touch eject.\n' if no_touch_eject else '',
'Set autoeject timeout to {}.\n'.format(
autoeject_timeout) if autoeject_timeout else '',
'Set challenge-response timeout to {}.\n'.format(
chalresp_timeout) if chalresp_timeout else '')
is_locked = dev.config.configuration_locked
if force and is_locked and not lock_code:
ctx.fail('Configuration is locked - please supply the --lock-code '
'option.')
if lock_code and not is_locked:
ctx.fail('Configuration is not locked - please remove the '
'--lock-code option.')
force or click.confirm(f_confirm, abort=True, err=True)
if is_locked and not lock_code:
lock_code = prompt_lock_code()
if lock_code:
lock_code = _parse_lock_code(ctx, lock_code)
try:
dev.write_config(
device_config(
usb_enabled=usb_enabled,
flags=flags,
auto_eject_timeout=autoeject_timeout,
chalresp_timeout=chalresp_timeout),
reboot=True,
lock_key=lock_code)
except Exception as e:
logger.error('Failed to write config', exc_info=e)
ctx.fail('Failed to configure USB applications.')
|
def function[usb, parameter[ctx, enable, disable, list, enable_all, touch_eject, no_touch_eject, autoeject_timeout, chalresp_timeout, lock_code, force]]:
constant[
Enable or disable applications over USB.
]
def function[ensure_not_all_disabled, parameter[ctx, usb_enabled]]:
for taget[name[app]] in starred[name[APPLICATION]] begin[:]
if binary_operation[name[app] <ast.BitAnd object at 0x7da2590d6b60> name[usb_enabled]] begin[:]
return[None]
call[name[ctx].fail, parameter[constant[Can not disable all applications over USB.]]]
if <ast.UnaryOp object at 0x7da2044c16f0> begin[:]
call[name[ctx].fail, parameter[constant[No configuration options chosen.]]]
variable[enable] assign[=] <ast.IfExp object at 0x7da18bc73f70>
call[name[_ensure_not_invalid_options], parameter[name[ctx], name[enable], name[disable]]]
if <ast.BoolOp object at 0x7da18bc723b0> begin[:]
call[name[ctx].fail, parameter[constant[Invalid options.]]]
variable[dev] assign[=] call[name[ctx].obj][constant[dev]]
variable[usb_supported] assign[=] name[dev].config.usb_supported
variable[usb_enabled] assign[=] name[dev].config.usb_enabled
variable[flags] assign[=] name[dev].config.device_flags
if <ast.UnaryOp object at 0x7da20c6e7e20> begin[:]
call[name[ctx].fail, parameter[constant[USB interface not supported.]]]
if name[list] begin[:]
call[name[_list_apps], parameter[name[ctx], name[usb_enabled]]]
if name[touch_eject] begin[:]
<ast.AugAssign object at 0x7da20c6e45e0>
if name[no_touch_eject] begin[:]
<ast.AugAssign object at 0x7da20c6e7820>
for taget[name[app]] in starred[name[enable]] begin[:]
if binary_operation[call[name[APPLICATION]][name[app]] <ast.BitAnd object at 0x7da2590d6b60> name[usb_supported]] begin[:]
<ast.AugAssign object at 0x7da20c6e55a0>
for taget[name[app]] in starred[name[disable]] begin[:]
if binary_operation[call[name[APPLICATION]][name[app]] <ast.BitAnd object at 0x7da2590d6b60> name[usb_supported]] begin[:]
<ast.AugAssign object at 0x7da20c6e7b20>
call[name[ensure_not_all_disabled], parameter[name[ctx], name[usb_enabled]]]
variable[f_confirm] assign[=] call[constant[{}{}{}{}{}{}Configure USB interface?].format, parameter[<ast.IfExp object at 0x7da20c6e55d0>, <ast.IfExp object at 0x7da20c6e52a0>, <ast.IfExp object at 0x7da20c6e7850>, <ast.IfExp object at 0x7da20c6e6020>, <ast.IfExp object at 0x7da1b12cb850>, <ast.IfExp object at 0x7da1b12c8d90>]]
variable[is_locked] assign[=] name[dev].config.configuration_locked
if <ast.BoolOp object at 0x7da1b12c9bd0> begin[:]
call[name[ctx].fail, parameter[constant[Configuration is locked - please supply the --lock-code option.]]]
if <ast.BoolOp object at 0x7da1b12c9030> begin[:]
call[name[ctx].fail, parameter[constant[Configuration is not locked - please remove the --lock-code option.]]]
<ast.BoolOp object at 0x7da1b12c9120>
if <ast.BoolOp object at 0x7da1b12c8310> begin[:]
variable[lock_code] assign[=] call[name[prompt_lock_code], parameter[]]
if name[lock_code] begin[:]
variable[lock_code] assign[=] call[name[_parse_lock_code], parameter[name[ctx], name[lock_code]]]
<ast.Try object at 0x7da1b12c90c0>
|
keyword[def] identifier[usb] (
identifier[ctx] , identifier[enable] , identifier[disable] , identifier[list] , identifier[enable_all] , identifier[touch_eject] , identifier[no_touch_eject] ,
identifier[autoeject_timeout] , identifier[chalresp_timeout] , identifier[lock_code] , identifier[force] ):
literal[string]
keyword[def] identifier[ensure_not_all_disabled] ( identifier[ctx] , identifier[usb_enabled] ):
keyword[for] identifier[app] keyword[in] identifier[APPLICATION] :
keyword[if] identifier[app] & identifier[usb_enabled] :
keyword[return]
identifier[ctx] . identifier[fail] ( literal[string] )
keyword[if] keyword[not] ( identifier[list] keyword[or]
identifier[enable_all] keyword[or]
identifier[enable] keyword[or]
identifier[disable] keyword[or]
identifier[touch_eject] keyword[or]
identifier[no_touch_eject] keyword[or]
identifier[autoeject_timeout] keyword[or]
identifier[chalresp_timeout] ):
identifier[ctx] . identifier[fail] ( literal[string] )
identifier[enable] = identifier[APPLICATION] . identifier[__members__] . identifier[keys] () keyword[if] identifier[enable_all] keyword[else] identifier[enable]
identifier[_ensure_not_invalid_options] ( identifier[ctx] , identifier[enable] , identifier[disable] )
keyword[if] identifier[touch_eject] keyword[and] identifier[no_touch_eject] :
identifier[ctx] . identifier[fail] ( literal[string] )
identifier[dev] = identifier[ctx] . identifier[obj] [ literal[string] ]
identifier[usb_supported] = identifier[dev] . identifier[config] . identifier[usb_supported]
identifier[usb_enabled] = identifier[dev] . identifier[config] . identifier[usb_enabled]
identifier[flags] = identifier[dev] . identifier[config] . identifier[device_flags]
keyword[if] keyword[not] identifier[usb_supported] :
identifier[ctx] . identifier[fail] ( literal[string] )
keyword[if] identifier[list] :
identifier[_list_apps] ( identifier[ctx] , identifier[usb_enabled] )
keyword[if] identifier[touch_eject] :
identifier[flags] |= identifier[FLAGS] . identifier[MODE_FLAG_EJECT]
keyword[if] identifier[no_touch_eject] :
identifier[flags] &=~ identifier[FLAGS] . identifier[MODE_FLAG_EJECT]
keyword[for] identifier[app] keyword[in] identifier[enable] :
keyword[if] identifier[APPLICATION] [ identifier[app] ]& identifier[usb_supported] :
identifier[usb_enabled] |= identifier[APPLICATION] [ identifier[app] ]
keyword[else] :
identifier[ctx] . identifier[fail] ( literal[string] . identifier[format] ( identifier[app] ))
keyword[for] identifier[app] keyword[in] identifier[disable] :
keyword[if] identifier[APPLICATION] [ identifier[app] ]& identifier[usb_supported] :
identifier[usb_enabled] &=~ identifier[APPLICATION] [ identifier[app] ]
keyword[else] :
identifier[ctx] . identifier[fail] ( literal[string] . identifier[format] ( identifier[app] ))
identifier[ensure_not_all_disabled] ( identifier[ctx] , identifier[usb_enabled] )
identifier[f_confirm] = literal[string] . identifier[format] (
literal[string] . identifier[format] (
literal[string] . identifier[join] (
[ identifier[str] ( identifier[APPLICATION] [ identifier[app] ]) keyword[for] identifier[app] keyword[in] identifier[enable] ])) keyword[if] identifier[enable] keyword[else] literal[string] ,
literal[string] . identifier[format] (
literal[string] . identifier[join] (
[ identifier[str] ( identifier[APPLICATION] [ identifier[app] ]) keyword[for] identifier[app] keyword[in] identifier[disable] ])) keyword[if] identifier[disable] keyword[else] literal[string] ,
literal[string] keyword[if] identifier[touch_eject] keyword[else] literal[string] ,
literal[string] keyword[if] identifier[no_touch_eject] keyword[else] literal[string] ,
literal[string] . identifier[format] (
identifier[autoeject_timeout] ) keyword[if] identifier[autoeject_timeout] keyword[else] literal[string] ,
literal[string] . identifier[format] (
identifier[chalresp_timeout] ) keyword[if] identifier[chalresp_timeout] keyword[else] literal[string] )
identifier[is_locked] = identifier[dev] . identifier[config] . identifier[configuration_locked]
keyword[if] identifier[force] keyword[and] identifier[is_locked] keyword[and] keyword[not] identifier[lock_code] :
identifier[ctx] . identifier[fail] ( literal[string]
literal[string] )
keyword[if] identifier[lock_code] keyword[and] keyword[not] identifier[is_locked] :
identifier[ctx] . identifier[fail] ( literal[string]
literal[string] )
identifier[force] keyword[or] identifier[click] . identifier[confirm] ( identifier[f_confirm] , identifier[abort] = keyword[True] , identifier[err] = keyword[True] )
keyword[if] identifier[is_locked] keyword[and] keyword[not] identifier[lock_code] :
identifier[lock_code] = identifier[prompt_lock_code] ()
keyword[if] identifier[lock_code] :
identifier[lock_code] = identifier[_parse_lock_code] ( identifier[ctx] , identifier[lock_code] )
keyword[try] :
identifier[dev] . identifier[write_config] (
identifier[device_config] (
identifier[usb_enabled] = identifier[usb_enabled] ,
identifier[flags] = identifier[flags] ,
identifier[auto_eject_timeout] = identifier[autoeject_timeout] ,
identifier[chalresp_timeout] = identifier[chalresp_timeout] ),
identifier[reboot] = keyword[True] ,
identifier[lock_key] = identifier[lock_code] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[error] ( literal[string] , identifier[exc_info] = identifier[e] )
identifier[ctx] . identifier[fail] ( literal[string] )
|
def usb(ctx, enable, disable, list, enable_all, touch_eject, no_touch_eject, autoeject_timeout, chalresp_timeout, lock_code, force):
"""
Enable or disable applications over USB.
"""
def ensure_not_all_disabled(ctx, usb_enabled):
for app in APPLICATION:
if app & usb_enabled:
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['app']]
ctx.fail('Can not disable all applications over USB.')
if not (list or enable_all or enable or disable or touch_eject or no_touch_eject or autoeject_timeout or chalresp_timeout):
ctx.fail('No configuration options chosen.') # depends on [control=['if'], data=[]]
enable = APPLICATION.__members__.keys() if enable_all else enable
_ensure_not_invalid_options(ctx, enable, disable)
if touch_eject and no_touch_eject:
ctx.fail('Invalid options.') # depends on [control=['if'], data=[]]
dev = ctx.obj['dev']
usb_supported = dev.config.usb_supported
usb_enabled = dev.config.usb_enabled
flags = dev.config.device_flags
if not usb_supported:
ctx.fail('USB interface not supported.') # depends on [control=['if'], data=[]]
if list:
_list_apps(ctx, usb_enabled) # depends on [control=['if'], data=[]]
if touch_eject:
flags |= FLAGS.MODE_FLAG_EJECT # depends on [control=['if'], data=[]]
if no_touch_eject:
flags &= ~FLAGS.MODE_FLAG_EJECT # depends on [control=['if'], data=[]]
for app in enable:
if APPLICATION[app] & usb_supported:
usb_enabled |= APPLICATION[app] # depends on [control=['if'], data=[]]
else:
ctx.fail('{} not supported over USB.'.format(app)) # depends on [control=['for'], data=['app']]
for app in disable:
if APPLICATION[app] & usb_supported:
usb_enabled &= ~APPLICATION[app] # depends on [control=['if'], data=[]]
else:
ctx.fail('{} not supported over USB.'.format(app)) # depends on [control=['for'], data=['app']]
ensure_not_all_disabled(ctx, usb_enabled)
f_confirm = '{}{}{}{}{}{}Configure USB interface?'.format('Enable {}.\n'.format(', '.join([str(APPLICATION[app]) for app in enable])) if enable else '', 'Disable {}.\n'.format(', '.join([str(APPLICATION[app]) for app in disable])) if disable else '', 'Set touch eject.\n' if touch_eject else '', 'Disable touch eject.\n' if no_touch_eject else '', 'Set autoeject timeout to {}.\n'.format(autoeject_timeout) if autoeject_timeout else '', 'Set challenge-response timeout to {}.\n'.format(chalresp_timeout) if chalresp_timeout else '')
is_locked = dev.config.configuration_locked
if force and is_locked and (not lock_code):
ctx.fail('Configuration is locked - please supply the --lock-code option.') # depends on [control=['if'], data=[]]
if lock_code and (not is_locked):
ctx.fail('Configuration is not locked - please remove the --lock-code option.') # depends on [control=['if'], data=[]]
force or click.confirm(f_confirm, abort=True, err=True)
if is_locked and (not lock_code):
lock_code = prompt_lock_code() # depends on [control=['if'], data=[]]
if lock_code:
lock_code = _parse_lock_code(ctx, lock_code) # depends on [control=['if'], data=[]]
try:
dev.write_config(device_config(usb_enabled=usb_enabled, flags=flags, auto_eject_timeout=autoeject_timeout, chalresp_timeout=chalresp_timeout), reboot=True, lock_key=lock_code) # depends on [control=['try'], data=[]]
except Exception as e:
logger.error('Failed to write config', exc_info=e)
ctx.fail('Failed to configure USB applications.') # depends on [control=['except'], data=['e']]
|
def post(self, request):
"""
Retrieve video IDs that are missing HLS profiles. This endpoint supports 2 types of input data:
1. If we want a batch of video ids which are missing HLS profile irrespective of their courses, the request
data should be in following format:
{
'batch_size': 50,
'offset': 0
}
And response will be in following format:
{
'videos': ['video_id1', 'video_id2', 'video_id3', ... , video_id50],
'total': 300,
'offset': 50,
'batch_size': 50
}
2. If we want all the videos which are missing HLS profiles in a set of specific courses, the request data
should be in following format:
{
'courses': [
'course_id1',
'course_id2',
...
]
}
And response will be in following format:
{
'videos': ['video_id1', 'video_id2', 'video_id3', ...]
}
"""
courses = request.data.get('courses')
batch_size = request.data.get('batch_size', 50)
offset = request.data.get('offset', 0)
if courses:
videos = (CourseVideo.objects.select_related('video')
.prefetch_related('video__encoded_videos', 'video__encoded_videos__profile')
.filter(course_id__in=courses, video__status='file_complete')
.exclude(video__encoded_videos__profile__profile_name='hls')
.values_list('video__edx_video_id', flat=True)
.distinct())
response = Response({'videos': videos}, status=status.HTTP_200_OK)
else:
videos = (Video.objects.prefetch_related('encoded_videos', 'encoded_videos__profile')
.filter(status='file_complete')
.exclude(encoded_videos__profile__profile_name='hls')
.order_by('id')
.values_list('edx_video_id', flat=True)
.distinct())
response = Response(
{
'videos': videos[offset: offset+batch_size],
'total': videos.count(),
'offset': offset,
'batch_size': batch_size,
},
status=status.HTTP_200_OK
)
return response
|
def function[post, parameter[self, request]]:
constant[
Retrieve video IDs that are missing HLS profiles. This endpoint supports 2 types of input data:
1. If we want a batch of video ids which are missing HLS profile irrespective of their courses, the request
data should be in following format:
{
'batch_size': 50,
'offset': 0
}
And response will be in following format:
{
'videos': ['video_id1', 'video_id2', 'video_id3', ... , video_id50],
'total': 300,
'offset': 50,
'batch_size': 50
}
2. If we want all the videos which are missing HLS profiles in a set of specific courses, the request data
should be in following format:
{
'courses': [
'course_id1',
'course_id2',
...
]
}
And response will be in following format:
{
'videos': ['video_id1', 'video_id2', 'video_id3', ...]
}
]
variable[courses] assign[=] call[name[request].data.get, parameter[constant[courses]]]
variable[batch_size] assign[=] call[name[request].data.get, parameter[constant[batch_size], constant[50]]]
variable[offset] assign[=] call[name[request].data.get, parameter[constant[offset], constant[0]]]
if name[courses] begin[:]
variable[videos] assign[=] call[call[call[call[call[call[name[CourseVideo].objects.select_related, parameter[constant[video]]].prefetch_related, parameter[constant[video__encoded_videos], constant[video__encoded_videos__profile]]].filter, parameter[]].exclude, parameter[]].values_list, parameter[constant[video__edx_video_id]]].distinct, parameter[]]
variable[response] assign[=] call[name[Response], parameter[dictionary[[<ast.Constant object at 0x7da1b03a46d0>], [<ast.Name object at 0x7da1b03a51e0>]]]]
return[name[response]]
|
keyword[def] identifier[post] ( identifier[self] , identifier[request] ):
literal[string]
identifier[courses] = identifier[request] . identifier[data] . identifier[get] ( literal[string] )
identifier[batch_size] = identifier[request] . identifier[data] . identifier[get] ( literal[string] , literal[int] )
identifier[offset] = identifier[request] . identifier[data] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[courses] :
identifier[videos] =( identifier[CourseVideo] . identifier[objects] . identifier[select_related] ( literal[string] )
. identifier[prefetch_related] ( literal[string] , literal[string] )
. identifier[filter] ( identifier[course_id__in] = identifier[courses] , identifier[video__status] = literal[string] )
. identifier[exclude] ( identifier[video__encoded_videos__profile__profile_name] = literal[string] )
. identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )
. identifier[distinct] ())
identifier[response] = identifier[Response] ({ literal[string] : identifier[videos] }, identifier[status] = identifier[status] . identifier[HTTP_200_OK] )
keyword[else] :
identifier[videos] =( identifier[Video] . identifier[objects] . identifier[prefetch_related] ( literal[string] , literal[string] )
. identifier[filter] ( identifier[status] = literal[string] )
. identifier[exclude] ( identifier[encoded_videos__profile__profile_name] = literal[string] )
. identifier[order_by] ( literal[string] )
. identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )
. identifier[distinct] ())
identifier[response] = identifier[Response] (
{
literal[string] : identifier[videos] [ identifier[offset] : identifier[offset] + identifier[batch_size] ],
literal[string] : identifier[videos] . identifier[count] (),
literal[string] : identifier[offset] ,
literal[string] : identifier[batch_size] ,
},
identifier[status] = identifier[status] . identifier[HTTP_200_OK]
)
keyword[return] identifier[response]
|
def post(self, request):
"""
Retrieve video IDs that are missing HLS profiles. This endpoint supports 2 types of input data:
1. If we want a batch of video ids which are missing HLS profile irrespective of their courses, the request
data should be in following format:
{
'batch_size': 50,
'offset': 0
}
And response will be in following format:
{
'videos': ['video_id1', 'video_id2', 'video_id3', ... , video_id50],
'total': 300,
'offset': 50,
'batch_size': 50
}
2. If we want all the videos which are missing HLS profiles in a set of specific courses, the request data
should be in following format:
{
'courses': [
'course_id1',
'course_id2',
...
]
}
And response will be in following format:
{
'videos': ['video_id1', 'video_id2', 'video_id3', ...]
}
"""
courses = request.data.get('courses')
batch_size = request.data.get('batch_size', 50)
offset = request.data.get('offset', 0)
if courses:
videos = CourseVideo.objects.select_related('video').prefetch_related('video__encoded_videos', 'video__encoded_videos__profile').filter(course_id__in=courses, video__status='file_complete').exclude(video__encoded_videos__profile__profile_name='hls').values_list('video__edx_video_id', flat=True).distinct()
response = Response({'videos': videos}, status=status.HTTP_200_OK) # depends on [control=['if'], data=[]]
else:
videos = Video.objects.prefetch_related('encoded_videos', 'encoded_videos__profile').filter(status='file_complete').exclude(encoded_videos__profile__profile_name='hls').order_by('id').values_list('edx_video_id', flat=True).distinct()
response = Response({'videos': videos[offset:offset + batch_size], 'total': videos.count(), 'offset': offset, 'batch_size': batch_size}, status=status.HTTP_200_OK)
return response
|
def is_visible(self, x, y):
"""
Return whether the specified location is on the visible screen.
:param x: The column (x coord) for the location to check.
:param y: The line (y coord) for the location to check.
"""
return ((x >= 0) and
(x <= self.width) and
(y >= self._start_line) and
(y < self._start_line + self.height))
|
def function[is_visible, parameter[self, x, y]]:
constant[
Return whether the specified location is on the visible screen.
:param x: The column (x coord) for the location to check.
:param y: The line (y coord) for the location to check.
]
return[<ast.BoolOp object at 0x7da1b1d4ddb0>]
|
keyword[def] identifier[is_visible] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
keyword[return] (( identifier[x] >= literal[int] ) keyword[and]
( identifier[x] <= identifier[self] . identifier[width] ) keyword[and]
( identifier[y] >= identifier[self] . identifier[_start_line] ) keyword[and]
( identifier[y] < identifier[self] . identifier[_start_line] + identifier[self] . identifier[height] ))
|
def is_visible(self, x, y):
"""
Return whether the specified location is on the visible screen.
:param x: The column (x coord) for the location to check.
:param y: The line (y coord) for the location to check.
"""
return x >= 0 and x <= self.width and (y >= self._start_line) and (y < self._start_line + self.height)
|
def _diff_cache_cluster(current, desired):
'''
If you need to enhance what modify_cache_cluster() considers when deciding what is to be
(or can be) updated, add it to 'modifiable' below. It's a dict mapping the param as used
in modify_cache_cluster() to that in describe_cache_clusters(). Any data fiddlery that
needs to be done to make the mappings meaningful should be done in the munging section
below as well.
This function will ONLY touch settings that are explicitly called out in 'desired' - any
settings which might have previously been changed from their 'default' values will not be
changed back simply by leaving them out of 'desired'. This is both intentional, and
much, much easier to code :)
'''
### The data formats are annoyingly (and as far as I can can tell, unnecessarily)
### different - we have to munge to a common format to compare...
if current.get('SecurityGroups') is not None:
current['SecurityGroupIds'] = [s['SecurityGroupId'] for s in current['SecurityGroups']]
if current.get('CacheSecurityGroups') is not None:
current['CacheSecurityGroupNames'] = [c['CacheSecurityGroupName'] for c in current['CacheSecurityGroups']]
if current.get('NotificationConfiguration') is not None:
current['NotificationTopicArn'] = current['NotificationConfiguration']['TopicArn']
current['NotificationTopicStatus'] = current['NotificationConfiguration']['TopicStatus']
if current.get('CacheParameterGroup') is not None:
current['CacheParameterGroupName'] = current['CacheParameterGroup']['CacheParameterGroupName']
modifiable = {
'AutoMinorVersionUpgrade': 'AutoMinorVersionUpgrade',
'AZMode': 'AZMode',
'CacheNodeType': 'CacheNodeType',
'CacheNodeIdsToRemove': None,
'CacheParameterGroupName': 'CacheParameterGroupName',
'CacheSecurityGroupNames': 'CacheSecurityGroupNames',
'EngineVersion': 'EngineVersion',
'NewAvailabilityZones': None,
'NotificationTopicArn': 'NotificationTopicArn',
'NotificationTopicStatus': 'NotificationTopicStatus',
'NumCacheNodes': 'NumCacheNodes',
'PreferredMaintenanceWindow': 'PreferredMaintenanceWindow',
'SecurityGroupIds': 'SecurityGroupIds',
'SnapshotRetentionLimit': 'SnapshotRetentionLimit',
'SnapshotWindow': 'SnapshotWindow'
}
need_update = {}
for m, o in modifiable.items():
if m in desired:
if not o:
# Always pass these through - let AWS do the math...
need_update[m] = desired[m]
else:
if m in current:
# Equivalence testing works fine for current simple type comparisons
# This might need enhancement if more complex structures enter the picture
if current[m] != desired[m]:
need_update[m] = desired[m]
return need_update
|
def function[_diff_cache_cluster, parameter[current, desired]]:
constant[
If you need to enhance what modify_cache_cluster() considers when deciding what is to be
(or can be) updated, add it to 'modifiable' below. It's a dict mapping the param as used
in modify_cache_cluster() to that in describe_cache_clusters(). Any data fiddlery that
needs to be done to make the mappings meaningful should be done in the munging section
below as well.
This function will ONLY touch settings that are explicitly called out in 'desired' - any
settings which might have previously been changed from their 'default' values will not be
changed back simply by leaving them out of 'desired'. This is both intentional, and
much, much easier to code :)
]
if compare[call[name[current].get, parameter[constant[SecurityGroups]]] is_not constant[None]] begin[:]
call[name[current]][constant[SecurityGroupIds]] assign[=] <ast.ListComp object at 0x7da207f9bca0>
if compare[call[name[current].get, parameter[constant[CacheSecurityGroups]]] is_not constant[None]] begin[:]
call[name[current]][constant[CacheSecurityGroupNames]] assign[=] <ast.ListComp object at 0x7da207f98d60>
if compare[call[name[current].get, parameter[constant[NotificationConfiguration]]] is_not constant[None]] begin[:]
call[name[current]][constant[NotificationTopicArn]] assign[=] call[call[name[current]][constant[NotificationConfiguration]]][constant[TopicArn]]
call[name[current]][constant[NotificationTopicStatus]] assign[=] call[call[name[current]][constant[NotificationConfiguration]]][constant[TopicStatus]]
if compare[call[name[current].get, parameter[constant[CacheParameterGroup]]] is_not constant[None]] begin[:]
call[name[current]][constant[CacheParameterGroupName]] assign[=] call[call[name[current]][constant[CacheParameterGroup]]][constant[CacheParameterGroupName]]
variable[modifiable] assign[=] dictionary[[<ast.Constant object at 0x7da1b2001360>, <ast.Constant object at 0x7da1b2001f90>, <ast.Constant object at 0x7da1b20014b0>, <ast.Constant object at 0x7da1b2001540>, <ast.Constant object at 0x7da1b2001b40>, <ast.Constant object at 0x7da1b2001990>, <ast.Constant object at 0x7da1b2002440>, <ast.Constant object at 0x7da1b2000c40>, <ast.Constant object at 0x7da1b2002290>, <ast.Constant object at 0x7da1b2002410>, <ast.Constant object at 0x7da1b2000c70>, <ast.Constant object at 0x7da1b2002350>, <ast.Constant object at 0x7da1b2000ca0>, <ast.Constant object at 0x7da1b2003190>, <ast.Constant object at 0x7da1b20022c0>], [<ast.Constant object at 0x7da1b2000dc0>, <ast.Constant object at 0x7da1b2003070>, <ast.Constant object at 0x7da1b20035b0>, <ast.Constant object at 0x7da1b2001870>, <ast.Constant object at 0x7da1b2001480>, <ast.Constant object at 0x7da1b2001900>, <ast.Constant object at 0x7da1b2003580>, <ast.Constant object at 0x7da1b20005b0>, <ast.Constant object at 0x7da1b20032b0>, <ast.Constant object at 0x7da1b2003250>, <ast.Constant object at 0x7da1b2001420>, <ast.Constant object at 0x7da1b2003be0>, <ast.Constant object at 0x7da1b2003c70>, <ast.Constant object at 0x7da1b2003ca0>, <ast.Constant object at 0x7da1b2003c40>]]
variable[need_update] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b2000220>, <ast.Name object at 0x7da1b2000280>]]] in starred[call[name[modifiable].items, parameter[]]] begin[:]
if compare[name[m] in name[desired]] begin[:]
if <ast.UnaryOp object at 0x7da1b2002a10> begin[:]
call[name[need_update]][name[m]] assign[=] call[name[desired]][name[m]]
return[name[need_update]]
|
keyword[def] identifier[_diff_cache_cluster] ( identifier[current] , identifier[desired] ):
literal[string]
keyword[if] identifier[current] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[current] [ literal[string] ]=[ identifier[s] [ literal[string] ] keyword[for] identifier[s] keyword[in] identifier[current] [ literal[string] ]]
keyword[if] identifier[current] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[current] [ literal[string] ]=[ identifier[c] [ literal[string] ] keyword[for] identifier[c] keyword[in] identifier[current] [ literal[string] ]]
keyword[if] identifier[current] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[current] [ literal[string] ]= identifier[current] [ literal[string] ][ literal[string] ]
identifier[current] [ literal[string] ]= identifier[current] [ literal[string] ][ literal[string] ]
keyword[if] identifier[current] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] :
identifier[current] [ literal[string] ]= identifier[current] [ literal[string] ][ literal[string] ]
identifier[modifiable] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : keyword[None] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
identifier[need_update] ={}
keyword[for] identifier[m] , identifier[o] keyword[in] identifier[modifiable] . identifier[items] ():
keyword[if] identifier[m] keyword[in] identifier[desired] :
keyword[if] keyword[not] identifier[o] :
identifier[need_update] [ identifier[m] ]= identifier[desired] [ identifier[m] ]
keyword[else] :
keyword[if] identifier[m] keyword[in] identifier[current] :
keyword[if] identifier[current] [ identifier[m] ]!= identifier[desired] [ identifier[m] ]:
identifier[need_update] [ identifier[m] ]= identifier[desired] [ identifier[m] ]
keyword[return] identifier[need_update]
|
def _diff_cache_cluster(current, desired):
"""
If you need to enhance what modify_cache_cluster() considers when deciding what is to be
(or can be) updated, add it to 'modifiable' below. It's a dict mapping the param as used
in modify_cache_cluster() to that in describe_cache_clusters(). Any data fiddlery that
needs to be done to make the mappings meaningful should be done in the munging section
below as well.
This function will ONLY touch settings that are explicitly called out in 'desired' - any
settings which might have previously been changed from their 'default' values will not be
changed back simply by leaving them out of 'desired'. This is both intentional, and
much, much easier to code :)
"""
### The data formats are annoyingly (and as far as I can can tell, unnecessarily)
### different - we have to munge to a common format to compare...
if current.get('SecurityGroups') is not None:
current['SecurityGroupIds'] = [s['SecurityGroupId'] for s in current['SecurityGroups']] # depends on [control=['if'], data=[]]
if current.get('CacheSecurityGroups') is not None:
current['CacheSecurityGroupNames'] = [c['CacheSecurityGroupName'] for c in current['CacheSecurityGroups']] # depends on [control=['if'], data=[]]
if current.get('NotificationConfiguration') is not None:
current['NotificationTopicArn'] = current['NotificationConfiguration']['TopicArn']
current['NotificationTopicStatus'] = current['NotificationConfiguration']['TopicStatus'] # depends on [control=['if'], data=[]]
if current.get('CacheParameterGroup') is not None:
current['CacheParameterGroupName'] = current['CacheParameterGroup']['CacheParameterGroupName'] # depends on [control=['if'], data=[]]
modifiable = {'AutoMinorVersionUpgrade': 'AutoMinorVersionUpgrade', 'AZMode': 'AZMode', 'CacheNodeType': 'CacheNodeType', 'CacheNodeIdsToRemove': None, 'CacheParameterGroupName': 'CacheParameterGroupName', 'CacheSecurityGroupNames': 'CacheSecurityGroupNames', 'EngineVersion': 'EngineVersion', 'NewAvailabilityZones': None, 'NotificationTopicArn': 'NotificationTopicArn', 'NotificationTopicStatus': 'NotificationTopicStatus', 'NumCacheNodes': 'NumCacheNodes', 'PreferredMaintenanceWindow': 'PreferredMaintenanceWindow', 'SecurityGroupIds': 'SecurityGroupIds', 'SnapshotRetentionLimit': 'SnapshotRetentionLimit', 'SnapshotWindow': 'SnapshotWindow'}
need_update = {}
for (m, o) in modifiable.items():
if m in desired:
if not o:
# Always pass these through - let AWS do the math...
need_update[m] = desired[m] # depends on [control=['if'], data=[]]
elif m in current:
# Equivalence testing works fine for current simple type comparisons
# This might need enhancement if more complex structures enter the picture
if current[m] != desired[m]:
need_update[m] = desired[m] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['m', 'current']] # depends on [control=['if'], data=['m', 'desired']] # depends on [control=['for'], data=[]]
return need_update
|
def batchcancel_openOrders(self, acc_id, symbol=None, side=None, size=None, _async=False):
"""
批量撤销未成交订单
:param acc_id: 帐号ID
:param symbol: 交易对
:param side: 方向
:param size:
:param _async:
:return:
"""
params = {}
path = '/v1/order/batchCancelOpenOrders'
params['account-id'] = acc_id
if symbol:
params['symbol'] = symbol
if side:
assert side in ['buy', 'sell']
params['side'] = side
if size:
params['size'] = size
return api_key_get(params, path, _async=_async)
|
def function[batchcancel_openOrders, parameter[self, acc_id, symbol, side, size, _async]]:
constant[
批量撤销未成交订单
:param acc_id: 帐号ID
:param symbol: 交易对
:param side: 方向
:param size:
:param _async:
:return:
]
variable[params] assign[=] dictionary[[], []]
variable[path] assign[=] constant[/v1/order/batchCancelOpenOrders]
call[name[params]][constant[account-id]] assign[=] name[acc_id]
if name[symbol] begin[:]
call[name[params]][constant[symbol]] assign[=] name[symbol]
if name[side] begin[:]
assert[compare[name[side] in list[[<ast.Constant object at 0x7da18f09ebc0>, <ast.Constant object at 0x7da18f09f1f0>]]]]
call[name[params]][constant[side]] assign[=] name[side]
if name[size] begin[:]
call[name[params]][constant[size]] assign[=] name[size]
return[call[name[api_key_get], parameter[name[params], name[path]]]]
|
keyword[def] identifier[batchcancel_openOrders] ( identifier[self] , identifier[acc_id] , identifier[symbol] = keyword[None] , identifier[side] = keyword[None] , identifier[size] = keyword[None] , identifier[_async] = keyword[False] ):
literal[string]
identifier[params] ={}
identifier[path] = literal[string]
identifier[params] [ literal[string] ]= identifier[acc_id]
keyword[if] identifier[symbol] :
identifier[params] [ literal[string] ]= identifier[symbol]
keyword[if] identifier[side] :
keyword[assert] identifier[side] keyword[in] [ literal[string] , literal[string] ]
identifier[params] [ literal[string] ]= identifier[side]
keyword[if] identifier[size] :
identifier[params] [ literal[string] ]= identifier[size]
keyword[return] identifier[api_key_get] ( identifier[params] , identifier[path] , identifier[_async] = identifier[_async] )
|
def batchcancel_openOrders(self, acc_id, symbol=None, side=None, size=None, _async=False):
"""
批量撤销未成交订单
:param acc_id: 帐号ID
:param symbol: 交易对
:param side: 方向
:param size:
:param _async:
:return:
"""
params = {}
path = '/v1/order/batchCancelOpenOrders'
params['account-id'] = acc_id
if symbol:
params['symbol'] = symbol # depends on [control=['if'], data=[]]
if side:
assert side in ['buy', 'sell']
params['side'] = side # depends on [control=['if'], data=[]]
if size:
params['size'] = size # depends on [control=['if'], data=[]]
return api_key_get(params, path, _async=_async)
|
def get_identity(user):
"""Create an identity for a given user instance.
Primarily useful for testing.
"""
identity = Identity(user.id)
if hasattr(user, 'id'):
identity.provides.add(UserNeed(user.id))
for role in getattr(user, 'roles', []):
identity.provides.add(RoleNeed(role.name))
identity.user = user
return identity
|
def function[get_identity, parameter[user]]:
constant[Create an identity for a given user instance.
Primarily useful for testing.
]
variable[identity] assign[=] call[name[Identity], parameter[name[user].id]]
if call[name[hasattr], parameter[name[user], constant[id]]] begin[:]
call[name[identity].provides.add, parameter[call[name[UserNeed], parameter[name[user].id]]]]
for taget[name[role]] in starred[call[name[getattr], parameter[name[user], constant[roles], list[[]]]]] begin[:]
call[name[identity].provides.add, parameter[call[name[RoleNeed], parameter[name[role].name]]]]
name[identity].user assign[=] name[user]
return[name[identity]]
|
keyword[def] identifier[get_identity] ( identifier[user] ):
literal[string]
identifier[identity] = identifier[Identity] ( identifier[user] . identifier[id] )
keyword[if] identifier[hasattr] ( identifier[user] , literal[string] ):
identifier[identity] . identifier[provides] . identifier[add] ( identifier[UserNeed] ( identifier[user] . identifier[id] ))
keyword[for] identifier[role] keyword[in] identifier[getattr] ( identifier[user] , literal[string] ,[]):
identifier[identity] . identifier[provides] . identifier[add] ( identifier[RoleNeed] ( identifier[role] . identifier[name] ))
identifier[identity] . identifier[user] = identifier[user]
keyword[return] identifier[identity]
|
def get_identity(user):
"""Create an identity for a given user instance.
Primarily useful for testing.
"""
identity = Identity(user.id)
if hasattr(user, 'id'):
identity.provides.add(UserNeed(user.id)) # depends on [control=['if'], data=[]]
for role in getattr(user, 'roles', []):
identity.provides.add(RoleNeed(role.name)) # depends on [control=['for'], data=['role']]
identity.user = user
return identity
|
def samblaster_dedup_sort(data, tx_out_file, tx_sr_file, tx_disc_file):
"""Deduplicate and sort with samblaster, produces split read and discordant pair files.
"""
samblaster = config_utils.get_program("samblaster", data["config"])
samtools = config_utils.get_program("samtools", data["config"])
tmp_prefix = "%s-sorttmp" % utils.splitext_plus(tx_out_file)[0]
tobam_cmd = ("{samtools} sort {sort_opt} -@ {cores} -m {mem} -T {tmp_prefix}-{dext} {out_file} -")
# full BAM -- associate more memory and cores
cores, mem = _get_cores_memory(data, downscale=2)
# Potentially downsample to maximum coverage here if not splitting and whole genome sample
ds_cmd = None if data.get("align_split") else bam.get_maxcov_downsample_cl(data, "samtools")
sort_opt = "-n" if data.get("align_split") and dd.get_mark_duplicates(data) else ""
if ds_cmd:
dedup_cmd = "%s %s > %s" % (tobam_cmd.format(out_file="", dext="full", **locals()), ds_cmd, tx_out_file)
else:
dedup_cmd = tobam_cmd.format(out_file="-o %s" % tx_out_file, dext="full", **locals())
# split and discordant BAMs -- give less memory/cores since smaller files
sort_opt = ""
cores, mem = _get_cores_memory(data, downscale=4)
splitter_cmd = tobam_cmd.format(out_file="-o %s" % tx_sr_file, dext="spl", **locals())
discordant_cmd = tobam_cmd.format(out_file="-o %s" % tx_disc_file, dext="disc", **locals())
# samblaster 0.1.22 and better require the -M flag for compatibility with bwa-mem
cmd = ("{samblaster} --addMateTags -M --splitterFile >({splitter_cmd}) --discordantFile >({discordant_cmd}) "
"| {dedup_cmd}")
return cmd.format(**locals())
|
def function[samblaster_dedup_sort, parameter[data, tx_out_file, tx_sr_file, tx_disc_file]]:
constant[Deduplicate and sort with samblaster, produces split read and discordant pair files.
]
variable[samblaster] assign[=] call[name[config_utils].get_program, parameter[constant[samblaster], call[name[data]][constant[config]]]]
variable[samtools] assign[=] call[name[config_utils].get_program, parameter[constant[samtools], call[name[data]][constant[config]]]]
variable[tmp_prefix] assign[=] binary_operation[constant[%s-sorttmp] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[name[tx_out_file]]]][constant[0]]]
variable[tobam_cmd] assign[=] constant[{samtools} sort {sort_opt} -@ {cores} -m {mem} -T {tmp_prefix}-{dext} {out_file} -]
<ast.Tuple object at 0x7da1b2347eb0> assign[=] call[name[_get_cores_memory], parameter[name[data]]]
variable[ds_cmd] assign[=] <ast.IfExp object at 0x7da1b19bbee0>
variable[sort_opt] assign[=] <ast.IfExp object at 0x7da1b19ba140>
if name[ds_cmd] begin[:]
variable[dedup_cmd] assign[=] binary_operation[constant[%s %s > %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b19ba080>, <ast.Name object at 0x7da1b19bb9a0>, <ast.Name object at 0x7da1b19b8040>]]]
variable[sort_opt] assign[=] constant[]
<ast.Tuple object at 0x7da1b19bb280> assign[=] call[name[_get_cores_memory], parameter[name[data]]]
variable[splitter_cmd] assign[=] call[name[tobam_cmd].format, parameter[]]
variable[discordant_cmd] assign[=] call[name[tobam_cmd].format, parameter[]]
variable[cmd] assign[=] constant[{samblaster} --addMateTags -M --splitterFile >({splitter_cmd}) --discordantFile >({discordant_cmd}) | {dedup_cmd}]
return[call[name[cmd].format, parameter[]]]
|
keyword[def] identifier[samblaster_dedup_sort] ( identifier[data] , identifier[tx_out_file] , identifier[tx_sr_file] , identifier[tx_disc_file] ):
literal[string]
identifier[samblaster] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ])
identifier[samtools] = identifier[config_utils] . identifier[get_program] ( literal[string] , identifier[data] [ literal[string] ])
identifier[tmp_prefix] = literal[string] % identifier[utils] . identifier[splitext_plus] ( identifier[tx_out_file] )[ literal[int] ]
identifier[tobam_cmd] =( literal[string] )
identifier[cores] , identifier[mem] = identifier[_get_cores_memory] ( identifier[data] , identifier[downscale] = literal[int] )
identifier[ds_cmd] = keyword[None] keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[else] identifier[bam] . identifier[get_maxcov_downsample_cl] ( identifier[data] , literal[string] )
identifier[sort_opt] = literal[string] keyword[if] identifier[data] . identifier[get] ( literal[string] ) keyword[and] identifier[dd] . identifier[get_mark_duplicates] ( identifier[data] ) keyword[else] literal[string]
keyword[if] identifier[ds_cmd] :
identifier[dedup_cmd] = literal[string] %( identifier[tobam_cmd] . identifier[format] ( identifier[out_file] = literal[string] , identifier[dext] = literal[string] ,** identifier[locals] ()), identifier[ds_cmd] , identifier[tx_out_file] )
keyword[else] :
identifier[dedup_cmd] = identifier[tobam_cmd] . identifier[format] ( identifier[out_file] = literal[string] % identifier[tx_out_file] , identifier[dext] = literal[string] ,** identifier[locals] ())
identifier[sort_opt] = literal[string]
identifier[cores] , identifier[mem] = identifier[_get_cores_memory] ( identifier[data] , identifier[downscale] = literal[int] )
identifier[splitter_cmd] = identifier[tobam_cmd] . identifier[format] ( identifier[out_file] = literal[string] % identifier[tx_sr_file] , identifier[dext] = literal[string] ,** identifier[locals] ())
identifier[discordant_cmd] = identifier[tobam_cmd] . identifier[format] ( identifier[out_file] = literal[string] % identifier[tx_disc_file] , identifier[dext] = literal[string] ,** identifier[locals] ())
identifier[cmd] =( literal[string]
literal[string] )
keyword[return] identifier[cmd] . identifier[format] (** identifier[locals] ())
|
def samblaster_dedup_sort(data, tx_out_file, tx_sr_file, tx_disc_file):
"""Deduplicate and sort with samblaster, produces split read and discordant pair files.
"""
samblaster = config_utils.get_program('samblaster', data['config'])
samtools = config_utils.get_program('samtools', data['config'])
tmp_prefix = '%s-sorttmp' % utils.splitext_plus(tx_out_file)[0]
tobam_cmd = '{samtools} sort {sort_opt} -@ {cores} -m {mem} -T {tmp_prefix}-{dext} {out_file} -'
# full BAM -- associate more memory and cores
(cores, mem) = _get_cores_memory(data, downscale=2)
# Potentially downsample to maximum coverage here if not splitting and whole genome sample
ds_cmd = None if data.get('align_split') else bam.get_maxcov_downsample_cl(data, 'samtools')
sort_opt = '-n' if data.get('align_split') and dd.get_mark_duplicates(data) else ''
if ds_cmd:
dedup_cmd = '%s %s > %s' % (tobam_cmd.format(out_file='', dext='full', **locals()), ds_cmd, tx_out_file) # depends on [control=['if'], data=[]]
else:
dedup_cmd = tobam_cmd.format(out_file='-o %s' % tx_out_file, dext='full', **locals())
# split and discordant BAMs -- give less memory/cores since smaller files
sort_opt = ''
(cores, mem) = _get_cores_memory(data, downscale=4)
splitter_cmd = tobam_cmd.format(out_file='-o %s' % tx_sr_file, dext='spl', **locals())
discordant_cmd = tobam_cmd.format(out_file='-o %s' % tx_disc_file, dext='disc', **locals())
# samblaster 0.1.22 and better require the -M flag for compatibility with bwa-mem
cmd = '{samblaster} --addMateTags -M --splitterFile >({splitter_cmd}) --discordantFile >({discordant_cmd}) | {dedup_cmd}'
return cmd.format(**locals())
|
def with_setup(setup=None, teardown=None):
"""Decorator to add setup and/or teardown methods to a test function::
@with_setup(setup, teardown)
def test_something():
" ... "
Note that `with_setup` is useful *only* for test functions, not for test
methods or inside of TestCase subclasses.
"""
def decorate(func, setup=setup, teardown=teardown):
if setup:
if hasattr(func, 'setup'):
_old_s = func.setup
def _s():
setup()
_old_s()
func.setup = _s
else:
func.setup = setup
if teardown:
if hasattr(func, 'teardown'):
_old_t = func.teardown
def _t():
_old_t()
teardown()
func.teardown = _t
else:
func.teardown = teardown
return func
return decorate
|
def function[with_setup, parameter[setup, teardown]]:
constant[Decorator to add setup and/or teardown methods to a test function::
@with_setup(setup, teardown)
def test_something():
" ... "
Note that `with_setup` is useful *only* for test functions, not for test
methods or inside of TestCase subclasses.
]
def function[decorate, parameter[func, setup, teardown]]:
if name[setup] begin[:]
if call[name[hasattr], parameter[name[func], constant[setup]]] begin[:]
variable[_old_s] assign[=] name[func].setup
def function[_s, parameter[]]:
call[name[setup], parameter[]]
call[name[_old_s], parameter[]]
name[func].setup assign[=] name[_s]
if name[teardown] begin[:]
if call[name[hasattr], parameter[name[func], constant[teardown]]] begin[:]
variable[_old_t] assign[=] name[func].teardown
def function[_t, parameter[]]:
call[name[_old_t], parameter[]]
call[name[teardown], parameter[]]
name[func].teardown assign[=] name[_t]
return[name[func]]
return[name[decorate]]
|
keyword[def] identifier[with_setup] ( identifier[setup] = keyword[None] , identifier[teardown] = keyword[None] ):
literal[string]
keyword[def] identifier[decorate] ( identifier[func] , identifier[setup] = identifier[setup] , identifier[teardown] = identifier[teardown] ):
keyword[if] identifier[setup] :
keyword[if] identifier[hasattr] ( identifier[func] , literal[string] ):
identifier[_old_s] = identifier[func] . identifier[setup]
keyword[def] identifier[_s] ():
identifier[setup] ()
identifier[_old_s] ()
identifier[func] . identifier[setup] = identifier[_s]
keyword[else] :
identifier[func] . identifier[setup] = identifier[setup]
keyword[if] identifier[teardown] :
keyword[if] identifier[hasattr] ( identifier[func] , literal[string] ):
identifier[_old_t] = identifier[func] . identifier[teardown]
keyword[def] identifier[_t] ():
identifier[_old_t] ()
identifier[teardown] ()
identifier[func] . identifier[teardown] = identifier[_t]
keyword[else] :
identifier[func] . identifier[teardown] = identifier[teardown]
keyword[return] identifier[func]
keyword[return] identifier[decorate]
|
def with_setup(setup=None, teardown=None):
"""Decorator to add setup and/or teardown methods to a test function::
@with_setup(setup, teardown)
def test_something():
" ... "
Note that `with_setup` is useful *only* for test functions, not for test
methods or inside of TestCase subclasses.
"""
def decorate(func, setup=setup, teardown=teardown):
if setup:
if hasattr(func, 'setup'):
_old_s = func.setup
def _s():
setup()
_old_s()
func.setup = _s # depends on [control=['if'], data=[]]
else:
func.setup = setup # depends on [control=['if'], data=[]]
if teardown:
if hasattr(func, 'teardown'):
_old_t = func.teardown
def _t():
_old_t()
teardown()
func.teardown = _t # depends on [control=['if'], data=[]]
else:
func.teardown = teardown # depends on [control=['if'], data=[]]
return func
return decorate
|
def clear_threads(self):
"""
Clears the threads snapshot.
"""
for aThread in compat.itervalues(self.__threadDict):
aThread.clear()
self.__threadDict = dict()
|
def function[clear_threads, parameter[self]]:
constant[
Clears the threads snapshot.
]
for taget[name[aThread]] in starred[call[name[compat].itervalues, parameter[name[self].__threadDict]]] begin[:]
call[name[aThread].clear, parameter[]]
name[self].__threadDict assign[=] call[name[dict], parameter[]]
|
keyword[def] identifier[clear_threads] ( identifier[self] ):
literal[string]
keyword[for] identifier[aThread] keyword[in] identifier[compat] . identifier[itervalues] ( identifier[self] . identifier[__threadDict] ):
identifier[aThread] . identifier[clear] ()
identifier[self] . identifier[__threadDict] = identifier[dict] ()
|
def clear_threads(self):
"""
Clears the threads snapshot.
"""
for aThread in compat.itervalues(self.__threadDict):
aThread.clear() # depends on [control=['for'], data=['aThread']]
self.__threadDict = dict()
|
def create_or_update_detail(self, request):
"""
Implements Create/Update an object completely given an id
maps to PUT /api/object/:id in rest semantics
:param request: rip.Request
:return: rip.Response
"""
pipeline = crud_pipeline_factory.create_or_update_detail_pipeline(
configuration=self.configuration)
return pipeline(request=request)
|
def function[create_or_update_detail, parameter[self, request]]:
constant[
Implements Create/Update an object completely given an id
maps to PUT /api/object/:id in rest semantics
:param request: rip.Request
:return: rip.Response
]
variable[pipeline] assign[=] call[name[crud_pipeline_factory].create_or_update_detail_pipeline, parameter[]]
return[call[name[pipeline], parameter[]]]
|
keyword[def] identifier[create_or_update_detail] ( identifier[self] , identifier[request] ):
literal[string]
identifier[pipeline] = identifier[crud_pipeline_factory] . identifier[create_or_update_detail_pipeline] (
identifier[configuration] = identifier[self] . identifier[configuration] )
keyword[return] identifier[pipeline] ( identifier[request] = identifier[request] )
|
def create_or_update_detail(self, request):
"""
Implements Create/Update an object completely given an id
maps to PUT /api/object/:id in rest semantics
:param request: rip.Request
:return: rip.Response
"""
pipeline = crud_pipeline_factory.create_or_update_detail_pipeline(configuration=self.configuration)
return pipeline(request=request)
|
def render(self, size, frame, drawqueue):
'''
Calls implmentation to get a render context,
passes it to the drawqueues render function
then calls self.rendering_finished
'''
r_context = self.create_rcontext(size, frame)
drawqueue.render(r_context)
self.rendering_finished(size, frame, r_context)
return r_context
|
def function[render, parameter[self, size, frame, drawqueue]]:
constant[
Calls implmentation to get a render context,
passes it to the drawqueues render function
then calls self.rendering_finished
]
variable[r_context] assign[=] call[name[self].create_rcontext, parameter[name[size], name[frame]]]
call[name[drawqueue].render, parameter[name[r_context]]]
call[name[self].rendering_finished, parameter[name[size], name[frame], name[r_context]]]
return[name[r_context]]
|
keyword[def] identifier[render] ( identifier[self] , identifier[size] , identifier[frame] , identifier[drawqueue] ):
literal[string]
identifier[r_context] = identifier[self] . identifier[create_rcontext] ( identifier[size] , identifier[frame] )
identifier[drawqueue] . identifier[render] ( identifier[r_context] )
identifier[self] . identifier[rendering_finished] ( identifier[size] , identifier[frame] , identifier[r_context] )
keyword[return] identifier[r_context]
|
def render(self, size, frame, drawqueue):
"""
Calls implmentation to get a render context,
passes it to the drawqueues render function
then calls self.rendering_finished
"""
r_context = self.create_rcontext(size, frame)
drawqueue.render(r_context)
self.rendering_finished(size, frame, r_context)
return r_context
|
def degree(self, kind='out', weighted=True):
'''Returns an array of vertex degrees.
kind : either 'in' or 'out', useful for directed graphs
weighted : controls whether to count edges or sum their weights
'''
if kind == 'out':
axis = 1
adj = self.matrix('dense', 'csc')
else:
axis = 0
adj = self.matrix('dense', 'csr')
if not weighted and self.is_weighted():
# With recent numpy and a dense matrix, could do:
# d = np.count_nonzero(adj, axis=axis)
d = (adj!=0).sum(axis=axis)
else:
d = adj.sum(axis=axis)
return np.asarray(d).ravel()
|
def function[degree, parameter[self, kind, weighted]]:
constant[Returns an array of vertex degrees.
kind : either 'in' or 'out', useful for directed graphs
weighted : controls whether to count edges or sum their weights
]
if compare[name[kind] equal[==] constant[out]] begin[:]
variable[axis] assign[=] constant[1]
variable[adj] assign[=] call[name[self].matrix, parameter[constant[dense], constant[csc]]]
if <ast.BoolOp object at 0x7da18dc99660> begin[:]
variable[d] assign[=] call[compare[name[adj] not_equal[!=] constant[0]].sum, parameter[]]
return[call[call[name[np].asarray, parameter[name[d]]].ravel, parameter[]]]
|
keyword[def] identifier[degree] ( identifier[self] , identifier[kind] = literal[string] , identifier[weighted] = keyword[True] ):
literal[string]
keyword[if] identifier[kind] == literal[string] :
identifier[axis] = literal[int]
identifier[adj] = identifier[self] . identifier[matrix] ( literal[string] , literal[string] )
keyword[else] :
identifier[axis] = literal[int]
identifier[adj] = identifier[self] . identifier[matrix] ( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[weighted] keyword[and] identifier[self] . identifier[is_weighted] ():
identifier[d] =( identifier[adj] != literal[int] ). identifier[sum] ( identifier[axis] = identifier[axis] )
keyword[else] :
identifier[d] = identifier[adj] . identifier[sum] ( identifier[axis] = identifier[axis] )
keyword[return] identifier[np] . identifier[asarray] ( identifier[d] ). identifier[ravel] ()
|
def degree(self, kind='out', weighted=True):
"""Returns an array of vertex degrees.
kind : either 'in' or 'out', useful for directed graphs
weighted : controls whether to count edges or sum their weights
"""
if kind == 'out':
axis = 1
adj = self.matrix('dense', 'csc') # depends on [control=['if'], data=[]]
else:
axis = 0
adj = self.matrix('dense', 'csr')
if not weighted and self.is_weighted():
# With recent numpy and a dense matrix, could do:
# d = np.count_nonzero(adj, axis=axis)
d = (adj != 0).sum(axis=axis) # depends on [control=['if'], data=[]]
else:
d = adj.sum(axis=axis)
return np.asarray(d).ravel()
|
def check_sparsity(x, fraction=0.6):
'''
check_sparsity(x) yields either x or an array equivalent to x with a different sparsity based on
a heuristic: if x is a sparse array with more than 60% of its elements specified, it is made
dense; otherwise, it is left alone.
The optional argument fraction (default 0.6) specifies the fraction of elements that must be
specified in the array for it to be un-sparsified.
'''
if not sps.issparse(x): return x
n = numel(x)
if n == 0: return x
if len(x.data) / float(x) > 0.6: return x.toarray()
else: return x
|
def function[check_sparsity, parameter[x, fraction]]:
constant[
check_sparsity(x) yields either x or an array equivalent to x with a different sparsity based on
a heuristic: if x is a sparse array with more than 60% of its elements specified, it is made
dense; otherwise, it is left alone.
The optional argument fraction (default 0.6) specifies the fraction of elements that must be
specified in the array for it to be un-sparsified.
]
if <ast.UnaryOp object at 0x7da1b0b44070> begin[:]
return[name[x]]
variable[n] assign[=] call[name[numel], parameter[name[x]]]
if compare[name[n] equal[==] constant[0]] begin[:]
return[name[x]]
if compare[binary_operation[call[name[len], parameter[name[x].data]] / call[name[float], parameter[name[x]]]] greater[>] constant[0.6]] begin[:]
return[call[name[x].toarray, parameter[]]]
|
keyword[def] identifier[check_sparsity] ( identifier[x] , identifier[fraction] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[sps] . identifier[issparse] ( identifier[x] ): keyword[return] identifier[x]
identifier[n] = identifier[numel] ( identifier[x] )
keyword[if] identifier[n] == literal[int] : keyword[return] identifier[x]
keyword[if] identifier[len] ( identifier[x] . identifier[data] )/ identifier[float] ( identifier[x] )> literal[int] : keyword[return] identifier[x] . identifier[toarray] ()
keyword[else] : keyword[return] identifier[x]
|
def check_sparsity(x, fraction=0.6):
"""
check_sparsity(x) yields either x or an array equivalent to x with a different sparsity based on
a heuristic: if x is a sparse array with more than 60% of its elements specified, it is made
dense; otherwise, it is left alone.
The optional argument fraction (default 0.6) specifies the fraction of elements that must be
specified in the array for it to be un-sparsified.
"""
if not sps.issparse(x):
return x # depends on [control=['if'], data=[]]
n = numel(x)
if n == 0:
return x # depends on [control=['if'], data=[]]
if len(x.data) / float(x) > 0.6:
return x.toarray() # depends on [control=['if'], data=[]]
else:
return x
|
def get_members_of_group(self, gname):
"""Get all members of a group which name is given in parameter
:param gname: name of the group
:type gname: str
:return: list of contacts in the group
:rtype: list[alignak.objects.contact.Contact]
"""
contactgroup = self.find_by_name(gname)
if contactgroup:
return contactgroup.get_contacts()
return []
|
def function[get_members_of_group, parameter[self, gname]]:
constant[Get all members of a group which name is given in parameter
:param gname: name of the group
:type gname: str
:return: list of contacts in the group
:rtype: list[alignak.objects.contact.Contact]
]
variable[contactgroup] assign[=] call[name[self].find_by_name, parameter[name[gname]]]
if name[contactgroup] begin[:]
return[call[name[contactgroup].get_contacts, parameter[]]]
return[list[[]]]
|
keyword[def] identifier[get_members_of_group] ( identifier[self] , identifier[gname] ):
literal[string]
identifier[contactgroup] = identifier[self] . identifier[find_by_name] ( identifier[gname] )
keyword[if] identifier[contactgroup] :
keyword[return] identifier[contactgroup] . identifier[get_contacts] ()
keyword[return] []
|
def get_members_of_group(self, gname):
"""Get all members of a group which name is given in parameter
:param gname: name of the group
:type gname: str
:return: list of contacts in the group
:rtype: list[alignak.objects.contact.Contact]
"""
contactgroup = self.find_by_name(gname)
if contactgroup:
return contactgroup.get_contacts() # depends on [control=['if'], data=[]]
return []
|
def launch_modules_with_names(modules_with_names, module_args={}, kill_before_launch=True):
'''launch module.main functions in another process'''
processes = []
if kill_before_launch:
for module_name, name in modules_with_names:
kill_module(name)
for module_name, name in modules_with_names:
m = importlib.import_module(module_name)
args = {}
if module_name in module_args:
args = module_args[module_name]
p1 = Process(target=m.main, args=args)
p1.daemon = True
p1.start()
processes.append(p1)
with open(get_launched_module_pid_file(name), 'w') as f:
f.write('{}'.format(p1.pid))
return processes
|
def function[launch_modules_with_names, parameter[modules_with_names, module_args, kill_before_launch]]:
constant[launch module.main functions in another process]
variable[processes] assign[=] list[[]]
if name[kill_before_launch] begin[:]
for taget[tuple[[<ast.Name object at 0x7da207f00520>, <ast.Name object at 0x7da207f01750>]]] in starred[name[modules_with_names]] begin[:]
call[name[kill_module], parameter[name[name]]]
for taget[tuple[[<ast.Name object at 0x7da207f00c40>, <ast.Name object at 0x7da207f029b0>]]] in starred[name[modules_with_names]] begin[:]
variable[m] assign[=] call[name[importlib].import_module, parameter[name[module_name]]]
variable[args] assign[=] dictionary[[], []]
if compare[name[module_name] in name[module_args]] begin[:]
variable[args] assign[=] call[name[module_args]][name[module_name]]
variable[p1] assign[=] call[name[Process], parameter[]]
name[p1].daemon assign[=] constant[True]
call[name[p1].start, parameter[]]
call[name[processes].append, parameter[name[p1]]]
with call[name[open], parameter[call[name[get_launched_module_pid_file], parameter[name[name]]], constant[w]]] begin[:]
call[name[f].write, parameter[call[constant[{}].format, parameter[name[p1].pid]]]]
return[name[processes]]
|
keyword[def] identifier[launch_modules_with_names] ( identifier[modules_with_names] , identifier[module_args] ={}, identifier[kill_before_launch] = keyword[True] ):
literal[string]
identifier[processes] =[]
keyword[if] identifier[kill_before_launch] :
keyword[for] identifier[module_name] , identifier[name] keyword[in] identifier[modules_with_names] :
identifier[kill_module] ( identifier[name] )
keyword[for] identifier[module_name] , identifier[name] keyword[in] identifier[modules_with_names] :
identifier[m] = identifier[importlib] . identifier[import_module] ( identifier[module_name] )
identifier[args] ={}
keyword[if] identifier[module_name] keyword[in] identifier[module_args] :
identifier[args] = identifier[module_args] [ identifier[module_name] ]
identifier[p1] = identifier[Process] ( identifier[target] = identifier[m] . identifier[main] , identifier[args] = identifier[args] )
identifier[p1] . identifier[daemon] = keyword[True]
identifier[p1] . identifier[start] ()
identifier[processes] . identifier[append] ( identifier[p1] )
keyword[with] identifier[open] ( identifier[get_launched_module_pid_file] ( identifier[name] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[p1] . identifier[pid] ))
keyword[return] identifier[processes]
|
def launch_modules_with_names(modules_with_names, module_args={}, kill_before_launch=True):
"""launch module.main functions in another process"""
processes = []
if kill_before_launch:
for (module_name, name) in modules_with_names:
kill_module(name) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
for (module_name, name) in modules_with_names:
m = importlib.import_module(module_name)
args = {}
if module_name in module_args:
args = module_args[module_name] # depends on [control=['if'], data=['module_name', 'module_args']]
p1 = Process(target=m.main, args=args)
p1.daemon = True
p1.start()
processes.append(p1)
with open(get_launched_module_pid_file(name), 'w') as f:
f.write('{}'.format(p1.pid)) # depends on [control=['with'], data=['f']] # depends on [control=['for'], data=[]]
return processes
|
def reference_creator_surnames(self, index):
"""Return as a list the surnames of the reference creators (locally defined)."""
# TODO Not true, ex: ISBN 978-1-4398-3778-8. Return all creator types?
# Academic books published as a collection of chapters contributed by
# different authors have editors but not authors at the level of the
# book (as opposed to the level of a chapter).
creators = self.reference_data(index)["creators"]
creator_types = [x["creatorType"] for x in creators]
# 'name' (not split) might be used instead of 'firstName' and 'lastName'.
try:
if "author" in creator_types:
return [x["lastName"] for x in creators if x["creatorType"] == "author"]
else:
return [x["lastName"] for x in creators]
except KeyError:
return []
|
def function[reference_creator_surnames, parameter[self, index]]:
constant[Return as a list the surnames of the reference creators (locally defined).]
variable[creators] assign[=] call[call[name[self].reference_data, parameter[name[index]]]][constant[creators]]
variable[creator_types] assign[=] <ast.ListComp object at 0x7da1b18b94e0>
<ast.Try object at 0x7da1b18bbbe0>
|
keyword[def] identifier[reference_creator_surnames] ( identifier[self] , identifier[index] ):
literal[string]
identifier[creators] = identifier[self] . identifier[reference_data] ( identifier[index] )[ literal[string] ]
identifier[creator_types] =[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[creators] ]
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[creator_types] :
keyword[return] [ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[creators] keyword[if] identifier[x] [ literal[string] ]== literal[string] ]
keyword[else] :
keyword[return] [ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[creators] ]
keyword[except] identifier[KeyError] :
keyword[return] []
|
def reference_creator_surnames(self, index):
"""Return as a list the surnames of the reference creators (locally defined)."""
# TODO Not true, ex: ISBN 978-1-4398-3778-8. Return all creator types?
# Academic books published as a collection of chapters contributed by
# different authors have editors but not authors at the level of the
# book (as opposed to the level of a chapter).
creators = self.reference_data(index)['creators']
creator_types = [x['creatorType'] for x in creators]
# 'name' (not split) might be used instead of 'firstName' and 'lastName'.
try:
if 'author' in creator_types:
return [x['lastName'] for x in creators if x['creatorType'] == 'author'] # depends on [control=['if'], data=[]]
else:
return [x['lastName'] for x in creators] # depends on [control=['try'], data=[]]
except KeyError:
return [] # depends on [control=['except'], data=[]]
|
def polygon(self, done=None):
'''return a polygon for the waypoints'''
indexes = self.view_indexes(done)
points = []
for idx in indexes:
w = self.wp(idx)
points.append((w.x, w.y))
return points
|
def function[polygon, parameter[self, done]]:
constant[return a polygon for the waypoints]
variable[indexes] assign[=] call[name[self].view_indexes, parameter[name[done]]]
variable[points] assign[=] list[[]]
for taget[name[idx]] in starred[name[indexes]] begin[:]
variable[w] assign[=] call[name[self].wp, parameter[name[idx]]]
call[name[points].append, parameter[tuple[[<ast.Attribute object at 0x7da20c76eb30>, <ast.Attribute object at 0x7da20c76d840>]]]]
return[name[points]]
|
keyword[def] identifier[polygon] ( identifier[self] , identifier[done] = keyword[None] ):
literal[string]
identifier[indexes] = identifier[self] . identifier[view_indexes] ( identifier[done] )
identifier[points] =[]
keyword[for] identifier[idx] keyword[in] identifier[indexes] :
identifier[w] = identifier[self] . identifier[wp] ( identifier[idx] )
identifier[points] . identifier[append] (( identifier[w] . identifier[x] , identifier[w] . identifier[y] ))
keyword[return] identifier[points]
|
def polygon(self, done=None):
"""return a polygon for the waypoints"""
indexes = self.view_indexes(done)
points = []
for idx in indexes:
w = self.wp(idx)
points.append((w.x, w.y)) # depends on [control=['for'], data=['idx']]
return points
|
def _collect_tfa_stats(task):
'''
This is a parallel worker to gather LC stats.
task[0] = lcfile
task[1] = lcformat
task[2] = lcformatdir
task[3] = timecols
task[4] = magcols
task[5] = errcols
task[6] = custom_bandpasses
'''
try:
(lcfile, lcformat, lcformatdir,
timecols, magcols, errcols,
custom_bandpasses) = task
try:
formatinfo = get_lcformat(lcformat,
use_lcformat_dir=lcformatdir)
if formatinfo:
(dfileglob, readerfunc,
dtimecols, dmagcols, derrcols,
magsarefluxes, normfunc) = formatinfo
else:
LOGERROR("can't figure out the light curve format")
return None
except Exception as e:
LOGEXCEPTION("can't figure out the light curve format")
return None
# override the default timecols, magcols, and errcols
# using the ones provided to the function
if timecols is None:
timecols = dtimecols
if magcols is None:
magcols = dmagcols
if errcols is None:
errcols = derrcols
# get the LC into a dict
lcdict = readerfunc(lcfile)
# this should handle lists/tuples being returned by readerfunc
# we assume that the first element is the actual lcdict
# FIXME: figure out how to not need this assumption
if ( (isinstance(lcdict, (list, tuple))) and
(isinstance(lcdict[0], dict)) ):
lcdict = lcdict[0]
#
# collect the necessary stats for this light curve
#
# 1. number of observations
# 2. median mag
# 3. eta_normal
# 4. MAD
# 5. objectid
# 6. get mags and colors from objectinfo if there's one in lcdict
if 'objectid' in lcdict:
objectid = lcdict['objectid']
elif 'objectinfo' in lcdict and 'objectid' in lcdict['objectinfo']:
objectid = lcdict['objectinfo']['objectid']
elif 'objectinfo' in lcdict and 'hatid' in lcdict['objectinfo']:
objectid = lcdict['objectinfo']['hatid']
else:
LOGERROR('no objectid present in lcdict for LC %s, '
'using filename prefix as objectid' % lcfile)
objectid = os.path.splitext(os.path.basename(lcfile))[0]
if 'objectinfo' in lcdict:
colorfeat = starfeatures.color_features(
lcdict['objectinfo'],
deredden=False,
custom_bandpasses=custom_bandpasses
)
else:
LOGERROR('no objectinfo dict in lcdict, '
'could not get magnitudes for LC %s, '
'cannot use for TFA template ensemble' %
lcfile)
return None
# this is the initial dict
resultdict = {'objectid':objectid,
'ra':lcdict['objectinfo']['ra'],
'decl':lcdict['objectinfo']['decl'],
'colorfeat':colorfeat,
'lcfpath':os.path.abspath(lcfile),
'lcformat':lcformat,
'lcformatdir':lcformatdir,
'timecols':timecols,
'magcols':magcols,
'errcols':errcols}
for tcol, mcol, ecol in zip(timecols, magcols, errcols):
try:
# dereference the columns and get them from the lcdict
if '.' in tcol:
tcolget = tcol.split('.')
else:
tcolget = [tcol]
times = _dict_get(lcdict, tcolget)
if '.' in mcol:
mcolget = mcol.split('.')
else:
mcolget = [mcol]
mags = _dict_get(lcdict, mcolget)
if '.' in ecol:
ecolget = ecol.split('.')
else:
ecolget = [ecol]
errs = _dict_get(lcdict, ecolget)
# normalize here if not using special normalization
if normfunc is None:
ntimes, nmags = normalize_magseries(
times, mags,
magsarefluxes=magsarefluxes
)
times, mags, errs = ntimes, nmags, errs
# get the variability features for this object
varfeat = varfeatures.all_nonperiodic_features(
times, mags, errs
)
resultdict[mcol] = varfeat
except Exception as e:
LOGEXCEPTION('%s, magcol: %s, probably ran into all-nans' %
(lcfile, mcol))
resultdict[mcol] = {'ndet':0,
'mad':np.nan,
'eta_normal':np.nan}
return resultdict
except Exception as e:
LOGEXCEPTION('could not execute get_tfa_stats for task: %s' %
repr(task))
return None
|
def function[_collect_tfa_stats, parameter[task]]:
constant[
This is a parallel worker to gather LC stats.
task[0] = lcfile
task[1] = lcformat
task[2] = lcformatdir
task[3] = timecols
task[4] = magcols
task[5] = errcols
task[6] = custom_bandpasses
]
<ast.Try object at 0x7da2041d86a0>
|
keyword[def] identifier[_collect_tfa_stats] ( identifier[task] ):
literal[string]
keyword[try] :
( identifier[lcfile] , identifier[lcformat] , identifier[lcformatdir] ,
identifier[timecols] , identifier[magcols] , identifier[errcols] ,
identifier[custom_bandpasses] )= identifier[task]
keyword[try] :
identifier[formatinfo] = identifier[get_lcformat] ( identifier[lcformat] ,
identifier[use_lcformat_dir] = identifier[lcformatdir] )
keyword[if] identifier[formatinfo] :
( identifier[dfileglob] , identifier[readerfunc] ,
identifier[dtimecols] , identifier[dmagcols] , identifier[derrcols] ,
identifier[magsarefluxes] , identifier[normfunc] )= identifier[formatinfo]
keyword[else] :
identifier[LOGERROR] ( literal[string] )
keyword[return] keyword[None]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string] )
keyword[return] keyword[None]
keyword[if] identifier[timecols] keyword[is] keyword[None] :
identifier[timecols] = identifier[dtimecols]
keyword[if] identifier[magcols] keyword[is] keyword[None] :
identifier[magcols] = identifier[dmagcols]
keyword[if] identifier[errcols] keyword[is] keyword[None] :
identifier[errcols] = identifier[derrcols]
identifier[lcdict] = identifier[readerfunc] ( identifier[lcfile] )
keyword[if] (( identifier[isinstance] ( identifier[lcdict] ,( identifier[list] , identifier[tuple] ))) keyword[and]
( identifier[isinstance] ( identifier[lcdict] [ literal[int] ], identifier[dict] ))):
identifier[lcdict] = identifier[lcdict] [ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[lcdict] :
identifier[objectid] = identifier[lcdict] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[lcdict] keyword[and] literal[string] keyword[in] identifier[lcdict] [ literal[string] ]:
identifier[objectid] = identifier[lcdict] [ literal[string] ][ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[lcdict] keyword[and] literal[string] keyword[in] identifier[lcdict] [ literal[string] ]:
identifier[objectid] = identifier[lcdict] [ literal[string] ][ literal[string] ]
keyword[else] :
identifier[LOGERROR] ( literal[string]
literal[string] % identifier[lcfile] )
identifier[objectid] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[lcfile] ))[ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[lcdict] :
identifier[colorfeat] = identifier[starfeatures] . identifier[color_features] (
identifier[lcdict] [ literal[string] ],
identifier[deredden] = keyword[False] ,
identifier[custom_bandpasses] = identifier[custom_bandpasses]
)
keyword[else] :
identifier[LOGERROR] ( literal[string]
literal[string]
literal[string] %
identifier[lcfile] )
keyword[return] keyword[None]
identifier[resultdict] ={ literal[string] : identifier[objectid] ,
literal[string] : identifier[lcdict] [ literal[string] ][ literal[string] ],
literal[string] : identifier[lcdict] [ literal[string] ][ literal[string] ],
literal[string] : identifier[colorfeat] ,
literal[string] : identifier[os] . identifier[path] . identifier[abspath] ( identifier[lcfile] ),
literal[string] : identifier[lcformat] ,
literal[string] : identifier[lcformatdir] ,
literal[string] : identifier[timecols] ,
literal[string] : identifier[magcols] ,
literal[string] : identifier[errcols] }
keyword[for] identifier[tcol] , identifier[mcol] , identifier[ecol] keyword[in] identifier[zip] ( identifier[timecols] , identifier[magcols] , identifier[errcols] ):
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[tcol] :
identifier[tcolget] = identifier[tcol] . identifier[split] ( literal[string] )
keyword[else] :
identifier[tcolget] =[ identifier[tcol] ]
identifier[times] = identifier[_dict_get] ( identifier[lcdict] , identifier[tcolget] )
keyword[if] literal[string] keyword[in] identifier[mcol] :
identifier[mcolget] = identifier[mcol] . identifier[split] ( literal[string] )
keyword[else] :
identifier[mcolget] =[ identifier[mcol] ]
identifier[mags] = identifier[_dict_get] ( identifier[lcdict] , identifier[mcolget] )
keyword[if] literal[string] keyword[in] identifier[ecol] :
identifier[ecolget] = identifier[ecol] . identifier[split] ( literal[string] )
keyword[else] :
identifier[ecolget] =[ identifier[ecol] ]
identifier[errs] = identifier[_dict_get] ( identifier[lcdict] , identifier[ecolget] )
keyword[if] identifier[normfunc] keyword[is] keyword[None] :
identifier[ntimes] , identifier[nmags] = identifier[normalize_magseries] (
identifier[times] , identifier[mags] ,
identifier[magsarefluxes] = identifier[magsarefluxes]
)
identifier[times] , identifier[mags] , identifier[errs] = identifier[ntimes] , identifier[nmags] , identifier[errs]
identifier[varfeat] = identifier[varfeatures] . identifier[all_nonperiodic_features] (
identifier[times] , identifier[mags] , identifier[errs]
)
identifier[resultdict] [ identifier[mcol] ]= identifier[varfeat]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string] %
( identifier[lcfile] , identifier[mcol] ))
identifier[resultdict] [ identifier[mcol] ]={ literal[string] : literal[int] ,
literal[string] : identifier[np] . identifier[nan] ,
literal[string] : identifier[np] . identifier[nan] }
keyword[return] identifier[resultdict]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[LOGEXCEPTION] ( literal[string] %
identifier[repr] ( identifier[task] ))
keyword[return] keyword[None]
|
def _collect_tfa_stats(task):
"""
This is a parallel worker to gather LC stats.
task[0] = lcfile
task[1] = lcformat
task[2] = lcformatdir
task[3] = timecols
task[4] = magcols
task[5] = errcols
task[6] = custom_bandpasses
"""
try:
(lcfile, lcformat, lcformatdir, timecols, magcols, errcols, custom_bandpasses) = task
try:
formatinfo = get_lcformat(lcformat, use_lcformat_dir=lcformatdir)
if formatinfo:
(dfileglob, readerfunc, dtimecols, dmagcols, derrcols, magsarefluxes, normfunc) = formatinfo # depends on [control=['if'], data=[]]
else:
LOGERROR("can't figure out the light curve format")
return None # depends on [control=['try'], data=[]]
except Exception as e:
LOGEXCEPTION("can't figure out the light curve format")
return None # depends on [control=['except'], data=[]]
# override the default timecols, magcols, and errcols
# using the ones provided to the function
if timecols is None:
timecols = dtimecols # depends on [control=['if'], data=['timecols']]
if magcols is None:
magcols = dmagcols # depends on [control=['if'], data=['magcols']]
if errcols is None:
errcols = derrcols # depends on [control=['if'], data=['errcols']]
# get the LC into a dict
lcdict = readerfunc(lcfile)
# this should handle lists/tuples being returned by readerfunc
# we assume that the first element is the actual lcdict
# FIXME: figure out how to not need this assumption
if isinstance(lcdict, (list, tuple)) and isinstance(lcdict[0], dict):
lcdict = lcdict[0] # depends on [control=['if'], data=[]]
#
# collect the necessary stats for this light curve
#
# 1. number of observations
# 2. median mag
# 3. eta_normal
# 4. MAD
# 5. objectid
# 6. get mags and colors from objectinfo if there's one in lcdict
if 'objectid' in lcdict:
objectid = lcdict['objectid'] # depends on [control=['if'], data=['lcdict']]
elif 'objectinfo' in lcdict and 'objectid' in lcdict['objectinfo']:
objectid = lcdict['objectinfo']['objectid'] # depends on [control=['if'], data=[]]
elif 'objectinfo' in lcdict and 'hatid' in lcdict['objectinfo']:
objectid = lcdict['objectinfo']['hatid'] # depends on [control=['if'], data=[]]
else:
LOGERROR('no objectid present in lcdict for LC %s, using filename prefix as objectid' % lcfile)
objectid = os.path.splitext(os.path.basename(lcfile))[0]
if 'objectinfo' in lcdict:
colorfeat = starfeatures.color_features(lcdict['objectinfo'], deredden=False, custom_bandpasses=custom_bandpasses) # depends on [control=['if'], data=['lcdict']]
else:
LOGERROR('no objectinfo dict in lcdict, could not get magnitudes for LC %s, cannot use for TFA template ensemble' % lcfile)
return None
# this is the initial dict
resultdict = {'objectid': objectid, 'ra': lcdict['objectinfo']['ra'], 'decl': lcdict['objectinfo']['decl'], 'colorfeat': colorfeat, 'lcfpath': os.path.abspath(lcfile), 'lcformat': lcformat, 'lcformatdir': lcformatdir, 'timecols': timecols, 'magcols': magcols, 'errcols': errcols}
for (tcol, mcol, ecol) in zip(timecols, magcols, errcols):
try:
# dereference the columns and get them from the lcdict
if '.' in tcol:
tcolget = tcol.split('.') # depends on [control=['if'], data=['tcol']]
else:
tcolget = [tcol]
times = _dict_get(lcdict, tcolget)
if '.' in mcol:
mcolget = mcol.split('.') # depends on [control=['if'], data=['mcol']]
else:
mcolget = [mcol]
mags = _dict_get(lcdict, mcolget)
if '.' in ecol:
ecolget = ecol.split('.') # depends on [control=['if'], data=['ecol']]
else:
ecolget = [ecol]
errs = _dict_get(lcdict, ecolget)
# normalize here if not using special normalization
if normfunc is None:
(ntimes, nmags) = normalize_magseries(times, mags, magsarefluxes=magsarefluxes)
(times, mags, errs) = (ntimes, nmags, errs) # depends on [control=['if'], data=[]]
# get the variability features for this object
varfeat = varfeatures.all_nonperiodic_features(times, mags, errs)
resultdict[mcol] = varfeat # depends on [control=['try'], data=[]]
except Exception as e:
LOGEXCEPTION('%s, magcol: %s, probably ran into all-nans' % (lcfile, mcol))
resultdict[mcol] = {'ndet': 0, 'mad': np.nan, 'eta_normal': np.nan} # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
return resultdict # depends on [control=['try'], data=[]]
except Exception as e:
LOGEXCEPTION('could not execute get_tfa_stats for task: %s' % repr(task))
return None # depends on [control=['except'], data=[]]
|
def ppj(json_data):
"""ppj
:param json_data: dictionary to print
"""
return str(json.dumps(
json_data,
sort_keys=True,
indent=4,
separators=(',', ': ')))
|
def function[ppj, parameter[json_data]]:
constant[ppj
:param json_data: dictionary to print
]
return[call[name[str], parameter[call[name[json].dumps, parameter[name[json_data]]]]]]
|
keyword[def] identifier[ppj] ( identifier[json_data] ):
literal[string]
keyword[return] identifier[str] ( identifier[json] . identifier[dumps] (
identifier[json_data] ,
identifier[sort_keys] = keyword[True] ,
identifier[indent] = literal[int] ,
identifier[separators] =( literal[string] , literal[string] )))
|
def ppj(json_data):
"""ppj
:param json_data: dictionary to print
"""
return str(json.dumps(json_data, sort_keys=True, indent=4, separators=(',', ': ')))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.