code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def parents(self):
"""获取此话题的父话题。
注意:由于没找到有很多父话题的话题来测试,
所以本方法可能再某些时候出现问题,请不吝反馈。
:return: 此话题的父话题,返回生成器
:rtype: Topic.Iterable
"""
self._make_soup()
parent_topic_tag = self.soup.find('div', class_='parent-topic')
if parent_topic_tag is None:
yield []
else:
for topic_tag in parent_topic_tag.find_all('a'):
yield Topic(Zhihu_URL + topic_tag['href'],
topic_tag.text.strip(),
session=self._session) | def function[parents, parameter[self]]:
constant[获取此话题的父话题。
注意:由于没找到有很多父话题的话题来测试,
所以本方法可能再某些时候出现问题,请不吝反馈。
:return: 此话题的父话题,返回生成器
:rtype: Topic.Iterable
]
call[name[self]._make_soup, parameter[]]
variable[parent_topic_tag] assign[=] call[name[self].soup.find, parameter[constant[div]]]
if compare[name[parent_topic_tag] is constant[None]] begin[:]
<ast.Yield object at 0x7da20e9b0670> | keyword[def] identifier[parents] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_make_soup] ()
identifier[parent_topic_tag] = identifier[self] . identifier[soup] . identifier[find] ( literal[string] , identifier[class_] = literal[string] )
keyword[if] identifier[parent_topic_tag] keyword[is] keyword[None] :
keyword[yield] []
keyword[else] :
keyword[for] identifier[topic_tag] keyword[in] identifier[parent_topic_tag] . identifier[find_all] ( literal[string] ):
keyword[yield] identifier[Topic] ( identifier[Zhihu_URL] + identifier[topic_tag] [ literal[string] ],
identifier[topic_tag] . identifier[text] . identifier[strip] (),
identifier[session] = identifier[self] . identifier[_session] ) | def parents(self):
"""获取此话题的父话题。
注意:由于没找到有很多父话题的话题来测试,
所以本方法可能再某些时候出现问题,请不吝反馈。
:return: 此话题的父话题,返回生成器
:rtype: Topic.Iterable
"""
self._make_soup()
parent_topic_tag = self.soup.find('div', class_='parent-topic')
if parent_topic_tag is None:
yield [] # depends on [control=['if'], data=[]]
else:
for topic_tag in parent_topic_tag.find_all('a'):
yield Topic(Zhihu_URL + topic_tag['href'], topic_tag.text.strip(), session=self._session) # depends on [control=['for'], data=['topic_tag']] |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: UserBindingContext for this UserBindingInstance
:rtype: twilio.rest.chat.v2.service.user.user_binding.UserBindingContext
"""
if self._context is None:
self._context = UserBindingContext(
self._version,
service_sid=self._solution['service_sid'],
user_sid=self._solution['user_sid'],
sid=self._solution['sid'],
)
return self._context | def function[_proxy, parameter[self]]:
constant[
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: UserBindingContext for this UserBindingInstance
:rtype: twilio.rest.chat.v2.service.user.user_binding.UserBindingContext
]
if compare[name[self]._context is constant[None]] begin[:]
name[self]._context assign[=] call[name[UserBindingContext], parameter[name[self]._version]]
return[name[self]._context] | keyword[def] identifier[_proxy] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] :
identifier[self] . identifier[_context] = identifier[UserBindingContext] (
identifier[self] . identifier[_version] ,
identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[user_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_context] | def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: UserBindingContext for this UserBindingInstance
:rtype: twilio.rest.chat.v2.service.user.user_binding.UserBindingContext
"""
if self._context is None:
self._context = UserBindingContext(self._version, service_sid=self._solution['service_sid'], user_sid=self._solution['user_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]]
return self._context |
def _dummify_expr(expr, basename, symbs):
"""
Useful to robustify prior to e.g. regexp substitution of
code strings
"""
dummies = sympy.symbols(basename+':'+str(len(symbs)))
for i, s in enumerate(symbs):
expr = expr.subs({s: dummies[i]})
return expr | def function[_dummify_expr, parameter[expr, basename, symbs]]:
constant[
Useful to robustify prior to e.g. regexp substitution of
code strings
]
variable[dummies] assign[=] call[name[sympy].symbols, parameter[binary_operation[binary_operation[name[basename] + constant[:]] + call[name[str], parameter[call[name[len], parameter[name[symbs]]]]]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0ab9ff0>, <ast.Name object at 0x7da1b0abbb50>]]] in starred[call[name[enumerate], parameter[name[symbs]]]] begin[:]
variable[expr] assign[=] call[name[expr].subs, parameter[dictionary[[<ast.Name object at 0x7da1b0abb730>], [<ast.Subscript object at 0x7da1b0abbdf0>]]]]
return[name[expr]] | keyword[def] identifier[_dummify_expr] ( identifier[expr] , identifier[basename] , identifier[symbs] ):
literal[string]
identifier[dummies] = identifier[sympy] . identifier[symbols] ( identifier[basename] + literal[string] + identifier[str] ( identifier[len] ( identifier[symbs] )))
keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[symbs] ):
identifier[expr] = identifier[expr] . identifier[subs] ({ identifier[s] : identifier[dummies] [ identifier[i] ]})
keyword[return] identifier[expr] | def _dummify_expr(expr, basename, symbs):
"""
Useful to robustify prior to e.g. regexp substitution of
code strings
"""
dummies = sympy.symbols(basename + ':' + str(len(symbs)))
for (i, s) in enumerate(symbs):
expr = expr.subs({s: dummies[i]}) # depends on [control=['for'], data=[]]
return expr |
def add_pipe(self, pipe, branch=None):
"""
Adds a pipe (A, ..., N) which is an N-``tuple`` tuple of ``Pipers``
instances. Adding a pipe means to add all the ``Pipers`` and connect
them in the specified left to right order.
The direction of the edges in the ``DictGraph`` is reversed compared to
the left to right data-flow in a pipe.
Arguments:
- pipe(sequence) N-``tuple`` of ``Piper`` instances or objects which
are valid ``add_piper`` arguments. See: ``Dagger.add_piper`` and
``Dagger.resolve``.
"""
#TODO: Check if consume/spawn/produce is right!
self.log.debug('%s adding pipe: %s' % (repr(self), repr(pipe)))
for i in xrange(len(pipe) - 1):
edge = (pipe[i + 1], pipe[i])
edge = (self.add_piper(edge[0], create=True, branch=branch)[1], \
self.add_piper(edge[1], create=True, branch=branch)[1])
if edge[0] in self.dfs(edge[1], []):
self.log.error('%s cannot add the %s>>>%s edge (introduces a cycle)' % \
(repr(self), edge[0], edge[1]))
raise DaggerError('%s cannot add the %s>>>%s edge (introduces a cycle)' % \
(repr(self), edge[0], edge[1]))
self.add_edge(edge)
self.clear_nodes() #dfs
self.log.debug('%s added the %s>>>%s edge' % \
(repr(self), edge[0], edge[1])) | def function[add_pipe, parameter[self, pipe, branch]]:
constant[
Adds a pipe (A, ..., N) which is an N-``tuple`` tuple of ``Pipers``
instances. Adding a pipe means to add all the ``Pipers`` and connect
them in the specified left to right order.
The direction of the edges in the ``DictGraph`` is reversed compared to
the left to right data-flow in a pipe.
Arguments:
- pipe(sequence) N-``tuple`` of ``Piper`` instances or objects which
are valid ``add_piper`` arguments. See: ``Dagger.add_piper`` and
``Dagger.resolve``.
]
call[name[self].log.debug, parameter[binary_operation[constant[%s adding pipe: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b257c6d0>, <ast.Call object at 0x7da1b257cf10>]]]]]
for taget[name[i]] in starred[call[name[xrange], parameter[binary_operation[call[name[len], parameter[name[pipe]]] - constant[1]]]]] begin[:]
variable[edge] assign[=] tuple[[<ast.Subscript object at 0x7da1b257cca0>, <ast.Subscript object at 0x7da1b257e3b0>]]
variable[edge] assign[=] tuple[[<ast.Subscript object at 0x7da1b257f3a0>, <ast.Subscript object at 0x7da1b257fd60>]]
if compare[call[name[edge]][constant[0]] in call[name[self].dfs, parameter[call[name[edge]][constant[1]], list[[]]]]] begin[:]
call[name[self].log.error, parameter[binary_operation[constant[%s cannot add the %s>>>%s edge (introduces a cycle)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b257ccd0>, <ast.Subscript object at 0x7da1b257c6a0>, <ast.Subscript object at 0x7da1b257c310>]]]]]
<ast.Raise object at 0x7da1b257fd30>
call[name[self].add_edge, parameter[name[edge]]]
call[name[self].clear_nodes, parameter[]]
call[name[self].log.debug, parameter[binary_operation[constant[%s added the %s>>>%s edge] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b257f340>, <ast.Subscript object at 0x7da1b257e8f0>, <ast.Subscript object at 0x7da1b257e200>]]]]] | keyword[def] identifier[add_pipe] ( identifier[self] , identifier[pipe] , identifier[branch] = keyword[None] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] %( identifier[repr] ( identifier[self] ), identifier[repr] ( identifier[pipe] )))
keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[len] ( identifier[pipe] )- literal[int] ):
identifier[edge] =( identifier[pipe] [ identifier[i] + literal[int] ], identifier[pipe] [ identifier[i] ])
identifier[edge] =( identifier[self] . identifier[add_piper] ( identifier[edge] [ literal[int] ], identifier[create] = keyword[True] , identifier[branch] = identifier[branch] )[ literal[int] ], identifier[self] . identifier[add_piper] ( identifier[edge] [ literal[int] ], identifier[create] = keyword[True] , identifier[branch] = identifier[branch] )[ literal[int] ])
keyword[if] identifier[edge] [ literal[int] ] keyword[in] identifier[self] . identifier[dfs] ( identifier[edge] [ literal[int] ],[]):
identifier[self] . identifier[log] . identifier[error] ( literal[string] %( identifier[repr] ( identifier[self] ), identifier[edge] [ literal[int] ], identifier[edge] [ literal[int] ]))
keyword[raise] identifier[DaggerError] ( literal[string] %( identifier[repr] ( identifier[self] ), identifier[edge] [ literal[int] ], identifier[edge] [ literal[int] ]))
identifier[self] . identifier[add_edge] ( identifier[edge] )
identifier[self] . identifier[clear_nodes] ()
identifier[self] . identifier[log] . identifier[debug] ( literal[string] %( identifier[repr] ( identifier[self] ), identifier[edge] [ literal[int] ], identifier[edge] [ literal[int] ])) | def add_pipe(self, pipe, branch=None):
"""
Adds a pipe (A, ..., N) which is an N-``tuple`` tuple of ``Pipers``
instances. Adding a pipe means to add all the ``Pipers`` and connect
them in the specified left to right order.
The direction of the edges in the ``DictGraph`` is reversed compared to
the left to right data-flow in a pipe.
Arguments:
- pipe(sequence) N-``tuple`` of ``Piper`` instances or objects which
are valid ``add_piper`` arguments. See: ``Dagger.add_piper`` and
``Dagger.resolve``.
"""
#TODO: Check if consume/spawn/produce is right!
self.log.debug('%s adding pipe: %s' % (repr(self), repr(pipe)))
for i in xrange(len(pipe) - 1):
edge = (pipe[i + 1], pipe[i])
edge = (self.add_piper(edge[0], create=True, branch=branch)[1], self.add_piper(edge[1], create=True, branch=branch)[1])
if edge[0] in self.dfs(edge[1], []):
self.log.error('%s cannot add the %s>>>%s edge (introduces a cycle)' % (repr(self), edge[0], edge[1]))
raise DaggerError('%s cannot add the %s>>>%s edge (introduces a cycle)' % (repr(self), edge[0], edge[1])) # depends on [control=['if'], data=[]]
self.add_edge(edge)
self.clear_nodes() #dfs
self.log.debug('%s added the %s>>>%s edge' % (repr(self), edge[0], edge[1])) # depends on [control=['for'], data=['i']] |
def chunk_to_matrices(narr, mapcol, nmask):
"""
numba compiled code to get matrix fast.
arr is a 4 x N seq matrix converted to np.int8
I convert the numbers for ATGC into their respective index for the MAT
matrix, and leave all others as high numbers, i.e., -==45, N==78.
"""
## get seq alignment and create an empty array for filling
mats = np.zeros((3, 16, 16), dtype=np.uint32)
## replace ints with small ints that index their place in the
## 16x16. This no longer checks for big ints to exclude, so resolve=True
## is now the default, TODO.
last_loc = -1
for idx in xrange(mapcol.shape[0]):
if not nmask[idx]:
if not mapcol[idx] == last_loc:
i = narr[:, idx]
mats[0, (4*i[0])+i[1], (4*i[2])+i[3]] += 1
last_loc = mapcol[idx]
## fill the alternates
x = np.uint8(0)
for y in np.array([0, 4, 8, 12], dtype=np.uint8):
for z in np.array([0, 4, 8, 12], dtype=np.uint8):
mats[1, y:y+np.uint8(4), z:z+np.uint8(4)] = mats[0, x].reshape(4, 4)
mats[2, y:y+np.uint8(4), z:z+np.uint8(4)] = mats[0, x].reshape(4, 4).T
x += np.uint8(1)
return mats | def function[chunk_to_matrices, parameter[narr, mapcol, nmask]]:
constant[
numba compiled code to get matrix fast.
arr is a 4 x N seq matrix converted to np.int8
I convert the numbers for ATGC into their respective index for the MAT
matrix, and leave all others as high numbers, i.e., -==45, N==78.
]
variable[mats] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da20c6c5f30>, <ast.Constant object at 0x7da20c6c6740>, <ast.Constant object at 0x7da20c6c66e0>]]]]
variable[last_loc] assign[=] <ast.UnaryOp object at 0x7da20c6c7a30>
for taget[name[idx]] in starred[call[name[xrange], parameter[call[name[mapcol].shape][constant[0]]]]] begin[:]
if <ast.UnaryOp object at 0x7da20c6c7ac0> begin[:]
if <ast.UnaryOp object at 0x7da20c6c5810> begin[:]
variable[i] assign[=] call[name[narr]][tuple[[<ast.Slice object at 0x7da20c6c4ac0>, <ast.Name object at 0x7da20c6c7a90>]]]
<ast.AugAssign object at 0x7da20c6c4af0>
variable[last_loc] assign[=] call[name[mapcol]][name[idx]]
variable[x] assign[=] call[name[np].uint8, parameter[constant[0]]]
for taget[name[y]] in starred[call[name[np].array, parameter[list[[<ast.Constant object at 0x7da20c6c7c70>, <ast.Constant object at 0x7da20c6c5270>, <ast.Constant object at 0x7da20c6c4e20>, <ast.Constant object at 0x7da20c6c40a0>]]]]] begin[:]
for taget[name[z]] in starred[call[name[np].array, parameter[list[[<ast.Constant object at 0x7da20c6c79d0>, <ast.Constant object at 0x7da20c6c67a0>, <ast.Constant object at 0x7da20c6c5b40>, <ast.Constant object at 0x7da20c6c5180>]]]]] begin[:]
call[name[mats]][tuple[[<ast.Constant object at 0x7da20c6c5960>, <ast.Slice object at 0x7da20c6c41c0>, <ast.Slice object at 0x7da20c6c7340>]]] assign[=] call[call[name[mats]][tuple[[<ast.Constant object at 0x7da20c6c7730>, <ast.Name object at 0x7da20c6c4100>]]].reshape, parameter[constant[4], constant[4]]]
call[name[mats]][tuple[[<ast.Constant object at 0x7da20c6c49d0>, <ast.Slice object at 0x7da20c6c5450>, <ast.Slice object at 0x7da20c6c6aa0>]]] assign[=] call[call[name[mats]][tuple[[<ast.Constant object at 0x7da20c6c40d0>, <ast.Name object at 0x7da20c6c4790>]]].reshape, parameter[constant[4], constant[4]]].T
<ast.AugAssign object at 0x7da20c6c4160>
return[name[mats]] | keyword[def] identifier[chunk_to_matrices] ( identifier[narr] , identifier[mapcol] , identifier[nmask] ):
literal[string]
identifier[mats] = identifier[np] . identifier[zeros] (( literal[int] , literal[int] , literal[int] ), identifier[dtype] = identifier[np] . identifier[uint32] )
identifier[last_loc] =- literal[int]
keyword[for] identifier[idx] keyword[in] identifier[xrange] ( identifier[mapcol] . identifier[shape] [ literal[int] ]):
keyword[if] keyword[not] identifier[nmask] [ identifier[idx] ]:
keyword[if] keyword[not] identifier[mapcol] [ identifier[idx] ]== identifier[last_loc] :
identifier[i] = identifier[narr] [:, identifier[idx] ]
identifier[mats] [ literal[int] ,( literal[int] * identifier[i] [ literal[int] ])+ identifier[i] [ literal[int] ],( literal[int] * identifier[i] [ literal[int] ])+ identifier[i] [ literal[int] ]]+= literal[int]
identifier[last_loc] = identifier[mapcol] [ identifier[idx] ]
identifier[x] = identifier[np] . identifier[uint8] ( literal[int] )
keyword[for] identifier[y] keyword[in] identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[dtype] = identifier[np] . identifier[uint8] ):
keyword[for] identifier[z] keyword[in] identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] , literal[int] ], identifier[dtype] = identifier[np] . identifier[uint8] ):
identifier[mats] [ literal[int] , identifier[y] : identifier[y] + identifier[np] . identifier[uint8] ( literal[int] ), identifier[z] : identifier[z] + identifier[np] . identifier[uint8] ( literal[int] )]= identifier[mats] [ literal[int] , identifier[x] ]. identifier[reshape] ( literal[int] , literal[int] )
identifier[mats] [ literal[int] , identifier[y] : identifier[y] + identifier[np] . identifier[uint8] ( literal[int] ), identifier[z] : identifier[z] + identifier[np] . identifier[uint8] ( literal[int] )]= identifier[mats] [ literal[int] , identifier[x] ]. identifier[reshape] ( literal[int] , literal[int] ). identifier[T]
identifier[x] += identifier[np] . identifier[uint8] ( literal[int] )
keyword[return] identifier[mats] | def chunk_to_matrices(narr, mapcol, nmask):
"""
numba compiled code to get matrix fast.
arr is a 4 x N seq matrix converted to np.int8
I convert the numbers for ATGC into their respective index for the MAT
matrix, and leave all others as high numbers, i.e., -==45, N==78.
"""
## get seq alignment and create an empty array for filling
mats = np.zeros((3, 16, 16), dtype=np.uint32) ## replace ints with small ints that index their place in the
## 16x16. This no longer checks for big ints to exclude, so resolve=True
## is now the default, TODO.
last_loc = -1
for idx in xrange(mapcol.shape[0]):
if not nmask[idx]:
if not mapcol[idx] == last_loc:
i = narr[:, idx]
mats[0, 4 * i[0] + i[1], 4 * i[2] + i[3]] += 1
last_loc = mapcol[idx] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
## fill the alternates
x = np.uint8(0)
for y in np.array([0, 4, 8, 12], dtype=np.uint8):
for z in np.array([0, 4, 8, 12], dtype=np.uint8):
mats[1, y:y + np.uint8(4), z:z + np.uint8(4)] = mats[0, x].reshape(4, 4)
mats[2, y:y + np.uint8(4), z:z + np.uint8(4)] = mats[0, x].reshape(4, 4).T
x += np.uint8(1) # depends on [control=['for'], data=['z']] # depends on [control=['for'], data=['y']]
return mats |
def parse(cls, parser, token):
"""
Parse the node syntax:
.. code-block:: html+django
{% page_placeholder parentobj slotname title="test" role="m" %}
"""
bits, as_var = parse_as_var(parser, token)
tag_name, args, kwargs = parse_token_kwargs(parser, bits, allowed_kwargs=cls.allowed_kwargs, compile_args=True, compile_kwargs=True)
# Play with the arguments
if len(args) == 2:
parent_expr = args[0]
slot_expr = args[1]
elif len(args) == 1:
# Allow 'page' by default. Works with most CMS'es, including django-fluent-pages.
parent_expr = Variable('page')
slot_expr = args[0]
else:
raise TemplateSyntaxError("""{0} tag allows two arguments: 'parent object' 'slot name' and optionally: title=".." role="..".""".format(tag_name))
cls.validate_args(tag_name, *args, **kwargs)
return cls(
tag_name=tag_name,
as_var=as_var,
parent_expr=parent_expr,
slot_expr=slot_expr,
**kwargs
) | def function[parse, parameter[cls, parser, token]]:
constant[
Parse the node syntax:
.. code-block:: html+django
{% page_placeholder parentobj slotname title="test" role="m" %}
]
<ast.Tuple object at 0x7da1b1175780> assign[=] call[name[parse_as_var], parameter[name[parser], name[token]]]
<ast.Tuple object at 0x7da1b1175240> assign[=] call[name[parse_token_kwargs], parameter[name[parser], name[bits]]]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[2]] begin[:]
variable[parent_expr] assign[=] call[name[args]][constant[0]]
variable[slot_expr] assign[=] call[name[args]][constant[1]]
call[name[cls].validate_args, parameter[name[tag_name], <ast.Starred object at 0x7da1b1175870>]]
return[call[name[cls], parameter[]]] | keyword[def] identifier[parse] ( identifier[cls] , identifier[parser] , identifier[token] ):
literal[string]
identifier[bits] , identifier[as_var] = identifier[parse_as_var] ( identifier[parser] , identifier[token] )
identifier[tag_name] , identifier[args] , identifier[kwargs] = identifier[parse_token_kwargs] ( identifier[parser] , identifier[bits] , identifier[allowed_kwargs] = identifier[cls] . identifier[allowed_kwargs] , identifier[compile_args] = keyword[True] , identifier[compile_kwargs] = keyword[True] )
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
identifier[parent_expr] = identifier[args] [ literal[int] ]
identifier[slot_expr] = identifier[args] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[args] )== literal[int] :
identifier[parent_expr] = identifier[Variable] ( literal[string] )
identifier[slot_expr] = identifier[args] [ literal[int] ]
keyword[else] :
keyword[raise] identifier[TemplateSyntaxError] ( literal[string] . identifier[format] ( identifier[tag_name] ))
identifier[cls] . identifier[validate_args] ( identifier[tag_name] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[cls] (
identifier[tag_name] = identifier[tag_name] ,
identifier[as_var] = identifier[as_var] ,
identifier[parent_expr] = identifier[parent_expr] ,
identifier[slot_expr] = identifier[slot_expr] ,
** identifier[kwargs]
) | def parse(cls, parser, token):
"""
Parse the node syntax:
.. code-block:: html+django
{% page_placeholder parentobj slotname title="test" role="m" %}
"""
(bits, as_var) = parse_as_var(parser, token)
(tag_name, args, kwargs) = parse_token_kwargs(parser, bits, allowed_kwargs=cls.allowed_kwargs, compile_args=True, compile_kwargs=True)
# Play with the arguments
if len(args) == 2:
parent_expr = args[0]
slot_expr = args[1] # depends on [control=['if'], data=[]]
elif len(args) == 1:
# Allow 'page' by default. Works with most CMS'es, including django-fluent-pages.
parent_expr = Variable('page')
slot_expr = args[0] # depends on [control=['if'], data=[]]
else:
raise TemplateSyntaxError('{0} tag allows two arguments: \'parent object\' \'slot name\' and optionally: title=".." role="..".'.format(tag_name))
cls.validate_args(tag_name, *args, **kwargs)
return cls(tag_name=tag_name, as_var=as_var, parent_expr=parent_expr, slot_expr=slot_expr, **kwargs) |
def locus(args):
"""
%prog locus bamfile
Extract selected locus from a list of TREDs for validation, and run lobSTR.
"""
from jcvi.formats.sam import get_minibam
# See `Format-lobSTR-database.ipynb` for a list of TREDs for validation
INCLUDE = ["HD", "SBMA", "SCA1", "SCA2", "SCA8", "SCA17", "DM1", "DM2",
"FXTAS"]
db_choices = ("hg38", "hg19")
p = OptionParser(locus.__doc__)
p.add_option("--tred", choices=INCLUDE,
help="TRED name")
p.add_option("--ref", choices=db_choices, default="hg38",
help="Reference genome")
p.set_home("lobstr")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
bamfile, = args
ref = opts.ref
lhome = opts.lobstr_home
tred = opts.tred
tredsfile = datafile("TREDs.meta.csv")
tf = pd.read_csv(tredsfile, index_col=0)
row = tf.ix[tred]
tag = "repeat_location"
ldb = "TREDs"
if ref == "hg19":
tag += "." + ref
ldb += "-" + ref
seqid, start_end = row[tag].split(":")
PAD = 1000
start, end = start_end.split('-')
start, end = int(start) - PAD, int(end) + PAD
region = "{}:{}-{}".format(seqid, start, end)
minibamfile = get_minibam(bamfile, region)
c = seqid.replace("chr", "")
cmd, vcf = allelotype_on_chr(minibamfile, c, lhome, ldb)
sh(cmd)
parser = LobSTRvcf(columnidsfile=None)
parser.parse(vcf, filtered=False)
items = parser.items()
if not items:
print("No entry found!", file=sys.stderr)
return
k, v = parser.items()[0]
print("{} => {}".format(tred, v.replace(',', '/')), file=sys.stderr) | def function[locus, parameter[args]]:
constant[
%prog locus bamfile
Extract selected locus from a list of TREDs for validation, and run lobSTR.
]
from relative_module[jcvi.formats.sam] import module[get_minibam]
variable[INCLUDE] assign[=] list[[<ast.Constant object at 0x7da1b08d3820>, <ast.Constant object at 0x7da1b08d1390>, <ast.Constant object at 0x7da1b08d13c0>, <ast.Constant object at 0x7da1b08d12a0>, <ast.Constant object at 0x7da1b08d2f20>, <ast.Constant object at 0x7da1b08d2f80>, <ast.Constant object at 0x7da1b08d2fb0>, <ast.Constant object at 0x7da1b08d2f50>, <ast.Constant object at 0x7da1b08d2ef0>]]
variable[db_choices] assign[=] tuple[[<ast.Constant object at 0x7da1b08d1990>, <ast.Constant object at 0x7da1b08d19f0>]]
variable[p] assign[=] call[name[OptionParser], parameter[name[locus].__doc__]]
call[name[p].add_option, parameter[constant[--tred]]]
call[name[p].add_option, parameter[constant[--ref]]]
call[name[p].set_home, parameter[constant[lobstr]]]
<ast.Tuple object at 0x7da1b08d0bb0> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b08d1000>]]
<ast.Tuple object at 0x7da1b08d3f40> assign[=] name[args]
variable[ref] assign[=] name[opts].ref
variable[lhome] assign[=] name[opts].lobstr_home
variable[tred] assign[=] name[opts].tred
variable[tredsfile] assign[=] call[name[datafile], parameter[constant[TREDs.meta.csv]]]
variable[tf] assign[=] call[name[pd].read_csv, parameter[name[tredsfile]]]
variable[row] assign[=] call[name[tf].ix][name[tred]]
variable[tag] assign[=] constant[repeat_location]
variable[ldb] assign[=] constant[TREDs]
if compare[name[ref] equal[==] constant[hg19]] begin[:]
<ast.AugAssign object at 0x7da1b08d24d0>
<ast.AugAssign object at 0x7da1b08d2860>
<ast.Tuple object at 0x7da1b08d2a10> assign[=] call[call[name[row]][name[tag]].split, parameter[constant[:]]]
variable[PAD] assign[=] constant[1000]
<ast.Tuple object at 0x7da1b08d3040> assign[=] call[name[start_end].split, parameter[constant[-]]]
<ast.Tuple object at 0x7da1b08d1c60> assign[=] tuple[[<ast.BinOp object at 0x7da1b08d1c00>, <ast.BinOp object at 0x7da1b08d1e40>]]
variable[region] assign[=] call[constant[{}:{}-{}].format, parameter[name[seqid], name[start], name[end]]]
variable[minibamfile] assign[=] call[name[get_minibam], parameter[name[bamfile], name[region]]]
variable[c] assign[=] call[name[seqid].replace, parameter[constant[chr], constant[]]]
<ast.Tuple object at 0x7da1b08d34c0> assign[=] call[name[allelotype_on_chr], parameter[name[minibamfile], name[c], name[lhome], name[ldb]]]
call[name[sh], parameter[name[cmd]]]
variable[parser] assign[=] call[name[LobSTRvcf], parameter[]]
call[name[parser].parse, parameter[name[vcf]]]
variable[items] assign[=] call[name[parser].items, parameter[]]
if <ast.UnaryOp object at 0x7da1b08d37c0> begin[:]
call[name[print], parameter[constant[No entry found!]]]
return[None]
<ast.Tuple object at 0x7da1b08d2b90> assign[=] call[call[name[parser].items, parameter[]]][constant[0]]
call[name[print], parameter[call[constant[{} => {}].format, parameter[name[tred], call[name[v].replace, parameter[constant[,], constant[/]]]]]]] | keyword[def] identifier[locus] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[sam] keyword[import] identifier[get_minibam]
identifier[INCLUDE] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] ]
identifier[db_choices] =( literal[string] , literal[string] )
identifier[p] = identifier[OptionParser] ( identifier[locus] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[choices] = identifier[INCLUDE] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[choices] = identifier[db_choices] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[set_home] ( literal[string] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[bamfile] ,= identifier[args]
identifier[ref] = identifier[opts] . identifier[ref]
identifier[lhome] = identifier[opts] . identifier[lobstr_home]
identifier[tred] = identifier[opts] . identifier[tred]
identifier[tredsfile] = identifier[datafile] ( literal[string] )
identifier[tf] = identifier[pd] . identifier[read_csv] ( identifier[tredsfile] , identifier[index_col] = literal[int] )
identifier[row] = identifier[tf] . identifier[ix] [ identifier[tred] ]
identifier[tag] = literal[string]
identifier[ldb] = literal[string]
keyword[if] identifier[ref] == literal[string] :
identifier[tag] += literal[string] + identifier[ref]
identifier[ldb] += literal[string] + identifier[ref]
identifier[seqid] , identifier[start_end] = identifier[row] [ identifier[tag] ]. identifier[split] ( literal[string] )
identifier[PAD] = literal[int]
identifier[start] , identifier[end] = identifier[start_end] . identifier[split] ( literal[string] )
identifier[start] , identifier[end] = identifier[int] ( identifier[start] )- identifier[PAD] , identifier[int] ( identifier[end] )+ identifier[PAD]
identifier[region] = literal[string] . identifier[format] ( identifier[seqid] , identifier[start] , identifier[end] )
identifier[minibamfile] = identifier[get_minibam] ( identifier[bamfile] , identifier[region] )
identifier[c] = identifier[seqid] . identifier[replace] ( literal[string] , literal[string] )
identifier[cmd] , identifier[vcf] = identifier[allelotype_on_chr] ( identifier[minibamfile] , identifier[c] , identifier[lhome] , identifier[ldb] )
identifier[sh] ( identifier[cmd] )
identifier[parser] = identifier[LobSTRvcf] ( identifier[columnidsfile] = keyword[None] )
identifier[parser] . identifier[parse] ( identifier[vcf] , identifier[filtered] = keyword[False] )
identifier[items] = identifier[parser] . identifier[items] ()
keyword[if] keyword[not] identifier[items] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return]
identifier[k] , identifier[v] = identifier[parser] . identifier[items] ()[ literal[int] ]
identifier[print] ( literal[string] . identifier[format] ( identifier[tred] , identifier[v] . identifier[replace] ( literal[string] , literal[string] )), identifier[file] = identifier[sys] . identifier[stderr] ) | def locus(args):
"""
%prog locus bamfile
Extract selected locus from a list of TREDs for validation, and run lobSTR.
"""
from jcvi.formats.sam import get_minibam
# See `Format-lobSTR-database.ipynb` for a list of TREDs for validation
INCLUDE = ['HD', 'SBMA', 'SCA1', 'SCA2', 'SCA8', 'SCA17', 'DM1', 'DM2', 'FXTAS']
db_choices = ('hg38', 'hg19')
p = OptionParser(locus.__doc__)
p.add_option('--tred', choices=INCLUDE, help='TRED name')
p.add_option('--ref', choices=db_choices, default='hg38', help='Reference genome')
p.set_home('lobstr')
(opts, args) = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(bamfile,) = args
ref = opts.ref
lhome = opts.lobstr_home
tred = opts.tred
tredsfile = datafile('TREDs.meta.csv')
tf = pd.read_csv(tredsfile, index_col=0)
row = tf.ix[tred]
tag = 'repeat_location'
ldb = 'TREDs'
if ref == 'hg19':
tag += '.' + ref
ldb += '-' + ref # depends on [control=['if'], data=['ref']]
(seqid, start_end) = row[tag].split(':')
PAD = 1000
(start, end) = start_end.split('-')
(start, end) = (int(start) - PAD, int(end) + PAD)
region = '{}:{}-{}'.format(seqid, start, end)
minibamfile = get_minibam(bamfile, region)
c = seqid.replace('chr', '')
(cmd, vcf) = allelotype_on_chr(minibamfile, c, lhome, ldb)
sh(cmd)
parser = LobSTRvcf(columnidsfile=None)
parser.parse(vcf, filtered=False)
items = parser.items()
if not items:
print('No entry found!', file=sys.stderr)
return # depends on [control=['if'], data=[]]
(k, v) = parser.items()[0]
print('{} => {}'.format(tred, v.replace(',', '/')), file=sys.stderr) |
def _is_valid_function(module_name, function):
'''
Determine if a function is valid for a module
'''
try:
functions = __salt__['sys.list_functions'](module_name)
except salt.exceptions.SaltException:
functions = ["unable to look up functions"]
return "{0}.{1}".format(module_name, function) in functions | def function[_is_valid_function, parameter[module_name, function]]:
constant[
Determine if a function is valid for a module
]
<ast.Try object at 0x7da18dc9a200>
return[compare[call[constant[{0}.{1}].format, parameter[name[module_name], name[function]]] in name[functions]]] | keyword[def] identifier[_is_valid_function] ( identifier[module_name] , identifier[function] ):
literal[string]
keyword[try] :
identifier[functions] = identifier[__salt__] [ literal[string] ]( identifier[module_name] )
keyword[except] identifier[salt] . identifier[exceptions] . identifier[SaltException] :
identifier[functions] =[ literal[string] ]
keyword[return] literal[string] . identifier[format] ( identifier[module_name] , identifier[function] ) keyword[in] identifier[functions] | def _is_valid_function(module_name, function):
"""
Determine if a function is valid for a module
"""
try:
functions = __salt__['sys.list_functions'](module_name) # depends on [control=['try'], data=[]]
except salt.exceptions.SaltException:
functions = ['unable to look up functions'] # depends on [control=['except'], data=[]]
return '{0}.{1}'.format(module_name, function) in functions |
def get_immediate_parents(self):
"""
Return all direct parents of this company. Excludes parents of parents
"""
ownership = Ownership.objects.filter(child=self)
parents = Company.objects.filter(parent__in=ownership).distinct()
return parents | def function[get_immediate_parents, parameter[self]]:
constant[
Return all direct parents of this company. Excludes parents of parents
]
variable[ownership] assign[=] call[name[Ownership].objects.filter, parameter[]]
variable[parents] assign[=] call[call[name[Company].objects.filter, parameter[]].distinct, parameter[]]
return[name[parents]] | keyword[def] identifier[get_immediate_parents] ( identifier[self] ):
literal[string]
identifier[ownership] = identifier[Ownership] . identifier[objects] . identifier[filter] ( identifier[child] = identifier[self] )
identifier[parents] = identifier[Company] . identifier[objects] . identifier[filter] ( identifier[parent__in] = identifier[ownership] ). identifier[distinct] ()
keyword[return] identifier[parents] | def get_immediate_parents(self):
"""
Return all direct parents of this company. Excludes parents of parents
"""
ownership = Ownership.objects.filter(child=self)
parents = Company.objects.filter(parent__in=ownership).distinct()
return parents |
def folderitem(self, obj, item, index):
"""Service triggered each time an item is iterated in folderitems.
The use of this service prevents the extra-loops in child objects.
:obj: the instance of the class to be foldered
:item: dict containing the properties of the object to be used by
the template
:index: current index of the item
"""
title = obj.Title()
url = obj.absolute_url()
item["Title"] = title
item["replace"]["Title"] = get_link(url, value=title)
instrument_type = obj.getInstrumentType()
if instrument_type:
url = instrument_type.absolute_url()
title = instrument_type.Title()
item["Type"] = instrument_type.Title()
item["replace"]["Type"] = get_link(url, value=title)
else:
item["Type"] = ""
instrument_brand = obj.getManufacturer()
if instrument_brand:
url = instrument_brand.absolute_url()
title = instrument_brand.Title()
item["Brand"] = instrument_brand.Title()
item["replace"]["Brand"] = get_link(url, value=title)
else:
item["Brand"] = ""
instrument_model = obj.getModel()
if instrument_model:
item["Model"] = instrument_model
else:
item["Model"] = ""
expiry_date = obj.getCertificateExpireDate()
if expiry_date is None:
item["ExpiryDate"] = _("No date set")
else:
item["ExpiryDate"] = expiry_date.asdatetime().strftime(
self.date_format_short)
if obj.isOutOfDate():
item["WeeksToExpire"] = _("Out of date")
else:
weeks, days = obj.getWeeksToExpire()
weeks_to_expire = _("{} weeks and {} day(s)".format(
str(weeks), str(days)))
item['WeeksToExpire'] = weeks_to_expire
methods = obj.getMethods()
if methods:
links = map(
lambda m: get_link(m.absolute_url(),
value=m.Title(),
css_class="link"),
methods)
item["replace"]["Methods"] = ", ".join(links)
return item | def function[folderitem, parameter[self, obj, item, index]]:
constant[Service triggered each time an item is iterated in folderitems.
The use of this service prevents the extra-loops in child objects.
:obj: the instance of the class to be foldered
:item: dict containing the properties of the object to be used by
the template
:index: current index of the item
]
variable[title] assign[=] call[name[obj].Title, parameter[]]
variable[url] assign[=] call[name[obj].absolute_url, parameter[]]
call[name[item]][constant[Title]] assign[=] name[title]
call[call[name[item]][constant[replace]]][constant[Title]] assign[=] call[name[get_link], parameter[name[url]]]
variable[instrument_type] assign[=] call[name[obj].getInstrumentType, parameter[]]
if name[instrument_type] begin[:]
variable[url] assign[=] call[name[instrument_type].absolute_url, parameter[]]
variable[title] assign[=] call[name[instrument_type].Title, parameter[]]
call[name[item]][constant[Type]] assign[=] call[name[instrument_type].Title, parameter[]]
call[call[name[item]][constant[replace]]][constant[Type]] assign[=] call[name[get_link], parameter[name[url]]]
variable[instrument_brand] assign[=] call[name[obj].getManufacturer, parameter[]]
if name[instrument_brand] begin[:]
variable[url] assign[=] call[name[instrument_brand].absolute_url, parameter[]]
variable[title] assign[=] call[name[instrument_brand].Title, parameter[]]
call[name[item]][constant[Brand]] assign[=] call[name[instrument_brand].Title, parameter[]]
call[call[name[item]][constant[replace]]][constant[Brand]] assign[=] call[name[get_link], parameter[name[url]]]
variable[instrument_model] assign[=] call[name[obj].getModel, parameter[]]
if name[instrument_model] begin[:]
call[name[item]][constant[Model]] assign[=] name[instrument_model]
variable[expiry_date] assign[=] call[name[obj].getCertificateExpireDate, parameter[]]
if compare[name[expiry_date] is constant[None]] begin[:]
call[name[item]][constant[ExpiryDate]] assign[=] call[name[_], parameter[constant[No date set]]]
if call[name[obj].isOutOfDate, parameter[]] begin[:]
call[name[item]][constant[WeeksToExpire]] assign[=] call[name[_], parameter[constant[Out of date]]]
variable[methods] assign[=] call[name[obj].getMethods, parameter[]]
if name[methods] begin[:]
variable[links] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b1d660b0>, name[methods]]]
call[call[name[item]][constant[replace]]][constant[Methods]] assign[=] call[constant[, ].join, parameter[name[links]]]
return[name[item]] | keyword[def] identifier[folderitem] ( identifier[self] , identifier[obj] , identifier[item] , identifier[index] ):
literal[string]
identifier[title] = identifier[obj] . identifier[Title] ()
identifier[url] = identifier[obj] . identifier[absolute_url] ()
identifier[item] [ literal[string] ]= identifier[title]
identifier[item] [ literal[string] ][ literal[string] ]= identifier[get_link] ( identifier[url] , identifier[value] = identifier[title] )
identifier[instrument_type] = identifier[obj] . identifier[getInstrumentType] ()
keyword[if] identifier[instrument_type] :
identifier[url] = identifier[instrument_type] . identifier[absolute_url] ()
identifier[title] = identifier[instrument_type] . identifier[Title] ()
identifier[item] [ literal[string] ]= identifier[instrument_type] . identifier[Title] ()
identifier[item] [ literal[string] ][ literal[string] ]= identifier[get_link] ( identifier[url] , identifier[value] = identifier[title] )
keyword[else] :
identifier[item] [ literal[string] ]= literal[string]
identifier[instrument_brand] = identifier[obj] . identifier[getManufacturer] ()
keyword[if] identifier[instrument_brand] :
identifier[url] = identifier[instrument_brand] . identifier[absolute_url] ()
identifier[title] = identifier[instrument_brand] . identifier[Title] ()
identifier[item] [ literal[string] ]= identifier[instrument_brand] . identifier[Title] ()
identifier[item] [ literal[string] ][ literal[string] ]= identifier[get_link] ( identifier[url] , identifier[value] = identifier[title] )
keyword[else] :
identifier[item] [ literal[string] ]= literal[string]
identifier[instrument_model] = identifier[obj] . identifier[getModel] ()
keyword[if] identifier[instrument_model] :
identifier[item] [ literal[string] ]= identifier[instrument_model]
keyword[else] :
identifier[item] [ literal[string] ]= literal[string]
identifier[expiry_date] = identifier[obj] . identifier[getCertificateExpireDate] ()
keyword[if] identifier[expiry_date] keyword[is] keyword[None] :
identifier[item] [ literal[string] ]= identifier[_] ( literal[string] )
keyword[else] :
identifier[item] [ literal[string] ]= identifier[expiry_date] . identifier[asdatetime] (). identifier[strftime] (
identifier[self] . identifier[date_format_short] )
keyword[if] identifier[obj] . identifier[isOutOfDate] ():
identifier[item] [ literal[string] ]= identifier[_] ( literal[string] )
keyword[else] :
identifier[weeks] , identifier[days] = identifier[obj] . identifier[getWeeksToExpire] ()
identifier[weeks_to_expire] = identifier[_] ( literal[string] . identifier[format] (
identifier[str] ( identifier[weeks] ), identifier[str] ( identifier[days] )))
identifier[item] [ literal[string] ]= identifier[weeks_to_expire]
identifier[methods] = identifier[obj] . identifier[getMethods] ()
keyword[if] identifier[methods] :
identifier[links] = identifier[map] (
keyword[lambda] identifier[m] : identifier[get_link] ( identifier[m] . identifier[absolute_url] (),
identifier[value] = identifier[m] . identifier[Title] (),
identifier[css_class] = literal[string] ),
identifier[methods] )
identifier[item] [ literal[string] ][ literal[string] ]= literal[string] . identifier[join] ( identifier[links] )
keyword[return] identifier[item] | def folderitem(self, obj, item, index):
"""Service triggered each time an item is iterated in folderitems.
The use of this service prevents the extra-loops in child objects.
:obj: the instance of the class to be foldered
:item: dict containing the properties of the object to be used by
the template
:index: current index of the item
"""
title = obj.Title()
url = obj.absolute_url()
item['Title'] = title
item['replace']['Title'] = get_link(url, value=title)
instrument_type = obj.getInstrumentType()
if instrument_type:
url = instrument_type.absolute_url()
title = instrument_type.Title()
item['Type'] = instrument_type.Title()
item['replace']['Type'] = get_link(url, value=title) # depends on [control=['if'], data=[]]
else:
item['Type'] = ''
instrument_brand = obj.getManufacturer()
if instrument_brand:
url = instrument_brand.absolute_url()
title = instrument_brand.Title()
item['Brand'] = instrument_brand.Title()
item['replace']['Brand'] = get_link(url, value=title) # depends on [control=['if'], data=[]]
else:
item['Brand'] = ''
instrument_model = obj.getModel()
if instrument_model:
item['Model'] = instrument_model # depends on [control=['if'], data=[]]
else:
item['Model'] = ''
expiry_date = obj.getCertificateExpireDate()
if expiry_date is None:
item['ExpiryDate'] = _('No date set') # depends on [control=['if'], data=[]]
else:
item['ExpiryDate'] = expiry_date.asdatetime().strftime(self.date_format_short)
if obj.isOutOfDate():
item['WeeksToExpire'] = _('Out of date') # depends on [control=['if'], data=[]]
else:
(weeks, days) = obj.getWeeksToExpire()
weeks_to_expire = _('{} weeks and {} day(s)'.format(str(weeks), str(days)))
item['WeeksToExpire'] = weeks_to_expire
methods = obj.getMethods()
if methods:
links = map(lambda m: get_link(m.absolute_url(), value=m.Title(), css_class='link'), methods)
item['replace']['Methods'] = ', '.join(links) # depends on [control=['if'], data=[]]
return item |
def initialize(self, pid, ore_software_id=d1_common.const.ORE_SOFTWARE_ID):
"""Create the basic ORE document structure."""
# Set nice prefixes for the namespaces
for k in list(d1_common.const.ORE_NAMESPACE_DICT.keys()):
self.bind(k, d1_common.const.ORE_NAMESPACE_DICT[k])
# Create the ORE entity
oid = self._pid_to_id(pid)
ore = rdflib.URIRef(oid)
self.add((ore, rdflib.RDF.type, ORE.ResourceMap))
self.add((ore, DCTERMS.identifier, rdflib.term.Literal(pid)))
self.add((ore, DCTERMS.creator, rdflib.term.Literal(ore_software_id)))
# Add an empty aggregation
ag = rdflib.URIRef(oid + "#aggregation")
self.add((ore, ORE.describes, ag))
self.add((ag, rdflib.RDF.type, ORE.Aggregation))
self.add((ORE.Aggregation, rdflib.RDFS.isDefinedBy, ORE.term("")))
self.add(
(ORE.Aggregation, rdflib.RDFS.label, rdflib.term.Literal("Aggregation"))
)
self._ore_initialized = True | def function[initialize, parameter[self, pid, ore_software_id]]:
constant[Create the basic ORE document structure.]
for taget[name[k]] in starred[call[name[list], parameter[call[name[d1_common].const.ORE_NAMESPACE_DICT.keys, parameter[]]]]] begin[:]
call[name[self].bind, parameter[name[k], call[name[d1_common].const.ORE_NAMESPACE_DICT][name[k]]]]
variable[oid] assign[=] call[name[self]._pid_to_id, parameter[name[pid]]]
variable[ore] assign[=] call[name[rdflib].URIRef, parameter[name[oid]]]
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da18dc05390>, <ast.Attribute object at 0x7da18dc06890>, <ast.Attribute object at 0x7da1b1adc280>]]]]
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da1b1add360>, <ast.Attribute object at 0x7da1b1ade8f0>, <ast.Call object at 0x7da1b1add780>]]]]
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da1b1adc1c0>, <ast.Attribute object at 0x7da1b1ade0e0>, <ast.Call object at 0x7da1b1adcaf0>]]]]
variable[ag] assign[=] call[name[rdflib].URIRef, parameter[binary_operation[name[oid] + constant[#aggregation]]]]
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da1b1adf6a0>, <ast.Attribute object at 0x7da1b1adceb0>, <ast.Name object at 0x7da1b1adc820>]]]]
call[name[self].add, parameter[tuple[[<ast.Name object at 0x7da1b1ade050>, <ast.Attribute object at 0x7da1b1adedd0>, <ast.Attribute object at 0x7da1b1adde40>]]]]
call[name[self].add, parameter[tuple[[<ast.Attribute object at 0x7da1b1ade710>, <ast.Attribute object at 0x7da1b1adc6d0>, <ast.Call object at 0x7da1b1adec50>]]]]
call[name[self].add, parameter[tuple[[<ast.Attribute object at 0x7da1b1adcc10>, <ast.Attribute object at 0x7da1b1adf400>, <ast.Call object at 0x7da1b1addf30>]]]]
name[self]._ore_initialized assign[=] constant[True] | keyword[def] identifier[initialize] ( identifier[self] , identifier[pid] , identifier[ore_software_id] = identifier[d1_common] . identifier[const] . identifier[ORE_SOFTWARE_ID] ):
literal[string]
keyword[for] identifier[k] keyword[in] identifier[list] ( identifier[d1_common] . identifier[const] . identifier[ORE_NAMESPACE_DICT] . identifier[keys] ()):
identifier[self] . identifier[bind] ( identifier[k] , identifier[d1_common] . identifier[const] . identifier[ORE_NAMESPACE_DICT] [ identifier[k] ])
identifier[oid] = identifier[self] . identifier[_pid_to_id] ( identifier[pid] )
identifier[ore] = identifier[rdflib] . identifier[URIRef] ( identifier[oid] )
identifier[self] . identifier[add] (( identifier[ore] , identifier[rdflib] . identifier[RDF] . identifier[type] , identifier[ORE] . identifier[ResourceMap] ))
identifier[self] . identifier[add] (( identifier[ore] , identifier[DCTERMS] . identifier[identifier] , identifier[rdflib] . identifier[term] . identifier[Literal] ( identifier[pid] )))
identifier[self] . identifier[add] (( identifier[ore] , identifier[DCTERMS] . identifier[creator] , identifier[rdflib] . identifier[term] . identifier[Literal] ( identifier[ore_software_id] )))
identifier[ag] = identifier[rdflib] . identifier[URIRef] ( identifier[oid] + literal[string] )
identifier[self] . identifier[add] (( identifier[ore] , identifier[ORE] . identifier[describes] , identifier[ag] ))
identifier[self] . identifier[add] (( identifier[ag] , identifier[rdflib] . identifier[RDF] . identifier[type] , identifier[ORE] . identifier[Aggregation] ))
identifier[self] . identifier[add] (( identifier[ORE] . identifier[Aggregation] , identifier[rdflib] . identifier[RDFS] . identifier[isDefinedBy] , identifier[ORE] . identifier[term] ( literal[string] )))
identifier[self] . identifier[add] (
( identifier[ORE] . identifier[Aggregation] , identifier[rdflib] . identifier[RDFS] . identifier[label] , identifier[rdflib] . identifier[term] . identifier[Literal] ( literal[string] ))
)
identifier[self] . identifier[_ore_initialized] = keyword[True] | def initialize(self, pid, ore_software_id=d1_common.const.ORE_SOFTWARE_ID):
"""Create the basic ORE document structure."""
# Set nice prefixes for the namespaces
for k in list(d1_common.const.ORE_NAMESPACE_DICT.keys()):
self.bind(k, d1_common.const.ORE_NAMESPACE_DICT[k]) # depends on [control=['for'], data=['k']]
# Create the ORE entity
oid = self._pid_to_id(pid)
ore = rdflib.URIRef(oid)
self.add((ore, rdflib.RDF.type, ORE.ResourceMap))
self.add((ore, DCTERMS.identifier, rdflib.term.Literal(pid)))
self.add((ore, DCTERMS.creator, rdflib.term.Literal(ore_software_id)))
# Add an empty aggregation
ag = rdflib.URIRef(oid + '#aggregation')
self.add((ore, ORE.describes, ag))
self.add((ag, rdflib.RDF.type, ORE.Aggregation))
self.add((ORE.Aggregation, rdflib.RDFS.isDefinedBy, ORE.term('')))
self.add((ORE.Aggregation, rdflib.RDFS.label, rdflib.term.Literal('Aggregation')))
self._ore_initialized = True |
def ng_save(self, request, *args, **kwargs):
"""
Called on $save()
Use modelform to save new object or modify an existing one
"""
form = self.get_form(self.get_form_class())
if form.is_valid():
obj = form.save()
return self.build_json_response(obj)
raise ValidationError(form.errors) | def function[ng_save, parameter[self, request]]:
constant[
Called on $save()
Use modelform to save new object or modify an existing one
]
variable[form] assign[=] call[name[self].get_form, parameter[call[name[self].get_form_class, parameter[]]]]
if call[name[form].is_valid, parameter[]] begin[:]
variable[obj] assign[=] call[name[form].save, parameter[]]
return[call[name[self].build_json_response, parameter[name[obj]]]]
<ast.Raise object at 0x7da18fe92fe0> | keyword[def] identifier[ng_save] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[form] = identifier[self] . identifier[get_form] ( identifier[self] . identifier[get_form_class] ())
keyword[if] identifier[form] . identifier[is_valid] ():
identifier[obj] = identifier[form] . identifier[save] ()
keyword[return] identifier[self] . identifier[build_json_response] ( identifier[obj] )
keyword[raise] identifier[ValidationError] ( identifier[form] . identifier[errors] ) | def ng_save(self, request, *args, **kwargs):
"""
Called on $save()
Use modelform to save new object or modify an existing one
"""
form = self.get_form(self.get_form_class())
if form.is_valid():
obj = form.save()
return self.build_json_response(obj) # depends on [control=['if'], data=[]]
raise ValidationError(form.errors) |
def bq(line, cell=None):
"""Implements the bq cell magic for ipython notebooks.
The supported syntax is:
%%bq <command> [<args>]
<cell>
or:
%bq <command> [<args>]
Use %bq --help for a list of commands, or %bq <command> --help for help
on a specific command.
"""
return google.datalab.utils.commands.handle_magic_line(line, cell, _bigquery_parser) | def function[bq, parameter[line, cell]]:
constant[Implements the bq cell magic for ipython notebooks.
The supported syntax is:
%%bq <command> [<args>]
<cell>
or:
%bq <command> [<args>]
Use %bq --help for a list of commands, or %bq <command> --help for help
on a specific command.
]
return[call[name[google].datalab.utils.commands.handle_magic_line, parameter[name[line], name[cell], name[_bigquery_parser]]]] | keyword[def] identifier[bq] ( identifier[line] , identifier[cell] = keyword[None] ):
literal[string]
keyword[return] identifier[google] . identifier[datalab] . identifier[utils] . identifier[commands] . identifier[handle_magic_line] ( identifier[line] , identifier[cell] , identifier[_bigquery_parser] ) | def bq(line, cell=None):
"""Implements the bq cell magic for ipython notebooks.
The supported syntax is:
%%bq <command> [<args>]
<cell>
or:
%bq <command> [<args>]
Use %bq --help for a list of commands, or %bq <command> --help for help
on a specific command.
"""
return google.datalab.utils.commands.handle_magic_line(line, cell, _bigquery_parser) |
def get_or_create_instance(self, id=None, application=None, revision=None, environment=None, name=None, parameters=None, submodules=None,
destroyInterval=None):
""" Get instance by id or name.
If not found: create with given parameters
"""
try:
instance = self.get_instance(id=id, name=name)
if name and name != instance.name:
instance.rename(name)
instance.ready()
return instance
except exceptions.NotFoundError:
return self.create_instance(application, revision, environment, name, parameters, submodules, destroyInterval) | def function[get_or_create_instance, parameter[self, id, application, revision, environment, name, parameters, submodules, destroyInterval]]:
constant[ Get instance by id or name.
If not found: create with given parameters
]
<ast.Try object at 0x7da1b1121420> | keyword[def] identifier[get_or_create_instance] ( identifier[self] , identifier[id] = keyword[None] , identifier[application] = keyword[None] , identifier[revision] = keyword[None] , identifier[environment] = keyword[None] , identifier[name] = keyword[None] , identifier[parameters] = keyword[None] , identifier[submodules] = keyword[None] ,
identifier[destroyInterval] = keyword[None] ):
literal[string]
keyword[try] :
identifier[instance] = identifier[self] . identifier[get_instance] ( identifier[id] = identifier[id] , identifier[name] = identifier[name] )
keyword[if] identifier[name] keyword[and] identifier[name] != identifier[instance] . identifier[name] :
identifier[instance] . identifier[rename] ( identifier[name] )
identifier[instance] . identifier[ready] ()
keyword[return] identifier[instance]
keyword[except] identifier[exceptions] . identifier[NotFoundError] :
keyword[return] identifier[self] . identifier[create_instance] ( identifier[application] , identifier[revision] , identifier[environment] , identifier[name] , identifier[parameters] , identifier[submodules] , identifier[destroyInterval] ) | def get_or_create_instance(self, id=None, application=None, revision=None, environment=None, name=None, parameters=None, submodules=None, destroyInterval=None):
""" Get instance by id or name.
If not found: create with given parameters
"""
try:
instance = self.get_instance(id=id, name=name)
if name and name != instance.name:
instance.rename(name)
instance.ready() # depends on [control=['if'], data=[]]
return instance # depends on [control=['try'], data=[]]
except exceptions.NotFoundError:
return self.create_instance(application, revision, environment, name, parameters, submodules, destroyInterval) # depends on [control=['except'], data=[]] |
def df_nc(self,x):
'''
Wrapper of the derivative of *f*: takes an input x with size of the not
fixed dimensions expands it and evaluates the gradient of the entire function.
'''
x = np.atleast_2d(x)
xx = self.context_manager._expand_vector(x)
_, df_nocontext_xx = self.f_df(xx)
df_nocontext_xx = df_nocontext_xx[:,np.array(self.context_manager.noncontext_index)]
return df_nocontext_xx | def function[df_nc, parameter[self, x]]:
constant[
Wrapper of the derivative of *f*: takes an input x with size of the not
fixed dimensions expands it and evaluates the gradient of the entire function.
]
variable[x] assign[=] call[name[np].atleast_2d, parameter[name[x]]]
variable[xx] assign[=] call[name[self].context_manager._expand_vector, parameter[name[x]]]
<ast.Tuple object at 0x7da18c4cdd50> assign[=] call[name[self].f_df, parameter[name[xx]]]
variable[df_nocontext_xx] assign[=] call[name[df_nocontext_xx]][tuple[[<ast.Slice object at 0x7da18c4cea70>, <ast.Call object at 0x7da18c4cf100>]]]
return[name[df_nocontext_xx]] | keyword[def] identifier[df_nc] ( identifier[self] , identifier[x] ):
literal[string]
identifier[x] = identifier[np] . identifier[atleast_2d] ( identifier[x] )
identifier[xx] = identifier[self] . identifier[context_manager] . identifier[_expand_vector] ( identifier[x] )
identifier[_] , identifier[df_nocontext_xx] = identifier[self] . identifier[f_df] ( identifier[xx] )
identifier[df_nocontext_xx] = identifier[df_nocontext_xx] [:, identifier[np] . identifier[array] ( identifier[self] . identifier[context_manager] . identifier[noncontext_index] )]
keyword[return] identifier[df_nocontext_xx] | def df_nc(self, x):
"""
Wrapper of the derivative of *f*: takes an input x with size of the not
fixed dimensions expands it and evaluates the gradient of the entire function.
"""
x = np.atleast_2d(x)
xx = self.context_manager._expand_vector(x)
(_, df_nocontext_xx) = self.f_df(xx)
df_nocontext_xx = df_nocontext_xx[:, np.array(self.context_manager.noncontext_index)]
return df_nocontext_xx |
def transform(self, stims, validation='strict', *args, **kwargs):
''' Executes the transformation on the passed stim(s).
Args:
stims (str, Stim, list): One or more stimuli to process. Must be
one of:
- A string giving the path to a file that can be read in
as a Stim (e.g., a .txt file, .jpg image, etc.)
- A Stim instance of any type.
- An iterable of stims, where each element is either a
string or a Stim.
validation (str): String specifying how validation errors should
be handled. Must be one of:
- 'strict': Raise an exception on any validation error
- 'warn': Issue a warning for all validation errors
- 'loose': Silently ignore all validation errors
args: Optional positional arguments to pass onto the internal
_transform call.
kwargs: Optional positional arguments to pass onto the internal
_transform call.
'''
if isinstance(stims, string_types):
stims = load_stims(stims)
# If stims is a CompoundStim and the Transformer is expecting a single
# input type, extract all matching stims
if isinstance(stims, CompoundStim) and not isinstance(self._input_type, tuple):
stims = stims.get_stim(self._input_type, return_all=True)
if not stims:
raise ValueError("No stims of class %s found in the provided"
"CompoundStim instance." % self._input_type)
# If stims is an iterable, naively loop over elements, removing
# invalid results if needed
if isiterable(stims):
iters = self._iterate(stims, validation=validation, *args,
**kwargs)
if config.get_option('drop_bad_extractor_results'):
iters = (i for i in iters if i is not None)
iters = progress_bar_wrapper(iters, desc='Stim')
return set_iterable_type(iters)
# Validate stim, and then either pass it directly to the Transformer
# or, if a conversion occurred, recurse.
else:
try:
validated_stim = self._validate(stims)
except TypeError as err:
if validation == 'strict':
raise err
elif validation == 'warn':
logging.warn(str(err))
return
elif validation == 'loose':
return
# If a conversion occurred during validation, we recurse
if stims is not validated_stim:
return self.transform(validated_stim, *args, **kwargs)
else:
result = self._transform(validated_stim, *args, **kwargs)
result = _log_transformation(validated_stim, result, self)
if isgenerator(result):
result = list(result)
self._propagate_context(validated_stim, result)
return result | def function[transform, parameter[self, stims, validation]]:
constant[ Executes the transformation on the passed stim(s).
Args:
stims (str, Stim, list): One or more stimuli to process. Must be
one of:
- A string giving the path to a file that can be read in
as a Stim (e.g., a .txt file, .jpg image, etc.)
- A Stim instance of any type.
- An iterable of stims, where each element is either a
string or a Stim.
validation (str): String specifying how validation errors should
be handled. Must be one of:
- 'strict': Raise an exception on any validation error
- 'warn': Issue a warning for all validation errors
- 'loose': Silently ignore all validation errors
args: Optional positional arguments to pass onto the internal
_transform call.
kwargs: Optional positional arguments to pass onto the internal
_transform call.
]
if call[name[isinstance], parameter[name[stims], name[string_types]]] begin[:]
variable[stims] assign[=] call[name[load_stims], parameter[name[stims]]]
if <ast.BoolOp object at 0x7da1b1528b80> begin[:]
variable[stims] assign[=] call[name[stims].get_stim, parameter[name[self]._input_type]]
if <ast.UnaryOp object at 0x7da1b1528cd0> begin[:]
<ast.Raise object at 0x7da1b1529fc0>
if call[name[isiterable], parameter[name[stims]]] begin[:]
variable[iters] assign[=] call[name[self]._iterate, parameter[name[stims], <ast.Starred object at 0x7da1b15286a0>]]
if call[name[config].get_option, parameter[constant[drop_bad_extractor_results]]] begin[:]
variable[iters] assign[=] <ast.GeneratorExp object at 0x7da1b152a920>
variable[iters] assign[=] call[name[progress_bar_wrapper], parameter[name[iters]]]
return[call[name[set_iterable_type], parameter[name[iters]]]] | keyword[def] identifier[transform] ( identifier[self] , identifier[stims] , identifier[validation] = literal[string] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[stims] , identifier[string_types] ):
identifier[stims] = identifier[load_stims] ( identifier[stims] )
keyword[if] identifier[isinstance] ( identifier[stims] , identifier[CompoundStim] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[self] . identifier[_input_type] , identifier[tuple] ):
identifier[stims] = identifier[stims] . identifier[get_stim] ( identifier[self] . identifier[_input_type] , identifier[return_all] = keyword[True] )
keyword[if] keyword[not] identifier[stims] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[self] . identifier[_input_type] )
keyword[if] identifier[isiterable] ( identifier[stims] ):
identifier[iters] = identifier[self] . identifier[_iterate] ( identifier[stims] , identifier[validation] = identifier[validation] ,* identifier[args] ,
** identifier[kwargs] )
keyword[if] identifier[config] . identifier[get_option] ( literal[string] ):
identifier[iters] =( identifier[i] keyword[for] identifier[i] keyword[in] identifier[iters] keyword[if] identifier[i] keyword[is] keyword[not] keyword[None] )
identifier[iters] = identifier[progress_bar_wrapper] ( identifier[iters] , identifier[desc] = literal[string] )
keyword[return] identifier[set_iterable_type] ( identifier[iters] )
keyword[else] :
keyword[try] :
identifier[validated_stim] = identifier[self] . identifier[_validate] ( identifier[stims] )
keyword[except] identifier[TypeError] keyword[as] identifier[err] :
keyword[if] identifier[validation] == literal[string] :
keyword[raise] identifier[err]
keyword[elif] identifier[validation] == literal[string] :
identifier[logging] . identifier[warn] ( identifier[str] ( identifier[err] ))
keyword[return]
keyword[elif] identifier[validation] == literal[string] :
keyword[return]
keyword[if] identifier[stims] keyword[is] keyword[not] identifier[validated_stim] :
keyword[return] identifier[self] . identifier[transform] ( identifier[validated_stim] ,* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[result] = identifier[self] . identifier[_transform] ( identifier[validated_stim] ,* identifier[args] ,** identifier[kwargs] )
identifier[result] = identifier[_log_transformation] ( identifier[validated_stim] , identifier[result] , identifier[self] )
keyword[if] identifier[isgenerator] ( identifier[result] ):
identifier[result] = identifier[list] ( identifier[result] )
identifier[self] . identifier[_propagate_context] ( identifier[validated_stim] , identifier[result] )
keyword[return] identifier[result] | def transform(self, stims, validation='strict', *args, **kwargs):
""" Executes the transformation on the passed stim(s).
Args:
stims (str, Stim, list): One or more stimuli to process. Must be
one of:
- A string giving the path to a file that can be read in
as a Stim (e.g., a .txt file, .jpg image, etc.)
- A Stim instance of any type.
- An iterable of stims, where each element is either a
string or a Stim.
validation (str): String specifying how validation errors should
be handled. Must be one of:
- 'strict': Raise an exception on any validation error
- 'warn': Issue a warning for all validation errors
- 'loose': Silently ignore all validation errors
args: Optional positional arguments to pass onto the internal
_transform call.
kwargs: Optional positional arguments to pass onto the internal
_transform call.
"""
if isinstance(stims, string_types):
stims = load_stims(stims) # depends on [control=['if'], data=[]]
# If stims is a CompoundStim and the Transformer is expecting a single
# input type, extract all matching stims
if isinstance(stims, CompoundStim) and (not isinstance(self._input_type, tuple)):
stims = stims.get_stim(self._input_type, return_all=True)
if not stims:
raise ValueError('No stims of class %s found in the providedCompoundStim instance.' % self._input_type) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# If stims is an iterable, naively loop over elements, removing
# invalid results if needed
if isiterable(stims):
iters = self._iterate(stims, *args, validation=validation, **kwargs)
if config.get_option('drop_bad_extractor_results'):
iters = (i for i in iters if i is not None) # depends on [control=['if'], data=[]]
iters = progress_bar_wrapper(iters, desc='Stim')
return set_iterable_type(iters) # depends on [control=['if'], data=[]]
else:
# Validate stim, and then either pass it directly to the Transformer
# or, if a conversion occurred, recurse.
try:
validated_stim = self._validate(stims) # depends on [control=['try'], data=[]]
except TypeError as err:
if validation == 'strict':
raise err # depends on [control=['if'], data=[]]
elif validation == 'warn':
logging.warn(str(err))
return # depends on [control=['if'], data=[]]
elif validation == 'loose':
return # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']]
# If a conversion occurred during validation, we recurse
if stims is not validated_stim:
return self.transform(validated_stim, *args, **kwargs) # depends on [control=['if'], data=['validated_stim']]
else:
result = self._transform(validated_stim, *args, **kwargs)
result = _log_transformation(validated_stim, result, self)
if isgenerator(result):
result = list(result) # depends on [control=['if'], data=[]]
self._propagate_context(validated_stim, result)
return result |
def list_presets(self, package_keyname, **kwargs):
"""Gets active presets for the given package.
:param str package_keyname: The package for which to get presets
:returns: A list of package presets that can be used for ordering
"""
get_kwargs = {}
get_kwargs['mask'] = kwargs.get('mask', PRESET_MASK)
if 'filter' in kwargs:
get_kwargs['filter'] = kwargs['filter']
package = self.get_package_by_key(package_keyname, mask='id')
acc_presets = self.package_svc.getAccountRestrictedActivePresets(id=package['id'], **get_kwargs)
active_presets = self.package_svc.getActivePresets(id=package['id'], **get_kwargs)
return active_presets + acc_presets | def function[list_presets, parameter[self, package_keyname]]:
constant[Gets active presets for the given package.
:param str package_keyname: The package for which to get presets
:returns: A list of package presets that can be used for ordering
]
variable[get_kwargs] assign[=] dictionary[[], []]
call[name[get_kwargs]][constant[mask]] assign[=] call[name[kwargs].get, parameter[constant[mask], name[PRESET_MASK]]]
if compare[constant[filter] in name[kwargs]] begin[:]
call[name[get_kwargs]][constant[filter]] assign[=] call[name[kwargs]][constant[filter]]
variable[package] assign[=] call[name[self].get_package_by_key, parameter[name[package_keyname]]]
variable[acc_presets] assign[=] call[name[self].package_svc.getAccountRestrictedActivePresets, parameter[]]
variable[active_presets] assign[=] call[name[self].package_svc.getActivePresets, parameter[]]
return[binary_operation[name[active_presets] + name[acc_presets]]] | keyword[def] identifier[list_presets] ( identifier[self] , identifier[package_keyname] ,** identifier[kwargs] ):
literal[string]
identifier[get_kwargs] ={}
identifier[get_kwargs] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] , identifier[PRESET_MASK] )
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[get_kwargs] [ literal[string] ]= identifier[kwargs] [ literal[string] ]
identifier[package] = identifier[self] . identifier[get_package_by_key] ( identifier[package_keyname] , identifier[mask] = literal[string] )
identifier[acc_presets] = identifier[self] . identifier[package_svc] . identifier[getAccountRestrictedActivePresets] ( identifier[id] = identifier[package] [ literal[string] ],** identifier[get_kwargs] )
identifier[active_presets] = identifier[self] . identifier[package_svc] . identifier[getActivePresets] ( identifier[id] = identifier[package] [ literal[string] ],** identifier[get_kwargs] )
keyword[return] identifier[active_presets] + identifier[acc_presets] | def list_presets(self, package_keyname, **kwargs):
"""Gets active presets for the given package.
:param str package_keyname: The package for which to get presets
:returns: A list of package presets that can be used for ordering
"""
get_kwargs = {}
get_kwargs['mask'] = kwargs.get('mask', PRESET_MASK)
if 'filter' in kwargs:
get_kwargs['filter'] = kwargs['filter'] # depends on [control=['if'], data=['kwargs']]
package = self.get_package_by_key(package_keyname, mask='id')
acc_presets = self.package_svc.getAccountRestrictedActivePresets(id=package['id'], **get_kwargs)
active_presets = self.package_svc.getActivePresets(id=package['id'], **get_kwargs)
return active_presets + acc_presets |
def images(self, **kwargs):
"""
Get the images (posters and backdrops) for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any movie method.
include_image_language: (optional) Comma separated, a valid
ISO 69-1.
Returns:
A dict representation of the JSON returned from the API.
"""
path = self._get_id_path('images')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response | def function[images, parameter[self]]:
constant[
Get the images (posters and backdrops) for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any movie method.
include_image_language: (optional) Comma separated, a valid
ISO 69-1.
Returns:
A dict representation of the JSON returned from the API.
]
variable[path] assign[=] call[name[self]._get_id_path, parameter[constant[images]]]
variable[response] assign[=] call[name[self]._GET, parameter[name[path], name[kwargs]]]
call[name[self]._set_attrs_to_values, parameter[name[response]]]
return[name[response]] | keyword[def] identifier[images] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[path] = identifier[self] . identifier[_get_id_path] ( literal[string] )
identifier[response] = identifier[self] . identifier[_GET] ( identifier[path] , identifier[kwargs] )
identifier[self] . identifier[_set_attrs_to_values] ( identifier[response] )
keyword[return] identifier[response] | def images(self, **kwargs):
"""
Get the images (posters and backdrops) for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
append_to_response: (optional) Comma separated, any movie method.
include_image_language: (optional) Comma separated, a valid
ISO 69-1.
Returns:
A dict representation of the JSON returned from the API.
"""
path = self._get_id_path('images')
response = self._GET(path, kwargs)
self._set_attrs_to_values(response)
return response |
def sort_fn_list(fn_list):
"""Sort input filename list by datetime
"""
dt_list = get_dt_list(fn_list)
fn_list_sort = [fn for (dt,fn) in sorted(zip(dt_list,fn_list))]
return fn_list_sort | def function[sort_fn_list, parameter[fn_list]]:
constant[Sort input filename list by datetime
]
variable[dt_list] assign[=] call[name[get_dt_list], parameter[name[fn_list]]]
variable[fn_list_sort] assign[=] <ast.ListComp object at 0x7da1b072f3d0>
return[name[fn_list_sort]] | keyword[def] identifier[sort_fn_list] ( identifier[fn_list] ):
literal[string]
identifier[dt_list] = identifier[get_dt_list] ( identifier[fn_list] )
identifier[fn_list_sort] =[ identifier[fn] keyword[for] ( identifier[dt] , identifier[fn] ) keyword[in] identifier[sorted] ( identifier[zip] ( identifier[dt_list] , identifier[fn_list] ))]
keyword[return] identifier[fn_list_sort] | def sort_fn_list(fn_list):
"""Sort input filename list by datetime
"""
dt_list = get_dt_list(fn_list)
fn_list_sort = [fn for (dt, fn) in sorted(zip(dt_list, fn_list))]
return fn_list_sort |
def do_kpl_on(self, args):
"""Turn on a KeypadLinc button.
Usage:
kpl_on address group
"""
params = args.split()
address = None
group = None
try:
address = params[0]
group = int(params[1])
except IndexError:
_LOGGING.error("Address and group are regquired")
self.do_help('kpl_status')
except TypeError:
_LOGGING.error("Group must be an integer")
self.do_help('kpl_status')
if address and group:
self.tools.kpl_on(address, group) | def function[do_kpl_on, parameter[self, args]]:
constant[Turn on a KeypadLinc button.
Usage:
kpl_on address group
]
variable[params] assign[=] call[name[args].split, parameter[]]
variable[address] assign[=] constant[None]
variable[group] assign[=] constant[None]
<ast.Try object at 0x7da1b1a44460>
if <ast.BoolOp object at 0x7da1b1a46260> begin[:]
call[name[self].tools.kpl_on, parameter[name[address], name[group]]] | keyword[def] identifier[do_kpl_on] ( identifier[self] , identifier[args] ):
literal[string]
identifier[params] = identifier[args] . identifier[split] ()
identifier[address] = keyword[None]
identifier[group] = keyword[None]
keyword[try] :
identifier[address] = identifier[params] [ literal[int] ]
identifier[group] = identifier[int] ( identifier[params] [ literal[int] ])
keyword[except] identifier[IndexError] :
identifier[_LOGGING] . identifier[error] ( literal[string] )
identifier[self] . identifier[do_help] ( literal[string] )
keyword[except] identifier[TypeError] :
identifier[_LOGGING] . identifier[error] ( literal[string] )
identifier[self] . identifier[do_help] ( literal[string] )
keyword[if] identifier[address] keyword[and] identifier[group] :
identifier[self] . identifier[tools] . identifier[kpl_on] ( identifier[address] , identifier[group] ) | def do_kpl_on(self, args):
"""Turn on a KeypadLinc button.
Usage:
kpl_on address group
"""
params = args.split()
address = None
group = None
try:
address = params[0]
group = int(params[1]) # depends on [control=['try'], data=[]]
except IndexError:
_LOGGING.error('Address and group are regquired')
self.do_help('kpl_status') # depends on [control=['except'], data=[]]
except TypeError:
_LOGGING.error('Group must be an integer')
self.do_help('kpl_status') # depends on [control=['except'], data=[]]
if address and group:
self.tools.kpl_on(address, group) # depends on [control=['if'], data=[]] |
def pop(self, name):
"""Remove and return metadata about variable
Parameters
----------
name : str
variable name
Returns
-------
pandas.Series
Series of metadata for variable
"""
# check if present
if name in self:
# get case preserved name for variable
new_name = self.var_case_name(name)
# check if 1D or nD
if new_name in self.keys():
output = self[new_name]
self.data.drop(new_name, inplace=True, axis=0)
else:
output = self.ho_data.pop(new_name)
return output
else:
raise KeyError('Key not present in metadata variables') | def function[pop, parameter[self, name]]:
constant[Remove and return metadata about variable
Parameters
----------
name : str
variable name
Returns
-------
pandas.Series
Series of metadata for variable
]
if compare[name[name] in name[self]] begin[:]
variable[new_name] assign[=] call[name[self].var_case_name, parameter[name[name]]]
if compare[name[new_name] in call[name[self].keys, parameter[]]] begin[:]
variable[output] assign[=] call[name[self]][name[new_name]]
call[name[self].data.drop, parameter[name[new_name]]]
return[name[output]] | keyword[def] identifier[pop] ( identifier[self] , identifier[name] ):
literal[string]
keyword[if] identifier[name] keyword[in] identifier[self] :
identifier[new_name] = identifier[self] . identifier[var_case_name] ( identifier[name] )
keyword[if] identifier[new_name] keyword[in] identifier[self] . identifier[keys] ():
identifier[output] = identifier[self] [ identifier[new_name] ]
identifier[self] . identifier[data] . identifier[drop] ( identifier[new_name] , identifier[inplace] = keyword[True] , identifier[axis] = literal[int] )
keyword[else] :
identifier[output] = identifier[self] . identifier[ho_data] . identifier[pop] ( identifier[new_name] )
keyword[return] identifier[output]
keyword[else] :
keyword[raise] identifier[KeyError] ( literal[string] ) | def pop(self, name):
"""Remove and return metadata about variable
Parameters
----------
name : str
variable name
Returns
-------
pandas.Series
Series of metadata for variable
"""
# check if present
if name in self:
# get case preserved name for variable
new_name = self.var_case_name(name)
# check if 1D or nD
if new_name in self.keys():
output = self[new_name]
self.data.drop(new_name, inplace=True, axis=0) # depends on [control=['if'], data=['new_name']]
else:
output = self.ho_data.pop(new_name)
return output # depends on [control=['if'], data=['name', 'self']]
else:
raise KeyError('Key not present in metadata variables') |
def compute(self, txns):
"""Compute the long/short live-to-date transaction level profit and loss. Uses an open average calculation"""
txndata = txns.frame
mktdata = txns.pricer.get_eod_frame()
if not isinstance(mktdata.index, pd.DatetimeIndex):
mktdata.to_timestamp(freq='B')
# get the set of all txn dts and mkt data dts
pl = pd.merge(txndata, mktdata.reset_index(), how='outer', on=TPL.DT)
if pl[TC.PID].isnull().all():
ltd_frame = pd.DataFrame(index=pl.index)
ltd_frame[TPL.DT] = pl[PL.DT]
ltd_frame[TPL.POS] = 0
ltd_frame[TPL.PID] = 0
ltd_frame[TPL.TID] = 0
ltd_frame[TPL.TXN_QTY] = np.nan
ltd_frame[TPL.TXN_PX] = np.nan
ltd_frame[TPL.TXN_FEES] = 0
ltd_frame[TPL.TXN_PREMIUM] = 0
ltd_frame[TPL.TXN_INTENT] = 0
ltd_frame[TPL.TXN_ACTION] = 0
ltd_frame[TPL.CLOSE_PX] = pl[TPL.CLOSE_PX]
ltd_frame[TPL.OPEN_VAL] = 0
ltd_frame[TPL.MKT_VAL] = 0
ltd_frame[TPL.TOT_VAL] = 0
ltd_frame[TPL.DVDS] = 0
ltd_frame[TPL.FEES] = 0
ltd_frame[TPL.RPL_GROSS] = 0
ltd_frame[TPL.RPL] = 0
ltd_frame[TPL.UPL] = 0
ltd_frame[TPL.PL] = 0
return ltd_frame
else:
pl.sort([TC.DT, TC.PID, TC.TID], inplace=1)
pl.reset_index(inplace=1, drop=1)
# check that all days can be priced
has_position = pl[TC.PID] > 0
missing_pxs = pl[MC.CLOSE].isnull()
missing = pl[TC.DT][has_position & missing_pxs]
if len(missing) > 0:
msg = 'insufficient price data: {0} prices missing for dates {1}'
mdates = ','.join([_.strftime('%Y-%m-%d') for _ in set(missing[:5])])
mdates += (len(missing) > 5 and '...' or '')
raise Exception(msg.format(len(missing), mdates))
# Now there is a row for every timestamp. Now compute the pl and fill in where missing data should be
cols = [TC.DT, TC.POS, TC.PID, TC.TID, TC.INTENT, TC.ACTION, TC.FEES, TC.QTY, TC.PX, TC.PREMIUM,
TC.OPEN_VAL]
dts, pos_qtys, pids, tids, intents, sides, txn_fees, txn_qtys, txn_pxs, premiums, open_vals = [pl[c] for c
in
cols]
dvds, closing_pxs, mkt_vals = [pl[c] for c in [MC.DVDS, MC.CLOSE, MC.MKT_VAL]]
# Ensure only end of day is kept for dividends (join will match dvd to any transaction during day
dvds = dvds.where(dts != dts.shift(-1), 0)
# fill in pl dates
open_vals.ffill(inplace=1)
open_vals.fillna(0, inplace=1)
pos_qtys.ffill(inplace=1)
pos_qtys.fillna(0, inplace=1)
# pid is the only tricky one, copy only while position is open
inpos = intents.notnull() | (pos_qtys != 0)
pids = np.where(inpos, pids.ffill(), 0)
pl['pid'] = pids.astype(int)
# Zero fill missing
dvds.fillna(0, inplace=1)
tids.fillna(0, inplace=1)
tids = tids.astype(int)
intents.fillna(0, inplace=1)
intents = intents.astype(int)
sides.fillna(0, inplace=1)
sides = sides.astype(int)
txn_fees.fillna(0, inplace=1)
premiums.fillna(0, inplace=1)
# LTD p/l calculation
fees = txn_fees.cumsum()
total_vals = premiums.cumsum()
mkt_vals *= pos_qtys
dvds = (dvds * pos_qtys).cumsum()
rpl_gross = total_vals - open_vals
rpl = rpl_gross + fees + dvds
upl = mkt_vals + open_vals
tpl = upl + rpl
# build the result
data = OrderedDict()
data[TPL.DT] = dts
data[TPL.POS] = pos_qtys
data[TPL.PID] = pids
data[TPL.TID] = tids
data[TPL.TXN_QTY] = txn_qtys
data[TPL.TXN_PX] = txn_pxs
data[TPL.TXN_FEES] = txn_fees
data[TPL.TXN_PREMIUM] = premiums
data[TPL.TXN_INTENT] = intents
data[TPL.TXN_ACTION] = sides
data[TPL.CLOSE_PX] = closing_pxs
data[TPL.OPEN_VAL] = open_vals
data[TPL.MKT_VAL] = mkt_vals
data[TPL.TOT_VAL] = total_vals
data[TPL.DVDS] = dvds
data[TPL.FEES] = fees
data[TPL.RPL_GROSS] = rpl_gross
data[TPL.RPL] = rpl
data[TPL.UPL] = upl
data[TPL.PL] = tpl
ltd_frame = pd.DataFrame(data, columns=data.keys())
return ltd_frame | def function[compute, parameter[self, txns]]:
constant[Compute the long/short live-to-date transaction level profit and loss. Uses an open average calculation]
variable[txndata] assign[=] name[txns].frame
variable[mktdata] assign[=] call[name[txns].pricer.get_eod_frame, parameter[]]
if <ast.UnaryOp object at 0x7da1b1e87c10> begin[:]
call[name[mktdata].to_timestamp, parameter[]]
variable[pl] assign[=] call[name[pd].merge, parameter[name[txndata], call[name[mktdata].reset_index, parameter[]]]]
if call[call[call[name[pl]][name[TC].PID].isnull, parameter[]].all, parameter[]] begin[:]
variable[ltd_frame] assign[=] call[name[pd].DataFrame, parameter[]]
call[name[ltd_frame]][name[TPL].DT] assign[=] call[name[pl]][name[PL].DT]
call[name[ltd_frame]][name[TPL].POS] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].PID] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].TID] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].TXN_QTY] assign[=] name[np].nan
call[name[ltd_frame]][name[TPL].TXN_PX] assign[=] name[np].nan
call[name[ltd_frame]][name[TPL].TXN_FEES] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].TXN_PREMIUM] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].TXN_INTENT] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].TXN_ACTION] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].CLOSE_PX] assign[=] call[name[pl]][name[TPL].CLOSE_PX]
call[name[ltd_frame]][name[TPL].OPEN_VAL] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].MKT_VAL] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].TOT_VAL] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].DVDS] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].FEES] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].RPL_GROSS] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].RPL] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].UPL] assign[=] constant[0]
call[name[ltd_frame]][name[TPL].PL] assign[=] constant[0]
return[name[ltd_frame]] | keyword[def] identifier[compute] ( identifier[self] , identifier[txns] ):
literal[string]
identifier[txndata] = identifier[txns] . identifier[frame]
identifier[mktdata] = identifier[txns] . identifier[pricer] . identifier[get_eod_frame] ()
keyword[if] keyword[not] identifier[isinstance] ( identifier[mktdata] . identifier[index] , identifier[pd] . identifier[DatetimeIndex] ):
identifier[mktdata] . identifier[to_timestamp] ( identifier[freq] = literal[string] )
identifier[pl] = identifier[pd] . identifier[merge] ( identifier[txndata] , identifier[mktdata] . identifier[reset_index] (), identifier[how] = literal[string] , identifier[on] = identifier[TPL] . identifier[DT] )
keyword[if] identifier[pl] [ identifier[TC] . identifier[PID] ]. identifier[isnull] (). identifier[all] ():
identifier[ltd_frame] = identifier[pd] . identifier[DataFrame] ( identifier[index] = identifier[pl] . identifier[index] )
identifier[ltd_frame] [ identifier[TPL] . identifier[DT] ]= identifier[pl] [ identifier[PL] . identifier[DT] ]
identifier[ltd_frame] [ identifier[TPL] . identifier[POS] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[PID] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[TID] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[TXN_QTY] ]= identifier[np] . identifier[nan]
identifier[ltd_frame] [ identifier[TPL] . identifier[TXN_PX] ]= identifier[np] . identifier[nan]
identifier[ltd_frame] [ identifier[TPL] . identifier[TXN_FEES] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[TXN_PREMIUM] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[TXN_INTENT] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[TXN_ACTION] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[CLOSE_PX] ]= identifier[pl] [ identifier[TPL] . identifier[CLOSE_PX] ]
identifier[ltd_frame] [ identifier[TPL] . identifier[OPEN_VAL] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[MKT_VAL] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[TOT_VAL] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[DVDS] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[FEES] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[RPL_GROSS] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[RPL] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[UPL] ]= literal[int]
identifier[ltd_frame] [ identifier[TPL] . identifier[PL] ]= literal[int]
keyword[return] identifier[ltd_frame]
keyword[else] :
identifier[pl] . identifier[sort] ([ identifier[TC] . identifier[DT] , identifier[TC] . identifier[PID] , identifier[TC] . identifier[TID] ], identifier[inplace] = literal[int] )
identifier[pl] . identifier[reset_index] ( identifier[inplace] = literal[int] , identifier[drop] = literal[int] )
identifier[has_position] = identifier[pl] [ identifier[TC] . identifier[PID] ]> literal[int]
identifier[missing_pxs] = identifier[pl] [ identifier[MC] . identifier[CLOSE] ]. identifier[isnull] ()
identifier[missing] = identifier[pl] [ identifier[TC] . identifier[DT] ][ identifier[has_position] & identifier[missing_pxs] ]
keyword[if] identifier[len] ( identifier[missing] )> literal[int] :
identifier[msg] = literal[string]
identifier[mdates] = literal[string] . identifier[join] ([ identifier[_] . identifier[strftime] ( literal[string] ) keyword[for] identifier[_] keyword[in] identifier[set] ( identifier[missing] [: literal[int] ])])
identifier[mdates] +=( identifier[len] ( identifier[missing] )> literal[int] keyword[and] literal[string] keyword[or] literal[string] )
keyword[raise] identifier[Exception] ( identifier[msg] . identifier[format] ( identifier[len] ( identifier[missing] ), identifier[mdates] ))
identifier[cols] =[ identifier[TC] . identifier[DT] , identifier[TC] . identifier[POS] , identifier[TC] . identifier[PID] , identifier[TC] . identifier[TID] , identifier[TC] . identifier[INTENT] , identifier[TC] . identifier[ACTION] , identifier[TC] . identifier[FEES] , identifier[TC] . identifier[QTY] , identifier[TC] . identifier[PX] , identifier[TC] . identifier[PREMIUM] ,
identifier[TC] . identifier[OPEN_VAL] ]
identifier[dts] , identifier[pos_qtys] , identifier[pids] , identifier[tids] , identifier[intents] , identifier[sides] , identifier[txn_fees] , identifier[txn_qtys] , identifier[txn_pxs] , identifier[premiums] , identifier[open_vals] =[ identifier[pl] [ identifier[c] ] keyword[for] identifier[c]
keyword[in]
identifier[cols] ]
identifier[dvds] , identifier[closing_pxs] , identifier[mkt_vals] =[ identifier[pl] [ identifier[c] ] keyword[for] identifier[c] keyword[in] [ identifier[MC] . identifier[DVDS] , identifier[MC] . identifier[CLOSE] , identifier[MC] . identifier[MKT_VAL] ]]
identifier[dvds] = identifier[dvds] . identifier[where] ( identifier[dts] != identifier[dts] . identifier[shift] (- literal[int] ), literal[int] )
identifier[open_vals] . identifier[ffill] ( identifier[inplace] = literal[int] )
identifier[open_vals] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[pos_qtys] . identifier[ffill] ( identifier[inplace] = literal[int] )
identifier[pos_qtys] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[inpos] = identifier[intents] . identifier[notnull] ()|( identifier[pos_qtys] != literal[int] )
identifier[pids] = identifier[np] . identifier[where] ( identifier[inpos] , identifier[pids] . identifier[ffill] (), literal[int] )
identifier[pl] [ literal[string] ]= identifier[pids] . identifier[astype] ( identifier[int] )
identifier[dvds] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[tids] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[tids] = identifier[tids] . identifier[astype] ( identifier[int] )
identifier[intents] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[intents] = identifier[intents] . identifier[astype] ( identifier[int] )
identifier[sides] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[sides] = identifier[sides] . identifier[astype] ( identifier[int] )
identifier[txn_fees] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[premiums] . identifier[fillna] ( literal[int] , identifier[inplace] = literal[int] )
identifier[fees] = identifier[txn_fees] . identifier[cumsum] ()
identifier[total_vals] = identifier[premiums] . identifier[cumsum] ()
identifier[mkt_vals] *= identifier[pos_qtys]
identifier[dvds] =( identifier[dvds] * identifier[pos_qtys] ). identifier[cumsum] ()
identifier[rpl_gross] = identifier[total_vals] - identifier[open_vals]
identifier[rpl] = identifier[rpl_gross] + identifier[fees] + identifier[dvds]
identifier[upl] = identifier[mkt_vals] + identifier[open_vals]
identifier[tpl] = identifier[upl] + identifier[rpl]
identifier[data] = identifier[OrderedDict] ()
identifier[data] [ identifier[TPL] . identifier[DT] ]= identifier[dts]
identifier[data] [ identifier[TPL] . identifier[POS] ]= identifier[pos_qtys]
identifier[data] [ identifier[TPL] . identifier[PID] ]= identifier[pids]
identifier[data] [ identifier[TPL] . identifier[TID] ]= identifier[tids]
identifier[data] [ identifier[TPL] . identifier[TXN_QTY] ]= identifier[txn_qtys]
identifier[data] [ identifier[TPL] . identifier[TXN_PX] ]= identifier[txn_pxs]
identifier[data] [ identifier[TPL] . identifier[TXN_FEES] ]= identifier[txn_fees]
identifier[data] [ identifier[TPL] . identifier[TXN_PREMIUM] ]= identifier[premiums]
identifier[data] [ identifier[TPL] . identifier[TXN_INTENT] ]= identifier[intents]
identifier[data] [ identifier[TPL] . identifier[TXN_ACTION] ]= identifier[sides]
identifier[data] [ identifier[TPL] . identifier[CLOSE_PX] ]= identifier[closing_pxs]
identifier[data] [ identifier[TPL] . identifier[OPEN_VAL] ]= identifier[open_vals]
identifier[data] [ identifier[TPL] . identifier[MKT_VAL] ]= identifier[mkt_vals]
identifier[data] [ identifier[TPL] . identifier[TOT_VAL] ]= identifier[total_vals]
identifier[data] [ identifier[TPL] . identifier[DVDS] ]= identifier[dvds]
identifier[data] [ identifier[TPL] . identifier[FEES] ]= identifier[fees]
identifier[data] [ identifier[TPL] . identifier[RPL_GROSS] ]= identifier[rpl_gross]
identifier[data] [ identifier[TPL] . identifier[RPL] ]= identifier[rpl]
identifier[data] [ identifier[TPL] . identifier[UPL] ]= identifier[upl]
identifier[data] [ identifier[TPL] . identifier[PL] ]= identifier[tpl]
identifier[ltd_frame] = identifier[pd] . identifier[DataFrame] ( identifier[data] , identifier[columns] = identifier[data] . identifier[keys] ())
keyword[return] identifier[ltd_frame] | def compute(self, txns):
"""Compute the long/short live-to-date transaction level profit and loss. Uses an open average calculation"""
txndata = txns.frame
mktdata = txns.pricer.get_eod_frame()
if not isinstance(mktdata.index, pd.DatetimeIndex):
mktdata.to_timestamp(freq='B') # depends on [control=['if'], data=[]]
# get the set of all txn dts and mkt data dts
pl = pd.merge(txndata, mktdata.reset_index(), how='outer', on=TPL.DT)
if pl[TC.PID].isnull().all():
ltd_frame = pd.DataFrame(index=pl.index)
ltd_frame[TPL.DT] = pl[PL.DT]
ltd_frame[TPL.POS] = 0
ltd_frame[TPL.PID] = 0
ltd_frame[TPL.TID] = 0
ltd_frame[TPL.TXN_QTY] = np.nan
ltd_frame[TPL.TXN_PX] = np.nan
ltd_frame[TPL.TXN_FEES] = 0
ltd_frame[TPL.TXN_PREMIUM] = 0
ltd_frame[TPL.TXN_INTENT] = 0
ltd_frame[TPL.TXN_ACTION] = 0
ltd_frame[TPL.CLOSE_PX] = pl[TPL.CLOSE_PX]
ltd_frame[TPL.OPEN_VAL] = 0
ltd_frame[TPL.MKT_VAL] = 0
ltd_frame[TPL.TOT_VAL] = 0
ltd_frame[TPL.DVDS] = 0
ltd_frame[TPL.FEES] = 0
ltd_frame[TPL.RPL_GROSS] = 0
ltd_frame[TPL.RPL] = 0
ltd_frame[TPL.UPL] = 0
ltd_frame[TPL.PL] = 0
return ltd_frame # depends on [control=['if'], data=[]]
else:
pl.sort([TC.DT, TC.PID, TC.TID], inplace=1)
pl.reset_index(inplace=1, drop=1)
# check that all days can be priced
has_position = pl[TC.PID] > 0
missing_pxs = pl[MC.CLOSE].isnull()
missing = pl[TC.DT][has_position & missing_pxs]
if len(missing) > 0:
msg = 'insufficient price data: {0} prices missing for dates {1}'
mdates = ','.join([_.strftime('%Y-%m-%d') for _ in set(missing[:5])])
mdates += len(missing) > 5 and '...' or ''
raise Exception(msg.format(len(missing), mdates)) # depends on [control=['if'], data=[]]
# Now there is a row for every timestamp. Now compute the pl and fill in where missing data should be
cols = [TC.DT, TC.POS, TC.PID, TC.TID, TC.INTENT, TC.ACTION, TC.FEES, TC.QTY, TC.PX, TC.PREMIUM, TC.OPEN_VAL]
(dts, pos_qtys, pids, tids, intents, sides, txn_fees, txn_qtys, txn_pxs, premiums, open_vals) = [pl[c] for c in cols]
(dvds, closing_pxs, mkt_vals) = [pl[c] for c in [MC.DVDS, MC.CLOSE, MC.MKT_VAL]]
# Ensure only end of day is kept for dividends (join will match dvd to any transaction during day
dvds = dvds.where(dts != dts.shift(-1), 0)
# fill in pl dates
open_vals.ffill(inplace=1)
open_vals.fillna(0, inplace=1)
pos_qtys.ffill(inplace=1)
pos_qtys.fillna(0, inplace=1)
# pid is the only tricky one, copy only while position is open
inpos = intents.notnull() | (pos_qtys != 0)
pids = np.where(inpos, pids.ffill(), 0)
pl['pid'] = pids.astype(int)
# Zero fill missing
dvds.fillna(0, inplace=1)
tids.fillna(0, inplace=1)
tids = tids.astype(int)
intents.fillna(0, inplace=1)
intents = intents.astype(int)
sides.fillna(0, inplace=1)
sides = sides.astype(int)
txn_fees.fillna(0, inplace=1)
premiums.fillna(0, inplace=1)
# LTD p/l calculation
fees = txn_fees.cumsum()
total_vals = premiums.cumsum()
mkt_vals *= pos_qtys
dvds = (dvds * pos_qtys).cumsum()
rpl_gross = total_vals - open_vals
rpl = rpl_gross + fees + dvds
upl = mkt_vals + open_vals
tpl = upl + rpl
# build the result
data = OrderedDict()
data[TPL.DT] = dts
data[TPL.POS] = pos_qtys
data[TPL.PID] = pids
data[TPL.TID] = tids
data[TPL.TXN_QTY] = txn_qtys
data[TPL.TXN_PX] = txn_pxs
data[TPL.TXN_FEES] = txn_fees
data[TPL.TXN_PREMIUM] = premiums
data[TPL.TXN_INTENT] = intents
data[TPL.TXN_ACTION] = sides
data[TPL.CLOSE_PX] = closing_pxs
data[TPL.OPEN_VAL] = open_vals
data[TPL.MKT_VAL] = mkt_vals
data[TPL.TOT_VAL] = total_vals
data[TPL.DVDS] = dvds
data[TPL.FEES] = fees
data[TPL.RPL_GROSS] = rpl_gross
data[TPL.RPL] = rpl
data[TPL.UPL] = upl
data[TPL.PL] = tpl
ltd_frame = pd.DataFrame(data, columns=data.keys())
return ltd_frame |
def download_class(session, args, class_name):
"""
Try to download on-demand class.
@return: Tuple of (bool, bool), where the first bool indicates whether
errors occurred while parsing syllabus, the second bool indicates
whether the course appears to be completed.
@rtype: (bool, bool)
"""
logging.debug('Downloading new style (on demand) class %s', class_name)
return download_on_demand_class(session, args, class_name) | def function[download_class, parameter[session, args, class_name]]:
constant[
Try to download on-demand class.
@return: Tuple of (bool, bool), where the first bool indicates whether
errors occurred while parsing syllabus, the second bool indicates
whether the course appears to be completed.
@rtype: (bool, bool)
]
call[name[logging].debug, parameter[constant[Downloading new style (on demand) class %s], name[class_name]]]
return[call[name[download_on_demand_class], parameter[name[session], name[args], name[class_name]]]] | keyword[def] identifier[download_class] ( identifier[session] , identifier[args] , identifier[class_name] ):
literal[string]
identifier[logging] . identifier[debug] ( literal[string] , identifier[class_name] )
keyword[return] identifier[download_on_demand_class] ( identifier[session] , identifier[args] , identifier[class_name] ) | def download_class(session, args, class_name):
"""
Try to download on-demand class.
@return: Tuple of (bool, bool), where the first bool indicates whether
errors occurred while parsing syllabus, the second bool indicates
whether the course appears to be completed.
@rtype: (bool, bool)
"""
logging.debug('Downloading new style (on demand) class %s', class_name)
return download_on_demand_class(session, args, class_name) |
def _create_flags(self, kw):
"""
this clones the kw dict, adding a lower-case version of every key
(duplicated in circuit.py; consider putting in util?)
"""
flags = {}
for k in kw.keys():
flags[k] = kw[k]
flags[k.lower()] = flags[k]
return flags | def function[_create_flags, parameter[self, kw]]:
constant[
this clones the kw dict, adding a lower-case version of every key
(duplicated in circuit.py; consider putting in util?)
]
variable[flags] assign[=] dictionary[[], []]
for taget[name[k]] in starred[call[name[kw].keys, parameter[]]] begin[:]
call[name[flags]][name[k]] assign[=] call[name[kw]][name[k]]
call[name[flags]][call[name[k].lower, parameter[]]] assign[=] call[name[flags]][name[k]]
return[name[flags]] | keyword[def] identifier[_create_flags] ( identifier[self] , identifier[kw] ):
literal[string]
identifier[flags] ={}
keyword[for] identifier[k] keyword[in] identifier[kw] . identifier[keys] ():
identifier[flags] [ identifier[k] ]= identifier[kw] [ identifier[k] ]
identifier[flags] [ identifier[k] . identifier[lower] ()]= identifier[flags] [ identifier[k] ]
keyword[return] identifier[flags] | def _create_flags(self, kw):
"""
this clones the kw dict, adding a lower-case version of every key
(duplicated in circuit.py; consider putting in util?)
"""
flags = {}
for k in kw.keys():
flags[k] = kw[k]
flags[k.lower()] = flags[k] # depends on [control=['for'], data=['k']]
return flags |
def random_discrete_dp(num_states, num_actions, beta=None,
k=None, scale=1, sparse=False, sa_pair=False,
random_state=None):
"""
Generate a DiscreteDP randomly. The reward values are drawn from the
normal distribution with mean 0 and standard deviation `scale`.
Parameters
----------
num_states : scalar(int)
Number of states.
num_actions : scalar(int)
Number of actions.
beta : scalar(float), optional(default=None)
Discount factor. Randomly chosen from [0, 1) if not specified.
k : scalar(int), optional(default=None)
Number of possible next states for each state-action pair. Equal
to `num_states` if not specified.
scale : scalar(float), optional(default=1)
Standard deviation of the normal distribution for the reward
values.
sparse : bool, optional(default=False)
Whether to store the transition probability array in sparse
matrix form.
sa_pair : bool, optional(default=False)
Whether to represent the data in the state-action pairs
formulation. (If `sparse=True`, automatically set `True`.)
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
ddp : DiscreteDP
An instance of DiscreteDP.
"""
if sparse:
sa_pair = True
# Number of state-action pairs
L = num_states * num_actions
random_state = check_random_state(random_state)
R = scale * random_state.randn(L)
Q = _random_stochastic_matrix(L, num_states, k=k,
sparse=sparse, format='csr',
random_state=random_state)
if beta is None:
beta = random_state.random_sample()
if sa_pair:
s_indices, a_indices = sa_indices(num_states, num_actions)
else:
s_indices, a_indices = None, None
R.shape = (num_states, num_actions)
Q.shape = (num_states, num_actions, num_states)
ddp = DiscreteDP(R, Q, beta, s_indices, a_indices)
return ddp | def function[random_discrete_dp, parameter[num_states, num_actions, beta, k, scale, sparse, sa_pair, random_state]]:
constant[
Generate a DiscreteDP randomly. The reward values are drawn from the
normal distribution with mean 0 and standard deviation `scale`.
Parameters
----------
num_states : scalar(int)
Number of states.
num_actions : scalar(int)
Number of actions.
beta : scalar(float), optional(default=None)
Discount factor. Randomly chosen from [0, 1) if not specified.
k : scalar(int), optional(default=None)
Number of possible next states for each state-action pair. Equal
to `num_states` if not specified.
scale : scalar(float), optional(default=1)
Standard deviation of the normal distribution for the reward
values.
sparse : bool, optional(default=False)
Whether to store the transition probability array in sparse
matrix form.
sa_pair : bool, optional(default=False)
Whether to represent the data in the state-action pairs
formulation. (If `sparse=True`, automatically set `True`.)
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
ddp : DiscreteDP
An instance of DiscreteDP.
]
if name[sparse] begin[:]
variable[sa_pair] assign[=] constant[True]
variable[L] assign[=] binary_operation[name[num_states] * name[num_actions]]
variable[random_state] assign[=] call[name[check_random_state], parameter[name[random_state]]]
variable[R] assign[=] binary_operation[name[scale] * call[name[random_state].randn, parameter[name[L]]]]
variable[Q] assign[=] call[name[_random_stochastic_matrix], parameter[name[L], name[num_states]]]
if compare[name[beta] is constant[None]] begin[:]
variable[beta] assign[=] call[name[random_state].random_sample, parameter[]]
if name[sa_pair] begin[:]
<ast.Tuple object at 0x7da20c6c4be0> assign[=] call[name[sa_indices], parameter[name[num_states], name[num_actions]]]
variable[ddp] assign[=] call[name[DiscreteDP], parameter[name[R], name[Q], name[beta], name[s_indices], name[a_indices]]]
return[name[ddp]] | keyword[def] identifier[random_discrete_dp] ( identifier[num_states] , identifier[num_actions] , identifier[beta] = keyword[None] ,
identifier[k] = keyword[None] , identifier[scale] = literal[int] , identifier[sparse] = keyword[False] , identifier[sa_pair] = keyword[False] ,
identifier[random_state] = keyword[None] ):
literal[string]
keyword[if] identifier[sparse] :
identifier[sa_pair] = keyword[True]
identifier[L] = identifier[num_states] * identifier[num_actions]
identifier[random_state] = identifier[check_random_state] ( identifier[random_state] )
identifier[R] = identifier[scale] * identifier[random_state] . identifier[randn] ( identifier[L] )
identifier[Q] = identifier[_random_stochastic_matrix] ( identifier[L] , identifier[num_states] , identifier[k] = identifier[k] ,
identifier[sparse] = identifier[sparse] , identifier[format] = literal[string] ,
identifier[random_state] = identifier[random_state] )
keyword[if] identifier[beta] keyword[is] keyword[None] :
identifier[beta] = identifier[random_state] . identifier[random_sample] ()
keyword[if] identifier[sa_pair] :
identifier[s_indices] , identifier[a_indices] = identifier[sa_indices] ( identifier[num_states] , identifier[num_actions] )
keyword[else] :
identifier[s_indices] , identifier[a_indices] = keyword[None] , keyword[None]
identifier[R] . identifier[shape] =( identifier[num_states] , identifier[num_actions] )
identifier[Q] . identifier[shape] =( identifier[num_states] , identifier[num_actions] , identifier[num_states] )
identifier[ddp] = identifier[DiscreteDP] ( identifier[R] , identifier[Q] , identifier[beta] , identifier[s_indices] , identifier[a_indices] )
keyword[return] identifier[ddp] | def random_discrete_dp(num_states, num_actions, beta=None, k=None, scale=1, sparse=False, sa_pair=False, random_state=None):
"""
Generate a DiscreteDP randomly. The reward values are drawn from the
normal distribution with mean 0 and standard deviation `scale`.
Parameters
----------
num_states : scalar(int)
Number of states.
num_actions : scalar(int)
Number of actions.
beta : scalar(float), optional(default=None)
Discount factor. Randomly chosen from [0, 1) if not specified.
k : scalar(int), optional(default=None)
Number of possible next states for each state-action pair. Equal
to `num_states` if not specified.
scale : scalar(float), optional(default=1)
Standard deviation of the normal distribution for the reward
values.
sparse : bool, optional(default=False)
Whether to store the transition probability array in sparse
matrix form.
sa_pair : bool, optional(default=False)
Whether to represent the data in the state-action pairs
formulation. (If `sparse=True`, automatically set `True`.)
random_state : int or np.random.RandomState, optional
Random seed (integer) or np.random.RandomState instance to set
the initial state of the random number generator for
reproducibility. If None, a randomly initialized RandomState is
used.
Returns
-------
ddp : DiscreteDP
An instance of DiscreteDP.
"""
if sparse:
sa_pair = True # depends on [control=['if'], data=[]]
# Number of state-action pairs
L = num_states * num_actions
random_state = check_random_state(random_state)
R = scale * random_state.randn(L)
Q = _random_stochastic_matrix(L, num_states, k=k, sparse=sparse, format='csr', random_state=random_state)
if beta is None:
beta = random_state.random_sample() # depends on [control=['if'], data=['beta']]
if sa_pair:
(s_indices, a_indices) = sa_indices(num_states, num_actions) # depends on [control=['if'], data=[]]
else:
(s_indices, a_indices) = (None, None)
R.shape = (num_states, num_actions)
Q.shape = (num_states, num_actions, num_states)
ddp = DiscreteDP(R, Q, beta, s_indices, a_indices)
return ddp |
def register(self, token, regexp):
"""Register a token.
Args:
token (Token): the token class to register
regexp (str): the regexp for that token
"""
self._tokens.append((token, re.compile(regexp))) | def function[register, parameter[self, token, regexp]]:
constant[Register a token.
Args:
token (Token): the token class to register
regexp (str): the regexp for that token
]
call[name[self]._tokens.append, parameter[tuple[[<ast.Name object at 0x7da20c992d70>, <ast.Call object at 0x7da20c992800>]]]] | keyword[def] identifier[register] ( identifier[self] , identifier[token] , identifier[regexp] ):
literal[string]
identifier[self] . identifier[_tokens] . identifier[append] (( identifier[token] , identifier[re] . identifier[compile] ( identifier[regexp] ))) | def register(self, token, regexp):
"""Register a token.
Args:
token (Token): the token class to register
regexp (str): the regexp for that token
"""
self._tokens.append((token, re.compile(regexp))) |
def profile_match(adapter, profiles, hard_threshold=0.95, soft_threshold=0.9):
"""
given a dict of profiles, searches through all the samples in the DB
for a match. If a matching sample is found an exception is raised,
and the variants will not be loaded into the database.
Args:
adapter (MongoAdapter): Adapter to mongodb
profiles (dict(str)): The profiles (given as strings) for each sample in vcf.
hard_threshold(float): Rejects load if hamming distance above this is found
soft_threshold(float): Stores similar samples if hamming distance above this is found
Returns:
matches(dict(list)): list of similar samples for each sample in vcf.
"""
matches = {sample: [] for sample in profiles.keys()}
for case in adapter.cases():
for individual in case['individuals']:
for sample in profiles.keys():
if individual.get('profile'):
similarity = compare_profiles(
profiles[sample], individual['profile']
)
if similarity >= hard_threshold:
msg = (
f"individual {sample} has a {similarity} similarity "
f"with individual {individual['ind_id']} in case "
f"{case['case_id']}"
)
LOG.critical(msg)
#Raise some exception
raise ProfileError
if similarity >= soft_threshold:
match = f"{case['case_id']}.{individual['ind_id']}"
matches[sample].append(match)
return matches | def function[profile_match, parameter[adapter, profiles, hard_threshold, soft_threshold]]:
constant[
given a dict of profiles, searches through all the samples in the DB
for a match. If a matching sample is found an exception is raised,
and the variants will not be loaded into the database.
Args:
adapter (MongoAdapter): Adapter to mongodb
profiles (dict(str)): The profiles (given as strings) for each sample in vcf.
hard_threshold(float): Rejects load if hamming distance above this is found
soft_threshold(float): Stores similar samples if hamming distance above this is found
Returns:
matches(dict(list)): list of similar samples for each sample in vcf.
]
variable[matches] assign[=] <ast.DictComp object at 0x7da1b19069b0>
for taget[name[case]] in starred[call[name[adapter].cases, parameter[]]] begin[:]
for taget[name[individual]] in starred[call[name[case]][constant[individuals]]] begin[:]
for taget[name[sample]] in starred[call[name[profiles].keys, parameter[]]] begin[:]
if call[name[individual].get, parameter[constant[profile]]] begin[:]
variable[similarity] assign[=] call[name[compare_profiles], parameter[call[name[profiles]][name[sample]], call[name[individual]][constant[profile]]]]
if compare[name[similarity] greater_or_equal[>=] name[hard_threshold]] begin[:]
variable[msg] assign[=] <ast.JoinedStr object at 0x7da18fe902e0>
call[name[LOG].critical, parameter[name[msg]]]
<ast.Raise object at 0x7da18fe93bb0>
if compare[name[similarity] greater_or_equal[>=] name[soft_threshold]] begin[:]
variable[match] assign[=] <ast.JoinedStr object at 0x7da18fe93b50>
call[call[name[matches]][name[sample]].append, parameter[name[match]]]
return[name[matches]] | keyword[def] identifier[profile_match] ( identifier[adapter] , identifier[profiles] , identifier[hard_threshold] = literal[int] , identifier[soft_threshold] = literal[int] ):
literal[string]
identifier[matches] ={ identifier[sample] :[] keyword[for] identifier[sample] keyword[in] identifier[profiles] . identifier[keys] ()}
keyword[for] identifier[case] keyword[in] identifier[adapter] . identifier[cases] ():
keyword[for] identifier[individual] keyword[in] identifier[case] [ literal[string] ]:
keyword[for] identifier[sample] keyword[in] identifier[profiles] . identifier[keys] ():
keyword[if] identifier[individual] . identifier[get] ( literal[string] ):
identifier[similarity] = identifier[compare_profiles] (
identifier[profiles] [ identifier[sample] ], identifier[individual] [ literal[string] ]
)
keyword[if] identifier[similarity] >= identifier[hard_threshold] :
identifier[msg] =(
literal[string]
literal[string]
literal[string]
)
identifier[LOG] . identifier[critical] ( identifier[msg] )
keyword[raise] identifier[ProfileError]
keyword[if] identifier[similarity] >= identifier[soft_threshold] :
identifier[match] = literal[string]
identifier[matches] [ identifier[sample] ]. identifier[append] ( identifier[match] )
keyword[return] identifier[matches] | def profile_match(adapter, profiles, hard_threshold=0.95, soft_threshold=0.9):
"""
given a dict of profiles, searches through all the samples in the DB
for a match. If a matching sample is found an exception is raised,
and the variants will not be loaded into the database.
Args:
adapter (MongoAdapter): Adapter to mongodb
profiles (dict(str)): The profiles (given as strings) for each sample in vcf.
hard_threshold(float): Rejects load if hamming distance above this is found
soft_threshold(float): Stores similar samples if hamming distance above this is found
Returns:
matches(dict(list)): list of similar samples for each sample in vcf.
"""
matches = {sample: [] for sample in profiles.keys()}
for case in adapter.cases():
for individual in case['individuals']:
for sample in profiles.keys():
if individual.get('profile'):
similarity = compare_profiles(profiles[sample], individual['profile'])
if similarity >= hard_threshold:
msg = f"individual {sample} has a {similarity} similarity with individual {individual['ind_id']} in case {case['case_id']}"
LOG.critical(msg)
#Raise some exception
raise ProfileError # depends on [control=['if'], data=['similarity']]
if similarity >= soft_threshold:
match = f"{case['case_id']}.{individual['ind_id']}"
matches[sample].append(match) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sample']] # depends on [control=['for'], data=['individual']] # depends on [control=['for'], data=['case']]
return matches |
def _check_configure_args(configure_args: Dict[str, Any]) -> Dict[str, Any]:
""" Check the arguments passed to configure.
Raises an exception on failure. On success, returns a dict of
configure_args with any necessary mutations.
"""
# SSID must always be present
if not configure_args.get('ssid')\
or not isinstance(configure_args['ssid'], str):
raise ConfigureArgsError("SSID must be specified")
# If specified, hidden must be a bool
if not configure_args.get('hidden'):
configure_args['hidden'] = False
elif not isinstance(configure_args['hidden'], bool):
raise ConfigureArgsError('If specified, hidden must be a bool')
configure_args['securityType'] = _deduce_security(configure_args)
# If we have wpa2-personal, we need a psk
if configure_args['securityType'] == nmcli.SECURITY_TYPES.WPA_PSK:
if not configure_args.get('psk'):
raise ConfigureArgsError(
'If securityType is wpa-psk, psk must be specified')
return configure_args
# If we have wpa2-enterprise, we need eap config, and we need to check
# it
if configure_args['securityType'] == nmcli.SECURITY_TYPES.WPA_EAP:
if not configure_args.get('eapConfig'):
raise ConfigureArgsError(
'If securityType is wpa-eap, eapConfig must be specified')
configure_args['eapConfig']\
= _eap_check_config(configure_args['eapConfig'])
return configure_args
# If we’re still here we have no security and we’re done
return configure_args | def function[_check_configure_args, parameter[configure_args]]:
constant[ Check the arguments passed to configure.
Raises an exception on failure. On success, returns a dict of
configure_args with any necessary mutations.
]
if <ast.BoolOp object at 0x7da18eb54490> begin[:]
<ast.Raise object at 0x7da18eb57010>
if <ast.UnaryOp object at 0x7da18eb54970> begin[:]
call[name[configure_args]][constant[hidden]] assign[=] constant[False]
call[name[configure_args]][constant[securityType]] assign[=] call[name[_deduce_security], parameter[name[configure_args]]]
if compare[call[name[configure_args]][constant[securityType]] equal[==] name[nmcli].SECURITY_TYPES.WPA_PSK] begin[:]
if <ast.UnaryOp object at 0x7da18eb578e0> begin[:]
<ast.Raise object at 0x7da18eb56620>
return[name[configure_args]]
if compare[call[name[configure_args]][constant[securityType]] equal[==] name[nmcli].SECURITY_TYPES.WPA_EAP] begin[:]
if <ast.UnaryOp object at 0x7da2044c07f0> begin[:]
<ast.Raise object at 0x7da2044c1240>
call[name[configure_args]][constant[eapConfig]] assign[=] call[name[_eap_check_config], parameter[call[name[configure_args]][constant[eapConfig]]]]
return[name[configure_args]]
return[name[configure_args]] | keyword[def] identifier[_check_configure_args] ( identifier[configure_args] : identifier[Dict] [ identifier[str] , identifier[Any] ])-> identifier[Dict] [ identifier[str] , identifier[Any] ]:
literal[string]
keyword[if] keyword[not] identifier[configure_args] . identifier[get] ( literal[string] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[configure_args] [ literal[string] ], identifier[str] ):
keyword[raise] identifier[ConfigureArgsError] ( literal[string] )
keyword[if] keyword[not] identifier[configure_args] . identifier[get] ( literal[string] ):
identifier[configure_args] [ literal[string] ]= keyword[False]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[configure_args] [ literal[string] ], identifier[bool] ):
keyword[raise] identifier[ConfigureArgsError] ( literal[string] )
identifier[configure_args] [ literal[string] ]= identifier[_deduce_security] ( identifier[configure_args] )
keyword[if] identifier[configure_args] [ literal[string] ]== identifier[nmcli] . identifier[SECURITY_TYPES] . identifier[WPA_PSK] :
keyword[if] keyword[not] identifier[configure_args] . identifier[get] ( literal[string] ):
keyword[raise] identifier[ConfigureArgsError] (
literal[string] )
keyword[return] identifier[configure_args]
keyword[if] identifier[configure_args] [ literal[string] ]== identifier[nmcli] . identifier[SECURITY_TYPES] . identifier[WPA_EAP] :
keyword[if] keyword[not] identifier[configure_args] . identifier[get] ( literal[string] ):
keyword[raise] identifier[ConfigureArgsError] (
literal[string] )
identifier[configure_args] [ literal[string] ]= identifier[_eap_check_config] ( identifier[configure_args] [ literal[string] ])
keyword[return] identifier[configure_args]
keyword[return] identifier[configure_args] | def _check_configure_args(configure_args: Dict[str, Any]) -> Dict[str, Any]:
""" Check the arguments passed to configure.
Raises an exception on failure. On success, returns a dict of
configure_args with any necessary mutations.
"""
# SSID must always be present
if not configure_args.get('ssid') or not isinstance(configure_args['ssid'], str):
raise ConfigureArgsError('SSID must be specified') # depends on [control=['if'], data=[]]
# If specified, hidden must be a bool
if not configure_args.get('hidden'):
configure_args['hidden'] = False # depends on [control=['if'], data=[]]
elif not isinstance(configure_args['hidden'], bool):
raise ConfigureArgsError('If specified, hidden must be a bool') # depends on [control=['if'], data=[]]
configure_args['securityType'] = _deduce_security(configure_args)
# If we have wpa2-personal, we need a psk
if configure_args['securityType'] == nmcli.SECURITY_TYPES.WPA_PSK:
if not configure_args.get('psk'):
raise ConfigureArgsError('If securityType is wpa-psk, psk must be specified') # depends on [control=['if'], data=[]]
return configure_args # depends on [control=['if'], data=[]]
# If we have wpa2-enterprise, we need eap config, and we need to check
# it
if configure_args['securityType'] == nmcli.SECURITY_TYPES.WPA_EAP:
if not configure_args.get('eapConfig'):
raise ConfigureArgsError('If securityType is wpa-eap, eapConfig must be specified') # depends on [control=['if'], data=[]]
configure_args['eapConfig'] = _eap_check_config(configure_args['eapConfig'])
return configure_args # depends on [control=['if'], data=[]]
# If we’re still here we have no security and we’re done
return configure_args |
def wantDirectory(self, dirname):
"""Check if directory is eligible for test discovery"""
if dirname in self.exclude_dirs:
log.debug("excluded: %s" % dirname)
return False
else:
return None | def function[wantDirectory, parameter[self, dirname]]:
constant[Check if directory is eligible for test discovery]
if compare[name[dirname] in name[self].exclude_dirs] begin[:]
call[name[log].debug, parameter[binary_operation[constant[excluded: %s] <ast.Mod object at 0x7da2590d6920> name[dirname]]]]
return[constant[False]] | keyword[def] identifier[wantDirectory] ( identifier[self] , identifier[dirname] ):
literal[string]
keyword[if] identifier[dirname] keyword[in] identifier[self] . identifier[exclude_dirs] :
identifier[log] . identifier[debug] ( literal[string] % identifier[dirname] )
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[None] | def wantDirectory(self, dirname):
"""Check if directory is eligible for test discovery"""
if dirname in self.exclude_dirs:
log.debug('excluded: %s' % dirname)
return False # depends on [control=['if'], data=['dirname']]
else:
return None |
def t_to_min(x):
"""
Convert XML 'xs: duration type' to decimal minutes, e.g.:
t_to_min('PT1H2M30S') == 62.5
"""
g = re.match('PT(?:(.*)H)?(?:(.*)M)?(?:(.*)S)?', x).groups()
return sum(0 if g[i] is None else float(g[i]) * 60. ** (1 - i)
for i in range(3)) | def function[t_to_min, parameter[x]]:
constant[
Convert XML 'xs: duration type' to decimal minutes, e.g.:
t_to_min('PT1H2M30S') == 62.5
]
variable[g] assign[=] call[call[name[re].match, parameter[constant[PT(?:(.*)H)?(?:(.*)M)?(?:(.*)S)?], name[x]]].groups, parameter[]]
return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da2046216c0>]]] | keyword[def] identifier[t_to_min] ( identifier[x] ):
literal[string]
identifier[g] = identifier[re] . identifier[match] ( literal[string] , identifier[x] ). identifier[groups] ()
keyword[return] identifier[sum] ( literal[int] keyword[if] identifier[g] [ identifier[i] ] keyword[is] keyword[None] keyword[else] identifier[float] ( identifier[g] [ identifier[i] ])* literal[int] **( literal[int] - identifier[i] )
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] )) | def t_to_min(x):
"""
Convert XML 'xs: duration type' to decimal minutes, e.g.:
t_to_min('PT1H2M30S') == 62.5
"""
g = re.match('PT(?:(.*)H)?(?:(.*)M)?(?:(.*)S)?', x).groups()
return sum((0 if g[i] is None else float(g[i]) * 60.0 ** (1 - i) for i in range(3))) |
def _converter(data):
"""Internal function that will be passed to the native `json.dumps`.
This function uses the `REGISTRY` of serializers and try to convert
a given instance to an object that json.dumps can understand.
"""
handler = REGISTRY.get(data.__class__)
if handler:
full_name = '{}.{}'.format(
data.__class__.__module__,
data.__class__.__name__)
return {
'__class__': full_name,
'__value__': handler(data),
}
raise TypeError(repr(data) + " is not JSON serializable") | def function[_converter, parameter[data]]:
constant[Internal function that will be passed to the native `json.dumps`.
This function uses the `REGISTRY` of serializers and try to convert
a given instance to an object that json.dumps can understand.
]
variable[handler] assign[=] call[name[REGISTRY].get, parameter[name[data].__class__]]
if name[handler] begin[:]
variable[full_name] assign[=] call[constant[{}.{}].format, parameter[name[data].__class__.__module__, name[data].__class__.__name__]]
return[dictionary[[<ast.Constant object at 0x7da20eb29fc0>, <ast.Constant object at 0x7da20eb29a50>], [<ast.Name object at 0x7da20eb2ae60>, <ast.Call object at 0x7da20eb2ada0>]]]
<ast.Raise object at 0x7da18dc98940> | keyword[def] identifier[_converter] ( identifier[data] ):
literal[string]
identifier[handler] = identifier[REGISTRY] . identifier[get] ( identifier[data] . identifier[__class__] )
keyword[if] identifier[handler] :
identifier[full_name] = literal[string] . identifier[format] (
identifier[data] . identifier[__class__] . identifier[__module__] ,
identifier[data] . identifier[__class__] . identifier[__name__] )
keyword[return] {
literal[string] : identifier[full_name] ,
literal[string] : identifier[handler] ( identifier[data] ),
}
keyword[raise] identifier[TypeError] ( identifier[repr] ( identifier[data] )+ literal[string] ) | def _converter(data):
"""Internal function that will be passed to the native `json.dumps`.
This function uses the `REGISTRY` of serializers and try to convert
a given instance to an object that json.dumps can understand.
"""
handler = REGISTRY.get(data.__class__)
if handler:
full_name = '{}.{}'.format(data.__class__.__module__, data.__class__.__name__)
return {'__class__': full_name, '__value__': handler(data)} # depends on [control=['if'], data=[]]
raise TypeError(repr(data) + ' is not JSON serializable') |
def splitMeiUyir(uyirmei_char):
"""
This function split uyirmei compound character into mei + uyir characters
and returns in tuple.
Input : It must be unicode tamil char.
Written By : Arulalan.T
Date : 22.09.2014
"""
if not isinstance(uyirmei_char, PYTHON3 and str or unicode):
raise ValueError("Passed input letter '%s' must be unicode, \
not just string" % uyirmei_char)
if uyirmei_char in mei_letters or uyirmei_char in uyir_letters or uyirmei_char in ayudha_letter:
return uyirmei_char
if uyirmei_char not in grantha_uyirmei_letters:
if not is_normalized( uyirmei_char ):
norm_char = unicode_normalize(uyirmei_char)
rval = splitMeiUyir( norm_char )
return rval
raise ValueError("Passed input letter '%s' is not tamil letter" % uyirmei_char)
idx = grantha_uyirmei_letters.index(uyirmei_char)
uyiridx = idx % 12
meiidx = int((idx - uyiridx)/ 12)
return (grantha_mei_letters[meiidx], uyir_letters[uyiridx]) | def function[splitMeiUyir, parameter[uyirmei_char]]:
constant[
This function split uyirmei compound character into mei + uyir characters
and returns in tuple.
Input : It must be unicode tamil char.
Written By : Arulalan.T
Date : 22.09.2014
]
if <ast.UnaryOp object at 0x7da1b069d720> begin[:]
<ast.Raise object at 0x7da1b069c0a0>
if <ast.BoolOp object at 0x7da1b069fb80> begin[:]
return[name[uyirmei_char]]
if compare[name[uyirmei_char] <ast.NotIn object at 0x7da2590d7190> name[grantha_uyirmei_letters]] begin[:]
if <ast.UnaryOp object at 0x7da1b069c4f0> begin[:]
variable[norm_char] assign[=] call[name[unicode_normalize], parameter[name[uyirmei_char]]]
variable[rval] assign[=] call[name[splitMeiUyir], parameter[name[norm_char]]]
return[name[rval]]
<ast.Raise object at 0x7da1b06273d0>
variable[idx] assign[=] call[name[grantha_uyirmei_letters].index, parameter[name[uyirmei_char]]]
variable[uyiridx] assign[=] binary_operation[name[idx] <ast.Mod object at 0x7da2590d6920> constant[12]]
variable[meiidx] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[idx] - name[uyiridx]] / constant[12]]]]
return[tuple[[<ast.Subscript object at 0x7da1b06986a0>, <ast.Subscript object at 0x7da1b0698100>]]] | keyword[def] identifier[splitMeiUyir] ( identifier[uyirmei_char] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[uyirmei_char] , identifier[PYTHON3] keyword[and] identifier[str] keyword[or] identifier[unicode] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[uyirmei_char] )
keyword[if] identifier[uyirmei_char] keyword[in] identifier[mei_letters] keyword[or] identifier[uyirmei_char] keyword[in] identifier[uyir_letters] keyword[or] identifier[uyirmei_char] keyword[in] identifier[ayudha_letter] :
keyword[return] identifier[uyirmei_char]
keyword[if] identifier[uyirmei_char] keyword[not] keyword[in] identifier[grantha_uyirmei_letters] :
keyword[if] keyword[not] identifier[is_normalized] ( identifier[uyirmei_char] ):
identifier[norm_char] = identifier[unicode_normalize] ( identifier[uyirmei_char] )
identifier[rval] = identifier[splitMeiUyir] ( identifier[norm_char] )
keyword[return] identifier[rval]
keyword[raise] identifier[ValueError] ( literal[string] % identifier[uyirmei_char] )
identifier[idx] = identifier[grantha_uyirmei_letters] . identifier[index] ( identifier[uyirmei_char] )
identifier[uyiridx] = identifier[idx] % literal[int]
identifier[meiidx] = identifier[int] (( identifier[idx] - identifier[uyiridx] )/ literal[int] )
keyword[return] ( identifier[grantha_mei_letters] [ identifier[meiidx] ], identifier[uyir_letters] [ identifier[uyiridx] ]) | def splitMeiUyir(uyirmei_char):
"""
This function split uyirmei compound character into mei + uyir characters
and returns in tuple.
Input : It must be unicode tamil char.
Written By : Arulalan.T
Date : 22.09.2014
"""
if not isinstance(uyirmei_char, PYTHON3 and str or unicode):
raise ValueError("Passed input letter '%s' must be unicode, not just string" % uyirmei_char) # depends on [control=['if'], data=[]]
if uyirmei_char in mei_letters or uyirmei_char in uyir_letters or uyirmei_char in ayudha_letter:
return uyirmei_char # depends on [control=['if'], data=[]]
if uyirmei_char not in grantha_uyirmei_letters:
if not is_normalized(uyirmei_char):
norm_char = unicode_normalize(uyirmei_char)
rval = splitMeiUyir(norm_char)
return rval # depends on [control=['if'], data=[]]
raise ValueError("Passed input letter '%s' is not tamil letter" % uyirmei_char) # depends on [control=['if'], data=['uyirmei_char']]
idx = grantha_uyirmei_letters.index(uyirmei_char)
uyiridx = idx % 12
meiidx = int((idx - uyiridx) / 12)
return (grantha_mei_letters[meiidx], uyir_letters[uyiridx]) |
def empty(self):
""" Empty a queue. """
with context.connections.redis.pipeline(transaction=True) as pipe:
pipe.delete(self.redis_key)
pipe.delete(self.redis_key_known_subqueues)
pipe.execute() | def function[empty, parameter[self]]:
constant[ Empty a queue. ]
with call[name[context].connections.redis.pipeline, parameter[]] begin[:]
call[name[pipe].delete, parameter[name[self].redis_key]]
call[name[pipe].delete, parameter[name[self].redis_key_known_subqueues]]
call[name[pipe].execute, parameter[]] | keyword[def] identifier[empty] ( identifier[self] ):
literal[string]
keyword[with] identifier[context] . identifier[connections] . identifier[redis] . identifier[pipeline] ( identifier[transaction] = keyword[True] ) keyword[as] identifier[pipe] :
identifier[pipe] . identifier[delete] ( identifier[self] . identifier[redis_key] )
identifier[pipe] . identifier[delete] ( identifier[self] . identifier[redis_key_known_subqueues] )
identifier[pipe] . identifier[execute] () | def empty(self):
""" Empty a queue. """
with context.connections.redis.pipeline(transaction=True) as pipe:
pipe.delete(self.redis_key)
pipe.delete(self.redis_key_known_subqueues)
pipe.execute() # depends on [control=['with'], data=['pipe']] |
def y(self):
""" Returns the scaled y positions of the points as doubles
"""
return scale_dimension(self.Y, self.header.y_scale, self.header.y_offset) | def function[y, parameter[self]]:
constant[ Returns the scaled y positions of the points as doubles
]
return[call[name[scale_dimension], parameter[name[self].Y, name[self].header.y_scale, name[self].header.y_offset]]] | keyword[def] identifier[y] ( identifier[self] ):
literal[string]
keyword[return] identifier[scale_dimension] ( identifier[self] . identifier[Y] , identifier[self] . identifier[header] . identifier[y_scale] , identifier[self] . identifier[header] . identifier[y_offset] ) | def y(self):
""" Returns the scaled y positions of the points as doubles
"""
return scale_dimension(self.Y, self.header.y_scale, self.header.y_offset) |
def generate_big_urls_glove(bigurls=None):
""" Generate a dictionary of URLs for various combinations of GloVe training set sizes and dimensionality """
bigurls = bigurls or {}
for num_dim in (50, 100, 200, 300):
# not all of these dimensionality, and training set size combinations were trained by Stanford
for suffixes, num_words in zip(
('sm -sm _sm -small _small'.split(),
'med -med _med -medium _medium'.split(),
'lg -lg _lg -large _large'.split()),
(6, 42, 840)
):
for suf in suffixes[:-1]:
name = 'glove' + suf + str(num_dim)
dirname = 'glove.{num_words}B'.format(num_words=num_words)
# glove.42B.300d.w2v.txt
filename = dirname + '.{num_dim}d.w2v.txt'.format(num_dim=num_dim)
# seed the alias named URL with the URL for that training set size's canonical name
bigurl_tuple = BIG_URLS['glove' + suffixes[-1]]
bigurls[name] = list(bigurl_tuple[:2])
bigurls[name].append(os.path.join(dirname, filename))
bigurls[name].append(load_glove)
bigurls[name] = tuple(bigurls[name])
return bigurls | def function[generate_big_urls_glove, parameter[bigurls]]:
constant[ Generate a dictionary of URLs for various combinations of GloVe training set sizes and dimensionality ]
variable[bigurls] assign[=] <ast.BoolOp object at 0x7da20c6c5960>
for taget[name[num_dim]] in starred[tuple[[<ast.Constant object at 0x7da20c6c5a20>, <ast.Constant object at 0x7da20c6c5150>, <ast.Constant object at 0x7da20c6c7490>, <ast.Constant object at 0x7da20c6c63b0>]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f58f490>, <ast.Name object at 0x7da18f58fca0>]]] in starred[call[name[zip], parameter[tuple[[<ast.Call object at 0x7da18f58e890>, <ast.Call object at 0x7da18f58f0a0>, <ast.Call object at 0x7da18f58ccd0>]], tuple[[<ast.Constant object at 0x7da18f58ce20>, <ast.Constant object at 0x7da18f58f550>, <ast.Constant object at 0x7da18f58dc60>]]]]] begin[:]
for taget[name[suf]] in starred[call[name[suffixes]][<ast.Slice object at 0x7da20c6c79d0>]] begin[:]
variable[name] assign[=] binary_operation[binary_operation[constant[glove] + name[suf]] + call[name[str], parameter[name[num_dim]]]]
variable[dirname] assign[=] call[constant[glove.{num_words}B].format, parameter[]]
variable[filename] assign[=] binary_operation[name[dirname] + call[constant[.{num_dim}d.w2v.txt].format, parameter[]]]
variable[bigurl_tuple] assign[=] call[name[BIG_URLS]][binary_operation[constant[glove] + call[name[suffixes]][<ast.UnaryOp object at 0x7da20c6c7b50>]]]
call[name[bigurls]][name[name]] assign[=] call[name[list], parameter[call[name[bigurl_tuple]][<ast.Slice object at 0x7da20c6c73a0>]]]
call[call[name[bigurls]][name[name]].append, parameter[call[name[os].path.join, parameter[name[dirname], name[filename]]]]]
call[call[name[bigurls]][name[name]].append, parameter[name[load_glove]]]
call[name[bigurls]][name[name]] assign[=] call[name[tuple], parameter[call[name[bigurls]][name[name]]]]
return[name[bigurls]] | keyword[def] identifier[generate_big_urls_glove] ( identifier[bigurls] = keyword[None] ):
literal[string]
identifier[bigurls] = identifier[bigurls] keyword[or] {}
keyword[for] identifier[num_dim] keyword[in] ( literal[int] , literal[int] , literal[int] , literal[int] ):
keyword[for] identifier[suffixes] , identifier[num_words] keyword[in] identifier[zip] (
( literal[string] . identifier[split] (),
literal[string] . identifier[split] (),
literal[string] . identifier[split] ()),
( literal[int] , literal[int] , literal[int] )
):
keyword[for] identifier[suf] keyword[in] identifier[suffixes] [:- literal[int] ]:
identifier[name] = literal[string] + identifier[suf] + identifier[str] ( identifier[num_dim] )
identifier[dirname] = literal[string] . identifier[format] ( identifier[num_words] = identifier[num_words] )
identifier[filename] = identifier[dirname] + literal[string] . identifier[format] ( identifier[num_dim] = identifier[num_dim] )
identifier[bigurl_tuple] = identifier[BIG_URLS] [ literal[string] + identifier[suffixes] [- literal[int] ]]
identifier[bigurls] [ identifier[name] ]= identifier[list] ( identifier[bigurl_tuple] [: literal[int] ])
identifier[bigurls] [ identifier[name] ]. identifier[append] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , identifier[filename] ))
identifier[bigurls] [ identifier[name] ]. identifier[append] ( identifier[load_glove] )
identifier[bigurls] [ identifier[name] ]= identifier[tuple] ( identifier[bigurls] [ identifier[name] ])
keyword[return] identifier[bigurls] | def generate_big_urls_glove(bigurls=None):
""" Generate a dictionary of URLs for various combinations of GloVe training set sizes and dimensionality """
bigurls = bigurls or {}
for num_dim in (50, 100, 200, 300):
# not all of these dimensionality, and training set size combinations were trained by Stanford
for (suffixes, num_words) in zip(('sm -sm _sm -small _small'.split(), 'med -med _med -medium _medium'.split(), 'lg -lg _lg -large _large'.split()), (6, 42, 840)):
for suf in suffixes[:-1]:
name = 'glove' + suf + str(num_dim)
dirname = 'glove.{num_words}B'.format(num_words=num_words)
# glove.42B.300d.w2v.txt
filename = dirname + '.{num_dim}d.w2v.txt'.format(num_dim=num_dim)
# seed the alias named URL with the URL for that training set size's canonical name
bigurl_tuple = BIG_URLS['glove' + suffixes[-1]]
bigurls[name] = list(bigurl_tuple[:2])
bigurls[name].append(os.path.join(dirname, filename))
bigurls[name].append(load_glove)
bigurls[name] = tuple(bigurls[name]) # depends on [control=['for'], data=['suf']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['num_dim']]
return bigurls |
def enable_pointer_type(self):
"""
If a type is a pointer, a platform-independent POINTER_T type needs
to be in the generated code.
"""
# 2015-01 reactivating header templates
#log.warning('enable_pointer_type deprecated - replaced by generate_headers')
# return # FIXME ignore
self.enable_pointer_type = lambda: True
import pkgutil
headers = pkgutil.get_data('ctypeslib', 'data/pointer_type.tpl').decode()
import ctypes
from clang.cindex import TypeKind
# assuming a LONG also has the same sizeof than a pointer.
word_size = self.parser.get_ctypes_size(TypeKind.POINTER) // 8
word_type = self.parser.get_ctypes_name(TypeKind.ULONG)
# pylint: disable=protected-access
word_char = getattr(ctypes, word_type)._type_
# replacing template values
headers = headers.replace('__POINTER_SIZE__', str(word_size))
headers = headers.replace('__REPLACEMENT_TYPE__', word_type)
headers = headers.replace('__REPLACEMENT_TYPE_CHAR__', word_char)
print(headers, file=self.imports)
return | def function[enable_pointer_type, parameter[self]]:
constant[
If a type is a pointer, a platform-independent POINTER_T type needs
to be in the generated code.
]
name[self].enable_pointer_type assign[=] <ast.Lambda object at 0x7da20c6e5000>
import module[pkgutil]
variable[headers] assign[=] call[call[name[pkgutil].get_data, parameter[constant[ctypeslib], constant[data/pointer_type.tpl]]].decode, parameter[]]
import module[ctypes]
from relative_module[clang.cindex] import module[TypeKind]
variable[word_size] assign[=] binary_operation[call[name[self].parser.get_ctypes_size, parameter[name[TypeKind].POINTER]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[8]]
variable[word_type] assign[=] call[name[self].parser.get_ctypes_name, parameter[name[TypeKind].ULONG]]
variable[word_char] assign[=] call[name[getattr], parameter[name[ctypes], name[word_type]]]._type_
variable[headers] assign[=] call[name[headers].replace, parameter[constant[__POINTER_SIZE__], call[name[str], parameter[name[word_size]]]]]
variable[headers] assign[=] call[name[headers].replace, parameter[constant[__REPLACEMENT_TYPE__], name[word_type]]]
variable[headers] assign[=] call[name[headers].replace, parameter[constant[__REPLACEMENT_TYPE_CHAR__], name[word_char]]]
call[name[print], parameter[name[headers]]]
return[None] | keyword[def] identifier[enable_pointer_type] ( identifier[self] ):
literal[string]
identifier[self] . identifier[enable_pointer_type] = keyword[lambda] : keyword[True]
keyword[import] identifier[pkgutil]
identifier[headers] = identifier[pkgutil] . identifier[get_data] ( literal[string] , literal[string] ). identifier[decode] ()
keyword[import] identifier[ctypes]
keyword[from] identifier[clang] . identifier[cindex] keyword[import] identifier[TypeKind]
identifier[word_size] = identifier[self] . identifier[parser] . identifier[get_ctypes_size] ( identifier[TypeKind] . identifier[POINTER] )// literal[int]
identifier[word_type] = identifier[self] . identifier[parser] . identifier[get_ctypes_name] ( identifier[TypeKind] . identifier[ULONG] )
identifier[word_char] = identifier[getattr] ( identifier[ctypes] , identifier[word_type] ). identifier[_type_]
identifier[headers] = identifier[headers] . identifier[replace] ( literal[string] , identifier[str] ( identifier[word_size] ))
identifier[headers] = identifier[headers] . identifier[replace] ( literal[string] , identifier[word_type] )
identifier[headers] = identifier[headers] . identifier[replace] ( literal[string] , identifier[word_char] )
identifier[print] ( identifier[headers] , identifier[file] = identifier[self] . identifier[imports] )
keyword[return] | def enable_pointer_type(self):
"""
If a type is a pointer, a platform-independent POINTER_T type needs
to be in the generated code.
"""
# 2015-01 reactivating header templates
#log.warning('enable_pointer_type deprecated - replaced by generate_headers')
# return # FIXME ignore
self.enable_pointer_type = lambda : True
import pkgutil
headers = pkgutil.get_data('ctypeslib', 'data/pointer_type.tpl').decode()
import ctypes
from clang.cindex import TypeKind
# assuming a LONG also has the same sizeof than a pointer.
word_size = self.parser.get_ctypes_size(TypeKind.POINTER) // 8
word_type = self.parser.get_ctypes_name(TypeKind.ULONG)
# pylint: disable=protected-access
word_char = getattr(ctypes, word_type)._type_
# replacing template values
headers = headers.replace('__POINTER_SIZE__', str(word_size))
headers = headers.replace('__REPLACEMENT_TYPE__', word_type)
headers = headers.replace('__REPLACEMENT_TYPE_CHAR__', word_char)
print(headers, file=self.imports)
return |
def setFixedHeight(self, height):
"""
Sets the maximum height value to the inputed height and emits the \
sizeConstraintChanged signal.
:param height | <int>
"""
super(XView, self).setFixedHeight(height)
if ( not self.signalsBlocked() ):
self.sizeConstraintChanged.emit() | def function[setFixedHeight, parameter[self, height]]:
constant[
Sets the maximum height value to the inputed height and emits the sizeConstraintChanged signal.
:param height | <int>
]
call[call[name[super], parameter[name[XView], name[self]]].setFixedHeight, parameter[name[height]]]
if <ast.UnaryOp object at 0x7da2041db010> begin[:]
call[name[self].sizeConstraintChanged.emit, parameter[]] | keyword[def] identifier[setFixedHeight] ( identifier[self] , identifier[height] ):
literal[string]
identifier[super] ( identifier[XView] , identifier[self] ). identifier[setFixedHeight] ( identifier[height] )
keyword[if] ( keyword[not] identifier[self] . identifier[signalsBlocked] ()):
identifier[self] . identifier[sizeConstraintChanged] . identifier[emit] () | def setFixedHeight(self, height):
"""
Sets the maximum height value to the inputed height and emits the sizeConstraintChanged signal.
:param height | <int>
"""
super(XView, self).setFixedHeight(height)
if not self.signalsBlocked():
self.sizeConstraintChanged.emit() # depends on [control=['if'], data=[]] |
def _sync_content_metadata(self, serialized_data):
"""
Create/update/delete content metadata records using the SuccessFactors OCN Course Import API endpoint.
Arguments:
serialized_data: Serialized JSON string representing a list of content metadata items.
Raises:
ClientError: If SuccessFactors API call fails.
"""
url = self.enterprise_configuration.sapsf_base_url + self.global_sap_config.course_api_path
try:
status_code, response_body = self._call_post_with_session(url, serialized_data)
except requests.exceptions.RequestException as exc:
raise ClientError(
'SAPSuccessFactorsAPIClient request failed: {error} {message}'.format(
error=exc.__class__.__name__,
message=str(exc)
)
)
if status_code >= 400:
raise ClientError(
'SAPSuccessFactorsAPIClient request failed with status {status_code}: {message}'.format(
status_code=status_code,
message=response_body
)
) | def function[_sync_content_metadata, parameter[self, serialized_data]]:
constant[
Create/update/delete content metadata records using the SuccessFactors OCN Course Import API endpoint.
Arguments:
serialized_data: Serialized JSON string representing a list of content metadata items.
Raises:
ClientError: If SuccessFactors API call fails.
]
variable[url] assign[=] binary_operation[name[self].enterprise_configuration.sapsf_base_url + name[self].global_sap_config.course_api_path]
<ast.Try object at 0x7da1b0050460>
if compare[name[status_code] greater_or_equal[>=] constant[400]] begin[:]
<ast.Raise object at 0x7da1b0053cd0> | keyword[def] identifier[_sync_content_metadata] ( identifier[self] , identifier[serialized_data] ):
literal[string]
identifier[url] = identifier[self] . identifier[enterprise_configuration] . identifier[sapsf_base_url] + identifier[self] . identifier[global_sap_config] . identifier[course_api_path]
keyword[try] :
identifier[status_code] , identifier[response_body] = identifier[self] . identifier[_call_post_with_session] ( identifier[url] , identifier[serialized_data] )
keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[exc] :
keyword[raise] identifier[ClientError] (
literal[string] . identifier[format] (
identifier[error] = identifier[exc] . identifier[__class__] . identifier[__name__] ,
identifier[message] = identifier[str] ( identifier[exc] )
)
)
keyword[if] identifier[status_code] >= literal[int] :
keyword[raise] identifier[ClientError] (
literal[string] . identifier[format] (
identifier[status_code] = identifier[status_code] ,
identifier[message] = identifier[response_body]
)
) | def _sync_content_metadata(self, serialized_data):
"""
Create/update/delete content metadata records using the SuccessFactors OCN Course Import API endpoint.
Arguments:
serialized_data: Serialized JSON string representing a list of content metadata items.
Raises:
ClientError: If SuccessFactors API call fails.
"""
url = self.enterprise_configuration.sapsf_base_url + self.global_sap_config.course_api_path
try:
(status_code, response_body) = self._call_post_with_session(url, serialized_data) # depends on [control=['try'], data=[]]
except requests.exceptions.RequestException as exc:
raise ClientError('SAPSuccessFactorsAPIClient request failed: {error} {message}'.format(error=exc.__class__.__name__, message=str(exc))) # depends on [control=['except'], data=['exc']]
if status_code >= 400:
raise ClientError('SAPSuccessFactorsAPIClient request failed with status {status_code}: {message}'.format(status_code=status_code, message=response_body)) # depends on [control=['if'], data=['status_code']] |
def pyramid(
input_raster,
output_dir,
pyramid_type=None,
output_format=None,
resampling_method=None,
scale_method=None,
zoom=None,
bounds=None,
overwrite=False,
debug=False
):
"""Create tile pyramid out of input raster."""
bounds = bounds if bounds else None
options = dict(
pyramid_type=pyramid_type,
scale_method=scale_method,
output_format=output_format,
resampling=resampling_method,
zoom=zoom,
bounds=bounds,
overwrite=overwrite
)
raster2pyramid(input_raster, output_dir, options) | def function[pyramid, parameter[input_raster, output_dir, pyramid_type, output_format, resampling_method, scale_method, zoom, bounds, overwrite, debug]]:
constant[Create tile pyramid out of input raster.]
variable[bounds] assign[=] <ast.IfExp object at 0x7da20c990400>
variable[options] assign[=] call[name[dict], parameter[]]
call[name[raster2pyramid], parameter[name[input_raster], name[output_dir], name[options]]] | keyword[def] identifier[pyramid] (
identifier[input_raster] ,
identifier[output_dir] ,
identifier[pyramid_type] = keyword[None] ,
identifier[output_format] = keyword[None] ,
identifier[resampling_method] = keyword[None] ,
identifier[scale_method] = keyword[None] ,
identifier[zoom] = keyword[None] ,
identifier[bounds] = keyword[None] ,
identifier[overwrite] = keyword[False] ,
identifier[debug] = keyword[False]
):
literal[string]
identifier[bounds] = identifier[bounds] keyword[if] identifier[bounds] keyword[else] keyword[None]
identifier[options] = identifier[dict] (
identifier[pyramid_type] = identifier[pyramid_type] ,
identifier[scale_method] = identifier[scale_method] ,
identifier[output_format] = identifier[output_format] ,
identifier[resampling] = identifier[resampling_method] ,
identifier[zoom] = identifier[zoom] ,
identifier[bounds] = identifier[bounds] ,
identifier[overwrite] = identifier[overwrite]
)
identifier[raster2pyramid] ( identifier[input_raster] , identifier[output_dir] , identifier[options] ) | def pyramid(input_raster, output_dir, pyramid_type=None, output_format=None, resampling_method=None, scale_method=None, zoom=None, bounds=None, overwrite=False, debug=False):
"""Create tile pyramid out of input raster."""
bounds = bounds if bounds else None
options = dict(pyramid_type=pyramid_type, scale_method=scale_method, output_format=output_format, resampling=resampling_method, zoom=zoom, bounds=bounds, overwrite=overwrite)
raster2pyramid(input_raster, output_dir, options) |
def get_symbol(num_classes=1000, **kwargs):
"""
VGG 16 layers network
This is a modified version, with fc6/fc7 layers replaced by conv layers
And the network is slightly smaller than original VGG 16 network
"""
data = mx.symbol.Variable(name="data")
label = mx.symbol.Variable(name="label")
# group 1
conv1_1 = mx.symbol.Convolution(
data=data, kernel=(3, 3), pad=(1, 1), num_filter=64, name="conv1_1")
relu1_1 = mx.symbol.Activation(data=conv1_1, act_type="relu", name="relu1_1")
conv1_2 = mx.symbol.Convolution(
data=relu1_1, kernel=(3, 3), pad=(1, 1), num_filter=64, name="conv1_2")
relu1_2 = mx.symbol.Activation(data=conv1_2, act_type="relu", name="relu1_2")
pool1 = mx.symbol.Pooling(
data=relu1_2, pool_type="max", kernel=(2, 2), stride=(2, 2), name="pool1")
# group 2
conv2_1 = mx.symbol.Convolution(
data=pool1, kernel=(3, 3), pad=(1, 1), num_filter=128, name="conv2_1")
relu2_1 = mx.symbol.Activation(data=conv2_1, act_type="relu", name="relu2_1")
conv2_2 = mx.symbol.Convolution(
data=relu2_1, kernel=(3, 3), pad=(1, 1), num_filter=128, name="conv2_2")
relu2_2 = mx.symbol.Activation(data=conv2_2, act_type="relu", name="relu2_2")
pool2 = mx.symbol.Pooling(
data=relu2_2, pool_type="max", kernel=(2, 2), stride=(2, 2), name="pool2")
# group 3
conv3_1 = mx.symbol.Convolution(
data=pool2, kernel=(3, 3), pad=(1, 1), num_filter=256, name="conv3_1")
relu3_1 = mx.symbol.Activation(data=conv3_1, act_type="relu", name="relu3_1")
conv3_2 = mx.symbol.Convolution(
data=relu3_1, kernel=(3, 3), pad=(1, 1), num_filter=256, name="conv3_2")
relu3_2 = mx.symbol.Activation(data=conv3_2, act_type="relu", name="relu3_2")
conv3_3 = mx.symbol.Convolution(
data=relu3_2, kernel=(3, 3), pad=(1, 1), num_filter=256, name="conv3_3")
relu3_3 = mx.symbol.Activation(data=conv3_3, act_type="relu", name="relu3_3")
pool3 = mx.symbol.Pooling(
data=relu3_3, pool_type="max", kernel=(2, 2), stride=(2, 2), \
pooling_convention="full", name="pool3")
# group 4
conv4_1 = mx.symbol.Convolution(
data=pool3, kernel=(3, 3), pad=(1, 1), num_filter=512, name="conv4_1")
relu4_1 = mx.symbol.Activation(data=conv4_1, act_type="relu", name="relu4_1")
conv4_2 = mx.symbol.Convolution(
data=relu4_1, kernel=(3, 3), pad=(1, 1), num_filter=512, name="conv4_2")
relu4_2 = mx.symbol.Activation(data=conv4_2, act_type="relu", name="relu4_2")
conv4_3 = mx.symbol.Convolution(
data=relu4_2, kernel=(3, 3), pad=(1, 1), num_filter=512, name="conv4_3")
relu4_3 = mx.symbol.Activation(data=conv4_3, act_type="relu", name="relu4_3")
pool4 = mx.symbol.Pooling(
data=relu4_3, pool_type="max", kernel=(2, 2), stride=(2, 2), name="pool4")
# group 5
conv5_1 = mx.symbol.Convolution(
data=pool4, kernel=(3, 3), pad=(1, 1), num_filter=512, name="conv5_1")
relu5_1 = mx.symbol.Activation(data=conv5_1, act_type="relu", name="relu5_1")
conv5_2 = mx.symbol.Convolution(
data=relu5_1, kernel=(3, 3), pad=(1, 1), num_filter=512, name="conv5_2")
relu5_2 = mx.symbol.Activation(data=conv5_2, act_type="relu", name="relu5_2")
conv5_3 = mx.symbol.Convolution(
data=relu5_2, kernel=(3, 3), pad=(1, 1), num_filter=512, name="conv5_3")
relu5_3 = mx.symbol.Activation(data=conv5_3, act_type="relu", name="relu5_3")
pool5 = mx.symbol.Pooling(
data=relu5_3, pool_type="max", kernel=(3, 3), stride=(1, 1),
pad=(1,1), name="pool5")
# group 6
conv6 = mx.symbol.Convolution(
data=pool5, kernel=(3, 3), pad=(6, 6), dilate=(6, 6),
num_filter=1024, name="fc6")
relu6 = mx.symbol.Activation(data=conv6, act_type="relu", name="relu6")
# drop6 = mx.symbol.Dropout(data=relu6, p=0.5, name="drop6")
# group 7
conv7 = mx.symbol.Convolution(
data=relu6, kernel=(1, 1), pad=(0, 0), num_filter=1024, name="fc7")
relu7 = mx.symbol.Activation(data=conv7, act_type="relu", name="relu7")
# drop7 = mx.symbol.Dropout(data=relu7, p=0.5, name="drop7")
gpool = mx.symbol.Pooling(data=relu7, pool_type='avg', kernel=(7, 7),
global_pool=True, name='global_pool')
conv8 = mx.symbol.Convolution(data=gpool, num_filter=num_classes, kernel=(1, 1),
name='fc8')
flat = mx.symbol.Flatten(data=conv8)
softmax = mx.symbol.SoftmaxOutput(data=flat, name='softmax')
return softmax | def function[get_symbol, parameter[num_classes]]:
constant[
VGG 16 layers network
This is a modified version, with fc6/fc7 layers replaced by conv layers
And the network is slightly smaller than original VGG 16 network
]
variable[data] assign[=] call[name[mx].symbol.Variable, parameter[]]
variable[label] assign[=] call[name[mx].symbol.Variable, parameter[]]
variable[conv1_1] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu1_1] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv1_2] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu1_2] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[pool1] assign[=] call[name[mx].symbol.Pooling, parameter[]]
variable[conv2_1] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu2_1] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv2_2] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu2_2] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[pool2] assign[=] call[name[mx].symbol.Pooling, parameter[]]
variable[conv3_1] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu3_1] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv3_2] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu3_2] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv3_3] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu3_3] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[pool3] assign[=] call[name[mx].symbol.Pooling, parameter[]]
variable[conv4_1] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu4_1] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv4_2] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu4_2] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv4_3] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu4_3] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[pool4] assign[=] call[name[mx].symbol.Pooling, parameter[]]
variable[conv5_1] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu5_1] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv5_2] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu5_2] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv5_3] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu5_3] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[pool5] assign[=] call[name[mx].symbol.Pooling, parameter[]]
variable[conv6] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu6] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[conv7] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[relu7] assign[=] call[name[mx].symbol.Activation, parameter[]]
variable[gpool] assign[=] call[name[mx].symbol.Pooling, parameter[]]
variable[conv8] assign[=] call[name[mx].symbol.Convolution, parameter[]]
variable[flat] assign[=] call[name[mx].symbol.Flatten, parameter[]]
variable[softmax] assign[=] call[name[mx].symbol.SoftmaxOutput, parameter[]]
return[name[softmax]] | keyword[def] identifier[get_symbol] ( identifier[num_classes] = literal[int] ,** identifier[kwargs] ):
literal[string]
identifier[data] = identifier[mx] . identifier[symbol] . identifier[Variable] ( identifier[name] = literal[string] )
identifier[label] = identifier[mx] . identifier[symbol] . identifier[Variable] ( identifier[name] = literal[string] )
identifier[conv1_1] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[data] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu1_1] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv1_1] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv1_2] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu1_1] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu1_2] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv1_2] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[pool1] = identifier[mx] . identifier[symbol] . identifier[Pooling] (
identifier[data] = identifier[relu1_2] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ), identifier[name] = literal[string] )
identifier[conv2_1] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[pool1] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu2_1] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv2_1] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv2_2] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu2_1] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu2_2] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv2_2] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[pool2] = identifier[mx] . identifier[symbol] . identifier[Pooling] (
identifier[data] = identifier[relu2_2] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ), identifier[name] = literal[string] )
identifier[conv3_1] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[pool2] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu3_1] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv3_1] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv3_2] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu3_1] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu3_2] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv3_2] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv3_3] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu3_2] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu3_3] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv3_3] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[pool3] = identifier[mx] . identifier[symbol] . identifier[Pooling] (
identifier[data] = identifier[relu3_3] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ), identifier[pooling_convention] = literal[string] , identifier[name] = literal[string] )
identifier[conv4_1] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[pool3] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu4_1] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv4_1] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv4_2] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu4_1] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu4_2] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv4_2] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv4_3] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu4_2] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu4_3] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv4_3] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[pool4] = identifier[mx] . identifier[symbol] . identifier[Pooling] (
identifier[data] = identifier[relu4_3] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ), identifier[name] = literal[string] )
identifier[conv5_1] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[pool4] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu5_1] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv5_1] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv5_2] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu5_1] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu5_2] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv5_2] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv5_3] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu5_2] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu5_3] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv5_3] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[pool5] = identifier[mx] . identifier[symbol] . identifier[Pooling] (
identifier[data] = identifier[relu5_3] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ), identifier[stride] =( literal[int] , literal[int] ),
identifier[pad] =( literal[int] , literal[int] ), identifier[name] = literal[string] )
identifier[conv6] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[pool5] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[dilate] =( literal[int] , literal[int] ),
identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu6] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv6] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[conv7] = identifier[mx] . identifier[symbol] . identifier[Convolution] (
identifier[data] = identifier[relu6] , identifier[kernel] =( literal[int] , literal[int] ), identifier[pad] =( literal[int] , literal[int] ), identifier[num_filter] = literal[int] , identifier[name] = literal[string] )
identifier[relu7] = identifier[mx] . identifier[symbol] . identifier[Activation] ( identifier[data] = identifier[conv7] , identifier[act_type] = literal[string] , identifier[name] = literal[string] )
identifier[gpool] = identifier[mx] . identifier[symbol] . identifier[Pooling] ( identifier[data] = identifier[relu7] , identifier[pool_type] = literal[string] , identifier[kernel] =( literal[int] , literal[int] ),
identifier[global_pool] = keyword[True] , identifier[name] = literal[string] )
identifier[conv8] = identifier[mx] . identifier[symbol] . identifier[Convolution] ( identifier[data] = identifier[gpool] , identifier[num_filter] = identifier[num_classes] , identifier[kernel] =( literal[int] , literal[int] ),
identifier[name] = literal[string] )
identifier[flat] = identifier[mx] . identifier[symbol] . identifier[Flatten] ( identifier[data] = identifier[conv8] )
identifier[softmax] = identifier[mx] . identifier[symbol] . identifier[SoftmaxOutput] ( identifier[data] = identifier[flat] , identifier[name] = literal[string] )
keyword[return] identifier[softmax] | def get_symbol(num_classes=1000, **kwargs):
"""
VGG 16 layers network
This is a modified version, with fc6/fc7 layers replaced by conv layers
And the network is slightly smaller than original VGG 16 network
"""
data = mx.symbol.Variable(name='data')
label = mx.symbol.Variable(name='label')
# group 1
conv1_1 = mx.symbol.Convolution(data=data, kernel=(3, 3), pad=(1, 1), num_filter=64, name='conv1_1')
relu1_1 = mx.symbol.Activation(data=conv1_1, act_type='relu', name='relu1_1')
conv1_2 = mx.symbol.Convolution(data=relu1_1, kernel=(3, 3), pad=(1, 1), num_filter=64, name='conv1_2')
relu1_2 = mx.symbol.Activation(data=conv1_2, act_type='relu', name='relu1_2')
pool1 = mx.symbol.Pooling(data=relu1_2, pool_type='max', kernel=(2, 2), stride=(2, 2), name='pool1')
# group 2
conv2_1 = mx.symbol.Convolution(data=pool1, kernel=(3, 3), pad=(1, 1), num_filter=128, name='conv2_1')
relu2_1 = mx.symbol.Activation(data=conv2_1, act_type='relu', name='relu2_1')
conv2_2 = mx.symbol.Convolution(data=relu2_1, kernel=(3, 3), pad=(1, 1), num_filter=128, name='conv2_2')
relu2_2 = mx.symbol.Activation(data=conv2_2, act_type='relu', name='relu2_2')
pool2 = mx.symbol.Pooling(data=relu2_2, pool_type='max', kernel=(2, 2), stride=(2, 2), name='pool2')
# group 3
conv3_1 = mx.symbol.Convolution(data=pool2, kernel=(3, 3), pad=(1, 1), num_filter=256, name='conv3_1')
relu3_1 = mx.symbol.Activation(data=conv3_1, act_type='relu', name='relu3_1')
conv3_2 = mx.symbol.Convolution(data=relu3_1, kernel=(3, 3), pad=(1, 1), num_filter=256, name='conv3_2')
relu3_2 = mx.symbol.Activation(data=conv3_2, act_type='relu', name='relu3_2')
conv3_3 = mx.symbol.Convolution(data=relu3_2, kernel=(3, 3), pad=(1, 1), num_filter=256, name='conv3_3')
relu3_3 = mx.symbol.Activation(data=conv3_3, act_type='relu', name='relu3_3')
pool3 = mx.symbol.Pooling(data=relu3_3, pool_type='max', kernel=(2, 2), stride=(2, 2), pooling_convention='full', name='pool3')
# group 4
conv4_1 = mx.symbol.Convolution(data=pool3, kernel=(3, 3), pad=(1, 1), num_filter=512, name='conv4_1')
relu4_1 = mx.symbol.Activation(data=conv4_1, act_type='relu', name='relu4_1')
conv4_2 = mx.symbol.Convolution(data=relu4_1, kernel=(3, 3), pad=(1, 1), num_filter=512, name='conv4_2')
relu4_2 = mx.symbol.Activation(data=conv4_2, act_type='relu', name='relu4_2')
conv4_3 = mx.symbol.Convolution(data=relu4_2, kernel=(3, 3), pad=(1, 1), num_filter=512, name='conv4_3')
relu4_3 = mx.symbol.Activation(data=conv4_3, act_type='relu', name='relu4_3')
pool4 = mx.symbol.Pooling(data=relu4_3, pool_type='max', kernel=(2, 2), stride=(2, 2), name='pool4')
# group 5
conv5_1 = mx.symbol.Convolution(data=pool4, kernel=(3, 3), pad=(1, 1), num_filter=512, name='conv5_1')
relu5_1 = mx.symbol.Activation(data=conv5_1, act_type='relu', name='relu5_1')
conv5_2 = mx.symbol.Convolution(data=relu5_1, kernel=(3, 3), pad=(1, 1), num_filter=512, name='conv5_2')
relu5_2 = mx.symbol.Activation(data=conv5_2, act_type='relu', name='relu5_2')
conv5_3 = mx.symbol.Convolution(data=relu5_2, kernel=(3, 3), pad=(1, 1), num_filter=512, name='conv5_3')
relu5_3 = mx.symbol.Activation(data=conv5_3, act_type='relu', name='relu5_3')
pool5 = mx.symbol.Pooling(data=relu5_3, pool_type='max', kernel=(3, 3), stride=(1, 1), pad=(1, 1), name='pool5')
# group 6
conv6 = mx.symbol.Convolution(data=pool5, kernel=(3, 3), pad=(6, 6), dilate=(6, 6), num_filter=1024, name='fc6')
relu6 = mx.symbol.Activation(data=conv6, act_type='relu', name='relu6')
# drop6 = mx.symbol.Dropout(data=relu6, p=0.5, name="drop6")
# group 7
conv7 = mx.symbol.Convolution(data=relu6, kernel=(1, 1), pad=(0, 0), num_filter=1024, name='fc7')
relu7 = mx.symbol.Activation(data=conv7, act_type='relu', name='relu7')
# drop7 = mx.symbol.Dropout(data=relu7, p=0.5, name="drop7")
gpool = mx.symbol.Pooling(data=relu7, pool_type='avg', kernel=(7, 7), global_pool=True, name='global_pool')
conv8 = mx.symbol.Convolution(data=gpool, num_filter=num_classes, kernel=(1, 1), name='fc8')
flat = mx.symbol.Flatten(data=conv8)
softmax = mx.symbol.SoftmaxOutput(data=flat, name='softmax')
return softmax |
def close(self):
"""
This method will close the transport (serial port) and exit
:return: No return value, but sys.exit(0) is called.
"""
self._command_handler.system_reset()
self._command_handler.stop()
self.transport.stop()
self.transport.close()
if self.verbose:
print("PyMata close(): Calling sys.exit(0): Hope to see you soon!")
sys.exit(0) | def function[close, parameter[self]]:
constant[
This method will close the transport (serial port) and exit
:return: No return value, but sys.exit(0) is called.
]
call[name[self]._command_handler.system_reset, parameter[]]
call[name[self]._command_handler.stop, parameter[]]
call[name[self].transport.stop, parameter[]]
call[name[self].transport.close, parameter[]]
if name[self].verbose begin[:]
call[name[print], parameter[constant[PyMata close(): Calling sys.exit(0): Hope to see you soon!]]]
call[name[sys].exit, parameter[constant[0]]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_command_handler] . identifier[system_reset] ()
identifier[self] . identifier[_command_handler] . identifier[stop] ()
identifier[self] . identifier[transport] . identifier[stop] ()
identifier[self] . identifier[transport] . identifier[close] ()
keyword[if] identifier[self] . identifier[verbose] :
identifier[print] ( literal[string] )
identifier[sys] . identifier[exit] ( literal[int] ) | def close(self):
"""
This method will close the transport (serial port) and exit
:return: No return value, but sys.exit(0) is called.
"""
self._command_handler.system_reset()
self._command_handler.stop()
self.transport.stop()
self.transport.close()
if self.verbose:
print('PyMata close(): Calling sys.exit(0): Hope to see you soon!') # depends on [control=['if'], data=[]]
sys.exit(0) |
def _is_locked(self):
'''
Checks to see if we are already pulling items from the queue
'''
if os.path.isfile(self._lck):
try:
import psutil
except ImportError:
return True #Lock file exists and no psutil
#If psutil is imported
with open(self._lck) as f:
pid = f.read()
return True if psutil.pid_exists(int(pid)) else False
else:
return False | def function[_is_locked, parameter[self]]:
constant[
Checks to see if we are already pulling items from the queue
]
if call[name[os].path.isfile, parameter[name[self]._lck]] begin[:]
<ast.Try object at 0x7da1b26ace80>
with call[name[open], parameter[name[self]._lck]] begin[:]
variable[pid] assign[=] call[name[f].read, parameter[]]
return[<ast.IfExp object at 0x7da1b26ae0e0>] | keyword[def] identifier[_is_locked] ( identifier[self] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[_lck] ):
keyword[try] :
keyword[import] identifier[psutil]
keyword[except] identifier[ImportError] :
keyword[return] keyword[True]
keyword[with] identifier[open] ( identifier[self] . identifier[_lck] ) keyword[as] identifier[f] :
identifier[pid] = identifier[f] . identifier[read] ()
keyword[return] keyword[True] keyword[if] identifier[psutil] . identifier[pid_exists] ( identifier[int] ( identifier[pid] )) keyword[else] keyword[False]
keyword[else] :
keyword[return] keyword[False] | def _is_locked(self):
"""
Checks to see if we are already pulling items from the queue
"""
if os.path.isfile(self._lck):
try:
import psutil # depends on [control=['try'], data=[]]
except ImportError:
return True #Lock file exists and no psutil # depends on [control=['except'], data=[]]
#If psutil is imported
with open(self._lck) as f:
pid = f.read() # depends on [control=['with'], data=['f']]
return True if psutil.pid_exists(int(pid)) else False # depends on [control=['if'], data=[]]
else:
return False |
def get_agent(self, reactor=None, contextFactory=None):
"""
Returns an IAgent that makes requests to this fake server.
"""
return ProxyAgentWithContext(
self.endpoint, reactor=reactor, contextFactory=contextFactory) | def function[get_agent, parameter[self, reactor, contextFactory]]:
constant[
Returns an IAgent that makes requests to this fake server.
]
return[call[name[ProxyAgentWithContext], parameter[name[self].endpoint]]] | keyword[def] identifier[get_agent] ( identifier[self] , identifier[reactor] = keyword[None] , identifier[contextFactory] = keyword[None] ):
literal[string]
keyword[return] identifier[ProxyAgentWithContext] (
identifier[self] . identifier[endpoint] , identifier[reactor] = identifier[reactor] , identifier[contextFactory] = identifier[contextFactory] ) | def get_agent(self, reactor=None, contextFactory=None):
"""
Returns an IAgent that makes requests to this fake server.
"""
return ProxyAgentWithContext(self.endpoint, reactor=reactor, contextFactory=contextFactory) |
def send_pending_requests(self):
"""Can block on network if request is larger than send_buffer_bytes"""
try:
with self._lock:
if not self._can_send_recv():
return Errors.NodeNotReadyError(str(self))
# In the future we might manage an internal write buffer
# and send bytes asynchronously. For now, just block
# sending each request payload
data = self._protocol.send_bytes()
total_bytes = self._send_bytes_blocking(data)
if self._sensors:
self._sensors.bytes_sent.record(total_bytes)
return total_bytes
except ConnectionError as e:
log.exception("Error sending request data to %s", self)
error = Errors.KafkaConnectionError("%s: %s" % (self, e))
self.close(error=error)
return error | def function[send_pending_requests, parameter[self]]:
constant[Can block on network if request is larger than send_buffer_bytes]
<ast.Try object at 0x7da1b1c9ba00> | keyword[def] identifier[send_pending_requests] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[with] identifier[self] . identifier[_lock] :
keyword[if] keyword[not] identifier[self] . identifier[_can_send_recv] ():
keyword[return] identifier[Errors] . identifier[NodeNotReadyError] ( identifier[str] ( identifier[self] ))
identifier[data] = identifier[self] . identifier[_protocol] . identifier[send_bytes] ()
identifier[total_bytes] = identifier[self] . identifier[_send_bytes_blocking] ( identifier[data] )
keyword[if] identifier[self] . identifier[_sensors] :
identifier[self] . identifier[_sensors] . identifier[bytes_sent] . identifier[record] ( identifier[total_bytes] )
keyword[return] identifier[total_bytes]
keyword[except] identifier[ConnectionError] keyword[as] identifier[e] :
identifier[log] . identifier[exception] ( literal[string] , identifier[self] )
identifier[error] = identifier[Errors] . identifier[KafkaConnectionError] ( literal[string] %( identifier[self] , identifier[e] ))
identifier[self] . identifier[close] ( identifier[error] = identifier[error] )
keyword[return] identifier[error] | def send_pending_requests(self):
"""Can block on network if request is larger than send_buffer_bytes"""
try:
with self._lock:
if not self._can_send_recv():
return Errors.NodeNotReadyError(str(self)) # depends on [control=['if'], data=[]]
# In the future we might manage an internal write buffer
# and send bytes asynchronously. For now, just block
# sending each request payload
data = self._protocol.send_bytes()
total_bytes = self._send_bytes_blocking(data) # depends on [control=['with'], data=[]]
if self._sensors:
self._sensors.bytes_sent.record(total_bytes) # depends on [control=['if'], data=[]]
return total_bytes # depends on [control=['try'], data=[]]
except ConnectionError as e:
log.exception('Error sending request data to %s', self)
error = Errors.KafkaConnectionError('%s: %s' % (self, e))
self.close(error=error)
return error # depends on [control=['except'], data=['e']] |
def step(self, action):
"""Forward action to the wrapped environment.
Args:
action: Action to apply to the environment.
Raises:
ValueError: Invalid action.
Returns:
Converted observation, converted reward, done flag, and info object.
"""
observ, reward, done, info = self._env.step(action)
observ = self._convert_observ(observ)
reward = self._convert_reward(reward)
return observ, reward, done, info | def function[step, parameter[self, action]]:
constant[Forward action to the wrapped environment.
Args:
action: Action to apply to the environment.
Raises:
ValueError: Invalid action.
Returns:
Converted observation, converted reward, done flag, and info object.
]
<ast.Tuple object at 0x7da2054a5c00> assign[=] call[name[self]._env.step, parameter[name[action]]]
variable[observ] assign[=] call[name[self]._convert_observ, parameter[name[observ]]]
variable[reward] assign[=] call[name[self]._convert_reward, parameter[name[reward]]]
return[tuple[[<ast.Name object at 0x7da2054a69e0>, <ast.Name object at 0x7da2054a6290>, <ast.Name object at 0x7da2054a61d0>, <ast.Name object at 0x7da2054a75b0>]]] | keyword[def] identifier[step] ( identifier[self] , identifier[action] ):
literal[string]
identifier[observ] , identifier[reward] , identifier[done] , identifier[info] = identifier[self] . identifier[_env] . identifier[step] ( identifier[action] )
identifier[observ] = identifier[self] . identifier[_convert_observ] ( identifier[observ] )
identifier[reward] = identifier[self] . identifier[_convert_reward] ( identifier[reward] )
keyword[return] identifier[observ] , identifier[reward] , identifier[done] , identifier[info] | def step(self, action):
"""Forward action to the wrapped environment.
Args:
action: Action to apply to the environment.
Raises:
ValueError: Invalid action.
Returns:
Converted observation, converted reward, done flag, and info object.
"""
(observ, reward, done, info) = self._env.step(action)
observ = self._convert_observ(observ)
reward = self._convert_reward(reward)
return (observ, reward, done, info) |
def parse_matchdict(self, req, name, field):
"""Pull a value from the request's `matchdict`."""
return core.get_value(req.matchdict, name, field) | def function[parse_matchdict, parameter[self, req, name, field]]:
constant[Pull a value from the request's `matchdict`.]
return[call[name[core].get_value, parameter[name[req].matchdict, name[name], name[field]]]] | keyword[def] identifier[parse_matchdict] ( identifier[self] , identifier[req] , identifier[name] , identifier[field] ):
literal[string]
keyword[return] identifier[core] . identifier[get_value] ( identifier[req] . identifier[matchdict] , identifier[name] , identifier[field] ) | def parse_matchdict(self, req, name, field):
"""Pull a value from the request's `matchdict`."""
return core.get_value(req.matchdict, name, field) |
def rounded(self, image, geometry, options):
"""
Wrapper for ``_rounded``
"""
r = options['rounded']
if not r:
return image
return self._rounded(image, int(r)) | def function[rounded, parameter[self, image, geometry, options]]:
constant[
Wrapper for ``_rounded``
]
variable[r] assign[=] call[name[options]][constant[rounded]]
if <ast.UnaryOp object at 0x7da1b1d55c90> begin[:]
return[name[image]]
return[call[name[self]._rounded, parameter[name[image], call[name[int], parameter[name[r]]]]]] | keyword[def] identifier[rounded] ( identifier[self] , identifier[image] , identifier[geometry] , identifier[options] ):
literal[string]
identifier[r] = identifier[options] [ literal[string] ]
keyword[if] keyword[not] identifier[r] :
keyword[return] identifier[image]
keyword[return] identifier[self] . identifier[_rounded] ( identifier[image] , identifier[int] ( identifier[r] )) | def rounded(self, image, geometry, options):
"""
Wrapper for ``_rounded``
"""
r = options['rounded']
if not r:
return image # depends on [control=['if'], data=[]]
return self._rounded(image, int(r)) |
def get_val(self):
"""
Gets attribute's value.
@return: stored value.
@rtype: int
@raise IOError: if corresponding file in /proc/sys cannot be read.
"""
file_obj = file(os.path.join(self._base, self._attr), 'r')
try:
val = int(file_obj.readline())
finally:
file_obj.close()
return val | def function[get_val, parameter[self]]:
constant[
Gets attribute's value.
@return: stored value.
@rtype: int
@raise IOError: if corresponding file in /proc/sys cannot be read.
]
variable[file_obj] assign[=] call[name[file], parameter[call[name[os].path.join, parameter[name[self]._base, name[self]._attr]], constant[r]]]
<ast.Try object at 0x7da207f9ab90>
return[name[val]] | keyword[def] identifier[get_val] ( identifier[self] ):
literal[string]
identifier[file_obj] = identifier[file] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_base] , identifier[self] . identifier[_attr] ), literal[string] )
keyword[try] :
identifier[val] = identifier[int] ( identifier[file_obj] . identifier[readline] ())
keyword[finally] :
identifier[file_obj] . identifier[close] ()
keyword[return] identifier[val] | def get_val(self):
"""
Gets attribute's value.
@return: stored value.
@rtype: int
@raise IOError: if corresponding file in /proc/sys cannot be read.
"""
file_obj = file(os.path.join(self._base, self._attr), 'r')
try:
val = int(file_obj.readline()) # depends on [control=['try'], data=[]]
finally:
file_obj.close()
return val |
def findLorenzDistanceAtTargetKY(Economy,param_name,param_count,center_range,spread,dist_type):
'''
Finds the sum of squared distances between simulated and target Lorenz points in an economy when
a given parameter has heterogeneity according to some distribution. The class of distribution
and a measure of spread are given as inputs, but the measure of centrality such that the capital
to income ratio matches the target ratio must be found.
Parameters
----------
Economy : cstwMPCmarket
An object representing the entire economy, containing the various AgentTypes as an attribute.
param_name : string
The name of the parameter of interest that varies across the population.
param_count : int
The number of different values the parameter of interest will take on.
center_range : [float,float]
Bounding values for a measure of centrality for the distribution of the parameter of interest.
spread : float
A measure of spread or diffusion for the distribution of the parameter of interest.
dist_type : string
The type of distribution to be used. Can be "lognormal" or "uniform" (can expand).
Returns
-------
dist : float
Sum of squared distances between simulated and target Lorenz points for this economy (sqrt).
'''
# Define the function to search for the correct value of center, then find its zero
intermediateObjective = lambda center : getKYratioDifference(Economy = Economy,
param_name = param_name,
param_count = param_count,
center = center,
spread = spread,
dist_type = dist_type)
optimal_center = brentq(intermediateObjective,center_range[0],center_range[1],xtol=10**(-6))
Economy.center_save = optimal_center
# Get the sum of squared Lorenz distances given the correct distribution of the parameter
Economy(LorenzBool = True) # Make sure we actually calculate simulated Lorenz points
Economy.distributeParams(param_name,param_count,optimal_center,spread,dist_type) # Distribute parameters
Economy.solveAgents()
Economy.makeHistory()
dist = Economy.calcLorenzDistance()
Economy(LorenzBool = False)
print ('findLorenzDistanceAtTargetKY tried spread = ' + str(spread) + ' and got ' + str(dist))
return dist | def function[findLorenzDistanceAtTargetKY, parameter[Economy, param_name, param_count, center_range, spread, dist_type]]:
constant[
Finds the sum of squared distances between simulated and target Lorenz points in an economy when
a given parameter has heterogeneity according to some distribution. The class of distribution
and a measure of spread are given as inputs, but the measure of centrality such that the capital
to income ratio matches the target ratio must be found.
Parameters
----------
Economy : cstwMPCmarket
An object representing the entire economy, containing the various AgentTypes as an attribute.
param_name : string
The name of the parameter of interest that varies across the population.
param_count : int
The number of different values the parameter of interest will take on.
center_range : [float,float]
Bounding values for a measure of centrality for the distribution of the parameter of interest.
spread : float
A measure of spread or diffusion for the distribution of the parameter of interest.
dist_type : string
The type of distribution to be used. Can be "lognormal" or "uniform" (can expand).
Returns
-------
dist : float
Sum of squared distances between simulated and target Lorenz points for this economy (sqrt).
]
variable[intermediateObjective] assign[=] <ast.Lambda object at 0x7da1b074c3a0>
variable[optimal_center] assign[=] call[name[brentq], parameter[name[intermediateObjective], call[name[center_range]][constant[0]], call[name[center_range]][constant[1]]]]
name[Economy].center_save assign[=] name[optimal_center]
call[name[Economy], parameter[]]
call[name[Economy].distributeParams, parameter[name[param_name], name[param_count], name[optimal_center], name[spread], name[dist_type]]]
call[name[Economy].solveAgents, parameter[]]
call[name[Economy].makeHistory, parameter[]]
variable[dist] assign[=] call[name[Economy].calcLorenzDistance, parameter[]]
call[name[Economy], parameter[]]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[findLorenzDistanceAtTargetKY tried spread = ] + call[name[str], parameter[name[spread]]]] + constant[ and got ]] + call[name[str], parameter[name[dist]]]]]]
return[name[dist]] | keyword[def] identifier[findLorenzDistanceAtTargetKY] ( identifier[Economy] , identifier[param_name] , identifier[param_count] , identifier[center_range] , identifier[spread] , identifier[dist_type] ):
literal[string]
identifier[intermediateObjective] = keyword[lambda] identifier[center] : identifier[getKYratioDifference] ( identifier[Economy] = identifier[Economy] ,
identifier[param_name] = identifier[param_name] ,
identifier[param_count] = identifier[param_count] ,
identifier[center] = identifier[center] ,
identifier[spread] = identifier[spread] ,
identifier[dist_type] = identifier[dist_type] )
identifier[optimal_center] = identifier[brentq] ( identifier[intermediateObjective] , identifier[center_range] [ literal[int] ], identifier[center_range] [ literal[int] ], identifier[xtol] = literal[int] **(- literal[int] ))
identifier[Economy] . identifier[center_save] = identifier[optimal_center]
identifier[Economy] ( identifier[LorenzBool] = keyword[True] )
identifier[Economy] . identifier[distributeParams] ( identifier[param_name] , identifier[param_count] , identifier[optimal_center] , identifier[spread] , identifier[dist_type] )
identifier[Economy] . identifier[solveAgents] ()
identifier[Economy] . identifier[makeHistory] ()
identifier[dist] = identifier[Economy] . identifier[calcLorenzDistance] ()
identifier[Economy] ( identifier[LorenzBool] = keyword[False] )
identifier[print] ( literal[string] + identifier[str] ( identifier[spread] )+ literal[string] + identifier[str] ( identifier[dist] ))
keyword[return] identifier[dist] | def findLorenzDistanceAtTargetKY(Economy, param_name, param_count, center_range, spread, dist_type):
"""
Finds the sum of squared distances between simulated and target Lorenz points in an economy when
a given parameter has heterogeneity according to some distribution. The class of distribution
and a measure of spread are given as inputs, but the measure of centrality such that the capital
to income ratio matches the target ratio must be found.
Parameters
----------
Economy : cstwMPCmarket
An object representing the entire economy, containing the various AgentTypes as an attribute.
param_name : string
The name of the parameter of interest that varies across the population.
param_count : int
The number of different values the parameter of interest will take on.
center_range : [float,float]
Bounding values for a measure of centrality for the distribution of the parameter of interest.
spread : float
A measure of spread or diffusion for the distribution of the parameter of interest.
dist_type : string
The type of distribution to be used. Can be "lognormal" or "uniform" (can expand).
Returns
-------
dist : float
Sum of squared distances between simulated and target Lorenz points for this economy (sqrt).
"""
# Define the function to search for the correct value of center, then find its zero
intermediateObjective = lambda center: getKYratioDifference(Economy=Economy, param_name=param_name, param_count=param_count, center=center, spread=spread, dist_type=dist_type)
optimal_center = brentq(intermediateObjective, center_range[0], center_range[1], xtol=10 ** (-6))
Economy.center_save = optimal_center
# Get the sum of squared Lorenz distances given the correct distribution of the parameter
Economy(LorenzBool=True) # Make sure we actually calculate simulated Lorenz points
Economy.distributeParams(param_name, param_count, optimal_center, spread, dist_type) # Distribute parameters
Economy.solveAgents()
Economy.makeHistory()
dist = Economy.calcLorenzDistance()
Economy(LorenzBool=False)
print('findLorenzDistanceAtTargetKY tried spread = ' + str(spread) + ' and got ' + str(dist))
return dist |
def get_utm_epsg(longitude, latitude, crs=None):
"""Return epsg code of the utm zone according to X, Y coordinates.
By default, the CRS is EPSG:4326. If the CRS is provided, first X,Y will
be reprojected from the input CRS to WGS84.
The code is based on the code:
http://gis.stackexchange.com/questions/34401
:param longitude: The longitude.
:type longitude: float
:param latitude: The latitude.
:type latitude: float
:param crs: The coordinate reference system of the latitude, longitude.
:type crs: QgsCoordinateReferenceSystem
"""
if crs is None or crs.authid() == 'EPSG:4326':
epsg = 32600
if latitude < 0.0:
epsg += 100
epsg += get_utm_zone(longitude)
return epsg
else:
epsg_4326 = QgsCoordinateReferenceSystem('EPSG:4326')
transform = QgsCoordinateTransform(
crs, epsg_4326, QgsProject.instance())
geom = QgsGeometry.fromPointXY(QgsPointXY(longitude, latitude))
geom.transform(transform)
point = geom.asPoint()
# The point is now in 4326, we can call the function again.
return get_utm_epsg(point.x(), point.y()) | def function[get_utm_epsg, parameter[longitude, latitude, crs]]:
constant[Return epsg code of the utm zone according to X, Y coordinates.
By default, the CRS is EPSG:4326. If the CRS is provided, first X,Y will
be reprojected from the input CRS to WGS84.
The code is based on the code:
http://gis.stackexchange.com/questions/34401
:param longitude: The longitude.
:type longitude: float
:param latitude: The latitude.
:type latitude: float
:param crs: The coordinate reference system of the latitude, longitude.
:type crs: QgsCoordinateReferenceSystem
]
if <ast.BoolOp object at 0x7da20c6a8130> begin[:]
variable[epsg] assign[=] constant[32600]
if compare[name[latitude] less[<] constant[0.0]] begin[:]
<ast.AugAssign object at 0x7da20e957070>
<ast.AugAssign object at 0x7da20e955ab0>
return[name[epsg]] | keyword[def] identifier[get_utm_epsg] ( identifier[longitude] , identifier[latitude] , identifier[crs] = keyword[None] ):
literal[string]
keyword[if] identifier[crs] keyword[is] keyword[None] keyword[or] identifier[crs] . identifier[authid] ()== literal[string] :
identifier[epsg] = literal[int]
keyword[if] identifier[latitude] < literal[int] :
identifier[epsg] += literal[int]
identifier[epsg] += identifier[get_utm_zone] ( identifier[longitude] )
keyword[return] identifier[epsg]
keyword[else] :
identifier[epsg_4326] = identifier[QgsCoordinateReferenceSystem] ( literal[string] )
identifier[transform] = identifier[QgsCoordinateTransform] (
identifier[crs] , identifier[epsg_4326] , identifier[QgsProject] . identifier[instance] ())
identifier[geom] = identifier[QgsGeometry] . identifier[fromPointXY] ( identifier[QgsPointXY] ( identifier[longitude] , identifier[latitude] ))
identifier[geom] . identifier[transform] ( identifier[transform] )
identifier[point] = identifier[geom] . identifier[asPoint] ()
keyword[return] identifier[get_utm_epsg] ( identifier[point] . identifier[x] (), identifier[point] . identifier[y] ()) | def get_utm_epsg(longitude, latitude, crs=None):
"""Return epsg code of the utm zone according to X, Y coordinates.
By default, the CRS is EPSG:4326. If the CRS is provided, first X,Y will
be reprojected from the input CRS to WGS84.
The code is based on the code:
http://gis.stackexchange.com/questions/34401
:param longitude: The longitude.
:type longitude: float
:param latitude: The latitude.
:type latitude: float
:param crs: The coordinate reference system of the latitude, longitude.
:type crs: QgsCoordinateReferenceSystem
"""
if crs is None or crs.authid() == 'EPSG:4326':
epsg = 32600
if latitude < 0.0:
epsg += 100 # depends on [control=['if'], data=[]]
epsg += get_utm_zone(longitude)
return epsg # depends on [control=['if'], data=[]]
else:
epsg_4326 = QgsCoordinateReferenceSystem('EPSG:4326')
transform = QgsCoordinateTransform(crs, epsg_4326, QgsProject.instance())
geom = QgsGeometry.fromPointXY(QgsPointXY(longitude, latitude))
geom.transform(transform)
point = geom.asPoint()
# The point is now in 4326, we can call the function again.
return get_utm_epsg(point.x(), point.y()) |
def build_input_data_with_word2vec(sentences, labels, word2vec_list):
"""
Map sentences and labels to vectors based on a pretrained word2vec
"""
x_vec = []
for sent in sentences:
vec = []
for word in sent:
if word in word2vec_list:
vec.append(word2vec_list[word])
else:
vec.append(word2vec_list['</s>'])
x_vec.append(vec)
x_vec = np.array(x_vec)
y_vec = np.array(labels)
return [x_vec, y_vec] | def function[build_input_data_with_word2vec, parameter[sentences, labels, word2vec_list]]:
constant[
Map sentences and labels to vectors based on a pretrained word2vec
]
variable[x_vec] assign[=] list[[]]
for taget[name[sent]] in starred[name[sentences]] begin[:]
variable[vec] assign[=] list[[]]
for taget[name[word]] in starred[name[sent]] begin[:]
if compare[name[word] in name[word2vec_list]] begin[:]
call[name[vec].append, parameter[call[name[word2vec_list]][name[word]]]]
call[name[x_vec].append, parameter[name[vec]]]
variable[x_vec] assign[=] call[name[np].array, parameter[name[x_vec]]]
variable[y_vec] assign[=] call[name[np].array, parameter[name[labels]]]
return[list[[<ast.Name object at 0x7da1b1e16bf0>, <ast.Name object at 0x7da1b1e15c90>]]] | keyword[def] identifier[build_input_data_with_word2vec] ( identifier[sentences] , identifier[labels] , identifier[word2vec_list] ):
literal[string]
identifier[x_vec] =[]
keyword[for] identifier[sent] keyword[in] identifier[sentences] :
identifier[vec] =[]
keyword[for] identifier[word] keyword[in] identifier[sent] :
keyword[if] identifier[word] keyword[in] identifier[word2vec_list] :
identifier[vec] . identifier[append] ( identifier[word2vec_list] [ identifier[word] ])
keyword[else] :
identifier[vec] . identifier[append] ( identifier[word2vec_list] [ literal[string] ])
identifier[x_vec] . identifier[append] ( identifier[vec] )
identifier[x_vec] = identifier[np] . identifier[array] ( identifier[x_vec] )
identifier[y_vec] = identifier[np] . identifier[array] ( identifier[labels] )
keyword[return] [ identifier[x_vec] , identifier[y_vec] ] | def build_input_data_with_word2vec(sentences, labels, word2vec_list):
"""
Map sentences and labels to vectors based on a pretrained word2vec
"""
x_vec = []
for sent in sentences:
vec = []
for word in sent:
if word in word2vec_list:
vec.append(word2vec_list[word]) # depends on [control=['if'], data=['word', 'word2vec_list']]
else:
vec.append(word2vec_list['</s>']) # depends on [control=['for'], data=['word']]
x_vec.append(vec) # depends on [control=['for'], data=['sent']]
x_vec = np.array(x_vec)
y_vec = np.array(labels)
return [x_vec, y_vec] |
def move_forward(num_steps):
"""Moves the pen forward a few steps in the direction that its "turtle" is facing.
Arguments:
num_steps - a number like 20. A bigger number makes the pen move farther.
"""
assert int(num_steps) == num_steps, "move_forward() only accepts integers, but you gave it " + str(num_steps)
_make_cnc_request("move.forward./" + str(num_steps))
state['turtle'].forward(num_steps) | def function[move_forward, parameter[num_steps]]:
constant[Moves the pen forward a few steps in the direction that its "turtle" is facing.
Arguments:
num_steps - a number like 20. A bigger number makes the pen move farther.
]
assert[compare[call[name[int], parameter[name[num_steps]]] equal[==] name[num_steps]]]
call[name[_make_cnc_request], parameter[binary_operation[constant[move.forward./] + call[name[str], parameter[name[num_steps]]]]]]
call[call[name[state]][constant[turtle]].forward, parameter[name[num_steps]]] | keyword[def] identifier[move_forward] ( identifier[num_steps] ):
literal[string]
keyword[assert] identifier[int] ( identifier[num_steps] )== identifier[num_steps] , literal[string] + identifier[str] ( identifier[num_steps] )
identifier[_make_cnc_request] ( literal[string] + identifier[str] ( identifier[num_steps] ))
identifier[state] [ literal[string] ]. identifier[forward] ( identifier[num_steps] ) | def move_forward(num_steps):
"""Moves the pen forward a few steps in the direction that its "turtle" is facing.
Arguments:
num_steps - a number like 20. A bigger number makes the pen move farther.
"""
assert int(num_steps) == num_steps, 'move_forward() only accepts integers, but you gave it ' + str(num_steps)
_make_cnc_request('move.forward./' + str(num_steps))
state['turtle'].forward(num_steps) |
def executeTree(address=[]):
"""This function executes a tree. To limit the size of the arguments passed
to the function, the tree must be loaded in memory in every worker. To do
this, simply call "Tree = importTree(filename)" before using the startup
method of the parralisation library you are using"""
global nodeDone
# Get tree subsection
localTree = getTree(address)
# Execute tasks
localTree.intCalc()
localTree.floatCalc()
# Select next nodes to be executed
nextAddresses = [address + [i] for i in range(len(localTree.children))]
if len(localTree.children) == 0:
return 1
# Execute the children
res = sum(mapfunc(executeTree, nextAddresses))
assert res == localTree.leaves, (
"Test failed: res = {0}, leaves = {1}").format(res, localTree.leaves)
return res | def function[executeTree, parameter[address]]:
constant[This function executes a tree. To limit the size of the arguments passed
to the function, the tree must be loaded in memory in every worker. To do
this, simply call "Tree = importTree(filename)" before using the startup
method of the parralisation library you are using]
<ast.Global object at 0x7da18c4cef80>
variable[localTree] assign[=] call[name[getTree], parameter[name[address]]]
call[name[localTree].intCalc, parameter[]]
call[name[localTree].floatCalc, parameter[]]
variable[nextAddresses] assign[=] <ast.ListComp object at 0x7da1b2346c20>
if compare[call[name[len], parameter[name[localTree].children]] equal[==] constant[0]] begin[:]
return[constant[1]]
variable[res] assign[=] call[name[sum], parameter[call[name[mapfunc], parameter[name[executeTree], name[nextAddresses]]]]]
assert[compare[name[res] equal[==] name[localTree].leaves]]
return[name[res]] | keyword[def] identifier[executeTree] ( identifier[address] =[]):
literal[string]
keyword[global] identifier[nodeDone]
identifier[localTree] = identifier[getTree] ( identifier[address] )
identifier[localTree] . identifier[intCalc] ()
identifier[localTree] . identifier[floatCalc] ()
identifier[nextAddresses] =[ identifier[address] +[ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[localTree] . identifier[children] ))]
keyword[if] identifier[len] ( identifier[localTree] . identifier[children] )== literal[int] :
keyword[return] literal[int]
identifier[res] = identifier[sum] ( identifier[mapfunc] ( identifier[executeTree] , identifier[nextAddresses] ))
keyword[assert] identifier[res] == identifier[localTree] . identifier[leaves] ,(
literal[string] ). identifier[format] ( identifier[res] , identifier[localTree] . identifier[leaves] )
keyword[return] identifier[res] | def executeTree(address=[]):
"""This function executes a tree. To limit the size of the arguments passed
to the function, the tree must be loaded in memory in every worker. To do
this, simply call "Tree = importTree(filename)" before using the startup
method of the parralisation library you are using"""
global nodeDone
# Get tree subsection
localTree = getTree(address)
# Execute tasks
localTree.intCalc()
localTree.floatCalc()
# Select next nodes to be executed
nextAddresses = [address + [i] for i in range(len(localTree.children))]
if len(localTree.children) == 0:
return 1 # depends on [control=['if'], data=[]]
# Execute the children
res = sum(mapfunc(executeTree, nextAddresses))
assert res == localTree.leaves, 'Test failed: res = {0}, leaves = {1}'.format(res, localTree.leaves)
return res |
def do_operation_update(self, info, an_op):
"""Updates an operation using the assigned update_op_func
Args:
info: (:class:`endpoints_management.control.report_request.Info`): the
info instance to update
an_op: (:class:`endpoints_management.control.report_request.Info`):
the info instance to update
Return:
`True` if desc is supported, otherwise `False`
"""
self.update_op_func(self.metric_name, info, an_op) | def function[do_operation_update, parameter[self, info, an_op]]:
constant[Updates an operation using the assigned update_op_func
Args:
info: (:class:`endpoints_management.control.report_request.Info`): the
info instance to update
an_op: (:class:`endpoints_management.control.report_request.Info`):
the info instance to update
Return:
`True` if desc is supported, otherwise `False`
]
call[name[self].update_op_func, parameter[name[self].metric_name, name[info], name[an_op]]] | keyword[def] identifier[do_operation_update] ( identifier[self] , identifier[info] , identifier[an_op] ):
literal[string]
identifier[self] . identifier[update_op_func] ( identifier[self] . identifier[metric_name] , identifier[info] , identifier[an_op] ) | def do_operation_update(self, info, an_op):
"""Updates an operation using the assigned update_op_func
Args:
info: (:class:`endpoints_management.control.report_request.Info`): the
info instance to update
an_op: (:class:`endpoints_management.control.report_request.Info`):
the info instance to update
Return:
`True` if desc is supported, otherwise `False`
"""
self.update_op_func(self.metric_name, info, an_op) |
def compile_higher_order_function(eqs, syms, params, order=2, funname='anonymous',
return_code=False, compile=False):
'''From a list of equations and variables, define a multivariate functions with higher order derivatives.'''
from dolang.symbolic import stringify, stringify_symbol
vars = [s[0] for s in syms]
# TEMP: compatibility fix when eqs is an Odict:
eqs = [eq for eq in eqs]
if isinstance(eqs[0], str):
# elif not isinstance(eqs[0], sympy.Basic):
# assume we have ASTs
eqs = list([ast.parse(eq).body[0] for eq in eqs])
eqs_std = list( [stringify_symbol(eq, variables=vars) for eq in eqs] )
eqs_sym = list( [ast_to_sympy(eq) for eq in eqs_std] )
else:
eqs_sym = eqs
symsd = list( [stringify_symbol((a,b)) for a,b in syms] )
paramsd = list( [stringify_symbol(a) for a in params] )
D = higher_order_diff(eqs_sym, symsd, order=order)
txt = """def {funname}(x, p, order=1):
import numpy
from numpy import log, exp, tan, sqrt
from numpy import pi as pi_
from numpy import inf as inf_
from scipy.special import erfc
""".format(funname=funname)
for i in range(len(syms)):
txt += " {} = x[{}]\n".format(symsd[i], i)
txt += "\n"
for i in range(len(params)):
txt += " {} = p[{}]\n".format(paramsd[i], i)
txt += "\n out = numpy.zeros({})".format(len(eqs))
for i in range(len(eqs)):
txt += "\n out[{}] = {}".format(i, D[0][i])
txt += """
if order == 0:
return out
"""
if order >= 1:
# Jacobian
txt += " out_1 = numpy.zeros(({},{}))\n".format(len(eqs), len(syms))
for i in range(len(eqs)):
for j in range(len(syms)):
val = D[1][i,j]
if val != 0:
txt += " out_1[{},{}] = {}\n".format(i,j,D[1][i,j])
txt += """
if order == 1:
return [out, out_1]
"""
if order >= 2:
# Hessian
txt += " out_2 = numpy.zeros(({},{},{}))\n".format(len(eqs), len(syms), len(syms))
for n in range(len(eqs)):
for i in range(len(syms)):
for j in range(len(syms)):
val = D[2][n,i,j]
if val is not None:
if val != 0:
txt += " out_2[{},{},{}] = {}\n".format(n,i,j,D[2][n,i,j])
else:
i1, j1 = sorted( (i,j) )
if D[2][n,i1,j1] != 0:
txt += " out_2[{},{},{}] = out_2[{},{},{}]\n".format(n,i,j,n,i1,j1)
txt += """
if order == 2:
return [out, out_1, out_2]
"""
if order >= 3:
# Hessian
txt += " out_3 = numpy.zeros(({},{},{},{}))\n".format(len(eqs), len(syms), len(syms), len(syms))
for n in range(len(eqs)):
for i in range(len(syms)):
for j in range(len(syms)):
for k in range(len(syms)):
val = D[3][n,i,j,k]
if val is not None:
if val != 0:
txt += " out_3[{},{},{},{}] = {}\n".format(n,i,j,k,D[3][n,i,j,k])
else:
i1, j1, k1 = sorted( (i,j,k) )
if D[3][n,i1,j1,k1] != 0:
txt += " out_3[{},{},{},{}] = out_3[{},{},{},{}]\n".format(n,i,j,k,n,i1,j1,k1)
txt += """
if order == 3:
return [out, out_1, out_2, out_3]
"""
if return_code:
return txt
else:
d = {}
exec(txt, d)
fun = d[funname]
if compile:
raise Exception("Not implemented.")
return fun | def function[compile_higher_order_function, parameter[eqs, syms, params, order, funname, return_code, compile]]:
constant[From a list of equations and variables, define a multivariate functions with higher order derivatives.]
from relative_module[dolang.symbolic] import module[stringify], module[stringify_symbol]
variable[vars] assign[=] <ast.ListComp object at 0x7da204567550>
variable[eqs] assign[=] <ast.ListComp object at 0x7da2045661d0>
if call[name[isinstance], parameter[call[name[eqs]][constant[0]], name[str]]] begin[:]
variable[eqs] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da204565990>]]
variable[eqs_std] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da2045668f0>]]
variable[eqs_sym] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da204566380>]]
variable[symsd] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da204565de0>]]
variable[paramsd] assign[=] call[name[list], parameter[<ast.ListComp object at 0x7da2045668c0>]]
variable[D] assign[=] call[name[higher_order_diff], parameter[name[eqs_sym], name[symsd]]]
variable[txt] assign[=] call[constant[def {funname}(x, p, order=1):
import numpy
from numpy import log, exp, tan, sqrt
from numpy import pi as pi_
from numpy import inf as inf_
from scipy.special import erfc
].format, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
<ast.AugAssign object at 0x7da204566e60>
<ast.AugAssign object at 0x7da2045648e0>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[params]]]]]] begin[:]
<ast.AugAssign object at 0x7da204564c10>
<ast.AugAssign object at 0x7da204567340>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[eqs]]]]]] begin[:]
<ast.AugAssign object at 0x7da2045655d0>
<ast.AugAssign object at 0x7da204565ba0>
if compare[name[order] greater_or_equal[>=] constant[1]] begin[:]
<ast.AugAssign object at 0x7da2045677f0>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[eqs]]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
variable[val] assign[=] call[call[name[D]][constant[1]]][tuple[[<ast.Name object at 0x7da204566b90>, <ast.Name object at 0x7da204565c30>]]]
if compare[name[val] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da2045643d0>
<ast.AugAssign object at 0x7da204567220>
if compare[name[order] greater_or_equal[>=] constant[2]] begin[:]
<ast.AugAssign object at 0x7da204565090>
for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[eqs]]]]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
variable[val] assign[=] call[call[name[D]][constant[2]]][tuple[[<ast.Name object at 0x7da18f723d00>, <ast.Name object at 0x7da18f720790>, <ast.Name object at 0x7da18f723a00>]]]
if compare[name[val] is_not constant[None]] begin[:]
if compare[name[val] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18f722f20>
<ast.AugAssign object at 0x7da18f7226e0>
if compare[name[order] greater_or_equal[>=] constant[3]] begin[:]
<ast.AugAssign object at 0x7da18f721a50>
for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[eqs]]]]]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[syms]]]]]] begin[:]
variable[val] assign[=] call[call[name[D]][constant[3]]][tuple[[<ast.Name object at 0x7da18f7209a0>, <ast.Name object at 0x7da18f722fe0>, <ast.Name object at 0x7da18f720190>, <ast.Name object at 0x7da18f721150>]]]
if compare[name[val] is_not constant[None]] begin[:]
if compare[name[val] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18f721090>
<ast.AugAssign object at 0x7da18f7220b0>
if name[return_code] begin[:]
return[name[txt]] | keyword[def] identifier[compile_higher_order_function] ( identifier[eqs] , identifier[syms] , identifier[params] , identifier[order] = literal[int] , identifier[funname] = literal[string] ,
identifier[return_code] = keyword[False] , identifier[compile] = keyword[False] ):
literal[string]
keyword[from] identifier[dolang] . identifier[symbolic] keyword[import] identifier[stringify] , identifier[stringify_symbol]
identifier[vars] =[ identifier[s] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[syms] ]
identifier[eqs] =[ identifier[eq] keyword[for] identifier[eq] keyword[in] identifier[eqs] ]
keyword[if] identifier[isinstance] ( identifier[eqs] [ literal[int] ], identifier[str] ):
identifier[eqs] = identifier[list] ([ identifier[ast] . identifier[parse] ( identifier[eq] ). identifier[body] [ literal[int] ] keyword[for] identifier[eq] keyword[in] identifier[eqs] ])
identifier[eqs_std] = identifier[list] ([ identifier[stringify_symbol] ( identifier[eq] , identifier[variables] = identifier[vars] ) keyword[for] identifier[eq] keyword[in] identifier[eqs] ])
identifier[eqs_sym] = identifier[list] ([ identifier[ast_to_sympy] ( identifier[eq] ) keyword[for] identifier[eq] keyword[in] identifier[eqs_std] ])
keyword[else] :
identifier[eqs_sym] = identifier[eqs]
identifier[symsd] = identifier[list] ([ identifier[stringify_symbol] (( identifier[a] , identifier[b] )) keyword[for] identifier[a] , identifier[b] keyword[in] identifier[syms] ])
identifier[paramsd] = identifier[list] ([ identifier[stringify_symbol] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[params] ])
identifier[D] = identifier[higher_order_diff] ( identifier[eqs_sym] , identifier[symsd] , identifier[order] = identifier[order] )
identifier[txt] = literal[string] . identifier[format] ( identifier[funname] = identifier[funname] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
identifier[txt] += literal[string] . identifier[format] ( identifier[symsd] [ identifier[i] ], identifier[i] )
identifier[txt] += literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[params] )):
identifier[txt] += literal[string] . identifier[format] ( identifier[paramsd] [ identifier[i] ], identifier[i] )
identifier[txt] += literal[string] . identifier[format] ( identifier[len] ( identifier[eqs] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[eqs] )):
identifier[txt] += literal[string] . identifier[format] ( identifier[i] , identifier[D] [ literal[int] ][ identifier[i] ])
identifier[txt] += literal[string]
keyword[if] identifier[order] >= literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[len] ( identifier[eqs] ), identifier[len] ( identifier[syms] ))
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[eqs] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
identifier[val] = identifier[D] [ literal[int] ][ identifier[i] , identifier[j] ]
keyword[if] identifier[val] != literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[i] , identifier[j] , identifier[D] [ literal[int] ][ identifier[i] , identifier[j] ])
identifier[txt] += literal[string]
keyword[if] identifier[order] >= literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[len] ( identifier[eqs] ), identifier[len] ( identifier[syms] ), identifier[len] ( identifier[syms] ))
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[eqs] )):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
identifier[val] = identifier[D] [ literal[int] ][ identifier[n] , identifier[i] , identifier[j] ]
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[val] != literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[n] , identifier[i] , identifier[j] , identifier[D] [ literal[int] ][ identifier[n] , identifier[i] , identifier[j] ])
keyword[else] :
identifier[i1] , identifier[j1] = identifier[sorted] (( identifier[i] , identifier[j] ))
keyword[if] identifier[D] [ literal[int] ][ identifier[n] , identifier[i1] , identifier[j1] ]!= literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[n] , identifier[i] , identifier[j] , identifier[n] , identifier[i1] , identifier[j1] )
identifier[txt] += literal[string]
keyword[if] identifier[order] >= literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[len] ( identifier[eqs] ), identifier[len] ( identifier[syms] ), identifier[len] ( identifier[syms] ), identifier[len] ( identifier[syms] ))
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[eqs] )):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[syms] )):
identifier[val] = identifier[D] [ literal[int] ][ identifier[n] , identifier[i] , identifier[j] , identifier[k] ]
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[val] != literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[n] , identifier[i] , identifier[j] , identifier[k] , identifier[D] [ literal[int] ][ identifier[n] , identifier[i] , identifier[j] , identifier[k] ])
keyword[else] :
identifier[i1] , identifier[j1] , identifier[k1] = identifier[sorted] (( identifier[i] , identifier[j] , identifier[k] ))
keyword[if] identifier[D] [ literal[int] ][ identifier[n] , identifier[i1] , identifier[j1] , identifier[k1] ]!= literal[int] :
identifier[txt] += literal[string] . identifier[format] ( identifier[n] , identifier[i] , identifier[j] , identifier[k] , identifier[n] , identifier[i1] , identifier[j1] , identifier[k1] )
identifier[txt] += literal[string]
keyword[if] identifier[return_code] :
keyword[return] identifier[txt]
keyword[else] :
identifier[d] ={}
identifier[exec] ( identifier[txt] , identifier[d] )
identifier[fun] = identifier[d] [ identifier[funname] ]
keyword[if] identifier[compile] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[fun] | def compile_higher_order_function(eqs, syms, params, order=2, funname='anonymous', return_code=False, compile=False):
"""From a list of equations and variables, define a multivariate functions with higher order derivatives."""
from dolang.symbolic import stringify, stringify_symbol
vars = [s[0] for s in syms]
# TEMP: compatibility fix when eqs is an Odict:
eqs = [eq for eq in eqs]
if isinstance(eqs[0], str):
# elif not isinstance(eqs[0], sympy.Basic):
# assume we have ASTs
eqs = list([ast.parse(eq).body[0] for eq in eqs])
eqs_std = list([stringify_symbol(eq, variables=vars) for eq in eqs])
eqs_sym = list([ast_to_sympy(eq) for eq in eqs_std]) # depends on [control=['if'], data=[]]
else:
eqs_sym = eqs
symsd = list([stringify_symbol((a, b)) for (a, b) in syms])
paramsd = list([stringify_symbol(a) for a in params])
D = higher_order_diff(eqs_sym, symsd, order=order)
txt = 'def {funname}(x, p, order=1):\n\n import numpy\n from numpy import log, exp, tan, sqrt\n from numpy import pi as pi_\n from numpy import inf as inf_\n from scipy.special import erfc\n\n'.format(funname=funname)
for i in range(len(syms)):
txt += ' {} = x[{}]\n'.format(symsd[i], i) # depends on [control=['for'], data=['i']]
txt += '\n'
for i in range(len(params)):
txt += ' {} = p[{}]\n'.format(paramsd[i], i) # depends on [control=['for'], data=['i']]
txt += '\n out = numpy.zeros({})'.format(len(eqs))
for i in range(len(eqs)):
txt += '\n out[{}] = {}'.format(i, D[0][i]) # depends on [control=['for'], data=['i']]
txt += '\n\n if order == 0:\n return out\n\n'
if order >= 1:
# Jacobian
txt += ' out_1 = numpy.zeros(({},{}))\n'.format(len(eqs), len(syms))
for i in range(len(eqs)):
for j in range(len(syms)):
val = D[1][i, j]
if val != 0:
txt += ' out_1[{},{}] = {}\n'.format(i, j, D[1][i, j]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
txt += '\n\n if order == 1:\n return [out, out_1]\n\n' # depends on [control=['if'], data=[]]
if order >= 2:
# Hessian
txt += ' out_2 = numpy.zeros(({},{},{}))\n'.format(len(eqs), len(syms), len(syms))
for n in range(len(eqs)):
for i in range(len(syms)):
for j in range(len(syms)):
val = D[2][n, i, j]
if val is not None:
if val != 0:
txt += ' out_2[{},{},{}] = {}\n'.format(n, i, j, D[2][n, i, j]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['val']]
else:
(i1, j1) = sorted((i, j))
if D[2][n, i1, j1] != 0:
txt += ' out_2[{},{},{}] = out_2[{},{},{}]\n'.format(n, i, j, n, i1, j1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['n']]
txt += '\n\n if order == 2:\n return [out, out_1, out_2]\n\n' # depends on [control=['if'], data=[]]
if order >= 3:
# Hessian
txt += ' out_3 = numpy.zeros(({},{},{},{}))\n'.format(len(eqs), len(syms), len(syms), len(syms))
for n in range(len(eqs)):
for i in range(len(syms)):
for j in range(len(syms)):
for k in range(len(syms)):
val = D[3][n, i, j, k]
if val is not None:
if val != 0:
txt += ' out_3[{},{},{},{}] = {}\n'.format(n, i, j, k, D[3][n, i, j, k]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['val']]
else:
(i1, j1, k1) = sorted((i, j, k))
if D[3][n, i1, j1, k1] != 0:
txt += ' out_3[{},{},{},{}] = out_3[{},{},{},{}]\n'.format(n, i, j, k, n, i1, j1, k1) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['n']]
txt += '\n\n if order == 3:\n return [out, out_1, out_2, out_3]\n ' # depends on [control=['if'], data=[]]
if return_code:
return txt # depends on [control=['if'], data=[]]
else:
d = {}
exec(txt, d)
fun = d[funname]
if compile:
raise Exception('Not implemented.') # depends on [control=['if'], data=[]]
return fun |
def fft_bandpassfilter(data, fs, lowcut, highcut):
"""
http://www.swharden.com/blog/2009-01-21-signal-filtering-with-python/#comment-16801
"""
fft = np.fft.fft(data)
# n = len(data)
# timestep = 1.0 / fs
# freq = np.fft.fftfreq(n, d=timestep)
bp = fft.copy()
# Zero out fft coefficients
# bp[10:-10] = 0
# Normalise
# bp *= real(fft.dot(fft))/real(bp.dot(bp))
bp *= fft.dot(fft) / bp.dot(bp)
# must multipy by 2 to get the correct amplitude
ibp = 12 * np.fft.ifft(bp)
return ibp | def function[fft_bandpassfilter, parameter[data, fs, lowcut, highcut]]:
constant[
http://www.swharden.com/blog/2009-01-21-signal-filtering-with-python/#comment-16801
]
variable[fft] assign[=] call[name[np].fft.fft, parameter[name[data]]]
variable[bp] assign[=] call[name[fft].copy, parameter[]]
<ast.AugAssign object at 0x7da18dc04af0>
variable[ibp] assign[=] binary_operation[constant[12] * call[name[np].fft.ifft, parameter[name[bp]]]]
return[name[ibp]] | keyword[def] identifier[fft_bandpassfilter] ( identifier[data] , identifier[fs] , identifier[lowcut] , identifier[highcut] ):
literal[string]
identifier[fft] = identifier[np] . identifier[fft] . identifier[fft] ( identifier[data] )
identifier[bp] = identifier[fft] . identifier[copy] ()
identifier[bp] *= identifier[fft] . identifier[dot] ( identifier[fft] )/ identifier[bp] . identifier[dot] ( identifier[bp] )
identifier[ibp] = literal[int] * identifier[np] . identifier[fft] . identifier[ifft] ( identifier[bp] )
keyword[return] identifier[ibp] | def fft_bandpassfilter(data, fs, lowcut, highcut):
"""
http://www.swharden.com/blog/2009-01-21-signal-filtering-with-python/#comment-16801
"""
fft = np.fft.fft(data)
# n = len(data)
# timestep = 1.0 / fs
# freq = np.fft.fftfreq(n, d=timestep)
bp = fft.copy()
# Zero out fft coefficients
# bp[10:-10] = 0
# Normalise
# bp *= real(fft.dot(fft))/real(bp.dot(bp))
bp *= fft.dot(fft) / bp.dot(bp)
# must multipy by 2 to get the correct amplitude
ibp = 12 * np.fft.ifft(bp)
return ibp |
def _create_image_url(self, file_path, type_, target_size):
"""The the closest available size for specified image type.
Arguments:
file_path (:py:class:`str`): The image file path.
type_ (:py:class:`str`): The type of image to create a URL
for, (``'poster'`` or ``'profile'``).
target_size (:py:class:`int`): The size of image to aim for (used
as either width or height).
"""
if self.image_config is None:
logger.warning('no image configuration available')
return
return ''.join([
self.image_config['secure_base_url'],
self._image_size(self.image_config, type_, target_size),
file_path,
]) | def function[_create_image_url, parameter[self, file_path, type_, target_size]]:
constant[The the closest available size for specified image type.
Arguments:
file_path (:py:class:`str`): The image file path.
type_ (:py:class:`str`): The type of image to create a URL
for, (``'poster'`` or ``'profile'``).
target_size (:py:class:`int`): The size of image to aim for (used
as either width or height).
]
if compare[name[self].image_config is constant[None]] begin[:]
call[name[logger].warning, parameter[constant[no image configuration available]]]
return[None]
return[call[constant[].join, parameter[list[[<ast.Subscript object at 0x7da1b20d4070>, <ast.Call object at 0x7da1b20d4040>, <ast.Name object at 0x7da1b20d4910>]]]]] | keyword[def] identifier[_create_image_url] ( identifier[self] , identifier[file_path] , identifier[type_] , identifier[target_size] ):
literal[string]
keyword[if] identifier[self] . identifier[image_config] keyword[is] keyword[None] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return]
keyword[return] literal[string] . identifier[join] ([
identifier[self] . identifier[image_config] [ literal[string] ],
identifier[self] . identifier[_image_size] ( identifier[self] . identifier[image_config] , identifier[type_] , identifier[target_size] ),
identifier[file_path] ,
]) | def _create_image_url(self, file_path, type_, target_size):
"""The the closest available size for specified image type.
Arguments:
file_path (:py:class:`str`): The image file path.
type_ (:py:class:`str`): The type of image to create a URL
for, (``'poster'`` or ``'profile'``).
target_size (:py:class:`int`): The size of image to aim for (used
as either width or height).
"""
if self.image_config is None:
logger.warning('no image configuration available')
return # depends on [control=['if'], data=[]]
return ''.join([self.image_config['secure_base_url'], self._image_size(self.image_config, type_, target_size), file_path]) |
def __snake_case(self, descriptor):
"""
Utility method to convert camelcase to snake
:param descriptor: The dictionary to convert
"""
newdict = {}
for i, (k, v) in enumerate(descriptor.items()):
newkey = ""
for j, c in enumerate(k):
if c.isupper():
if len(newkey) != 0:
newkey += '_'
newkey += c.lower()
else:
newkey += c
newdict[newkey] = v
return newdict | def function[__snake_case, parameter[self, descriptor]]:
constant[
Utility method to convert camelcase to snake
:param descriptor: The dictionary to convert
]
variable[newdict] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da18bcc9060>, <ast.Tuple object at 0x7da18bcca980>]]] in starred[call[name[enumerate], parameter[call[name[descriptor].items, parameter[]]]]] begin[:]
variable[newkey] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da18c4cd210>, <ast.Name object at 0x7da18c4cc460>]]] in starred[call[name[enumerate], parameter[name[k]]]] begin[:]
if call[name[c].isupper, parameter[]] begin[:]
if compare[call[name[len], parameter[name[newkey]]] not_equal[!=] constant[0]] begin[:]
<ast.AugAssign object at 0x7da18c4cead0>
<ast.AugAssign object at 0x7da18c4cefe0>
call[name[newdict]][name[newkey]] assign[=] name[v]
return[name[newdict]] | keyword[def] identifier[__snake_case] ( identifier[self] , identifier[descriptor] ):
literal[string]
identifier[newdict] ={}
keyword[for] identifier[i] ,( identifier[k] , identifier[v] ) keyword[in] identifier[enumerate] ( identifier[descriptor] . identifier[items] ()):
identifier[newkey] = literal[string]
keyword[for] identifier[j] , identifier[c] keyword[in] identifier[enumerate] ( identifier[k] ):
keyword[if] identifier[c] . identifier[isupper] ():
keyword[if] identifier[len] ( identifier[newkey] )!= literal[int] :
identifier[newkey] += literal[string]
identifier[newkey] += identifier[c] . identifier[lower] ()
keyword[else] :
identifier[newkey] += identifier[c]
identifier[newdict] [ identifier[newkey] ]= identifier[v]
keyword[return] identifier[newdict] | def __snake_case(self, descriptor):
"""
Utility method to convert camelcase to snake
:param descriptor: The dictionary to convert
"""
newdict = {}
for (i, (k, v)) in enumerate(descriptor.items()):
newkey = ''
for (j, c) in enumerate(k):
if c.isupper():
if len(newkey) != 0:
newkey += '_' # depends on [control=['if'], data=[]]
newkey += c.lower() # depends on [control=['if'], data=[]]
else:
newkey += c # depends on [control=['for'], data=[]]
newdict[newkey] = v # depends on [control=['for'], data=[]]
return newdict |
def add(self, properties):
"""
Add a faked Port resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'network-port' or
'storage-port', if not specified.
This method also updates the 'network-port-uris' or
'storage-port-uris' property in the parent Adapter resource, by
adding the URI for the faked Port resource.
Returns:
:class:`zhmcclient_mock.FakedPort`: The faked Port resource.
"""
new_port = super(FakedPortManager, self).add(properties)
adapter = self.parent
if 'network-port-uris' in adapter.properties:
adapter.properties['network-port-uris'].append(new_port.uri)
if 'storage-port-uris' in adapter.properties:
adapter.properties['storage-port-uris'].append(new_port.uri)
return new_port | def function[add, parameter[self, properties]]:
constant[
Add a faked Port resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'network-port' or
'storage-port', if not specified.
This method also updates the 'network-port-uris' or
'storage-port-uris' property in the parent Adapter resource, by
adding the URI for the faked Port resource.
Returns:
:class:`zhmcclient_mock.FakedPort`: The faked Port resource.
]
variable[new_port] assign[=] call[call[name[super], parameter[name[FakedPortManager], name[self]]].add, parameter[name[properties]]]
variable[adapter] assign[=] name[self].parent
if compare[constant[network-port-uris] in name[adapter].properties] begin[:]
call[call[name[adapter].properties][constant[network-port-uris]].append, parameter[name[new_port].uri]]
if compare[constant[storage-port-uris] in name[adapter].properties] begin[:]
call[call[name[adapter].properties][constant[storage-port-uris]].append, parameter[name[new_port].uri]]
return[name[new_port]] | keyword[def] identifier[add] ( identifier[self] , identifier[properties] ):
literal[string]
identifier[new_port] = identifier[super] ( identifier[FakedPortManager] , identifier[self] ). identifier[add] ( identifier[properties] )
identifier[adapter] = identifier[self] . identifier[parent]
keyword[if] literal[string] keyword[in] identifier[adapter] . identifier[properties] :
identifier[adapter] . identifier[properties] [ literal[string] ]. identifier[append] ( identifier[new_port] . identifier[uri] )
keyword[if] literal[string] keyword[in] identifier[adapter] . identifier[properties] :
identifier[adapter] . identifier[properties] [ literal[string] ]. identifier[append] ( identifier[new_port] . identifier[uri] )
keyword[return] identifier[new_port] | def add(self, properties):
"""
Add a faked Port resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'network-port' or
'storage-port', if not specified.
This method also updates the 'network-port-uris' or
'storage-port-uris' property in the parent Adapter resource, by
adding the URI for the faked Port resource.
Returns:
:class:`zhmcclient_mock.FakedPort`: The faked Port resource.
"""
new_port = super(FakedPortManager, self).add(properties)
adapter = self.parent
if 'network-port-uris' in adapter.properties:
adapter.properties['network-port-uris'].append(new_port.uri) # depends on [control=['if'], data=[]]
if 'storage-port-uris' in adapter.properties:
adapter.properties['storage-port-uris'].append(new_port.uri) # depends on [control=['if'], data=[]]
return new_port |
def solveNetwork(self, Xgen, augYbus_solver, gbus):
""" Based on SolveNetwork.m from MatDyn by Stijn Cole, developed at
Katholieke Universiteit Leuven. See U{http://www.esat.kuleuven.be/
electa/teaching/matdyn/} for more information.
@rtype: array
@return: Bus voltages.
"""
generators = self.dyn_generators
j = 0 + 1j
ng = len(gbus)
Igen = zeros(ng)
s = len(augYbus_solver)
Ig = zeros(s)
# Define generator types.
typ1 = [g._i for g in generators if g.model == CLASSICAL]
typ2 = [g._i for g in generators if g.model == FOURTH_ORDER]
# Generator type 1: classical model
delta = Xgen[typ1, 0]
Eq_tr = Xgen[typ1, 2]
xd_tr = array([g.xd_tr for g in generators])[typ1]
# Calculate generator currents
Igen[typ1] = (Eq_tr * exp(j * delta)) / (j * xd_tr)
# Generator type 2: 4th order model
delta = Xgen[typ2, 0]
Eq_tr = Xgen[typ2, 2]
Ed_tr = Xgen[typ2, 3]
xd_tr = array([g.xd_tr for g in generators])[typ2] # Pgen(type2,8)
# Calculate generator currents. (Padiyar, p.417.)
Igen[typ2] = (Eq_tr + j * Ed_tr) * exp(j * delta) / (j * xd_tr)
# Calculations --------------------------------------------------------
# Generator currents
Ig[gbus] = Igen
# Calculate network voltages: U = Y/Ig
U = augYbus_solver.solve(Ig)
return U | def function[solveNetwork, parameter[self, Xgen, augYbus_solver, gbus]]:
constant[ Based on SolveNetwork.m from MatDyn by Stijn Cole, developed at
Katholieke Universiteit Leuven. See U{http://www.esat.kuleuven.be/
electa/teaching/matdyn/} for more information.
@rtype: array
@return: Bus voltages.
]
variable[generators] assign[=] name[self].dyn_generators
variable[j] assign[=] binary_operation[constant[0] + constant[1j]]
variable[ng] assign[=] call[name[len], parameter[name[gbus]]]
variable[Igen] assign[=] call[name[zeros], parameter[name[ng]]]
variable[s] assign[=] call[name[len], parameter[name[augYbus_solver]]]
variable[Ig] assign[=] call[name[zeros], parameter[name[s]]]
variable[typ1] assign[=] <ast.ListComp object at 0x7da20c6aac80>
variable[typ2] assign[=] <ast.ListComp object at 0x7da1b257d900>
variable[delta] assign[=] call[name[Xgen]][tuple[[<ast.Name object at 0x7da1b257d630>, <ast.Constant object at 0x7da1b257f190>]]]
variable[Eq_tr] assign[=] call[name[Xgen]][tuple[[<ast.Name object at 0x7da18ede44f0>, <ast.Constant object at 0x7da18ede5480>]]]
variable[xd_tr] assign[=] call[call[name[array], parameter[<ast.ListComp object at 0x7da18ede4d60>]]][name[typ1]]
call[name[Igen]][name[typ1]] assign[=] binary_operation[binary_operation[name[Eq_tr] * call[name[exp], parameter[binary_operation[name[j] * name[delta]]]]] / binary_operation[name[j] * name[xd_tr]]]
variable[delta] assign[=] call[name[Xgen]][tuple[[<ast.Name object at 0x7da1b2534430>, <ast.Constant object at 0x7da1b2534460>]]]
variable[Eq_tr] assign[=] call[name[Xgen]][tuple[[<ast.Name object at 0x7da1b25369e0>, <ast.Constant object at 0x7da1b25369b0>]]]
variable[Ed_tr] assign[=] call[name[Xgen]][tuple[[<ast.Name object at 0x7da1b2536890>, <ast.Constant object at 0x7da1b2536860>]]]
variable[xd_tr] assign[=] call[call[name[array], parameter[<ast.ListComp object at 0x7da1b2536740>]]][name[typ2]]
call[name[Igen]][name[typ2]] assign[=] binary_operation[binary_operation[binary_operation[name[Eq_tr] + binary_operation[name[j] * name[Ed_tr]]] * call[name[exp], parameter[binary_operation[name[j] * name[delta]]]]] / binary_operation[name[j] * name[xd_tr]]]
call[name[Ig]][name[gbus]] assign[=] name[Igen]
variable[U] assign[=] call[name[augYbus_solver].solve, parameter[name[Ig]]]
return[name[U]] | keyword[def] identifier[solveNetwork] ( identifier[self] , identifier[Xgen] , identifier[augYbus_solver] , identifier[gbus] ):
literal[string]
identifier[generators] = identifier[self] . identifier[dyn_generators]
identifier[j] = literal[int] + literal[int]
identifier[ng] = identifier[len] ( identifier[gbus] )
identifier[Igen] = identifier[zeros] ( identifier[ng] )
identifier[s] = identifier[len] ( identifier[augYbus_solver] )
identifier[Ig] = identifier[zeros] ( identifier[s] )
identifier[typ1] =[ identifier[g] . identifier[_i] keyword[for] identifier[g] keyword[in] identifier[generators] keyword[if] identifier[g] . identifier[model] == identifier[CLASSICAL] ]
identifier[typ2] =[ identifier[g] . identifier[_i] keyword[for] identifier[g] keyword[in] identifier[generators] keyword[if] identifier[g] . identifier[model] == identifier[FOURTH_ORDER] ]
identifier[delta] = identifier[Xgen] [ identifier[typ1] , literal[int] ]
identifier[Eq_tr] = identifier[Xgen] [ identifier[typ1] , literal[int] ]
identifier[xd_tr] = identifier[array] ([ identifier[g] . identifier[xd_tr] keyword[for] identifier[g] keyword[in] identifier[generators] ])[ identifier[typ1] ]
identifier[Igen] [ identifier[typ1] ]=( identifier[Eq_tr] * identifier[exp] ( identifier[j] * identifier[delta] ))/( identifier[j] * identifier[xd_tr] )
identifier[delta] = identifier[Xgen] [ identifier[typ2] , literal[int] ]
identifier[Eq_tr] = identifier[Xgen] [ identifier[typ2] , literal[int] ]
identifier[Ed_tr] = identifier[Xgen] [ identifier[typ2] , literal[int] ]
identifier[xd_tr] = identifier[array] ([ identifier[g] . identifier[xd_tr] keyword[for] identifier[g] keyword[in] identifier[generators] ])[ identifier[typ2] ]
identifier[Igen] [ identifier[typ2] ]=( identifier[Eq_tr] + identifier[j] * identifier[Ed_tr] )* identifier[exp] ( identifier[j] * identifier[delta] )/( identifier[j] * identifier[xd_tr] )
identifier[Ig] [ identifier[gbus] ]= identifier[Igen]
identifier[U] = identifier[augYbus_solver] . identifier[solve] ( identifier[Ig] )
keyword[return] identifier[U] | def solveNetwork(self, Xgen, augYbus_solver, gbus):
""" Based on SolveNetwork.m from MatDyn by Stijn Cole, developed at
Katholieke Universiteit Leuven. See U{http://www.esat.kuleuven.be/
electa/teaching/matdyn/} for more information.
@rtype: array
@return: Bus voltages.
"""
generators = self.dyn_generators
j = 0 + 1j
ng = len(gbus)
Igen = zeros(ng)
s = len(augYbus_solver)
Ig = zeros(s)
# Define generator types.
typ1 = [g._i for g in generators if g.model == CLASSICAL]
typ2 = [g._i for g in generators if g.model == FOURTH_ORDER]
# Generator type 1: classical model
delta = Xgen[typ1, 0]
Eq_tr = Xgen[typ1, 2]
xd_tr = array([g.xd_tr for g in generators])[typ1]
# Calculate generator currents
Igen[typ1] = Eq_tr * exp(j * delta) / (j * xd_tr)
# Generator type 2: 4th order model
delta = Xgen[typ2, 0]
Eq_tr = Xgen[typ2, 2]
Ed_tr = Xgen[typ2, 3]
xd_tr = array([g.xd_tr for g in generators])[typ2] # Pgen(type2,8)
# Calculate generator currents. (Padiyar, p.417.)
Igen[typ2] = (Eq_tr + j * Ed_tr) * exp(j * delta) / (j * xd_tr)
# Calculations --------------------------------------------------------
# Generator currents
Ig[gbus] = Igen
# Calculate network voltages: U = Y/Ig
U = augYbus_solver.solve(Ig)
return U |
def command_output(
self, command, shell=False, capture_stderr=False, localized=False
):
"""
Run a command and return its output as unicode.
The command can either be supplied as a sequence or string.
:param command: command to run can be a str or list
:param shell: if `True` then command is run through the shell
:param capture_stderr: if `True` then STDERR is piped to STDOUT
:param localized: if `False` then command is forced to use its default (English) locale
A CommandError is raised if an error occurs
"""
# make a pretty command for error loggings and...
if isinstance(command, basestring):
pretty_cmd = command
else:
pretty_cmd = " ".join(command)
# convert the non-shell command to sequence if it is a string
if not shell and isinstance(command, basestring):
command = shlex.split(command)
stderr = STDOUT if capture_stderr else PIPE
env = self._english_env if not localized else None
try:
process = Popen(
command,
stdout=PIPE,
stderr=stderr,
close_fds=True,
universal_newlines=True,
shell=shell,
env=env,
)
except Exception as e:
msg = "Command `{cmd}` {error}".format(cmd=pretty_cmd, error=e)
raise exceptions.CommandError(msg, error_code=e.errno)
output, error = process.communicate()
if self._is_python_2 and isinstance(output, str):
output = output.decode("utf-8")
error = error.decode("utf-8")
retcode = process.poll()
if retcode:
# under certain conditions a successfully run command may get a
# return code of -15 even though correct output was returned see
# #664. This issue seems to be related to arch linux but the
# reason is not entirely clear.
if retcode == -15:
msg = "Command `{cmd}` returned SIGTERM (ignoring)"
self.log(msg.format(cmd=pretty_cmd))
else:
msg = "Command `{cmd}` returned non-zero exit status {error}"
output_oneline = output.replace("\n", " ")
if output_oneline:
msg += " ({output})"
msg = msg.format(cmd=pretty_cmd, error=retcode, output=output_oneline)
raise exceptions.CommandError(
msg, error_code=retcode, error=error, output=output
)
return output | def function[command_output, parameter[self, command, shell, capture_stderr, localized]]:
constant[
Run a command and return its output as unicode.
The command can either be supplied as a sequence or string.
:param command: command to run can be a str or list
:param shell: if `True` then command is run through the shell
:param capture_stderr: if `True` then STDERR is piped to STDOUT
:param localized: if `False` then command is forced to use its default (English) locale
A CommandError is raised if an error occurs
]
if call[name[isinstance], parameter[name[command], name[basestring]]] begin[:]
variable[pretty_cmd] assign[=] name[command]
if <ast.BoolOp object at 0x7da18f8109a0> begin[:]
variable[command] assign[=] call[name[shlex].split, parameter[name[command]]]
variable[stderr] assign[=] <ast.IfExp object at 0x7da18f812b00>
variable[env] assign[=] <ast.IfExp object at 0x7da18f811150>
<ast.Try object at 0x7da18f813a90>
<ast.Tuple object at 0x7da1b1d0e320> assign[=] call[name[process].communicate, parameter[]]
if <ast.BoolOp object at 0x7da1b1d0e200> begin[:]
variable[output] assign[=] call[name[output].decode, parameter[constant[utf-8]]]
variable[error] assign[=] call[name[error].decode, parameter[constant[utf-8]]]
variable[retcode] assign[=] call[name[process].poll, parameter[]]
if name[retcode] begin[:]
if compare[name[retcode] equal[==] <ast.UnaryOp object at 0x7da1b1d0f940>] begin[:]
variable[msg] assign[=] constant[Command `{cmd}` returned SIGTERM (ignoring)]
call[name[self].log, parameter[call[name[msg].format, parameter[]]]]
return[name[output]] | keyword[def] identifier[command_output] (
identifier[self] , identifier[command] , identifier[shell] = keyword[False] , identifier[capture_stderr] = keyword[False] , identifier[localized] = keyword[False]
):
literal[string]
keyword[if] identifier[isinstance] ( identifier[command] , identifier[basestring] ):
identifier[pretty_cmd] = identifier[command]
keyword[else] :
identifier[pretty_cmd] = literal[string] . identifier[join] ( identifier[command] )
keyword[if] keyword[not] identifier[shell] keyword[and] identifier[isinstance] ( identifier[command] , identifier[basestring] ):
identifier[command] = identifier[shlex] . identifier[split] ( identifier[command] )
identifier[stderr] = identifier[STDOUT] keyword[if] identifier[capture_stderr] keyword[else] identifier[PIPE]
identifier[env] = identifier[self] . identifier[_english_env] keyword[if] keyword[not] identifier[localized] keyword[else] keyword[None]
keyword[try] :
identifier[process] = identifier[Popen] (
identifier[command] ,
identifier[stdout] = identifier[PIPE] ,
identifier[stderr] = identifier[stderr] ,
identifier[close_fds] = keyword[True] ,
identifier[universal_newlines] = keyword[True] ,
identifier[shell] = identifier[shell] ,
identifier[env] = identifier[env] ,
)
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[msg] = literal[string] . identifier[format] ( identifier[cmd] = identifier[pretty_cmd] , identifier[error] = identifier[e] )
keyword[raise] identifier[exceptions] . identifier[CommandError] ( identifier[msg] , identifier[error_code] = identifier[e] . identifier[errno] )
identifier[output] , identifier[error] = identifier[process] . identifier[communicate] ()
keyword[if] identifier[self] . identifier[_is_python_2] keyword[and] identifier[isinstance] ( identifier[output] , identifier[str] ):
identifier[output] = identifier[output] . identifier[decode] ( literal[string] )
identifier[error] = identifier[error] . identifier[decode] ( literal[string] )
identifier[retcode] = identifier[process] . identifier[poll] ()
keyword[if] identifier[retcode] :
keyword[if] identifier[retcode] ==- literal[int] :
identifier[msg] = literal[string]
identifier[self] . identifier[log] ( identifier[msg] . identifier[format] ( identifier[cmd] = identifier[pretty_cmd] ))
keyword[else] :
identifier[msg] = literal[string]
identifier[output_oneline] = identifier[output] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] identifier[output_oneline] :
identifier[msg] += literal[string]
identifier[msg] = identifier[msg] . identifier[format] ( identifier[cmd] = identifier[pretty_cmd] , identifier[error] = identifier[retcode] , identifier[output] = identifier[output_oneline] )
keyword[raise] identifier[exceptions] . identifier[CommandError] (
identifier[msg] , identifier[error_code] = identifier[retcode] , identifier[error] = identifier[error] , identifier[output] = identifier[output]
)
keyword[return] identifier[output] | def command_output(self, command, shell=False, capture_stderr=False, localized=False):
"""
Run a command and return its output as unicode.
The command can either be supplied as a sequence or string.
:param command: command to run can be a str or list
:param shell: if `True` then command is run through the shell
:param capture_stderr: if `True` then STDERR is piped to STDOUT
:param localized: if `False` then command is forced to use its default (English) locale
A CommandError is raised if an error occurs
"""
# make a pretty command for error loggings and...
if isinstance(command, basestring):
pretty_cmd = command # depends on [control=['if'], data=[]]
else:
pretty_cmd = ' '.join(command)
# convert the non-shell command to sequence if it is a string
if not shell and isinstance(command, basestring):
command = shlex.split(command) # depends on [control=['if'], data=[]]
stderr = STDOUT if capture_stderr else PIPE
env = self._english_env if not localized else None
try:
process = Popen(command, stdout=PIPE, stderr=stderr, close_fds=True, universal_newlines=True, shell=shell, env=env) # depends on [control=['try'], data=[]]
except Exception as e:
msg = 'Command `{cmd}` {error}'.format(cmd=pretty_cmd, error=e)
raise exceptions.CommandError(msg, error_code=e.errno) # depends on [control=['except'], data=['e']]
(output, error) = process.communicate()
if self._is_python_2 and isinstance(output, str):
output = output.decode('utf-8')
error = error.decode('utf-8') # depends on [control=['if'], data=[]]
retcode = process.poll()
if retcode:
# under certain conditions a successfully run command may get a
# return code of -15 even though correct output was returned see
# #664. This issue seems to be related to arch linux but the
# reason is not entirely clear.
if retcode == -15:
msg = 'Command `{cmd}` returned SIGTERM (ignoring)'
self.log(msg.format(cmd=pretty_cmd)) # depends on [control=['if'], data=[]]
else:
msg = 'Command `{cmd}` returned non-zero exit status {error}'
output_oneline = output.replace('\n', ' ')
if output_oneline:
msg += ' ({output})' # depends on [control=['if'], data=[]]
msg = msg.format(cmd=pretty_cmd, error=retcode, output=output_oneline)
raise exceptions.CommandError(msg, error_code=retcode, error=error, output=output) # depends on [control=['if'], data=[]]
return output |
def get_nodes(self, request):
"""
Generates the nodelist
:param request:
:return: list of nodes
"""
nodes = []
language = get_language_from_request(request, check_path=True)
current_site = get_current_site(request)
page_site = self.instance.node.site
if self.instance and page_site != current_site:
return []
categories_menu = False
posts_menu = False
config = False
if self.instance:
if not self._config.get(self.instance.application_namespace, False):
self._config[self.instance.application_namespace] = BlogConfig.objects.get(
namespace=self.instance.application_namespace
)
config = self._config[self.instance.application_namespace]
if not getattr(request, 'toolbar', False) or not request.toolbar.edit_mode_active:
if self.instance == self.instance.get_draft_object():
return []
else:
if self.instance == self.instance.get_public_object():
return []
if config and config.menu_structure in (MENU_TYPE_COMPLETE, MENU_TYPE_CATEGORIES):
categories_menu = True
if config and config.menu_structure in (MENU_TYPE_COMPLETE, MENU_TYPE_POSTS):
posts_menu = True
if config and config.menu_structure in (MENU_TYPE_NONE, ):
return nodes
used_categories = []
if posts_menu:
posts = Post.objects
if hasattr(self, 'instance') and self.instance:
posts = posts.namespace(self.instance.application_namespace).on_site()
posts = posts.active_translations(language).distinct().\
select_related('app_config').prefetch_related('translations', 'categories')
for post in posts:
post_id = None
parent = None
used_categories.extend(post.categories.values_list('pk', flat=True))
if categories_menu:
category = post.categories.first()
if category:
parent = '{0}-{1}'.format(category.__class__.__name__, category.pk)
post_id = '{0}-{1}'.format(post.__class__.__name__, post.pk),
else:
post_id = '{0}-{1}'.format(post.__class__.__name__, post.pk),
if post_id:
node = NavigationNode(
post.get_title(),
post.get_absolute_url(language),
post_id,
parent
)
nodes.append(node)
if categories_menu:
categories = BlogCategory.objects
if config:
categories = categories.namespace(self.instance.application_namespace)
if config and not config.menu_empty_categories:
categories = categories.active_translations(language).filter(
pk__in=used_categories
).distinct()
else:
categories = categories.active_translations(language).distinct()
categories = categories.order_by('parent__id', 'translations__name').\
select_related('app_config').prefetch_related('translations')
added_categories = []
for category in categories:
if category.pk not in added_categories:
node = NavigationNode(
category.name,
category.get_absolute_url(),
'{0}-{1}'.format(category.__class__.__name__, category.pk),
(
'{0}-{1}'.format(
category.__class__.__name__, category.parent.id
) if category.parent else None
)
)
nodes.append(node)
added_categories.append(category.pk)
return nodes | def function[get_nodes, parameter[self, request]]:
constant[
Generates the nodelist
:param request:
:return: list of nodes
]
variable[nodes] assign[=] list[[]]
variable[language] assign[=] call[name[get_language_from_request], parameter[name[request]]]
variable[current_site] assign[=] call[name[get_current_site], parameter[name[request]]]
variable[page_site] assign[=] name[self].instance.node.site
if <ast.BoolOp object at 0x7da1b11a1630> begin[:]
return[list[[]]]
variable[categories_menu] assign[=] constant[False]
variable[posts_menu] assign[=] constant[False]
variable[config] assign[=] constant[False]
if name[self].instance begin[:]
if <ast.UnaryOp object at 0x7da1b101b4c0> begin[:]
call[name[self]._config][name[self].instance.application_namespace] assign[=] call[name[BlogConfig].objects.get, parameter[]]
variable[config] assign[=] call[name[self]._config][name[self].instance.application_namespace]
if <ast.BoolOp object at 0x7da1b101bc10> begin[:]
if compare[name[self].instance equal[==] call[name[self].instance.get_draft_object, parameter[]]] begin[:]
return[list[[]]]
if <ast.BoolOp object at 0x7da1b1019570> begin[:]
variable[categories_menu] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b101add0> begin[:]
variable[posts_menu] assign[=] constant[True]
if <ast.BoolOp object at 0x7da1b101a3b0> begin[:]
return[name[nodes]]
variable[used_categories] assign[=] list[[]]
if name[posts_menu] begin[:]
variable[posts] assign[=] name[Post].objects
if <ast.BoolOp object at 0x7da1b101b6a0> begin[:]
variable[posts] assign[=] call[call[name[posts].namespace, parameter[name[self].instance.application_namespace]].on_site, parameter[]]
variable[posts] assign[=] call[call[call[call[name[posts].active_translations, parameter[name[language]]].distinct, parameter[]].select_related, parameter[constant[app_config]]].prefetch_related, parameter[constant[translations], constant[categories]]]
for taget[name[post]] in starred[name[posts]] begin[:]
variable[post_id] assign[=] constant[None]
variable[parent] assign[=] constant[None]
call[name[used_categories].extend, parameter[call[name[post].categories.values_list, parameter[constant[pk]]]]]
if name[categories_menu] begin[:]
variable[category] assign[=] call[name[post].categories.first, parameter[]]
if name[category] begin[:]
variable[parent] assign[=] call[constant[{0}-{1}].format, parameter[name[category].__class__.__name__, name[category].pk]]
variable[post_id] assign[=] tuple[[<ast.Call object at 0x7da1b101bdc0>]]
if name[post_id] begin[:]
variable[node] assign[=] call[name[NavigationNode], parameter[call[name[post].get_title, parameter[]], call[name[post].get_absolute_url, parameter[name[language]]], name[post_id], name[parent]]]
call[name[nodes].append, parameter[name[node]]]
if name[categories_menu] begin[:]
variable[categories] assign[=] name[BlogCategory].objects
if name[config] begin[:]
variable[categories] assign[=] call[name[categories].namespace, parameter[name[self].instance.application_namespace]]
if <ast.BoolOp object at 0x7da1b106d150> begin[:]
variable[categories] assign[=] call[call[call[name[categories].active_translations, parameter[name[language]]].filter, parameter[]].distinct, parameter[]]
variable[categories] assign[=] call[call[call[name[categories].order_by, parameter[constant[parent__id], constant[translations__name]]].select_related, parameter[constant[app_config]]].prefetch_related, parameter[constant[translations]]]
variable[added_categories] assign[=] list[[]]
for taget[name[category]] in starred[name[categories]] begin[:]
if compare[name[category].pk <ast.NotIn object at 0x7da2590d7190> name[added_categories]] begin[:]
variable[node] assign[=] call[name[NavigationNode], parameter[name[category].name, call[name[category].get_absolute_url, parameter[]], call[constant[{0}-{1}].format, parameter[name[category].__class__.__name__, name[category].pk]], <ast.IfExp object at 0x7da1b106e5c0>]]
call[name[nodes].append, parameter[name[node]]]
call[name[added_categories].append, parameter[name[category].pk]]
return[name[nodes]] | keyword[def] identifier[get_nodes] ( identifier[self] , identifier[request] ):
literal[string]
identifier[nodes] =[]
identifier[language] = identifier[get_language_from_request] ( identifier[request] , identifier[check_path] = keyword[True] )
identifier[current_site] = identifier[get_current_site] ( identifier[request] )
identifier[page_site] = identifier[self] . identifier[instance] . identifier[node] . identifier[site]
keyword[if] identifier[self] . identifier[instance] keyword[and] identifier[page_site] != identifier[current_site] :
keyword[return] []
identifier[categories_menu] = keyword[False]
identifier[posts_menu] = keyword[False]
identifier[config] = keyword[False]
keyword[if] identifier[self] . identifier[instance] :
keyword[if] keyword[not] identifier[self] . identifier[_config] . identifier[get] ( identifier[self] . identifier[instance] . identifier[application_namespace] , keyword[False] ):
identifier[self] . identifier[_config] [ identifier[self] . identifier[instance] . identifier[application_namespace] ]= identifier[BlogConfig] . identifier[objects] . identifier[get] (
identifier[namespace] = identifier[self] . identifier[instance] . identifier[application_namespace]
)
identifier[config] = identifier[self] . identifier[_config] [ identifier[self] . identifier[instance] . identifier[application_namespace] ]
keyword[if] keyword[not] identifier[getattr] ( identifier[request] , literal[string] , keyword[False] ) keyword[or] keyword[not] identifier[request] . identifier[toolbar] . identifier[edit_mode_active] :
keyword[if] identifier[self] . identifier[instance] == identifier[self] . identifier[instance] . identifier[get_draft_object] ():
keyword[return] []
keyword[else] :
keyword[if] identifier[self] . identifier[instance] == identifier[self] . identifier[instance] . identifier[get_public_object] ():
keyword[return] []
keyword[if] identifier[config] keyword[and] identifier[config] . identifier[menu_structure] keyword[in] ( identifier[MENU_TYPE_COMPLETE] , identifier[MENU_TYPE_CATEGORIES] ):
identifier[categories_menu] = keyword[True]
keyword[if] identifier[config] keyword[and] identifier[config] . identifier[menu_structure] keyword[in] ( identifier[MENU_TYPE_COMPLETE] , identifier[MENU_TYPE_POSTS] ):
identifier[posts_menu] = keyword[True]
keyword[if] identifier[config] keyword[and] identifier[config] . identifier[menu_structure] keyword[in] ( identifier[MENU_TYPE_NONE] ,):
keyword[return] identifier[nodes]
identifier[used_categories] =[]
keyword[if] identifier[posts_menu] :
identifier[posts] = identifier[Post] . identifier[objects]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[instance] :
identifier[posts] = identifier[posts] . identifier[namespace] ( identifier[self] . identifier[instance] . identifier[application_namespace] ). identifier[on_site] ()
identifier[posts] = identifier[posts] . identifier[active_translations] ( identifier[language] ). identifier[distinct] (). identifier[select_related] ( literal[string] ). identifier[prefetch_related] ( literal[string] , literal[string] )
keyword[for] identifier[post] keyword[in] identifier[posts] :
identifier[post_id] = keyword[None]
identifier[parent] = keyword[None]
identifier[used_categories] . identifier[extend] ( identifier[post] . identifier[categories] . identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] ))
keyword[if] identifier[categories_menu] :
identifier[category] = identifier[post] . identifier[categories] . identifier[first] ()
keyword[if] identifier[category] :
identifier[parent] = literal[string] . identifier[format] ( identifier[category] . identifier[__class__] . identifier[__name__] , identifier[category] . identifier[pk] )
identifier[post_id] = literal[string] . identifier[format] ( identifier[post] . identifier[__class__] . identifier[__name__] , identifier[post] . identifier[pk] ),
keyword[else] :
identifier[post_id] = literal[string] . identifier[format] ( identifier[post] . identifier[__class__] . identifier[__name__] , identifier[post] . identifier[pk] ),
keyword[if] identifier[post_id] :
identifier[node] = identifier[NavigationNode] (
identifier[post] . identifier[get_title] (),
identifier[post] . identifier[get_absolute_url] ( identifier[language] ),
identifier[post_id] ,
identifier[parent]
)
identifier[nodes] . identifier[append] ( identifier[node] )
keyword[if] identifier[categories_menu] :
identifier[categories] = identifier[BlogCategory] . identifier[objects]
keyword[if] identifier[config] :
identifier[categories] = identifier[categories] . identifier[namespace] ( identifier[self] . identifier[instance] . identifier[application_namespace] )
keyword[if] identifier[config] keyword[and] keyword[not] identifier[config] . identifier[menu_empty_categories] :
identifier[categories] = identifier[categories] . identifier[active_translations] ( identifier[language] ). identifier[filter] (
identifier[pk__in] = identifier[used_categories]
). identifier[distinct] ()
keyword[else] :
identifier[categories] = identifier[categories] . identifier[active_translations] ( identifier[language] ). identifier[distinct] ()
identifier[categories] = identifier[categories] . identifier[order_by] ( literal[string] , literal[string] ). identifier[select_related] ( literal[string] ). identifier[prefetch_related] ( literal[string] )
identifier[added_categories] =[]
keyword[for] identifier[category] keyword[in] identifier[categories] :
keyword[if] identifier[category] . identifier[pk] keyword[not] keyword[in] identifier[added_categories] :
identifier[node] = identifier[NavigationNode] (
identifier[category] . identifier[name] ,
identifier[category] . identifier[get_absolute_url] (),
literal[string] . identifier[format] ( identifier[category] . identifier[__class__] . identifier[__name__] , identifier[category] . identifier[pk] ),
(
literal[string] . identifier[format] (
identifier[category] . identifier[__class__] . identifier[__name__] , identifier[category] . identifier[parent] . identifier[id]
) keyword[if] identifier[category] . identifier[parent] keyword[else] keyword[None]
)
)
identifier[nodes] . identifier[append] ( identifier[node] )
identifier[added_categories] . identifier[append] ( identifier[category] . identifier[pk] )
keyword[return] identifier[nodes] | def get_nodes(self, request):
"""
Generates the nodelist
:param request:
:return: list of nodes
"""
nodes = []
language = get_language_from_request(request, check_path=True)
current_site = get_current_site(request)
page_site = self.instance.node.site
if self.instance and page_site != current_site:
return [] # depends on [control=['if'], data=[]]
categories_menu = False
posts_menu = False
config = False
if self.instance:
if not self._config.get(self.instance.application_namespace, False):
self._config[self.instance.application_namespace] = BlogConfig.objects.get(namespace=self.instance.application_namespace) # depends on [control=['if'], data=[]]
config = self._config[self.instance.application_namespace]
if not getattr(request, 'toolbar', False) or not request.toolbar.edit_mode_active:
if self.instance == self.instance.get_draft_object():
return [] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif self.instance == self.instance.get_public_object():
return [] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if config and config.menu_structure in (MENU_TYPE_COMPLETE, MENU_TYPE_CATEGORIES):
categories_menu = True # depends on [control=['if'], data=[]]
if config and config.menu_structure in (MENU_TYPE_COMPLETE, MENU_TYPE_POSTS):
posts_menu = True # depends on [control=['if'], data=[]]
if config and config.menu_structure in (MENU_TYPE_NONE,):
return nodes # depends on [control=['if'], data=[]]
used_categories = []
if posts_menu:
posts = Post.objects
if hasattr(self, 'instance') and self.instance:
posts = posts.namespace(self.instance.application_namespace).on_site() # depends on [control=['if'], data=[]]
posts = posts.active_translations(language).distinct().select_related('app_config').prefetch_related('translations', 'categories')
for post in posts:
post_id = None
parent = None
used_categories.extend(post.categories.values_list('pk', flat=True))
if categories_menu:
category = post.categories.first()
if category:
parent = '{0}-{1}'.format(category.__class__.__name__, category.pk)
post_id = ('{0}-{1}'.format(post.__class__.__name__, post.pk),) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
post_id = ('{0}-{1}'.format(post.__class__.__name__, post.pk),)
if post_id:
node = NavigationNode(post.get_title(), post.get_absolute_url(language), post_id, parent)
nodes.append(node) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['post']] # depends on [control=['if'], data=[]]
if categories_menu:
categories = BlogCategory.objects
if config:
categories = categories.namespace(self.instance.application_namespace) # depends on [control=['if'], data=[]]
if config and (not config.menu_empty_categories):
categories = categories.active_translations(language).filter(pk__in=used_categories).distinct() # depends on [control=['if'], data=[]]
else:
categories = categories.active_translations(language).distinct()
categories = categories.order_by('parent__id', 'translations__name').select_related('app_config').prefetch_related('translations')
added_categories = []
for category in categories:
if category.pk not in added_categories:
node = NavigationNode(category.name, category.get_absolute_url(), '{0}-{1}'.format(category.__class__.__name__, category.pk), '{0}-{1}'.format(category.__class__.__name__, category.parent.id) if category.parent else None)
nodes.append(node)
added_categories.append(category.pk) # depends on [control=['if'], data=['added_categories']] # depends on [control=['for'], data=['category']] # depends on [control=['if'], data=[]]
return nodes |
def main_update(self):
"""
Main function called by the updater thread.
Direct call is unnecessary.
"""
# Renice updater thread to limit overload
try:
os.nice(1)
except AttributeError as er:
pass # os.nice is not available on windows
time.sleep(self.refresh)
try:
while True:
# We pick a timestamp to take in account the time used by update()
timestamp=time.time()
# Update data with user's defined function
self.update()
# We use this trick because we cannot use signals in a backoffice threads
# and alarm() mess up with readline() in the main thread.
delay=(timestamp+self.refresh)-time.time()
if delay > 0:
if delay > self.refresh:
time.sleep(self.refresh)
else:
time.sleep(delay)
# Commit change exactly every 'refresh' seconds, whatever update() takes long.
# Commited values are a bit old, but for RRD, punctuals values
# are better than fresh-but-not-time-constants values.
self.commit()
except Exception as e:
self.error=e
raise | def function[main_update, parameter[self]]:
constant[
Main function called by the updater thread.
Direct call is unnecessary.
]
<ast.Try object at 0x7da20c796020>
call[name[time].sleep, parameter[name[self].refresh]]
<ast.Try object at 0x7da20c795570> | keyword[def] identifier[main_update] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[os] . identifier[nice] ( literal[int] )
keyword[except] identifier[AttributeError] keyword[as] identifier[er] :
keyword[pass]
identifier[time] . identifier[sleep] ( identifier[self] . identifier[refresh] )
keyword[try] :
keyword[while] keyword[True] :
identifier[timestamp] = identifier[time] . identifier[time] ()
identifier[self] . identifier[update] ()
identifier[delay] =( identifier[timestamp] + identifier[self] . identifier[refresh] )- identifier[time] . identifier[time] ()
keyword[if] identifier[delay] > literal[int] :
keyword[if] identifier[delay] > identifier[self] . identifier[refresh] :
identifier[time] . identifier[sleep] ( identifier[self] . identifier[refresh] )
keyword[else] :
identifier[time] . identifier[sleep] ( identifier[delay] )
identifier[self] . identifier[commit] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[self] . identifier[error] = identifier[e]
keyword[raise] | def main_update(self):
"""
Main function called by the updater thread.
Direct call is unnecessary.
""" # Renice updater thread to limit overload
try:
os.nice(1) # depends on [control=['try'], data=[]]
except AttributeError as er:
pass # os.nice is not available on windows # depends on [control=['except'], data=[]]
time.sleep(self.refresh)
try:
while True: # We pick a timestamp to take in account the time used by update()
timestamp = time.time() # Update data with user's defined function
self.update() # We use this trick because we cannot use signals in a backoffice threads
# and alarm() mess up with readline() in the main thread.
delay = timestamp + self.refresh - time.time()
if delay > 0:
if delay > self.refresh:
time.sleep(self.refresh) # depends on [control=['if'], data=[]]
else:
time.sleep(delay) # depends on [control=['if'], data=['delay']] # Commit change exactly every 'refresh' seconds, whatever update() takes long.
# Commited values are a bit old, but for RRD, punctuals values
# are better than fresh-but-not-time-constants values.
self.commit() # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except Exception as e:
self.error = e
raise # depends on [control=['except'], data=['e']] |
def _SetAllFieldTypes(self, package, desc_proto, scope):
"""Sets all the descriptor's fields's types.
This method also sets the containing types on any extensions.
Args:
package: The current package of desc_proto.
desc_proto: The message descriptor to update.
scope: Enclosing scope of available types.
"""
package = _PrefixWithDot(package)
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
if package == '.':
nested_package = _PrefixWithDot(desc_proto.name)
else:
nested_package = '.'.join([package, desc_proto.name])
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
self._SetFieldType(field_proto, field_desc, nested_package, scope)
for extension_proto, extension_desc in (
zip(desc_proto.extension, main_desc.extensions)):
extension_desc.containing_type = self._GetTypeFromScope(
nested_package, extension_proto.extendee, scope)
self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
for nested_type in desc_proto.nested_type:
self._SetAllFieldTypes(nested_package, nested_type, scope) | def function[_SetAllFieldTypes, parameter[self, package, desc_proto, scope]]:
constant[Sets all the descriptor's fields's types.
This method also sets the containing types on any extensions.
Args:
package: The current package of desc_proto.
desc_proto: The message descriptor to update.
scope: Enclosing scope of available types.
]
variable[package] assign[=] call[name[_PrefixWithDot], parameter[name[package]]]
variable[main_desc] assign[=] call[name[self]._GetTypeFromScope, parameter[name[package], name[desc_proto].name, name[scope]]]
if compare[name[package] equal[==] constant[.]] begin[:]
variable[nested_package] assign[=] call[name[_PrefixWithDot], parameter[name[desc_proto].name]]
for taget[tuple[[<ast.Name object at 0x7da204960ca0>, <ast.Name object at 0x7da204960e50>]]] in starred[call[name[zip], parameter[name[desc_proto].field, name[main_desc].fields]]] begin[:]
call[name[self]._SetFieldType, parameter[name[field_proto], name[field_desc], name[nested_package], name[scope]]]
for taget[tuple[[<ast.Name object at 0x7da2049621a0>, <ast.Name object at 0x7da204963e20>]]] in starred[call[name[zip], parameter[name[desc_proto].extension, name[main_desc].extensions]]] begin[:]
name[extension_desc].containing_type assign[=] call[name[self]._GetTypeFromScope, parameter[name[nested_package], name[extension_proto].extendee, name[scope]]]
call[name[self]._SetFieldType, parameter[name[extension_proto], name[extension_desc], name[nested_package], name[scope]]]
for taget[name[nested_type]] in starred[name[desc_proto].nested_type] begin[:]
call[name[self]._SetAllFieldTypes, parameter[name[nested_package], name[nested_type], name[scope]]] | keyword[def] identifier[_SetAllFieldTypes] ( identifier[self] , identifier[package] , identifier[desc_proto] , identifier[scope] ):
literal[string]
identifier[package] = identifier[_PrefixWithDot] ( identifier[package] )
identifier[main_desc] = identifier[self] . identifier[_GetTypeFromScope] ( identifier[package] , identifier[desc_proto] . identifier[name] , identifier[scope] )
keyword[if] identifier[package] == literal[string] :
identifier[nested_package] = identifier[_PrefixWithDot] ( identifier[desc_proto] . identifier[name] )
keyword[else] :
identifier[nested_package] = literal[string] . identifier[join] ([ identifier[package] , identifier[desc_proto] . identifier[name] ])
keyword[for] identifier[field_proto] , identifier[field_desc] keyword[in] identifier[zip] ( identifier[desc_proto] . identifier[field] , identifier[main_desc] . identifier[fields] ):
identifier[self] . identifier[_SetFieldType] ( identifier[field_proto] , identifier[field_desc] , identifier[nested_package] , identifier[scope] )
keyword[for] identifier[extension_proto] , identifier[extension_desc] keyword[in] (
identifier[zip] ( identifier[desc_proto] . identifier[extension] , identifier[main_desc] . identifier[extensions] )):
identifier[extension_desc] . identifier[containing_type] = identifier[self] . identifier[_GetTypeFromScope] (
identifier[nested_package] , identifier[extension_proto] . identifier[extendee] , identifier[scope] )
identifier[self] . identifier[_SetFieldType] ( identifier[extension_proto] , identifier[extension_desc] , identifier[nested_package] , identifier[scope] )
keyword[for] identifier[nested_type] keyword[in] identifier[desc_proto] . identifier[nested_type] :
identifier[self] . identifier[_SetAllFieldTypes] ( identifier[nested_package] , identifier[nested_type] , identifier[scope] ) | def _SetAllFieldTypes(self, package, desc_proto, scope):
"""Sets all the descriptor's fields's types.
This method also sets the containing types on any extensions.
Args:
package: The current package of desc_proto.
desc_proto: The message descriptor to update.
scope: Enclosing scope of available types.
"""
package = _PrefixWithDot(package)
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
if package == '.':
nested_package = _PrefixWithDot(desc_proto.name) # depends on [control=['if'], data=[]]
else:
nested_package = '.'.join([package, desc_proto.name])
for (field_proto, field_desc) in zip(desc_proto.field, main_desc.fields):
self._SetFieldType(field_proto, field_desc, nested_package, scope) # depends on [control=['for'], data=[]]
for (extension_proto, extension_desc) in zip(desc_proto.extension, main_desc.extensions):
extension_desc.containing_type = self._GetTypeFromScope(nested_package, extension_proto.extendee, scope)
self._SetFieldType(extension_proto, extension_desc, nested_package, scope) # depends on [control=['for'], data=[]]
for nested_type in desc_proto.nested_type:
self._SetAllFieldTypes(nested_package, nested_type, scope) # depends on [control=['for'], data=['nested_type']] |
def set_metric_ids(self, key, metric_ids):
"""
Store the list of metric IDs we will want to collect for the given instance key
"""
with self._lock:
self._metric_ids[key] = metric_ids | def function[set_metric_ids, parameter[self, key, metric_ids]]:
constant[
Store the list of metric IDs we will want to collect for the given instance key
]
with name[self]._lock begin[:]
call[name[self]._metric_ids][name[key]] assign[=] name[metric_ids] | keyword[def] identifier[set_metric_ids] ( identifier[self] , identifier[key] , identifier[metric_ids] ):
literal[string]
keyword[with] identifier[self] . identifier[_lock] :
identifier[self] . identifier[_metric_ids] [ identifier[key] ]= identifier[metric_ids] | def set_metric_ids(self, key, metric_ids):
"""
Store the list of metric IDs we will want to collect for the given instance key
"""
with self._lock:
self._metric_ids[key] = metric_ids # depends on [control=['with'], data=[]] |
def update(self, dict_name, mapping=None, priorities=None, expire=None,
locks=None):
'''Add mapping to a dictionary, replacing previous values
Can be called with only dict_name and expire to refresh the
expiration time.
NB: locks are only enforced if present, so nothing prevents
another caller from coming in an modifying data without using
locks.
:param mapping: a dict of keys and values to update in
dict_name. Must be specified if priorities is specified.
:param priorities: a dict with the same keys as those in
mapping that provides a numerical value indicating the
priority to assign to that key. Default sets 0 for all keys.
:param int expire: if specified, then dict_name will be set to
expire in that many seconds.
:param locks: a dict with the same keys as those in the
mapping. Before making any particular update, this function
checks if a key is present in a 'locks' table for this dict,
and if so, then its value must match the value provided in the
input locks dict for that key. If not, then the value
provided in the locks dict is inserted into the 'locks' table.
If the locks parameter is None, then no lock checking is
performed.
'''
if self._session_lock_identifier is None:
raise ProgrammerError('must acquire lock first')
if priorities is None:
## set all priorities to zero
priorities = defaultdict(int)
if locks is None:
## set all locks to None
locks = defaultdict(lambda: '')
if not (expire is None or isinstance(expire, int)):
raise ProgrammerError('expire must be int or unspecified')
conn = redis.Redis(connection_pool=self.pool)
script = conn.register_script('''
if redis.call("get", KEYS[1]) == ARGV[1]
then
for i = 3, #ARGV, 4 do
if ARGV[i+3] ~= 'j:""' then
local curr_lock = redis.call("hget", KEYS[4], ARGV[i])
if curr_lock and curr_lock ~= ARGV[i+3] then
return {-1, ARGV[i], curr_lock, ARGV[i+3]}
end
redis.call("hset", KEYS[4], ARGV[i], ARGV[i+3])
end
end
for i = 3, #ARGV, 4 do
redis.call("hset", KEYS[2], ARGV[i], ARGV[i+1])
redis.call("zadd", KEYS[3], ARGV[i+2], ARGV[i])
end
if tonumber(ARGV[2]) ~= nil then
redis.call("expire", KEYS[2], ARGV[2])
redis.call("expire", KEYS[3], ARGV[2])
end
return {1, 0}
else
-- ERROR: No longer own the lock
return {0, 0}
end
''')
dict_name = self._namespace(dict_name)
if mapping is None:
mapping = {}
items = []
## This flattens the dictionary into a list
for key, value in mapping.iteritems():
items.append(self._encode(key))
items.append(self._encode(value))
items.append(priorities[key])
items.append(self._encode(locks[key]))
#logger.debug('update %r %r', dict_name, items)
res = script(keys=[self._lock_name,
dict_name,
dict_name + 'keys',
dict_name + '_locks'],
args=[self._session_lock_identifier, expire] + items)
if res[0] == 0:
raise EnvironmentError(
'Unable to add items to %s in registry' % dict_name)
elif res[0] == -1:
raise EnvironmentError(
'lost lock on key=%r owned by %r not %r in %s'
% (self._decode(res[1]), res[2], res[3], dict_name)) | def function[update, parameter[self, dict_name, mapping, priorities, expire, locks]]:
constant[Add mapping to a dictionary, replacing previous values
Can be called with only dict_name and expire to refresh the
expiration time.
NB: locks are only enforced if present, so nothing prevents
another caller from coming in an modifying data without using
locks.
:param mapping: a dict of keys and values to update in
dict_name. Must be specified if priorities is specified.
:param priorities: a dict with the same keys as those in
mapping that provides a numerical value indicating the
priority to assign to that key. Default sets 0 for all keys.
:param int expire: if specified, then dict_name will be set to
expire in that many seconds.
:param locks: a dict with the same keys as those in the
mapping. Before making any particular update, this function
checks if a key is present in a 'locks' table for this dict,
and if so, then its value must match the value provided in the
input locks dict for that key. If not, then the value
provided in the locks dict is inserted into the 'locks' table.
If the locks parameter is None, then no lock checking is
performed.
]
if compare[name[self]._session_lock_identifier is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1437670>
if compare[name[priorities] is constant[None]] begin[:]
variable[priorities] assign[=] call[name[defaultdict], parameter[name[int]]]
if compare[name[locks] is constant[None]] begin[:]
variable[locks] assign[=] call[name[defaultdict], parameter[<ast.Lambda object at 0x7da1b1437280>]]
if <ast.UnaryOp object at 0x7da1b1436a10> begin[:]
<ast.Raise object at 0x7da1b1436980>
variable[conn] assign[=] call[name[redis].Redis, parameter[]]
variable[script] assign[=] call[name[conn].register_script, parameter[constant[
if redis.call("get", KEYS[1]) == ARGV[1]
then
for i = 3, #ARGV, 4 do
if ARGV[i+3] ~= 'j:""' then
local curr_lock = redis.call("hget", KEYS[4], ARGV[i])
if curr_lock and curr_lock ~= ARGV[i+3] then
return {-1, ARGV[i], curr_lock, ARGV[i+3]}
end
redis.call("hset", KEYS[4], ARGV[i], ARGV[i+3])
end
end
for i = 3, #ARGV, 4 do
redis.call("hset", KEYS[2], ARGV[i], ARGV[i+1])
redis.call("zadd", KEYS[3], ARGV[i+2], ARGV[i])
end
if tonumber(ARGV[2]) ~= nil then
redis.call("expire", KEYS[2], ARGV[2])
redis.call("expire", KEYS[3], ARGV[2])
end
return {1, 0}
else
-- ERROR: No longer own the lock
return {0, 0}
end
]]]
variable[dict_name] assign[=] call[name[self]._namespace, parameter[name[dict_name]]]
if compare[name[mapping] is constant[None]] begin[:]
variable[mapping] assign[=] dictionary[[], []]
variable[items] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1435d50>, <ast.Name object at 0x7da1b14342e0>]]] in starred[call[name[mapping].iteritems, parameter[]]] begin[:]
call[name[items].append, parameter[call[name[self]._encode, parameter[name[key]]]]]
call[name[items].append, parameter[call[name[self]._encode, parameter[name[value]]]]]
call[name[items].append, parameter[call[name[priorities]][name[key]]]]
call[name[items].append, parameter[call[name[self]._encode, parameter[call[name[locks]][name[key]]]]]]
variable[res] assign[=] call[name[script], parameter[]]
if compare[call[name[res]][constant[0]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b14365c0> | keyword[def] identifier[update] ( identifier[self] , identifier[dict_name] , identifier[mapping] = keyword[None] , identifier[priorities] = keyword[None] , identifier[expire] = keyword[None] ,
identifier[locks] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_session_lock_identifier] keyword[is] keyword[None] :
keyword[raise] identifier[ProgrammerError] ( literal[string] )
keyword[if] identifier[priorities] keyword[is] keyword[None] :
identifier[priorities] = identifier[defaultdict] ( identifier[int] )
keyword[if] identifier[locks] keyword[is] keyword[None] :
identifier[locks] = identifier[defaultdict] ( keyword[lambda] : literal[string] )
keyword[if] keyword[not] ( identifier[expire] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[expire] , identifier[int] )):
keyword[raise] identifier[ProgrammerError] ( literal[string] )
identifier[conn] = identifier[redis] . identifier[Redis] ( identifier[connection_pool] = identifier[self] . identifier[pool] )
identifier[script] = identifier[conn] . identifier[register_script] ( literal[string] )
identifier[dict_name] = identifier[self] . identifier[_namespace] ( identifier[dict_name] )
keyword[if] identifier[mapping] keyword[is] keyword[None] :
identifier[mapping] ={}
identifier[items] =[]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[mapping] . identifier[iteritems] ():
identifier[items] . identifier[append] ( identifier[self] . identifier[_encode] ( identifier[key] ))
identifier[items] . identifier[append] ( identifier[self] . identifier[_encode] ( identifier[value] ))
identifier[items] . identifier[append] ( identifier[priorities] [ identifier[key] ])
identifier[items] . identifier[append] ( identifier[self] . identifier[_encode] ( identifier[locks] [ identifier[key] ]))
identifier[res] = identifier[script] ( identifier[keys] =[ identifier[self] . identifier[_lock_name] ,
identifier[dict_name] ,
identifier[dict_name] + literal[string] ,
identifier[dict_name] + literal[string] ],
identifier[args] =[ identifier[self] . identifier[_session_lock_identifier] , identifier[expire] ]+ identifier[items] )
keyword[if] identifier[res] [ literal[int] ]== literal[int] :
keyword[raise] identifier[EnvironmentError] (
literal[string] % identifier[dict_name] )
keyword[elif] identifier[res] [ literal[int] ]==- literal[int] :
keyword[raise] identifier[EnvironmentError] (
literal[string]
%( identifier[self] . identifier[_decode] ( identifier[res] [ literal[int] ]), identifier[res] [ literal[int] ], identifier[res] [ literal[int] ], identifier[dict_name] )) | def update(self, dict_name, mapping=None, priorities=None, expire=None, locks=None):
"""Add mapping to a dictionary, replacing previous values
Can be called with only dict_name and expire to refresh the
expiration time.
NB: locks are only enforced if present, so nothing prevents
another caller from coming in an modifying data without using
locks.
:param mapping: a dict of keys and values to update in
dict_name. Must be specified if priorities is specified.
:param priorities: a dict with the same keys as those in
mapping that provides a numerical value indicating the
priority to assign to that key. Default sets 0 for all keys.
:param int expire: if specified, then dict_name will be set to
expire in that many seconds.
:param locks: a dict with the same keys as those in the
mapping. Before making any particular update, this function
checks if a key is present in a 'locks' table for this dict,
and if so, then its value must match the value provided in the
input locks dict for that key. If not, then the value
provided in the locks dict is inserted into the 'locks' table.
If the locks parameter is None, then no lock checking is
performed.
"""
if self._session_lock_identifier is None:
raise ProgrammerError('must acquire lock first') # depends on [control=['if'], data=[]]
if priorities is None:
## set all priorities to zero
priorities = defaultdict(int) # depends on [control=['if'], data=['priorities']]
if locks is None:
## set all locks to None
locks = defaultdict(lambda : '') # depends on [control=['if'], data=['locks']]
if not (expire is None or isinstance(expire, int)):
raise ProgrammerError('expire must be int or unspecified') # depends on [control=['if'], data=[]]
conn = redis.Redis(connection_pool=self.pool)
script = conn.register_script('\n if redis.call("get", KEYS[1]) == ARGV[1]\n then\n for i = 3, #ARGV, 4 do\n if ARGV[i+3] ~= \'j:""\' then\n local curr_lock = redis.call("hget", KEYS[4], ARGV[i])\n if curr_lock and curr_lock ~= ARGV[i+3] then\n return {-1, ARGV[i], curr_lock, ARGV[i+3]}\n end\n redis.call("hset", KEYS[4], ARGV[i], ARGV[i+3])\n end\n end\n for i = 3, #ARGV, 4 do\n redis.call("hset", KEYS[2], ARGV[i], ARGV[i+1])\n redis.call("zadd", KEYS[3], ARGV[i+2], ARGV[i])\n end\n if tonumber(ARGV[2]) ~= nil then\n redis.call("expire", KEYS[2], ARGV[2])\n redis.call("expire", KEYS[3], ARGV[2])\n end\n return {1, 0}\n else\n -- ERROR: No longer own the lock\n return {0, 0}\n end\n ')
dict_name = self._namespace(dict_name)
if mapping is None:
mapping = {} # depends on [control=['if'], data=['mapping']]
items = []
## This flattens the dictionary into a list
for (key, value) in mapping.iteritems():
items.append(self._encode(key))
items.append(self._encode(value))
items.append(priorities[key])
items.append(self._encode(locks[key])) # depends on [control=['for'], data=[]]
#logger.debug('update %r %r', dict_name, items)
res = script(keys=[self._lock_name, dict_name, dict_name + 'keys', dict_name + '_locks'], args=[self._session_lock_identifier, expire] + items)
if res[0] == 0:
raise EnvironmentError('Unable to add items to %s in registry' % dict_name) # depends on [control=['if'], data=[]]
elif res[0] == -1:
raise EnvironmentError('lost lock on key=%r owned by %r not %r in %s' % (self._decode(res[1]), res[2], res[3], dict_name)) # depends on [control=['if'], data=[]] |
def _repr_html_(self):
"""Generates the HTML representation.
"""
parts = []
if self._class:
parts.append('<div id="hh_%s" class="%s">%s</div>' % (self._id, self._class, self._markup))
else:
parts.append('<div id="hh_%s">%s</div>' % (self._id, self._markup))
if len(self._script) != 0:
parts.append('<script>')
parts.append('require([')
parts.append(','.join(['"%s"' % d[0] for d in self._dependencies]))
parts.append('], function(')
parts.append(','.join([d[1] for d in self._dependencies]))
parts.append(') {')
parts.append(self._script)
parts.append('});')
parts.append('</script>')
return ''.join(parts) | def function[_repr_html_, parameter[self]]:
constant[Generates the HTML representation.
]
variable[parts] assign[=] list[[]]
if name[self]._class begin[:]
call[name[parts].append, parameter[binary_operation[constant[<div id="hh_%s" class="%s">%s</div>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c9929e0>, <ast.Attribute object at 0x7da20c991b70>, <ast.Attribute object at 0x7da20c993070>]]]]]
if compare[call[name[len], parameter[name[self]._script]] not_equal[!=] constant[0]] begin[:]
call[name[parts].append, parameter[constant[<script>]]]
call[name[parts].append, parameter[constant[require([]]]
call[name[parts].append, parameter[call[constant[,].join, parameter[<ast.ListComp object at 0x7da20c993880>]]]]
call[name[parts].append, parameter[constant[], function(]]]
call[name[parts].append, parameter[call[constant[,].join, parameter[<ast.ListComp object at 0x7da20c990310>]]]]
call[name[parts].append, parameter[constant[) {]]]
call[name[parts].append, parameter[name[self]._script]]
call[name[parts].append, parameter[constant[});]]]
call[name[parts].append, parameter[constant[</script>]]]
return[call[constant[].join, parameter[name[parts]]]] | keyword[def] identifier[_repr_html_] ( identifier[self] ):
literal[string]
identifier[parts] =[]
keyword[if] identifier[self] . identifier[_class] :
identifier[parts] . identifier[append] ( literal[string] %( identifier[self] . identifier[_id] , identifier[self] . identifier[_class] , identifier[self] . identifier[_markup] ))
keyword[else] :
identifier[parts] . identifier[append] ( literal[string] %( identifier[self] . identifier[_id] , identifier[self] . identifier[_markup] ))
keyword[if] identifier[len] ( identifier[self] . identifier[_script] )!= literal[int] :
identifier[parts] . identifier[append] ( literal[string] )
identifier[parts] . identifier[append] ( literal[string] )
identifier[parts] . identifier[append] ( literal[string] . identifier[join] ([ literal[string] % identifier[d] [ literal[int] ] keyword[for] identifier[d] keyword[in] identifier[self] . identifier[_dependencies] ]))
identifier[parts] . identifier[append] ( literal[string] )
identifier[parts] . identifier[append] ( literal[string] . identifier[join] ([ identifier[d] [ literal[int] ] keyword[for] identifier[d] keyword[in] identifier[self] . identifier[_dependencies] ]))
identifier[parts] . identifier[append] ( literal[string] )
identifier[parts] . identifier[append] ( identifier[self] . identifier[_script] )
identifier[parts] . identifier[append] ( literal[string] )
identifier[parts] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[parts] ) | def _repr_html_(self):
"""Generates the HTML representation.
"""
parts = []
if self._class:
parts.append('<div id="hh_%s" class="%s">%s</div>' % (self._id, self._class, self._markup)) # depends on [control=['if'], data=[]]
else:
parts.append('<div id="hh_%s">%s</div>' % (self._id, self._markup))
if len(self._script) != 0:
parts.append('<script>')
parts.append('require([')
parts.append(','.join(['"%s"' % d[0] for d in self._dependencies]))
parts.append('], function(')
parts.append(','.join([d[1] for d in self._dependencies]))
parts.append(') {')
parts.append(self._script)
parts.append('});')
parts.append('</script>') # depends on [control=['if'], data=[]]
return ''.join(parts) |
def __update_display_items_model(self, display_items_model: ListModel.FilteredListModel, data_group: typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str]) -> None:
"""Update the data item model with a new container, filter, and sorting.
This is called when the data item model is created or when the user changes
the data group or sorting settings.
"""
with display_items_model.changes(): # change filter and sort together
if data_group is not None:
display_items_model.container = data_group
display_items_model.filter = ListModel.Filter(True)
display_items_model.sort_key = None
display_items_model.filter_id = None
elif filter_id == "latest-session":
display_items_model.container = self.document_model
display_items_model.filter = ListModel.EqFilter("session_id", self.document_model.session_id)
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id
elif filter_id == "temporary":
display_items_model.container = self.document_model
display_items_model.filter = ListModel.NotEqFilter("category", "persistent")
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id
elif filter_id == "none": # not intended to be used directly
display_items_model.container = self.document_model
display_items_model.filter = ListModel.Filter(False)
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id
else: # "all"
display_items_model.container = self.document_model
display_items_model.filter = ListModel.EqFilter("category", "persistent")
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = None | def function[__update_display_items_model, parameter[self, display_items_model, data_group, filter_id]]:
constant[Update the data item model with a new container, filter, and sorting.
This is called when the data item model is created or when the user changes
the data group or sorting settings.
]
with call[name[display_items_model].changes, parameter[]] begin[:]
if compare[name[data_group] is_not constant[None]] begin[:]
name[display_items_model].container assign[=] name[data_group]
name[display_items_model].filter assign[=] call[name[ListModel].Filter, parameter[constant[True]]]
name[display_items_model].sort_key assign[=] constant[None]
name[display_items_model].filter_id assign[=] constant[None] | keyword[def] identifier[__update_display_items_model] ( identifier[self] , identifier[display_items_model] : identifier[ListModel] . identifier[FilteredListModel] , identifier[data_group] : identifier[typing] . identifier[Optional] [ identifier[DataGroup] . identifier[DataGroup] ], identifier[filter_id] : identifier[typing] . identifier[Optional] [ identifier[str] ])-> keyword[None] :
literal[string]
keyword[with] identifier[display_items_model] . identifier[changes] ():
keyword[if] identifier[data_group] keyword[is] keyword[not] keyword[None] :
identifier[display_items_model] . identifier[container] = identifier[data_group]
identifier[display_items_model] . identifier[filter] = identifier[ListModel] . identifier[Filter] ( keyword[True] )
identifier[display_items_model] . identifier[sort_key] = keyword[None]
identifier[display_items_model] . identifier[filter_id] = keyword[None]
keyword[elif] identifier[filter_id] == literal[string] :
identifier[display_items_model] . identifier[container] = identifier[self] . identifier[document_model]
identifier[display_items_model] . identifier[filter] = identifier[ListModel] . identifier[EqFilter] ( literal[string] , identifier[self] . identifier[document_model] . identifier[session_id] )
identifier[display_items_model] . identifier[sort_key] = identifier[DataItem] . identifier[sort_by_date_key]
identifier[display_items_model] . identifier[sort_reverse] = keyword[True]
identifier[display_items_model] . identifier[filter_id] = identifier[filter_id]
keyword[elif] identifier[filter_id] == literal[string] :
identifier[display_items_model] . identifier[container] = identifier[self] . identifier[document_model]
identifier[display_items_model] . identifier[filter] = identifier[ListModel] . identifier[NotEqFilter] ( literal[string] , literal[string] )
identifier[display_items_model] . identifier[sort_key] = identifier[DataItem] . identifier[sort_by_date_key]
identifier[display_items_model] . identifier[sort_reverse] = keyword[True]
identifier[display_items_model] . identifier[filter_id] = identifier[filter_id]
keyword[elif] identifier[filter_id] == literal[string] :
identifier[display_items_model] . identifier[container] = identifier[self] . identifier[document_model]
identifier[display_items_model] . identifier[filter] = identifier[ListModel] . identifier[Filter] ( keyword[False] )
identifier[display_items_model] . identifier[sort_key] = identifier[DataItem] . identifier[sort_by_date_key]
identifier[display_items_model] . identifier[sort_reverse] = keyword[True]
identifier[display_items_model] . identifier[filter_id] = identifier[filter_id]
keyword[else] :
identifier[display_items_model] . identifier[container] = identifier[self] . identifier[document_model]
identifier[display_items_model] . identifier[filter] = identifier[ListModel] . identifier[EqFilter] ( literal[string] , literal[string] )
identifier[display_items_model] . identifier[sort_key] = identifier[DataItem] . identifier[sort_by_date_key]
identifier[display_items_model] . identifier[sort_reverse] = keyword[True]
identifier[display_items_model] . identifier[filter_id] = keyword[None] | def __update_display_items_model(self, display_items_model: ListModel.FilteredListModel, data_group: typing.Optional[DataGroup.DataGroup], filter_id: typing.Optional[str]) -> None:
"""Update the data item model with a new container, filter, and sorting.
This is called when the data item model is created or when the user changes
the data group or sorting settings.
"""
with display_items_model.changes(): # change filter and sort together
if data_group is not None:
display_items_model.container = data_group
display_items_model.filter = ListModel.Filter(True)
display_items_model.sort_key = None
display_items_model.filter_id = None # depends on [control=['if'], data=['data_group']]
elif filter_id == 'latest-session':
display_items_model.container = self.document_model
display_items_model.filter = ListModel.EqFilter('session_id', self.document_model.session_id)
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id # depends on [control=['if'], data=['filter_id']]
elif filter_id == 'temporary':
display_items_model.container = self.document_model
display_items_model.filter = ListModel.NotEqFilter('category', 'persistent')
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id # depends on [control=['if'], data=['filter_id']]
elif filter_id == 'none': # not intended to be used directly
display_items_model.container = self.document_model
display_items_model.filter = ListModel.Filter(False)
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = filter_id # depends on [control=['if'], data=['filter_id']]
else: # "all"
display_items_model.container = self.document_model
display_items_model.filter = ListModel.EqFilter('category', 'persistent')
display_items_model.sort_key = DataItem.sort_by_date_key
display_items_model.sort_reverse = True
display_items_model.filter_id = None # depends on [control=['with'], data=[]] |
def parse_fasta(infile, upper=False):
'''
parse a fasta-formatted file and returns header
can be a fasta file that contains multiple records.
'''
try:
fp = must_open(infile)
except:
fp = infile
# keep header
fa_iter = (x[1] for x in groupby(fp, lambda row: row[0] == '>'))
for header in fa_iter:
header = next(header)
if header[0] != '>':
continue
# drop '>'
header = header.strip()[1:]
# stitch the sequence lines together and make into upper case
seq = "".join(s.strip() for s in next(fa_iter))
if upper:
seq = seq.upper()
yield header, seq | def function[parse_fasta, parameter[infile, upper]]:
constant[
parse a fasta-formatted file and returns header
can be a fasta file that contains multiple records.
]
<ast.Try object at 0x7da1b08c9600>
variable[fa_iter] assign[=] <ast.GeneratorExp object at 0x7da1b08c9ff0>
for taget[name[header]] in starred[name[fa_iter]] begin[:]
variable[header] assign[=] call[name[next], parameter[name[header]]]
if compare[call[name[header]][constant[0]] not_equal[!=] constant[>]] begin[:]
continue
variable[header] assign[=] call[call[name[header].strip, parameter[]]][<ast.Slice object at 0x7da1b08c8f70>]
variable[seq] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b08c9bd0>]]
if name[upper] begin[:]
variable[seq] assign[=] call[name[seq].upper, parameter[]]
<ast.Yield object at 0x7da1b09601f0> | keyword[def] identifier[parse_fasta] ( identifier[infile] , identifier[upper] = keyword[False] ):
literal[string]
keyword[try] :
identifier[fp] = identifier[must_open] ( identifier[infile] )
keyword[except] :
identifier[fp] = identifier[infile]
identifier[fa_iter] =( identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[groupby] ( identifier[fp] , keyword[lambda] identifier[row] : identifier[row] [ literal[int] ]== literal[string] ))
keyword[for] identifier[header] keyword[in] identifier[fa_iter] :
identifier[header] = identifier[next] ( identifier[header] )
keyword[if] identifier[header] [ literal[int] ]!= literal[string] :
keyword[continue]
identifier[header] = identifier[header] . identifier[strip] ()[ literal[int] :]
identifier[seq] = literal[string] . identifier[join] ( identifier[s] . identifier[strip] () keyword[for] identifier[s] keyword[in] identifier[next] ( identifier[fa_iter] ))
keyword[if] identifier[upper] :
identifier[seq] = identifier[seq] . identifier[upper] ()
keyword[yield] identifier[header] , identifier[seq] | def parse_fasta(infile, upper=False):
"""
parse a fasta-formatted file and returns header
can be a fasta file that contains multiple records.
"""
try:
fp = must_open(infile) # depends on [control=['try'], data=[]]
except:
fp = infile # depends on [control=['except'], data=[]]
# keep header
fa_iter = (x[1] for x in groupby(fp, lambda row: row[0] == '>'))
for header in fa_iter:
header = next(header)
if header[0] != '>':
continue # depends on [control=['if'], data=[]]
# drop '>'
header = header.strip()[1:]
# stitch the sequence lines together and make into upper case
seq = ''.join((s.strip() for s in next(fa_iter)))
if upper:
seq = seq.upper() # depends on [control=['if'], data=[]]
yield (header, seq) # depends on [control=['for'], data=['header']] |
def version_sort_key(version, digits=6):
"""
Produces a canonicalized version string for standard version strings
in the dotted-numeric-label format. Function appropriate for use as
the key function of sort().
The conversion removes a possible prefix and reformats each key element
as a long string of digits with leading zeros. The number of digits
in each element can be changed with "digits". The prefix is replaced
before the result is returned. Prefixes match the regex '^\w+[_-]'
where the next character is a digit.
Non-conforming input is returned with only those completely numeric
elements reformatted.
"""
m = re.match('^(\w+[_-])(\d.*)$', version)
if m:
prefix = m.group(1)
version = m.group(2)
else:
prefix = ''
key = []
for elem in version.split('.'):
try:
num = int(elem)
elem = ('%0'+str(digits)+'d') % num
except:
pass
key.append(elem)
return prefix + '.'.join(key) | def function[version_sort_key, parameter[version, digits]]:
constant[
Produces a canonicalized version string for standard version strings
in the dotted-numeric-label format. Function appropriate for use as
the key function of sort().
The conversion removes a possible prefix and reformats each key element
as a long string of digits with leading zeros. The number of digits
in each element can be changed with "digits". The prefix is replaced
before the result is returned. Prefixes match the regex '^\w+[_-]'
where the next character is a digit.
Non-conforming input is returned with only those completely numeric
elements reformatted.
]
variable[m] assign[=] call[name[re].match, parameter[constant[^(\w+[_-])(\d.*)$], name[version]]]
if name[m] begin[:]
variable[prefix] assign[=] call[name[m].group, parameter[constant[1]]]
variable[version] assign[=] call[name[m].group, parameter[constant[2]]]
variable[key] assign[=] list[[]]
for taget[name[elem]] in starred[call[name[version].split, parameter[constant[.]]]] begin[:]
<ast.Try object at 0x7da20c9907c0>
call[name[key].append, parameter[name[elem]]]
return[binary_operation[name[prefix] + call[constant[.].join, parameter[name[key]]]]] | keyword[def] identifier[version_sort_key] ( identifier[version] , identifier[digits] = literal[int] ):
literal[string]
identifier[m] = identifier[re] . identifier[match] ( literal[string] , identifier[version] )
keyword[if] identifier[m] :
identifier[prefix] = identifier[m] . identifier[group] ( literal[int] )
identifier[version] = identifier[m] . identifier[group] ( literal[int] )
keyword[else] :
identifier[prefix] = literal[string]
identifier[key] =[]
keyword[for] identifier[elem] keyword[in] identifier[version] . identifier[split] ( literal[string] ):
keyword[try] :
identifier[num] = identifier[int] ( identifier[elem] )
identifier[elem] =( literal[string] + identifier[str] ( identifier[digits] )+ literal[string] )% identifier[num]
keyword[except] :
keyword[pass]
identifier[key] . identifier[append] ( identifier[elem] )
keyword[return] identifier[prefix] + literal[string] . identifier[join] ( identifier[key] ) | def version_sort_key(version, digits=6):
"""
Produces a canonicalized version string for standard version strings
in the dotted-numeric-label format. Function appropriate for use as
the key function of sort().
The conversion removes a possible prefix and reformats each key element
as a long string of digits with leading zeros. The number of digits
in each element can be changed with "digits". The prefix is replaced
before the result is returned. Prefixes match the regex '^\\w+[_-]'
where the next character is a digit.
Non-conforming input is returned with only those completely numeric
elements reformatted.
"""
m = re.match('^(\\w+[_-])(\\d.*)$', version)
if m:
prefix = m.group(1)
version = m.group(2) # depends on [control=['if'], data=[]]
else:
prefix = ''
key = []
for elem in version.split('.'):
try:
num = int(elem)
elem = ('%0' + str(digits) + 'd') % num # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
key.append(elem) # depends on [control=['for'], data=['elem']]
return prefix + '.'.join(key) |
def Vc(CASRN, AvailableMethods=False, Method=None, IgnoreMethods=[SURF]):
r'''This function handles the retrieval of a chemical's critical
volume. Lookup is based on CASRNs. Will automatically select a data
source to use if no Method is provided; returns None if the data is not
available.
Prefered sources are 'IUPAC' for organic chemicals, and 'MATTHEWS' for
inorganic chemicals. Function has data for approximately 1000 chemicals.
Examples
--------
>>> Vc(CASRN='64-17-5')
0.000168
Parameters
----------
CASRN : string
CASRN [-]
Returns
-------
Vc : float
Critical volume, [m^3/mol]
methods : list, only returned if AvailableMethods == True
List of methods which can be used to obtain Vc with the given inputs
Other Parameters
----------------
Method : string, optional
The method name to use. Accepted methods are 'IUPAC', 'MATTHEWS',
'CRC', 'PSRK', 'YAWS', and 'SURF'. All valid values are also held
in the list `Vc_methods`.
AvailableMethods : bool, optional
If True, function will determine which methods can be used to obtain
Vc for the desired chemical, and will return methods instead of Vc
IgnoreMethods : list, optional
A list of methods to ignore in obtaining the full list of methods,
useful for for performance reasons and ignoring inaccurate methods
Notes
-----
A total of six sources are available for this function. They are:
* 'IUPAC', a series of critically evaluated
experimental datum for organic compounds in [1]_, [2]_, [3]_, [4]_,
[5]_, [6]_, [7]_, [8]_, [9]_, [10]_, [11]_, and [12]_.
* 'MATTHEWS', a series of critically
evaluated data for inorganic compounds in [13]_.
* 'CRC', a compillation of critically
evaluated data by the TRC as published in [14]_.
* 'PSRK', a compillation of experimental and
estimated data published in [15]_.
* 'YAWS', a large compillation of data from a
variety of sources; no data points are sourced in the work of [16]_.
* 'SURF', an estimation method using a
simple quadratic method for estimating Pc from Tc and Vc. This is
ignored and not returned as a method by default
References
----------
.. [1] Ambrose, Douglas, and Colin L. Young. "Vapor-Liquid Critical
Properties of Elements and Compounds. 1. An Introductory Survey."
Journal of Chemical & Engineering Data 41, no. 1 (January 1, 1996):
154-154. doi:10.1021/je950378q.
.. [2] Ambrose, Douglas, and Constantine Tsonopoulos. "Vapor-Liquid
Critical Properties of Elements and Compounds. 2. Normal Alkanes."
Journal of Chemical & Engineering Data 40, no. 3 (May 1, 1995): 531-46.
doi:10.1021/je00019a001.
.. [3] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 3. Aromatic
Hydrocarbons." Journal of Chemical & Engineering Data 40, no. 3
(May 1, 1995): 547-58. doi:10.1021/je00019a002.
.. [4] Gude, Michael, and Amyn S. Teja. "Vapor-Liquid Critical Properties
of Elements and Compounds. 4. Aliphatic Alkanols." Journal of Chemical
& Engineering Data 40, no. 5 (September 1, 1995): 1025-36.
doi:10.1021/je00021a001.
.. [5] Daubert, Thomas E. "Vapor-Liquid Critical Properties of Elements
and Compounds. 5. Branched Alkanes and Cycloalkanes." Journal of
Chemical & Engineering Data 41, no. 3 (January 1, 1996): 365-72.
doi:10.1021/je9501548.
.. [6] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 6. Unsaturated Aliphatic
Hydrocarbons." Journal of Chemical & Engineering Data 41, no. 4
(January 1, 1996): 645-56. doi:10.1021/je9501999.
.. [7] Kudchadker, Arvind P., Douglas Ambrose, and Constantine Tsonopoulos.
"Vapor-Liquid Critical Properties of Elements and Compounds. 7. Oxygen
Compounds Other Than Alkanols and Cycloalkanols." Journal of Chemical &
Engineering Data 46, no. 3 (May 1, 2001): 457-79. doi:10.1021/je0001680.
.. [8] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 8. Organic Sulfur,
Silicon, and Tin Compounds (C + H + S, Si, and Sn)." Journal of Chemical
& Engineering Data 46, no. 3 (May 1, 2001): 480-85.
doi:10.1021/je000210r.
.. [9] Marsh, Kenneth N., Colin L. Young, David W. Morton, Douglas Ambrose,
and Constantine Tsonopoulos. "Vapor-Liquid Critical Properties of
Elements and Compounds. 9. Organic Compounds Containing Nitrogen."
Journal of Chemical & Engineering Data 51, no. 2 (March 1, 2006):
305-14. doi:10.1021/je050221q.
.. [10] Marsh, Kenneth N., Alan Abramson, Douglas Ambrose, David W. Morton,
Eugene Nikitin, Constantine Tsonopoulos, and Colin L. Young.
"Vapor-Liquid Critical Properties of Elements and Compounds. 10. Organic
Compounds Containing Halogens." Journal of Chemical & Engineering Data
52, no. 5 (September 1, 2007): 1509-38. doi:10.1021/je700336g.
.. [11] Ambrose, Douglas, Constantine Tsonopoulos, and Eugene D. Nikitin.
"Vapor-Liquid Critical Properties of Elements and Compounds. 11. Organic
Compounds Containing B + O; Halogens + N, + O, + O + S, + S, + Si;
N + O; and O + S, + Si." Journal of Chemical & Engineering Data 54,
no. 3 (March 12, 2009): 669-89. doi:10.1021/je800580z.
.. [12] Ambrose, Douglas, Constantine Tsonopoulos, Eugene D. Nikitin, David
W. Morton, and Kenneth N. Marsh. "Vapor-Liquid Critical Properties of
Elements and Compounds. 12. Review of Recent Data for Hydrocarbons and
Non-Hydrocarbons." Journal of Chemical & Engineering Data, October 5,
2015, 151005081500002. doi:10.1021/acs.jced.5b00571.
.. [13] Mathews, Joseph F. "Critical Constants of Inorganic Substances."
Chemical Reviews 72, no. 1 (February 1, 1972): 71-100.
doi:10.1021/cr60275a004.
.. [14] Haynes, W.M., Thomas J. Bruno, and David R. Lide. CRC Handbook of
Chemistry and Physics, 95E. Boca Raton, FL: CRC press, 2014.
.. [15] Horstmann, Sven, Anna Jabłoniec, Jörg Krafczyk, Kai Fischer, and
Jürgen Gmehling. "PSRK Group Contribution Equation of State:
Comprehensive Revision and Extension IV, Including Critical Constants
and Α-Function Parameters for 1000 Components." Fluid Phase Equilibria
227, no. 2 (January 25, 2005): 157-64. doi:10.1016/j.fluid.2004.11.002.
.. [16] Yaws, Carl L. Thermophysical Properties of Chemicals and
Hydrocarbons, Second Edition. Amsterdam Boston: Gulf Professional
Publishing, 2014.
'''
def list_methods():
methods = []
if CASRN in _crit_IUPAC.index and not np.isnan(_crit_IUPAC.at[CASRN, 'Vc']):
methods.append(IUPAC)
if CASRN in _crit_Matthews.index and not np.isnan(_crit_Matthews.at[CASRN, 'Vc']):
methods.append(MATTHEWS)
if CASRN in _crit_CRC.index and not np.isnan(_crit_CRC.at[CASRN, 'Vc']):
methods.append(CRC)
if CASRN in _crit_PSRKR4.index and not np.isnan(_crit_PSRKR4.at[CASRN, 'Vc']):
methods.append(PSRK)
if CASRN in _crit_Yaws.index and not np.isnan(_crit_Yaws.at[CASRN, 'Vc']):
methods.append(YAWS)
if CASRN:
methods.append(SURF)
if IgnoreMethods:
for Method in IgnoreMethods:
if Method in methods:
methods.remove(Method)
methods.append(NONE)
return methods
if AvailableMethods:
return list_methods()
if not Method:
Method = list_methods()[0]
if Method == IUPAC:
_Vc = float(_crit_IUPAC.at[CASRN, 'Vc'])
elif Method == PSRK:
_Vc = float(_crit_PSRKR4.at[CASRN, 'Vc'])
elif Method == MATTHEWS:
_Vc = float(_crit_Matthews.at[CASRN, 'Vc'])
elif Method == CRC:
_Vc = float(_crit_CRC.at[CASRN, 'Vc'])
elif Method == YAWS:
_Vc = float(_crit_Yaws.at[CASRN, 'Vc'])
elif Method == SURF:
_Vc = third_property(CASRN=CASRN, V=True)
elif Method == NONE:
return None
else:
raise Exception('Failure in in function')
return _Vc | def function[Vc, parameter[CASRN, AvailableMethods, Method, IgnoreMethods]]:
constant[This function handles the retrieval of a chemical's critical
volume. Lookup is based on CASRNs. Will automatically select a data
source to use if no Method is provided; returns None if the data is not
available.
Prefered sources are 'IUPAC' for organic chemicals, and 'MATTHEWS' for
inorganic chemicals. Function has data for approximately 1000 chemicals.
Examples
--------
>>> Vc(CASRN='64-17-5')
0.000168
Parameters
----------
CASRN : string
CASRN [-]
Returns
-------
Vc : float
Critical volume, [m^3/mol]
methods : list, only returned if AvailableMethods == True
List of methods which can be used to obtain Vc with the given inputs
Other Parameters
----------------
Method : string, optional
The method name to use. Accepted methods are 'IUPAC', 'MATTHEWS',
'CRC', 'PSRK', 'YAWS', and 'SURF'. All valid values are also held
in the list `Vc_methods`.
AvailableMethods : bool, optional
If True, function will determine which methods can be used to obtain
Vc for the desired chemical, and will return methods instead of Vc
IgnoreMethods : list, optional
A list of methods to ignore in obtaining the full list of methods,
useful for for performance reasons and ignoring inaccurate methods
Notes
-----
A total of six sources are available for this function. They are:
* 'IUPAC', a series of critically evaluated
experimental datum for organic compounds in [1]_, [2]_, [3]_, [4]_,
[5]_, [6]_, [7]_, [8]_, [9]_, [10]_, [11]_, and [12]_.
* 'MATTHEWS', a series of critically
evaluated data for inorganic compounds in [13]_.
* 'CRC', a compillation of critically
evaluated data by the TRC as published in [14]_.
* 'PSRK', a compillation of experimental and
estimated data published in [15]_.
* 'YAWS', a large compillation of data from a
variety of sources; no data points are sourced in the work of [16]_.
* 'SURF', an estimation method using a
simple quadratic method for estimating Pc from Tc and Vc. This is
ignored and not returned as a method by default
References
----------
.. [1] Ambrose, Douglas, and Colin L. Young. "Vapor-Liquid Critical
Properties of Elements and Compounds. 1. An Introductory Survey."
Journal of Chemical & Engineering Data 41, no. 1 (January 1, 1996):
154-154. doi:10.1021/je950378q.
.. [2] Ambrose, Douglas, and Constantine Tsonopoulos. "Vapor-Liquid
Critical Properties of Elements and Compounds. 2. Normal Alkanes."
Journal of Chemical & Engineering Data 40, no. 3 (May 1, 1995): 531-46.
doi:10.1021/je00019a001.
.. [3] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 3. Aromatic
Hydrocarbons." Journal of Chemical & Engineering Data 40, no. 3
(May 1, 1995): 547-58. doi:10.1021/je00019a002.
.. [4] Gude, Michael, and Amyn S. Teja. "Vapor-Liquid Critical Properties
of Elements and Compounds. 4. Aliphatic Alkanols." Journal of Chemical
& Engineering Data 40, no. 5 (September 1, 1995): 1025-36.
doi:10.1021/je00021a001.
.. [5] Daubert, Thomas E. "Vapor-Liquid Critical Properties of Elements
and Compounds. 5. Branched Alkanes and Cycloalkanes." Journal of
Chemical & Engineering Data 41, no. 3 (January 1, 1996): 365-72.
doi:10.1021/je9501548.
.. [6] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 6. Unsaturated Aliphatic
Hydrocarbons." Journal of Chemical & Engineering Data 41, no. 4
(January 1, 1996): 645-56. doi:10.1021/je9501999.
.. [7] Kudchadker, Arvind P., Douglas Ambrose, and Constantine Tsonopoulos.
"Vapor-Liquid Critical Properties of Elements and Compounds. 7. Oxygen
Compounds Other Than Alkanols and Cycloalkanols." Journal of Chemical &
Engineering Data 46, no. 3 (May 1, 2001): 457-79. doi:10.1021/je0001680.
.. [8] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 8. Organic Sulfur,
Silicon, and Tin Compounds (C + H + S, Si, and Sn)." Journal of Chemical
& Engineering Data 46, no. 3 (May 1, 2001): 480-85.
doi:10.1021/je000210r.
.. [9] Marsh, Kenneth N., Colin L. Young, David W. Morton, Douglas Ambrose,
and Constantine Tsonopoulos. "Vapor-Liquid Critical Properties of
Elements and Compounds. 9. Organic Compounds Containing Nitrogen."
Journal of Chemical & Engineering Data 51, no. 2 (March 1, 2006):
305-14. doi:10.1021/je050221q.
.. [10] Marsh, Kenneth N., Alan Abramson, Douglas Ambrose, David W. Morton,
Eugene Nikitin, Constantine Tsonopoulos, and Colin L. Young.
"Vapor-Liquid Critical Properties of Elements and Compounds. 10. Organic
Compounds Containing Halogens." Journal of Chemical & Engineering Data
52, no. 5 (September 1, 2007): 1509-38. doi:10.1021/je700336g.
.. [11] Ambrose, Douglas, Constantine Tsonopoulos, and Eugene D. Nikitin.
"Vapor-Liquid Critical Properties of Elements and Compounds. 11. Organic
Compounds Containing B + O; Halogens + N, + O, + O + S, + S, + Si;
N + O; and O + S, + Si." Journal of Chemical & Engineering Data 54,
no. 3 (March 12, 2009): 669-89. doi:10.1021/je800580z.
.. [12] Ambrose, Douglas, Constantine Tsonopoulos, Eugene D. Nikitin, David
W. Morton, and Kenneth N. Marsh. "Vapor-Liquid Critical Properties of
Elements and Compounds. 12. Review of Recent Data for Hydrocarbons and
Non-Hydrocarbons." Journal of Chemical & Engineering Data, October 5,
2015, 151005081500002. doi:10.1021/acs.jced.5b00571.
.. [13] Mathews, Joseph F. "Critical Constants of Inorganic Substances."
Chemical Reviews 72, no. 1 (February 1, 1972): 71-100.
doi:10.1021/cr60275a004.
.. [14] Haynes, W.M., Thomas J. Bruno, and David R. Lide. CRC Handbook of
Chemistry and Physics, 95E. Boca Raton, FL: CRC press, 2014.
.. [15] Horstmann, Sven, Anna Jabłoniec, Jörg Krafczyk, Kai Fischer, and
Jürgen Gmehling. "PSRK Group Contribution Equation of State:
Comprehensive Revision and Extension IV, Including Critical Constants
and Α-Function Parameters for 1000 Components." Fluid Phase Equilibria
227, no. 2 (January 25, 2005): 157-64. doi:10.1016/j.fluid.2004.11.002.
.. [16] Yaws, Carl L. Thermophysical Properties of Chemicals and
Hydrocarbons, Second Edition. Amsterdam Boston: Gulf Professional
Publishing, 2014.
]
def function[list_methods, parameter[]]:
variable[methods] assign[=] list[[]]
if <ast.BoolOp object at 0x7da1b26af3d0> begin[:]
call[name[methods].append, parameter[name[IUPAC]]]
if <ast.BoolOp object at 0x7da1b26aea10> begin[:]
call[name[methods].append, parameter[name[MATTHEWS]]]
if <ast.BoolOp object at 0x7da1b26ac430> begin[:]
call[name[methods].append, parameter[name[CRC]]]
if <ast.BoolOp object at 0x7da1b26ad8d0> begin[:]
call[name[methods].append, parameter[name[PSRK]]]
if <ast.BoolOp object at 0x7da1b26aeaa0> begin[:]
call[name[methods].append, parameter[name[YAWS]]]
if name[CASRN] begin[:]
call[name[methods].append, parameter[name[SURF]]]
if name[IgnoreMethods] begin[:]
for taget[name[Method]] in starred[name[IgnoreMethods]] begin[:]
if compare[name[Method] in name[methods]] begin[:]
call[name[methods].remove, parameter[name[Method]]]
call[name[methods].append, parameter[name[NONE]]]
return[name[methods]]
if name[AvailableMethods] begin[:]
return[call[name[list_methods], parameter[]]]
if <ast.UnaryOp object at 0x7da1b26acd00> begin[:]
variable[Method] assign[=] call[call[name[list_methods], parameter[]]][constant[0]]
if compare[name[Method] equal[==] name[IUPAC]] begin[:]
variable[_Vc] assign[=] call[name[float], parameter[call[name[_crit_IUPAC].at][tuple[[<ast.Name object at 0x7da1b26afa60>, <ast.Constant object at 0x7da1b26add50>]]]]]
return[name[_Vc]] | keyword[def] identifier[Vc] ( identifier[CASRN] , identifier[AvailableMethods] = keyword[False] , identifier[Method] = keyword[None] , identifier[IgnoreMethods] =[ identifier[SURF] ]):
literal[string]
keyword[def] identifier[list_methods] ():
identifier[methods] =[]
keyword[if] identifier[CASRN] keyword[in] identifier[_crit_IUPAC] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[_crit_IUPAC] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[IUPAC] )
keyword[if] identifier[CASRN] keyword[in] identifier[_crit_Matthews] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[_crit_Matthews] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[MATTHEWS] )
keyword[if] identifier[CASRN] keyword[in] identifier[_crit_CRC] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[_crit_CRC] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[CRC] )
keyword[if] identifier[CASRN] keyword[in] identifier[_crit_PSRKR4] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[_crit_PSRKR4] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[PSRK] )
keyword[if] identifier[CASRN] keyword[in] identifier[_crit_Yaws] . identifier[index] keyword[and] keyword[not] identifier[np] . identifier[isnan] ( identifier[_crit_Yaws] . identifier[at] [ identifier[CASRN] , literal[string] ]):
identifier[methods] . identifier[append] ( identifier[YAWS] )
keyword[if] identifier[CASRN] :
identifier[methods] . identifier[append] ( identifier[SURF] )
keyword[if] identifier[IgnoreMethods] :
keyword[for] identifier[Method] keyword[in] identifier[IgnoreMethods] :
keyword[if] identifier[Method] keyword[in] identifier[methods] :
identifier[methods] . identifier[remove] ( identifier[Method] )
identifier[methods] . identifier[append] ( identifier[NONE] )
keyword[return] identifier[methods]
keyword[if] identifier[AvailableMethods] :
keyword[return] identifier[list_methods] ()
keyword[if] keyword[not] identifier[Method] :
identifier[Method] = identifier[list_methods] ()[ literal[int] ]
keyword[if] identifier[Method] == identifier[IUPAC] :
identifier[_Vc] = identifier[float] ( identifier[_crit_IUPAC] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[PSRK] :
identifier[_Vc] = identifier[float] ( identifier[_crit_PSRKR4] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[MATTHEWS] :
identifier[_Vc] = identifier[float] ( identifier[_crit_Matthews] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[CRC] :
identifier[_Vc] = identifier[float] ( identifier[_crit_CRC] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[YAWS] :
identifier[_Vc] = identifier[float] ( identifier[_crit_Yaws] . identifier[at] [ identifier[CASRN] , literal[string] ])
keyword[elif] identifier[Method] == identifier[SURF] :
identifier[_Vc] = identifier[third_property] ( identifier[CASRN] = identifier[CASRN] , identifier[V] = keyword[True] )
keyword[elif] identifier[Method] == identifier[NONE] :
keyword[return] keyword[None]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[_Vc] | def Vc(CASRN, AvailableMethods=False, Method=None, IgnoreMethods=[SURF]):
"""This function handles the retrieval of a chemical's critical
volume. Lookup is based on CASRNs. Will automatically select a data
source to use if no Method is provided; returns None if the data is not
available.
Prefered sources are 'IUPAC' for organic chemicals, and 'MATTHEWS' for
inorganic chemicals. Function has data for approximately 1000 chemicals.
Examples
--------
>>> Vc(CASRN='64-17-5')
0.000168
Parameters
----------
CASRN : string
CASRN [-]
Returns
-------
Vc : float
Critical volume, [m^3/mol]
methods : list, only returned if AvailableMethods == True
List of methods which can be used to obtain Vc with the given inputs
Other Parameters
----------------
Method : string, optional
The method name to use. Accepted methods are 'IUPAC', 'MATTHEWS',
'CRC', 'PSRK', 'YAWS', and 'SURF'. All valid values are also held
in the list `Vc_methods`.
AvailableMethods : bool, optional
If True, function will determine which methods can be used to obtain
Vc for the desired chemical, and will return methods instead of Vc
IgnoreMethods : list, optional
A list of methods to ignore in obtaining the full list of methods,
useful for for performance reasons and ignoring inaccurate methods
Notes
-----
A total of six sources are available for this function. They are:
* 'IUPAC', a series of critically evaluated
experimental datum for organic compounds in [1]_, [2]_, [3]_, [4]_,
[5]_, [6]_, [7]_, [8]_, [9]_, [10]_, [11]_, and [12]_.
* 'MATTHEWS', a series of critically
evaluated data for inorganic compounds in [13]_.
* 'CRC', a compillation of critically
evaluated data by the TRC as published in [14]_.
* 'PSRK', a compillation of experimental and
estimated data published in [15]_.
* 'YAWS', a large compillation of data from a
variety of sources; no data points are sourced in the work of [16]_.
* 'SURF', an estimation method using a
simple quadratic method for estimating Pc from Tc and Vc. This is
ignored and not returned as a method by default
References
----------
.. [1] Ambrose, Douglas, and Colin L. Young. "Vapor-Liquid Critical
Properties of Elements and Compounds. 1. An Introductory Survey."
Journal of Chemical & Engineering Data 41, no. 1 (January 1, 1996):
154-154. doi:10.1021/je950378q.
.. [2] Ambrose, Douglas, and Constantine Tsonopoulos. "Vapor-Liquid
Critical Properties of Elements and Compounds. 2. Normal Alkanes."
Journal of Chemical & Engineering Data 40, no. 3 (May 1, 1995): 531-46.
doi:10.1021/je00019a001.
.. [3] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 3. Aromatic
Hydrocarbons." Journal of Chemical & Engineering Data 40, no. 3
(May 1, 1995): 547-58. doi:10.1021/je00019a002.
.. [4] Gude, Michael, and Amyn S. Teja. "Vapor-Liquid Critical Properties
of Elements and Compounds. 4. Aliphatic Alkanols." Journal of Chemical
& Engineering Data 40, no. 5 (September 1, 1995): 1025-36.
doi:10.1021/je00021a001.
.. [5] Daubert, Thomas E. "Vapor-Liquid Critical Properties of Elements
and Compounds. 5. Branched Alkanes and Cycloalkanes." Journal of
Chemical & Engineering Data 41, no. 3 (January 1, 1996): 365-72.
doi:10.1021/je9501548.
.. [6] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 6. Unsaturated Aliphatic
Hydrocarbons." Journal of Chemical & Engineering Data 41, no. 4
(January 1, 1996): 645-56. doi:10.1021/je9501999.
.. [7] Kudchadker, Arvind P., Douglas Ambrose, and Constantine Tsonopoulos.
"Vapor-Liquid Critical Properties of Elements and Compounds. 7. Oxygen
Compounds Other Than Alkanols and Cycloalkanols." Journal of Chemical &
Engineering Data 46, no. 3 (May 1, 2001): 457-79. doi:10.1021/je0001680.
.. [8] Tsonopoulos, Constantine, and Douglas Ambrose. "Vapor-Liquid
Critical Properties of Elements and Compounds. 8. Organic Sulfur,
Silicon, and Tin Compounds (C + H + S, Si, and Sn)." Journal of Chemical
& Engineering Data 46, no. 3 (May 1, 2001): 480-85.
doi:10.1021/je000210r.
.. [9] Marsh, Kenneth N., Colin L. Young, David W. Morton, Douglas Ambrose,
and Constantine Tsonopoulos. "Vapor-Liquid Critical Properties of
Elements and Compounds. 9. Organic Compounds Containing Nitrogen."
Journal of Chemical & Engineering Data 51, no. 2 (March 1, 2006):
305-14. doi:10.1021/je050221q.
.. [10] Marsh, Kenneth N., Alan Abramson, Douglas Ambrose, David W. Morton,
Eugene Nikitin, Constantine Tsonopoulos, and Colin L. Young.
"Vapor-Liquid Critical Properties of Elements and Compounds. 10. Organic
Compounds Containing Halogens." Journal of Chemical & Engineering Data
52, no. 5 (September 1, 2007): 1509-38. doi:10.1021/je700336g.
.. [11] Ambrose, Douglas, Constantine Tsonopoulos, and Eugene D. Nikitin.
"Vapor-Liquid Critical Properties of Elements and Compounds. 11. Organic
Compounds Containing B + O; Halogens + N, + O, + O + S, + S, + Si;
N + O; and O + S, + Si." Journal of Chemical & Engineering Data 54,
no. 3 (March 12, 2009): 669-89. doi:10.1021/je800580z.
.. [12] Ambrose, Douglas, Constantine Tsonopoulos, Eugene D. Nikitin, David
W. Morton, and Kenneth N. Marsh. "Vapor-Liquid Critical Properties of
Elements and Compounds. 12. Review of Recent Data for Hydrocarbons and
Non-Hydrocarbons." Journal of Chemical & Engineering Data, October 5,
2015, 151005081500002. doi:10.1021/acs.jced.5b00571.
.. [13] Mathews, Joseph F. "Critical Constants of Inorganic Substances."
Chemical Reviews 72, no. 1 (February 1, 1972): 71-100.
doi:10.1021/cr60275a004.
.. [14] Haynes, W.M., Thomas J. Bruno, and David R. Lide. CRC Handbook of
Chemistry and Physics, 95E. Boca Raton, FL: CRC press, 2014.
.. [15] Horstmann, Sven, Anna Jabłoniec, Jörg Krafczyk, Kai Fischer, and
Jürgen Gmehling. "PSRK Group Contribution Equation of State:
Comprehensive Revision and Extension IV, Including Critical Constants
and Α-Function Parameters for 1000 Components." Fluid Phase Equilibria
227, no. 2 (January 25, 2005): 157-64. doi:10.1016/j.fluid.2004.11.002.
.. [16] Yaws, Carl L. Thermophysical Properties of Chemicals and
Hydrocarbons, Second Edition. Amsterdam Boston: Gulf Professional
Publishing, 2014.
"""
def list_methods():
methods = []
if CASRN in _crit_IUPAC.index and (not np.isnan(_crit_IUPAC.at[CASRN, 'Vc'])):
methods.append(IUPAC) # depends on [control=['if'], data=[]]
if CASRN in _crit_Matthews.index and (not np.isnan(_crit_Matthews.at[CASRN, 'Vc'])):
methods.append(MATTHEWS) # depends on [control=['if'], data=[]]
if CASRN in _crit_CRC.index and (not np.isnan(_crit_CRC.at[CASRN, 'Vc'])):
methods.append(CRC) # depends on [control=['if'], data=[]]
if CASRN in _crit_PSRKR4.index and (not np.isnan(_crit_PSRKR4.at[CASRN, 'Vc'])):
methods.append(PSRK) # depends on [control=['if'], data=[]]
if CASRN in _crit_Yaws.index and (not np.isnan(_crit_Yaws.at[CASRN, 'Vc'])):
methods.append(YAWS) # depends on [control=['if'], data=[]]
if CASRN:
methods.append(SURF) # depends on [control=['if'], data=[]]
if IgnoreMethods:
for Method in IgnoreMethods:
if Method in methods:
methods.remove(Method) # depends on [control=['if'], data=['Method', 'methods']] # depends on [control=['for'], data=['Method']] # depends on [control=['if'], data=[]]
methods.append(NONE)
return methods
if AvailableMethods:
return list_methods() # depends on [control=['if'], data=[]]
if not Method:
Method = list_methods()[0] # depends on [control=['if'], data=[]]
if Method == IUPAC:
_Vc = float(_crit_IUPAC.at[CASRN, 'Vc']) # depends on [control=['if'], data=[]]
elif Method == PSRK:
_Vc = float(_crit_PSRKR4.at[CASRN, 'Vc']) # depends on [control=['if'], data=[]]
elif Method == MATTHEWS:
_Vc = float(_crit_Matthews.at[CASRN, 'Vc']) # depends on [control=['if'], data=[]]
elif Method == CRC:
_Vc = float(_crit_CRC.at[CASRN, 'Vc']) # depends on [control=['if'], data=[]]
elif Method == YAWS:
_Vc = float(_crit_Yaws.at[CASRN, 'Vc']) # depends on [control=['if'], data=[]]
elif Method == SURF:
_Vc = third_property(CASRN=CASRN, V=True) # depends on [control=['if'], data=[]]
elif Method == NONE:
return None # depends on [control=['if'], data=[]]
else:
raise Exception('Failure in in function')
return _Vc |
def _ping(self, peerid, callid):
"""
Called from remote to ask if a call made to here is still in progress.
"""
if not (peerid, callid) in self._remote_to_local:
logger.warn("No remote call %s from %s. Might just be unfoutunate timing." % (callid, peerid)) | def function[_ping, parameter[self, peerid, callid]]:
constant[
Called from remote to ask if a call made to here is still in progress.
]
if <ast.UnaryOp object at 0x7da18fe92860> begin[:]
call[name[logger].warn, parameter[binary_operation[constant[No remote call %s from %s. Might just be unfoutunate timing.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18fe90bb0>, <ast.Name object at 0x7da18fe90af0>]]]]] | keyword[def] identifier[_ping] ( identifier[self] , identifier[peerid] , identifier[callid] ):
literal[string]
keyword[if] keyword[not] ( identifier[peerid] , identifier[callid] ) keyword[in] identifier[self] . identifier[_remote_to_local] :
identifier[logger] . identifier[warn] ( literal[string] %( identifier[callid] , identifier[peerid] )) | def _ping(self, peerid, callid):
"""
Called from remote to ask if a call made to here is still in progress.
"""
if not (peerid, callid) in self._remote_to_local:
logger.warn('No remote call %s from %s. Might just be unfoutunate timing.' % (callid, peerid)) # depends on [control=['if'], data=[]] |
def get_server_certificate(server_certificate, flags=FLAGS.BASE, **conn):
"""
Orchestrates all the calls required to fully build out an IAM User in the following format:
{
"Arn": ...,
"ServerCertificateName": ...,
"Path": ...,
"ServerCertificateId": ...,
"UploadDate": ..., # str
"Expiration": ..., # str
"CertificateBody": ...,
"CertificateChain": ...,
"_version": 1
}
:param flags: By default, Users is disabled. This is somewhat expensive as it has to call the
`get_server_certificate` call multiple times.
:param server_certificate: dict MUST contain the ServerCertificateName and also a combination of
either the ARN or the account_number.
:param output: Determines whether keys should be returned camelized or underscored.
:param conn: dict containing enough information to make a connection to the desired account.
Must at least have 'assume_role' key.
:return: dict containing fully built out Server Certificate.
"""
if not server_certificate.get('ServerCertificateName'):
raise MissingFieldException('Must include ServerCertificateName.')
server_certificate = modify(server_certificate, output='camelized')
_conn_from_args(server_certificate, conn)
return registry.build_out(flags, start_with=server_certificate, pass_datastructure=True, **conn) | def function[get_server_certificate, parameter[server_certificate, flags]]:
constant[
Orchestrates all the calls required to fully build out an IAM User in the following format:
{
"Arn": ...,
"ServerCertificateName": ...,
"Path": ...,
"ServerCertificateId": ...,
"UploadDate": ..., # str
"Expiration": ..., # str
"CertificateBody": ...,
"CertificateChain": ...,
"_version": 1
}
:param flags: By default, Users is disabled. This is somewhat expensive as it has to call the
`get_server_certificate` call multiple times.
:param server_certificate: dict MUST contain the ServerCertificateName and also a combination of
either the ARN or the account_number.
:param output: Determines whether keys should be returned camelized or underscored.
:param conn: dict containing enough information to make a connection to the desired account.
Must at least have 'assume_role' key.
:return: dict containing fully built out Server Certificate.
]
if <ast.UnaryOp object at 0x7da20e957e50> begin[:]
<ast.Raise object at 0x7da20e955450>
variable[server_certificate] assign[=] call[name[modify], parameter[name[server_certificate]]]
call[name[_conn_from_args], parameter[name[server_certificate], name[conn]]]
return[call[name[registry].build_out, parameter[name[flags]]]] | keyword[def] identifier[get_server_certificate] ( identifier[server_certificate] , identifier[flags] = identifier[FLAGS] . identifier[BASE] ,** identifier[conn] ):
literal[string]
keyword[if] keyword[not] identifier[server_certificate] . identifier[get] ( literal[string] ):
keyword[raise] identifier[MissingFieldException] ( literal[string] )
identifier[server_certificate] = identifier[modify] ( identifier[server_certificate] , identifier[output] = literal[string] )
identifier[_conn_from_args] ( identifier[server_certificate] , identifier[conn] )
keyword[return] identifier[registry] . identifier[build_out] ( identifier[flags] , identifier[start_with] = identifier[server_certificate] , identifier[pass_datastructure] = keyword[True] ,** identifier[conn] ) | def get_server_certificate(server_certificate, flags=FLAGS.BASE, **conn):
"""
Orchestrates all the calls required to fully build out an IAM User in the following format:
{
"Arn": ...,
"ServerCertificateName": ...,
"Path": ...,
"ServerCertificateId": ...,
"UploadDate": ..., # str
"Expiration": ..., # str
"CertificateBody": ...,
"CertificateChain": ...,
"_version": 1
}
:param flags: By default, Users is disabled. This is somewhat expensive as it has to call the
`get_server_certificate` call multiple times.
:param server_certificate: dict MUST contain the ServerCertificateName and also a combination of
either the ARN or the account_number.
:param output: Determines whether keys should be returned camelized or underscored.
:param conn: dict containing enough information to make a connection to the desired account.
Must at least have 'assume_role' key.
:return: dict containing fully built out Server Certificate.
"""
if not server_certificate.get('ServerCertificateName'):
raise MissingFieldException('Must include ServerCertificateName.') # depends on [control=['if'], data=[]]
server_certificate = modify(server_certificate, output='camelized')
_conn_from_args(server_certificate, conn)
return registry.build_out(flags, start_with=server_certificate, pass_datastructure=True, **conn) |
def chunked_iter(src, size, **kw):
"""Generates *size*-sized chunks from *src* iterable. Unless the
optional *fill* keyword argument is provided, iterables not even
divisible by *size* will have a final chunk that is smaller than
*size*.
>>> list(chunked_iter(range(10), 3))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
>>> list(chunked_iter(range(10), 3, fill=None))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
Note that ``fill=None`` in fact uses ``None`` as the fill value.
"""
# TODO: add count kwarg?
if not is_iterable(src):
raise TypeError('expected an iterable')
size = int(size)
if size <= 0:
raise ValueError('expected a positive integer chunk size')
do_fill = True
try:
fill_val = kw.pop('fill')
except KeyError:
do_fill = False
fill_val = None
if kw:
raise ValueError('got unexpected keyword arguments: %r' % kw.keys())
if not src:
return
postprocess = lambda chk: chk
if isinstance(src, basestring):
postprocess = lambda chk, _sep=type(src)(): _sep.join(chk)
src_iter = iter(src)
while True:
cur_chunk = list(itertools.islice(src_iter, size))
if not cur_chunk:
break
lc = len(cur_chunk)
if lc < size and do_fill:
cur_chunk[lc:] = [fill_val] * (size - lc)
yield postprocess(cur_chunk)
return | def function[chunked_iter, parameter[src, size]]:
constant[Generates *size*-sized chunks from *src* iterable. Unless the
optional *fill* keyword argument is provided, iterables not even
divisible by *size* will have a final chunk that is smaller than
*size*.
>>> list(chunked_iter(range(10), 3))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
>>> list(chunked_iter(range(10), 3, fill=None))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
Note that ``fill=None`` in fact uses ``None`` as the fill value.
]
if <ast.UnaryOp object at 0x7da18fe92fe0> begin[:]
<ast.Raise object at 0x7da18fe93f40>
variable[size] assign[=] call[name[int], parameter[name[size]]]
if compare[name[size] less_or_equal[<=] constant[0]] begin[:]
<ast.Raise object at 0x7da18fe91d50>
variable[do_fill] assign[=] constant[True]
<ast.Try object at 0x7da18fe90880>
if name[kw] begin[:]
<ast.Raise object at 0x7da18fe936d0>
if <ast.UnaryOp object at 0x7da18fe90970> begin[:]
return[None]
variable[postprocess] assign[=] <ast.Lambda object at 0x7da18fe922c0>
if call[name[isinstance], parameter[name[src], name[basestring]]] begin[:]
variable[postprocess] assign[=] <ast.Lambda object at 0x7da18fe91420>
variable[src_iter] assign[=] call[name[iter], parameter[name[src]]]
while constant[True] begin[:]
variable[cur_chunk] assign[=] call[name[list], parameter[call[name[itertools].islice, parameter[name[src_iter], name[size]]]]]
if <ast.UnaryOp object at 0x7da2054a7a90> begin[:]
break
variable[lc] assign[=] call[name[len], parameter[name[cur_chunk]]]
if <ast.BoolOp object at 0x7da2054a5240> begin[:]
call[name[cur_chunk]][<ast.Slice object at 0x7da2054a6e00>] assign[=] binary_operation[list[[<ast.Name object at 0x7da2054a5150>]] * binary_operation[name[size] - name[lc]]]
<ast.Yield object at 0x7da2054a4820>
return[None] | keyword[def] identifier[chunked_iter] ( identifier[src] , identifier[size] ,** identifier[kw] ):
literal[string]
keyword[if] keyword[not] identifier[is_iterable] ( identifier[src] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[size] = identifier[int] ( identifier[size] )
keyword[if] identifier[size] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[do_fill] = keyword[True]
keyword[try] :
identifier[fill_val] = identifier[kw] . identifier[pop] ( literal[string] )
keyword[except] identifier[KeyError] :
identifier[do_fill] = keyword[False]
identifier[fill_val] = keyword[None]
keyword[if] identifier[kw] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[kw] . identifier[keys] ())
keyword[if] keyword[not] identifier[src] :
keyword[return]
identifier[postprocess] = keyword[lambda] identifier[chk] : identifier[chk]
keyword[if] identifier[isinstance] ( identifier[src] , identifier[basestring] ):
identifier[postprocess] = keyword[lambda] identifier[chk] , identifier[_sep] = identifier[type] ( identifier[src] )(): identifier[_sep] . identifier[join] ( identifier[chk] )
identifier[src_iter] = identifier[iter] ( identifier[src] )
keyword[while] keyword[True] :
identifier[cur_chunk] = identifier[list] ( identifier[itertools] . identifier[islice] ( identifier[src_iter] , identifier[size] ))
keyword[if] keyword[not] identifier[cur_chunk] :
keyword[break]
identifier[lc] = identifier[len] ( identifier[cur_chunk] )
keyword[if] identifier[lc] < identifier[size] keyword[and] identifier[do_fill] :
identifier[cur_chunk] [ identifier[lc] :]=[ identifier[fill_val] ]*( identifier[size] - identifier[lc] )
keyword[yield] identifier[postprocess] ( identifier[cur_chunk] )
keyword[return] | def chunked_iter(src, size, **kw):
"""Generates *size*-sized chunks from *src* iterable. Unless the
optional *fill* keyword argument is provided, iterables not even
divisible by *size* will have a final chunk that is smaller than
*size*.
>>> list(chunked_iter(range(10), 3))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
>>> list(chunked_iter(range(10), 3, fill=None))
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
Note that ``fill=None`` in fact uses ``None`` as the fill value.
"""
# TODO: add count kwarg?
if not is_iterable(src):
raise TypeError('expected an iterable') # depends on [control=['if'], data=[]]
size = int(size)
if size <= 0:
raise ValueError('expected a positive integer chunk size') # depends on [control=['if'], data=[]]
do_fill = True
try:
fill_val = kw.pop('fill') # depends on [control=['try'], data=[]]
except KeyError:
do_fill = False
fill_val = None # depends on [control=['except'], data=[]]
if kw:
raise ValueError('got unexpected keyword arguments: %r' % kw.keys()) # depends on [control=['if'], data=[]]
if not src:
return # depends on [control=['if'], data=[]]
postprocess = lambda chk: chk
if isinstance(src, basestring):
postprocess = lambda chk, _sep=type(src)(): _sep.join(chk) # depends on [control=['if'], data=[]]
src_iter = iter(src)
while True:
cur_chunk = list(itertools.islice(src_iter, size))
if not cur_chunk:
break # depends on [control=['if'], data=[]]
lc = len(cur_chunk)
if lc < size and do_fill:
cur_chunk[lc:] = [fill_val] * (size - lc) # depends on [control=['if'], data=[]]
yield postprocess(cur_chunk) # depends on [control=['while'], data=[]]
return |
def main(*args):
r"""Bootstrap Python projects and libraries with virtualenv and pip.
Also check system requirements before bootstrap and run post bootstrap
hook if any.
:param \*args: Command line arguments list.
"""
# Create parser, read arguments from direct input or command line
with disable_error_handler():
args = parse_args(args or sys.argv[1:])
# Read current config from file and command line arguments
config = read_config(args.config, args)
if config is None:
return True
bootstrap = config[__script__]
# Check pre-requirements
if not check_pre_requirements(bootstrap['pre_requirements']):
return True
# Create virtual environment
env_args = prepare_args(config['virtualenv'], bootstrap)
if not create_env(
bootstrap['env'],
env_args,
bootstrap['recreate'],
bootstrap['ignore_activated'],
bootstrap['quiet']
):
# Exit if couldn't create virtual environment
return True
# And install library or project here
pip_args = prepare_args(config['pip'], bootstrap)
if not install(
bootstrap['env'],
bootstrap['requirements'],
pip_args,
bootstrap['ignore_activated'],
bootstrap['install_dev_requirements'],
bootstrap['quiet']
):
# Exist if couldn't install requirements into venv
return True
# Run post-bootstrap hook
run_hook(bootstrap['hook'], bootstrap, bootstrap['quiet'])
# All OK!
if not bootstrap['quiet']:
print_message('All OK!')
# False means everything went alright, exit code: 0
return False | def function[main, parameter[]]:
constant[Bootstrap Python projects and libraries with virtualenv and pip.
Also check system requirements before bootstrap and run post bootstrap
hook if any.
:param \*args: Command line arguments list.
]
with call[name[disable_error_handler], parameter[]] begin[:]
variable[args] assign[=] call[name[parse_args], parameter[<ast.BoolOp object at 0x7da1b004f820>]]
variable[config] assign[=] call[name[read_config], parameter[name[args].config, name[args]]]
if compare[name[config] is constant[None]] begin[:]
return[constant[True]]
variable[bootstrap] assign[=] call[name[config]][name[__script__]]
if <ast.UnaryOp object at 0x7da1b004ceb0> begin[:]
return[constant[True]]
variable[env_args] assign[=] call[name[prepare_args], parameter[call[name[config]][constant[virtualenv]], name[bootstrap]]]
if <ast.UnaryOp object at 0x7da1b004d300> begin[:]
return[constant[True]]
variable[pip_args] assign[=] call[name[prepare_args], parameter[call[name[config]][constant[pip]], name[bootstrap]]]
if <ast.UnaryOp object at 0x7da1b004c730> begin[:]
return[constant[True]]
call[name[run_hook], parameter[call[name[bootstrap]][constant[hook]], name[bootstrap], call[name[bootstrap]][constant[quiet]]]]
if <ast.UnaryOp object at 0x7da1b004f1c0> begin[:]
call[name[print_message], parameter[constant[All OK!]]]
return[constant[False]] | keyword[def] identifier[main] (* identifier[args] ):
literal[string]
keyword[with] identifier[disable_error_handler] ():
identifier[args] = identifier[parse_args] ( identifier[args] keyword[or] identifier[sys] . identifier[argv] [ literal[int] :])
identifier[config] = identifier[read_config] ( identifier[args] . identifier[config] , identifier[args] )
keyword[if] identifier[config] keyword[is] keyword[None] :
keyword[return] keyword[True]
identifier[bootstrap] = identifier[config] [ identifier[__script__] ]
keyword[if] keyword[not] identifier[check_pre_requirements] ( identifier[bootstrap] [ literal[string] ]):
keyword[return] keyword[True]
identifier[env_args] = identifier[prepare_args] ( identifier[config] [ literal[string] ], identifier[bootstrap] )
keyword[if] keyword[not] identifier[create_env] (
identifier[bootstrap] [ literal[string] ],
identifier[env_args] ,
identifier[bootstrap] [ literal[string] ],
identifier[bootstrap] [ literal[string] ],
identifier[bootstrap] [ literal[string] ]
):
keyword[return] keyword[True]
identifier[pip_args] = identifier[prepare_args] ( identifier[config] [ literal[string] ], identifier[bootstrap] )
keyword[if] keyword[not] identifier[install] (
identifier[bootstrap] [ literal[string] ],
identifier[bootstrap] [ literal[string] ],
identifier[pip_args] ,
identifier[bootstrap] [ literal[string] ],
identifier[bootstrap] [ literal[string] ],
identifier[bootstrap] [ literal[string] ]
):
keyword[return] keyword[True]
identifier[run_hook] ( identifier[bootstrap] [ literal[string] ], identifier[bootstrap] , identifier[bootstrap] [ literal[string] ])
keyword[if] keyword[not] identifier[bootstrap] [ literal[string] ]:
identifier[print_message] ( literal[string] )
keyword[return] keyword[False] | def main(*args):
"""Bootstrap Python projects and libraries with virtualenv and pip.
Also check system requirements before bootstrap and run post bootstrap
hook if any.
:param \\*args: Command line arguments list.
"""
# Create parser, read arguments from direct input or command line
with disable_error_handler():
args = parse_args(args or sys.argv[1:]) # depends on [control=['with'], data=[]]
# Read current config from file and command line arguments
config = read_config(args.config, args)
if config is None:
return True # depends on [control=['if'], data=[]]
bootstrap = config[__script__]
# Check pre-requirements
if not check_pre_requirements(bootstrap['pre_requirements']):
return True # depends on [control=['if'], data=[]]
# Create virtual environment
env_args = prepare_args(config['virtualenv'], bootstrap)
if not create_env(bootstrap['env'], env_args, bootstrap['recreate'], bootstrap['ignore_activated'], bootstrap['quiet']):
# Exit if couldn't create virtual environment
return True # depends on [control=['if'], data=[]]
# And install library or project here
pip_args = prepare_args(config['pip'], bootstrap)
if not install(bootstrap['env'], bootstrap['requirements'], pip_args, bootstrap['ignore_activated'], bootstrap['install_dev_requirements'], bootstrap['quiet']):
# Exist if couldn't install requirements into venv
return True # depends on [control=['if'], data=[]]
# Run post-bootstrap hook
run_hook(bootstrap['hook'], bootstrap, bootstrap['quiet'])
# All OK!
if not bootstrap['quiet']:
print_message('All OK!') # depends on [control=['if'], data=[]]
# False means everything went alright, exit code: 0
return False |
def add_custom_func(self, func, dim, *args, **kwargs):
""" adds a user defined function to extract features
Parameters
----------
func : function
a user-defined function, which accepts mdtraj.Trajectory object as
first parameter and as many optional and named arguments as desired.
Has to return a numpy.ndarray ndim=2.
dim : int
output dimension of :py:obj:`function`
description: str or None
a message for the describe feature list.
args : any number of positional arguments
these have to be in the same order as :py:obj:`func` is expecting them
kwargs : dictionary
named arguments passed to func
Notes
-----
You can pass a description list to describe the output of your function by element,
by passing a list of strings with the same lengths as dimensions.
Alternatively a single element list or str will be expanded to match the output dimension.
"""
description = kwargs.pop('description', None)
f = CustomFeature(func, dim=dim, description=description, fun_args=args, fun_kwargs=kwargs)
self.add_custom_feature(f) | def function[add_custom_func, parameter[self, func, dim]]:
constant[ adds a user defined function to extract features
Parameters
----------
func : function
a user-defined function, which accepts mdtraj.Trajectory object as
first parameter and as many optional and named arguments as desired.
Has to return a numpy.ndarray ndim=2.
dim : int
output dimension of :py:obj:`function`
description: str or None
a message for the describe feature list.
args : any number of positional arguments
these have to be in the same order as :py:obj:`func` is expecting them
kwargs : dictionary
named arguments passed to func
Notes
-----
You can pass a description list to describe the output of your function by element,
by passing a list of strings with the same lengths as dimensions.
Alternatively a single element list or str will be expanded to match the output dimension.
]
variable[description] assign[=] call[name[kwargs].pop, parameter[constant[description], constant[None]]]
variable[f] assign[=] call[name[CustomFeature], parameter[name[func]]]
call[name[self].add_custom_feature, parameter[name[f]]] | keyword[def] identifier[add_custom_func] ( identifier[self] , identifier[func] , identifier[dim] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[description] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[f] = identifier[CustomFeature] ( identifier[func] , identifier[dim] = identifier[dim] , identifier[description] = identifier[description] , identifier[fun_args] = identifier[args] , identifier[fun_kwargs] = identifier[kwargs] )
identifier[self] . identifier[add_custom_feature] ( identifier[f] ) | def add_custom_func(self, func, dim, *args, **kwargs):
""" adds a user defined function to extract features
Parameters
----------
func : function
a user-defined function, which accepts mdtraj.Trajectory object as
first parameter and as many optional and named arguments as desired.
Has to return a numpy.ndarray ndim=2.
dim : int
output dimension of :py:obj:`function`
description: str or None
a message for the describe feature list.
args : any number of positional arguments
these have to be in the same order as :py:obj:`func` is expecting them
kwargs : dictionary
named arguments passed to func
Notes
-----
You can pass a description list to describe the output of your function by element,
by passing a list of strings with the same lengths as dimensions.
Alternatively a single element list or str will be expanded to match the output dimension.
"""
description = kwargs.pop('description', None)
f = CustomFeature(func, dim=dim, description=description, fun_args=args, fun_kwargs=kwargs)
self.add_custom_feature(f) |
def diff(self, sym: Symbol, n: int = 1, expand_simplify: bool = True):
"""Differentiate by scalar parameter `sym`.
Args:
sym: What to differentiate by.
n: How often to differentiate
expand_simplify: Whether to simplify the result.
Returns:
The n-th derivative.
"""
if not isinstance(sym, sympy.Basic):
raise TypeError("%s needs to be a Sympy symbol" % sym)
if sym.free_symbols.issubset(self.free_symbols):
# QuantumDerivative.create delegates internally to _diff (the
# explicit non-trivial derivative). Using `create` gives us free
# caching
deriv = QuantumDerivative.create(self, derivs={sym: n}, vals=None)
if not deriv.is_zero and expand_simplify:
deriv = deriv.expand().simplify_scalar()
return deriv
else:
# the "issubset" of free symbols is a sufficient, but not a
# necessary condition; if `sym` is non-atomic, determining whether
# `self` depends on `sym` is not completely trivial (you'd have to
# substitute with a Dummy)
return self.__class__._zero | def function[diff, parameter[self, sym, n, expand_simplify]]:
constant[Differentiate by scalar parameter `sym`.
Args:
sym: What to differentiate by.
n: How often to differentiate
expand_simplify: Whether to simplify the result.
Returns:
The n-th derivative.
]
if <ast.UnaryOp object at 0x7da18bccbac0> begin[:]
<ast.Raise object at 0x7da18bccb070>
if call[name[sym].free_symbols.issubset, parameter[name[self].free_symbols]] begin[:]
variable[deriv] assign[=] call[name[QuantumDerivative].create, parameter[name[self]]]
if <ast.BoolOp object at 0x7da20cabeef0> begin[:]
variable[deriv] assign[=] call[call[name[deriv].expand, parameter[]].simplify_scalar, parameter[]]
return[name[deriv]] | keyword[def] identifier[diff] ( identifier[self] , identifier[sym] : identifier[Symbol] , identifier[n] : identifier[int] = literal[int] , identifier[expand_simplify] : identifier[bool] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[sym] , identifier[sympy] . identifier[Basic] ):
keyword[raise] identifier[TypeError] ( literal[string] % identifier[sym] )
keyword[if] identifier[sym] . identifier[free_symbols] . identifier[issubset] ( identifier[self] . identifier[free_symbols] ):
identifier[deriv] = identifier[QuantumDerivative] . identifier[create] ( identifier[self] , identifier[derivs] ={ identifier[sym] : identifier[n] }, identifier[vals] = keyword[None] )
keyword[if] keyword[not] identifier[deriv] . identifier[is_zero] keyword[and] identifier[expand_simplify] :
identifier[deriv] = identifier[deriv] . identifier[expand] (). identifier[simplify_scalar] ()
keyword[return] identifier[deriv]
keyword[else] :
keyword[return] identifier[self] . identifier[__class__] . identifier[_zero] | def diff(self, sym: Symbol, n: int=1, expand_simplify: bool=True):
"""Differentiate by scalar parameter `sym`.
Args:
sym: What to differentiate by.
n: How often to differentiate
expand_simplify: Whether to simplify the result.
Returns:
The n-th derivative.
"""
if not isinstance(sym, sympy.Basic):
raise TypeError('%s needs to be a Sympy symbol' % sym) # depends on [control=['if'], data=[]]
if sym.free_symbols.issubset(self.free_symbols):
# QuantumDerivative.create delegates internally to _diff (the
# explicit non-trivial derivative). Using `create` gives us free
# caching
deriv = QuantumDerivative.create(self, derivs={sym: n}, vals=None)
if not deriv.is_zero and expand_simplify:
deriv = deriv.expand().simplify_scalar() # depends on [control=['if'], data=[]]
return deriv # depends on [control=['if'], data=[]]
else:
# the "issubset" of free symbols is a sufficient, but not a
# necessary condition; if `sym` is non-atomic, determining whether
# `self` depends on `sym` is not completely trivial (you'd have to
# substitute with a Dummy)
return self.__class__._zero |
def save(self, force_insert=False):
"""
Save the model and any related many-to-many fields.
:param force_insert: Should the save force an insert?
:return: Number of rows impacted, or False.
"""
delayed = {}
for field, value in self.data.items():
model_field = getattr(type(self.instance), field, None)
# If this is a many-to-many field, we cannot save it to the instance until the instance
# is saved to the database. Collect these fields and delay the setting until after
# the model instance is saved.
if isinstance(model_field, ManyToManyField):
if value is not None:
delayed[field] = value
continue
setattr(self.instance, field, value)
rv = self.instance.save(force_insert=force_insert)
for field, value in delayed.items():
setattr(self.instance, field, value)
return rv | def function[save, parameter[self, force_insert]]:
constant[
Save the model and any related many-to-many fields.
:param force_insert: Should the save force an insert?
:return: Number of rows impacted, or False.
]
variable[delayed] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b1800d30>, <ast.Name object at 0x7da1b1802a10>]]] in starred[call[name[self].data.items, parameter[]]] begin[:]
variable[model_field] assign[=] call[name[getattr], parameter[call[name[type], parameter[name[self].instance]], name[field], constant[None]]]
if call[name[isinstance], parameter[name[model_field], name[ManyToManyField]]] begin[:]
if compare[name[value] is_not constant[None]] begin[:]
call[name[delayed]][name[field]] assign[=] name[value]
continue
call[name[setattr], parameter[name[self].instance, name[field], name[value]]]
variable[rv] assign[=] call[name[self].instance.save, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b19d1180>, <ast.Name object at 0x7da1b19d01c0>]]] in starred[call[name[delayed].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[self].instance, name[field], name[value]]]
return[name[rv]] | keyword[def] identifier[save] ( identifier[self] , identifier[force_insert] = keyword[False] ):
literal[string]
identifier[delayed] ={}
keyword[for] identifier[field] , identifier[value] keyword[in] identifier[self] . identifier[data] . identifier[items] ():
identifier[model_field] = identifier[getattr] ( identifier[type] ( identifier[self] . identifier[instance] ), identifier[field] , keyword[None] )
keyword[if] identifier[isinstance] ( identifier[model_field] , identifier[ManyToManyField] ):
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[delayed] [ identifier[field] ]= identifier[value]
keyword[continue]
identifier[setattr] ( identifier[self] . identifier[instance] , identifier[field] , identifier[value] )
identifier[rv] = identifier[self] . identifier[instance] . identifier[save] ( identifier[force_insert] = identifier[force_insert] )
keyword[for] identifier[field] , identifier[value] keyword[in] identifier[delayed] . identifier[items] ():
identifier[setattr] ( identifier[self] . identifier[instance] , identifier[field] , identifier[value] )
keyword[return] identifier[rv] | def save(self, force_insert=False):
"""
Save the model and any related many-to-many fields.
:param force_insert: Should the save force an insert?
:return: Number of rows impacted, or False.
"""
delayed = {}
for (field, value) in self.data.items():
model_field = getattr(type(self.instance), field, None)
# If this is a many-to-many field, we cannot save it to the instance until the instance
# is saved to the database. Collect these fields and delay the setting until after
# the model instance is saved.
if isinstance(model_field, ManyToManyField):
if value is not None:
delayed[field] = value # depends on [control=['if'], data=['value']]
continue # depends on [control=['if'], data=[]]
setattr(self.instance, field, value) # depends on [control=['for'], data=[]]
rv = self.instance.save(force_insert=force_insert)
for (field, value) in delayed.items():
setattr(self.instance, field, value) # depends on [control=['for'], data=[]]
return rv |
def get_ip_mac_arp_list(auth, url, devid=None, devip=None):
"""
function takes devid of specific device and issues a RESTFUL call to get the IP/MAC/ARP list
from the target device.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: int or str value of the target device.
:param devip: str of ipv4 address of the target device
:return: list of dictionaries containing the IP/MAC/ARP list of the target device.
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.termaccess import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ip_mac_list = get_ip_mac_arp_list( auth.creds, auth.url, devid='10')
>>> ip_mac_list = get_ip_mac_arp_list( auth.creds, auth.url, devip='10.101.0.221')
>>> assert type(ip_mac_list) is list
>>> assert 'deviceId' in ip_mac_list[0]
"""
if devip is not None:
dev_details = get_dev_details(devip, auth, url)
if isinstance(dev_details, str):
print("Device not found")
return 403
else:
devid = get_dev_details(devip, auth, url)['id']
f_url = url + "/imcrs/res/access/ipMacArp/" + str(devid)
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
ipmacarplist = (json.loads(response.text))
if 'ipMacArp' in ipmacarplist:
return ipmacarplist['ipMacArp']
else:
return ['this function is unsupported']
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + " get_ip_mac_arp_list: An Error has occured" | def function[get_ip_mac_arp_list, parameter[auth, url, devid, devip]]:
constant[
function takes devid of specific device and issues a RESTFUL call to get the IP/MAC/ARP list
from the target device.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: int or str value of the target device.
:param devip: str of ipv4 address of the target device
:return: list of dictionaries containing the IP/MAC/ARP list of the target device.
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.termaccess import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ip_mac_list = get_ip_mac_arp_list( auth.creds, auth.url, devid='10')
>>> ip_mac_list = get_ip_mac_arp_list( auth.creds, auth.url, devip='10.101.0.221')
>>> assert type(ip_mac_list) is list
>>> assert 'deviceId' in ip_mac_list[0]
]
if compare[name[devip] is_not constant[None]] begin[:]
variable[dev_details] assign[=] call[name[get_dev_details], parameter[name[devip], name[auth], name[url]]]
if call[name[isinstance], parameter[name[dev_details], name[str]]] begin[:]
call[name[print], parameter[constant[Device not found]]]
return[constant[403]]
variable[f_url] assign[=] binary_operation[binary_operation[name[url] + constant[/imcrs/res/access/ipMacArp/]] + call[name[str], parameter[name[devid]]]]
variable[response] assign[=] call[name[requests].get, parameter[name[f_url]]]
<ast.Try object at 0x7da18f09d900> | keyword[def] identifier[get_ip_mac_arp_list] ( identifier[auth] , identifier[url] , identifier[devid] = keyword[None] , identifier[devip] = keyword[None] ):
literal[string]
keyword[if] identifier[devip] keyword[is] keyword[not] keyword[None] :
identifier[dev_details] = identifier[get_dev_details] ( identifier[devip] , identifier[auth] , identifier[url] )
keyword[if] identifier[isinstance] ( identifier[dev_details] , identifier[str] ):
identifier[print] ( literal[string] )
keyword[return] literal[int]
keyword[else] :
identifier[devid] = identifier[get_dev_details] ( identifier[devip] , identifier[auth] , identifier[url] )[ literal[string] ]
identifier[f_url] = identifier[url] + literal[string] + identifier[str] ( identifier[devid] )
identifier[response] = identifier[requests] . identifier[get] ( identifier[f_url] , identifier[auth] = identifier[auth] , identifier[headers] = identifier[HEADERS] )
keyword[try] :
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
identifier[ipmacarplist] =( identifier[json] . identifier[loads] ( identifier[response] . identifier[text] ))
keyword[if] literal[string] keyword[in] identifier[ipmacarplist] :
keyword[return] identifier[ipmacarplist] [ literal[string] ]
keyword[else] :
keyword[return] [ literal[string] ]
keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[error] :
keyword[return] literal[string] + identifier[str] ( identifier[error] )+ literal[string] | def get_ip_mac_arp_list(auth, url, devid=None, devip=None):
"""
function takes devid of specific device and issues a RESTFUL call to get the IP/MAC/ARP list
from the target device.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: int or str value of the target device.
:param devip: str of ipv4 address of the target device
:return: list of dictionaries containing the IP/MAC/ARP list of the target device.
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.termaccess import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ip_mac_list = get_ip_mac_arp_list( auth.creds, auth.url, devid='10')
>>> ip_mac_list = get_ip_mac_arp_list( auth.creds, auth.url, devip='10.101.0.221')
>>> assert type(ip_mac_list) is list
>>> assert 'deviceId' in ip_mac_list[0]
"""
if devip is not None:
dev_details = get_dev_details(devip, auth, url)
if isinstance(dev_details, str):
print('Device not found')
return 403 # depends on [control=['if'], data=[]]
else:
devid = get_dev_details(devip, auth, url)['id'] # depends on [control=['if'], data=['devip']]
f_url = url + '/imcrs/res/access/ipMacArp/' + str(devid)
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
ipmacarplist = json.loads(response.text)
if 'ipMacArp' in ipmacarplist:
return ipmacarplist['ipMacArp'] # depends on [control=['if'], data=['ipmacarplist']]
else:
return ['this function is unsupported'] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except requests.exceptions.RequestException as error:
return 'Error:\n' + str(error) + ' get_ip_mac_arp_list: An Error has occured' # depends on [control=['except'], data=['error']] |
def decrypt(self, message):
"""Decrypts a string using our own private key object.
Args:
message (string): The string of the message to decrypt.
Returns:
The unencrypted string.
"""
# Unserialize the encrypted message
message = json.loads(message)
# Set up a list for the unencrypted lines of the message
unencrypted_msg = []
for line in message:
# Convert from ascii back to bytestring
enc_line = binascii.a2b_base64(line)
# Decrypt the line using our private key
unencrypted_line = rsa.decrypt(enc_line, self.private_key)
unencrypted_msg.append(unencrypted_line)
# Convert the message from a list back into a string
unencrypted_msg = "".join(unencrypted_msg)
return unencrypted_msg | def function[decrypt, parameter[self, message]]:
constant[Decrypts a string using our own private key object.
Args:
message (string): The string of the message to decrypt.
Returns:
The unencrypted string.
]
variable[message] assign[=] call[name[json].loads, parameter[name[message]]]
variable[unencrypted_msg] assign[=] list[[]]
for taget[name[line]] in starred[name[message]] begin[:]
variable[enc_line] assign[=] call[name[binascii].a2b_base64, parameter[name[line]]]
variable[unencrypted_line] assign[=] call[name[rsa].decrypt, parameter[name[enc_line], name[self].private_key]]
call[name[unencrypted_msg].append, parameter[name[unencrypted_line]]]
variable[unencrypted_msg] assign[=] call[constant[].join, parameter[name[unencrypted_msg]]]
return[name[unencrypted_msg]] | keyword[def] identifier[decrypt] ( identifier[self] , identifier[message] ):
literal[string]
identifier[message] = identifier[json] . identifier[loads] ( identifier[message] )
identifier[unencrypted_msg] =[]
keyword[for] identifier[line] keyword[in] identifier[message] :
identifier[enc_line] = identifier[binascii] . identifier[a2b_base64] ( identifier[line] )
identifier[unencrypted_line] = identifier[rsa] . identifier[decrypt] ( identifier[enc_line] , identifier[self] . identifier[private_key] )
identifier[unencrypted_msg] . identifier[append] ( identifier[unencrypted_line] )
identifier[unencrypted_msg] = literal[string] . identifier[join] ( identifier[unencrypted_msg] )
keyword[return] identifier[unencrypted_msg] | def decrypt(self, message):
"""Decrypts a string using our own private key object.
Args:
message (string): The string of the message to decrypt.
Returns:
The unencrypted string.
"""
# Unserialize the encrypted message
message = json.loads(message)
# Set up a list for the unencrypted lines of the message
unencrypted_msg = []
for line in message:
# Convert from ascii back to bytestring
enc_line = binascii.a2b_base64(line)
# Decrypt the line using our private key
unencrypted_line = rsa.decrypt(enc_line, self.private_key)
unencrypted_msg.append(unencrypted_line) # depends on [control=['for'], data=['line']]
# Convert the message from a list back into a string
unencrypted_msg = ''.join(unencrypted_msg)
return unencrypted_msg |
def sectionsWord(self,walkTrace=tuple(),case=None,element=None,doc=None):
"""Prepares section for word output.
"""
from docx.shared import Inches
from io import BytesIO
#p.add_run('italic.').italic = True
if case == 'sectionmain':
if self.settings['clearpage']: doc.add_page_break()
doc.add_heading(self.title, level = len(walkTrace))
for p in renewliner(self.p).split('\n'):
doc.add_paragraph(p)
if case == 'figure':
bf=BytesIO()
figtitle,fig = element
width = fig.get_size_inches()[0]
width = Inches(width if width < 6 else 6)
fig.savefig(bf)
doc.add_picture(bf, width=width)
doc.add_heading('Figure {}: {}'.format(
fig._leopardref,
figtitle),level=6)
if case == 'table':
caption,t = element
tableref = t._leopardref
t = pdSeriesToFrame(t) if type(t) == pd.Series else t
if self.settings['tablehead']:
t = t.head(self.settings['tablehead'])
if self.settings['tablecolumns']:
t = t[self.settings['tablecolumns']]
doc.add_heading('Table {}: {}'.format(
tableref,
caption),level=6)
table = doc.add_table(t.shape[0]+1,t.shape[1]+1)
for tcell,col in zip(table.rows[0].cells[1:],t.columns):
tcell.text = str(col)
for trow,rrow in zip(table.rows[1:],t.to_records()):
for tcell,rcell in zip(trow.cells,rrow):
tcell.text = str(rcell) | def function[sectionsWord, parameter[self, walkTrace, case, element, doc]]:
constant[Prepares section for word output.
]
from relative_module[docx.shared] import module[Inches]
from relative_module[io] import module[BytesIO]
if compare[name[case] equal[==] constant[sectionmain]] begin[:]
if call[name[self].settings][constant[clearpage]] begin[:]
call[name[doc].add_page_break, parameter[]]
call[name[doc].add_heading, parameter[name[self].title]]
for taget[name[p]] in starred[call[call[name[renewliner], parameter[name[self].p]].split, parameter[constant[
]]]] begin[:]
call[name[doc].add_paragraph, parameter[name[p]]]
if compare[name[case] equal[==] constant[figure]] begin[:]
variable[bf] assign[=] call[name[BytesIO], parameter[]]
<ast.Tuple object at 0x7da20c6a8e20> assign[=] name[element]
variable[width] assign[=] call[call[name[fig].get_size_inches, parameter[]]][constant[0]]
variable[width] assign[=] call[name[Inches], parameter[<ast.IfExp object at 0x7da20c6abc70>]]
call[name[fig].savefig, parameter[name[bf]]]
call[name[doc].add_picture, parameter[name[bf]]]
call[name[doc].add_heading, parameter[call[constant[Figure {}: {}].format, parameter[name[fig]._leopardref, name[figtitle]]]]]
if compare[name[case] equal[==] constant[table]] begin[:]
<ast.Tuple object at 0x7da1b16e2f80> assign[=] name[element]
variable[tableref] assign[=] name[t]._leopardref
variable[t] assign[=] <ast.IfExp object at 0x7da1b16e21d0>
if call[name[self].settings][constant[tablehead]] begin[:]
variable[t] assign[=] call[name[t].head, parameter[call[name[self].settings][constant[tablehead]]]]
if call[name[self].settings][constant[tablecolumns]] begin[:]
variable[t] assign[=] call[name[t]][call[name[self].settings][constant[tablecolumns]]]
call[name[doc].add_heading, parameter[call[constant[Table {}: {}].format, parameter[name[tableref], name[caption]]]]]
variable[table] assign[=] call[name[doc].add_table, parameter[binary_operation[call[name[t].shape][constant[0]] + constant[1]], binary_operation[call[name[t].shape][constant[1]] + constant[1]]]]
for taget[tuple[[<ast.Name object at 0x7da1b16e1420>, <ast.Name object at 0x7da1b16e0dc0>]]] in starred[call[name[zip], parameter[call[call[name[table].rows][constant[0]].cells][<ast.Slice object at 0x7da1b16e3880>], name[t].columns]]] begin[:]
name[tcell].text assign[=] call[name[str], parameter[name[col]]]
for taget[tuple[[<ast.Name object at 0x7da1b16e3f40>, <ast.Name object at 0x7da1b16e0790>]]] in starred[call[name[zip], parameter[call[name[table].rows][<ast.Slice object at 0x7da1b16e2650>], call[name[t].to_records, parameter[]]]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b16e3340>, <ast.Name object at 0x7da1b16e0520>]]] in starred[call[name[zip], parameter[name[trow].cells, name[rrow]]]] begin[:]
name[tcell].text assign[=] call[name[str], parameter[name[rcell]]] | keyword[def] identifier[sectionsWord] ( identifier[self] , identifier[walkTrace] = identifier[tuple] (), identifier[case] = keyword[None] , identifier[element] = keyword[None] , identifier[doc] = keyword[None] ):
literal[string]
keyword[from] identifier[docx] . identifier[shared] keyword[import] identifier[Inches]
keyword[from] identifier[io] keyword[import] identifier[BytesIO]
keyword[if] identifier[case] == literal[string] :
keyword[if] identifier[self] . identifier[settings] [ literal[string] ]: identifier[doc] . identifier[add_page_break] ()
identifier[doc] . identifier[add_heading] ( identifier[self] . identifier[title] , identifier[level] = identifier[len] ( identifier[walkTrace] ))
keyword[for] identifier[p] keyword[in] identifier[renewliner] ( identifier[self] . identifier[p] ). identifier[split] ( literal[string] ):
identifier[doc] . identifier[add_paragraph] ( identifier[p] )
keyword[if] identifier[case] == literal[string] :
identifier[bf] = identifier[BytesIO] ()
identifier[figtitle] , identifier[fig] = identifier[element]
identifier[width] = identifier[fig] . identifier[get_size_inches] ()[ literal[int] ]
identifier[width] = identifier[Inches] ( identifier[width] keyword[if] identifier[width] < literal[int] keyword[else] literal[int] )
identifier[fig] . identifier[savefig] ( identifier[bf] )
identifier[doc] . identifier[add_picture] ( identifier[bf] , identifier[width] = identifier[width] )
identifier[doc] . identifier[add_heading] ( literal[string] . identifier[format] (
identifier[fig] . identifier[_leopardref] ,
identifier[figtitle] ), identifier[level] = literal[int] )
keyword[if] identifier[case] == literal[string] :
identifier[caption] , identifier[t] = identifier[element]
identifier[tableref] = identifier[t] . identifier[_leopardref]
identifier[t] = identifier[pdSeriesToFrame] ( identifier[t] ) keyword[if] identifier[type] ( identifier[t] )== identifier[pd] . identifier[Series] keyword[else] identifier[t]
keyword[if] identifier[self] . identifier[settings] [ literal[string] ]:
identifier[t] = identifier[t] . identifier[head] ( identifier[self] . identifier[settings] [ literal[string] ])
keyword[if] identifier[self] . identifier[settings] [ literal[string] ]:
identifier[t] = identifier[t] [ identifier[self] . identifier[settings] [ literal[string] ]]
identifier[doc] . identifier[add_heading] ( literal[string] . identifier[format] (
identifier[tableref] ,
identifier[caption] ), identifier[level] = literal[int] )
identifier[table] = identifier[doc] . identifier[add_table] ( identifier[t] . identifier[shape] [ literal[int] ]+ literal[int] , identifier[t] . identifier[shape] [ literal[int] ]+ literal[int] )
keyword[for] identifier[tcell] , identifier[col] keyword[in] identifier[zip] ( identifier[table] . identifier[rows] [ literal[int] ]. identifier[cells] [ literal[int] :], identifier[t] . identifier[columns] ):
identifier[tcell] . identifier[text] = identifier[str] ( identifier[col] )
keyword[for] identifier[trow] , identifier[rrow] keyword[in] identifier[zip] ( identifier[table] . identifier[rows] [ literal[int] :], identifier[t] . identifier[to_records] ()):
keyword[for] identifier[tcell] , identifier[rcell] keyword[in] identifier[zip] ( identifier[trow] . identifier[cells] , identifier[rrow] ):
identifier[tcell] . identifier[text] = identifier[str] ( identifier[rcell] ) | def sectionsWord(self, walkTrace=tuple(), case=None, element=None, doc=None):
"""Prepares section for word output.
"""
from docx.shared import Inches
from io import BytesIO
#p.add_run('italic.').italic = True
if case == 'sectionmain':
if self.settings['clearpage']:
doc.add_page_break() # depends on [control=['if'], data=[]]
doc.add_heading(self.title, level=len(walkTrace))
for p in renewliner(self.p).split('\n'):
doc.add_paragraph(p) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=[]]
if case == 'figure':
bf = BytesIO()
(figtitle, fig) = element
width = fig.get_size_inches()[0]
width = Inches(width if width < 6 else 6)
fig.savefig(bf)
doc.add_picture(bf, width=width)
doc.add_heading('Figure {}: {}'.format(fig._leopardref, figtitle), level=6) # depends on [control=['if'], data=[]]
if case == 'table':
(caption, t) = element
tableref = t._leopardref
t = pdSeriesToFrame(t) if type(t) == pd.Series else t
if self.settings['tablehead']:
t = t.head(self.settings['tablehead']) # depends on [control=['if'], data=[]]
if self.settings['tablecolumns']:
t = t[self.settings['tablecolumns']] # depends on [control=['if'], data=[]]
doc.add_heading('Table {}: {}'.format(tableref, caption), level=6)
table = doc.add_table(t.shape[0] + 1, t.shape[1] + 1)
for (tcell, col) in zip(table.rows[0].cells[1:], t.columns):
tcell.text = str(col) # depends on [control=['for'], data=[]]
for (trow, rrow) in zip(table.rows[1:], t.to_records()):
for (tcell, rcell) in zip(trow.cells, rrow):
tcell.text = str(rcell) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] |
async def try_sending(self,msg,timeout_secs, max_attempts):
"""Coroutine used to send message to the device when a response or ack is needed.
This coroutine will try to send up to max_attempts time the message, waiting timeout_secs
for an answer. If no answer is received, it will consider that the device is no longer
accessible and will unregister it.
:param msg: The message to send
:type msg: aiolifx.Message
:param timeout_secs: Number of seconds to wait for a response or ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: a coroutine to be scheduled
:rtype: coroutine
"""
if timeout_secs is None:
timeout_secs = self.timeout
if max_attempts is None:
max_attempts = self.retry_count
attempts = 0
while attempts < max_attempts:
if msg.seq_num not in self.message: return
event = aio.Event()
self.message[msg.seq_num][1]= event
attempts += 1
if self.transport:
self.transport.sendto(msg.packed_message)
try:
myresult = await aio.wait_for(event.wait(),timeout_secs)
break
except Exception as inst:
if attempts >= max_attempts:
if msg.seq_num in self.message:
callb = self.message[msg.seq_num][2]
if callb:
callb(self, None)
del(self.message[msg.seq_num])
#It's dead Jim
self.unregister() | <ast.AsyncFunctionDef object at 0x7da207f9bdc0> | keyword[async] keyword[def] identifier[try_sending] ( identifier[self] , identifier[msg] , identifier[timeout_secs] , identifier[max_attempts] ):
literal[string]
keyword[if] identifier[timeout_secs] keyword[is] keyword[None] :
identifier[timeout_secs] = identifier[self] . identifier[timeout]
keyword[if] identifier[max_attempts] keyword[is] keyword[None] :
identifier[max_attempts] = identifier[self] . identifier[retry_count]
identifier[attempts] = literal[int]
keyword[while] identifier[attempts] < identifier[max_attempts] :
keyword[if] identifier[msg] . identifier[seq_num] keyword[not] keyword[in] identifier[self] . identifier[message] : keyword[return]
identifier[event] = identifier[aio] . identifier[Event] ()
identifier[self] . identifier[message] [ identifier[msg] . identifier[seq_num] ][ literal[int] ]= identifier[event]
identifier[attempts] += literal[int]
keyword[if] identifier[self] . identifier[transport] :
identifier[self] . identifier[transport] . identifier[sendto] ( identifier[msg] . identifier[packed_message] )
keyword[try] :
identifier[myresult] = keyword[await] identifier[aio] . identifier[wait_for] ( identifier[event] . identifier[wait] (), identifier[timeout_secs] )
keyword[break]
keyword[except] identifier[Exception] keyword[as] identifier[inst] :
keyword[if] identifier[attempts] >= identifier[max_attempts] :
keyword[if] identifier[msg] . identifier[seq_num] keyword[in] identifier[self] . identifier[message] :
identifier[callb] = identifier[self] . identifier[message] [ identifier[msg] . identifier[seq_num] ][ literal[int] ]
keyword[if] identifier[callb] :
identifier[callb] ( identifier[self] , keyword[None] )
keyword[del] ( identifier[self] . identifier[message] [ identifier[msg] . identifier[seq_num] ])
identifier[self] . identifier[unregister] () | async def try_sending(self, msg, timeout_secs, max_attempts):
"""Coroutine used to send message to the device when a response or ack is needed.
This coroutine will try to send up to max_attempts time the message, waiting timeout_secs
for an answer. If no answer is received, it will consider that the device is no longer
accessible and will unregister it.
:param msg: The message to send
:type msg: aiolifx.Message
:param timeout_secs: Number of seconds to wait for a response or ack
:type timeout_secs: int
:param max_attempts: .
:type max_attempts: int
:returns: a coroutine to be scheduled
:rtype: coroutine
"""
if timeout_secs is None:
timeout_secs = self.timeout # depends on [control=['if'], data=['timeout_secs']]
if max_attempts is None:
max_attempts = self.retry_count # depends on [control=['if'], data=['max_attempts']]
attempts = 0
while attempts < max_attempts:
if msg.seq_num not in self.message:
return # depends on [control=['if'], data=[]]
event = aio.Event()
self.message[msg.seq_num][1] = event
attempts += 1
if self.transport:
self.transport.sendto(msg.packed_message) # depends on [control=['if'], data=[]]
try:
myresult = await aio.wait_for(event.wait(), timeout_secs)
break # depends on [control=['try'], data=[]]
except Exception as inst:
if attempts >= max_attempts:
if msg.seq_num in self.message:
callb = self.message[msg.seq_num][2]
if callb:
callb(self, None) # depends on [control=['if'], data=[]]
del self.message[msg.seq_num] # depends on [control=['if'], data=[]]
#It's dead Jim
self.unregister() # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['while'], data=['attempts', 'max_attempts']] |
def _configure_manager(self):
"""
Creates a manager to handle the instances, and another
to handle flavors.
"""
self._manager = CloudDNSManager(self, resource_class=CloudDNSDomain,
response_key="domains", plural_response_key="domains",
uri_base="domains") | def function[_configure_manager, parameter[self]]:
constant[
Creates a manager to handle the instances, and another
to handle flavors.
]
name[self]._manager assign[=] call[name[CloudDNSManager], parameter[name[self]]] | keyword[def] identifier[_configure_manager] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_manager] = identifier[CloudDNSManager] ( identifier[self] , identifier[resource_class] = identifier[CloudDNSDomain] ,
identifier[response_key] = literal[string] , identifier[plural_response_key] = literal[string] ,
identifier[uri_base] = literal[string] ) | def _configure_manager(self):
"""
Creates a manager to handle the instances, and another
to handle flavors.
"""
self._manager = CloudDNSManager(self, resource_class=CloudDNSDomain, response_key='domains', plural_response_key='domains', uri_base='domains') |
def get_current_channel(self):
"""Get the current tv channel."""
self.request(EP_GET_CURRENT_CHANNEL)
return {} if self.last_response is None else self.last_response.get('payload') | def function[get_current_channel, parameter[self]]:
constant[Get the current tv channel.]
call[name[self].request, parameter[name[EP_GET_CURRENT_CHANNEL]]]
return[<ast.IfExp object at 0x7da1b06262f0>] | keyword[def] identifier[get_current_channel] ( identifier[self] ):
literal[string]
identifier[self] . identifier[request] ( identifier[EP_GET_CURRENT_CHANNEL] )
keyword[return] {} keyword[if] identifier[self] . identifier[last_response] keyword[is] keyword[None] keyword[else] identifier[self] . identifier[last_response] . identifier[get] ( literal[string] ) | def get_current_channel(self):
"""Get the current tv channel."""
self.request(EP_GET_CURRENT_CHANNEL)
return {} if self.last_response is None else self.last_response.get('payload') |
def set_unit(self, unit):
"""Set the GPS step scale
"""
# accept all core time units
if unit is None or (isinstance(unit, units.NamedUnit) and
unit.physical_type == 'time'):
self._unit = unit
return
# convert float to custom unit in seconds
if isinstance(unit, Number):
unit = units.Unit(unit * units.second)
# otherwise, should be able to convert to a time unit
try:
unit = units.Unit(unit)
except ValueError as exc:
# catch annoying plurals
try:
unit = units.Unit(str(unit).rstrip('s'))
except ValueError:
raise exc
# decompose and check that it's actually a time unit
dec = unit.decompose()
if dec.bases != [units.second]:
raise ValueError("Cannot set GPS unit to %s" % unit)
# check equivalent units
for other in TIME_UNITS:
if other.decompose().scale == dec.scale:
self._unit = other
return
raise ValueError("Unrecognised unit: %s" % unit) | def function[set_unit, parameter[self, unit]]:
constant[Set the GPS step scale
]
if <ast.BoolOp object at 0x7da204567bb0> begin[:]
name[self]._unit assign[=] name[unit]
return[None]
if call[name[isinstance], parameter[name[unit], name[Number]]] begin[:]
variable[unit] assign[=] call[name[units].Unit, parameter[binary_operation[name[unit] * name[units].second]]]
<ast.Try object at 0x7da204567340>
variable[dec] assign[=] call[name[unit].decompose, parameter[]]
if compare[name[dec].bases not_equal[!=] list[[<ast.Attribute object at 0x7da204565090>]]] begin[:]
<ast.Raise object at 0x7da204565840>
for taget[name[other]] in starred[name[TIME_UNITS]] begin[:]
if compare[call[name[other].decompose, parameter[]].scale equal[==] name[dec].scale] begin[:]
name[self]._unit assign[=] name[other]
return[None]
<ast.Raise object at 0x7da204567be0> | keyword[def] identifier[set_unit] ( identifier[self] , identifier[unit] ):
literal[string]
keyword[if] identifier[unit] keyword[is] keyword[None] keyword[or] ( identifier[isinstance] ( identifier[unit] , identifier[units] . identifier[NamedUnit] ) keyword[and]
identifier[unit] . identifier[physical_type] == literal[string] ):
identifier[self] . identifier[_unit] = identifier[unit]
keyword[return]
keyword[if] identifier[isinstance] ( identifier[unit] , identifier[Number] ):
identifier[unit] = identifier[units] . identifier[Unit] ( identifier[unit] * identifier[units] . identifier[second] )
keyword[try] :
identifier[unit] = identifier[units] . identifier[Unit] ( identifier[unit] )
keyword[except] identifier[ValueError] keyword[as] identifier[exc] :
keyword[try] :
identifier[unit] = identifier[units] . identifier[Unit] ( identifier[str] ( identifier[unit] ). identifier[rstrip] ( literal[string] ))
keyword[except] identifier[ValueError] :
keyword[raise] identifier[exc]
identifier[dec] = identifier[unit] . identifier[decompose] ()
keyword[if] identifier[dec] . identifier[bases] !=[ identifier[units] . identifier[second] ]:
keyword[raise] identifier[ValueError] ( literal[string] % identifier[unit] )
keyword[for] identifier[other] keyword[in] identifier[TIME_UNITS] :
keyword[if] identifier[other] . identifier[decompose] (). identifier[scale] == identifier[dec] . identifier[scale] :
identifier[self] . identifier[_unit] = identifier[other]
keyword[return]
keyword[raise] identifier[ValueError] ( literal[string] % identifier[unit] ) | def set_unit(self, unit):
"""Set the GPS step scale
"""
# accept all core time units
if unit is None or (isinstance(unit, units.NamedUnit) and unit.physical_type == 'time'):
self._unit = unit
return # depends on [control=['if'], data=[]]
# convert float to custom unit in seconds
if isinstance(unit, Number):
unit = units.Unit(unit * units.second) # depends on [control=['if'], data=[]]
# otherwise, should be able to convert to a time unit
try:
unit = units.Unit(unit) # depends on [control=['try'], data=[]]
except ValueError as exc:
# catch annoying plurals
try:
unit = units.Unit(str(unit).rstrip('s')) # depends on [control=['try'], data=[]]
except ValueError:
raise exc # depends on [control=['except'], data=[]] # depends on [control=['except'], data=['exc']]
# decompose and check that it's actually a time unit
dec = unit.decompose()
if dec.bases != [units.second]:
raise ValueError('Cannot set GPS unit to %s' % unit) # depends on [control=['if'], data=[]]
# check equivalent units
for other in TIME_UNITS:
if other.decompose().scale == dec.scale:
self._unit = other
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['other']]
raise ValueError('Unrecognised unit: %s' % unit) |
def AddFile(self, filepath):
"""Adds a file path as a source.
Args:
filepath: a string representing a path to the file.
Returns:
True if the file is not an already existing source.
"""
if filepath not in self._files:
self._files.add(filepath)
return True
return False | def function[AddFile, parameter[self, filepath]]:
constant[Adds a file path as a source.
Args:
filepath: a string representing a path to the file.
Returns:
True if the file is not an already existing source.
]
if compare[name[filepath] <ast.NotIn object at 0x7da2590d7190> name[self]._files] begin[:]
call[name[self]._files.add, parameter[name[filepath]]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[AddFile] ( identifier[self] , identifier[filepath] ):
literal[string]
keyword[if] identifier[filepath] keyword[not] keyword[in] identifier[self] . identifier[_files] :
identifier[self] . identifier[_files] . identifier[add] ( identifier[filepath] )
keyword[return] keyword[True]
keyword[return] keyword[False] | def AddFile(self, filepath):
"""Adds a file path as a source.
Args:
filepath: a string representing a path to the file.
Returns:
True if the file is not an already existing source.
"""
if filepath not in self._files:
self._files.add(filepath)
return True # depends on [control=['if'], data=['filepath']]
return False |
def _bind_socket(self, bindaddr):
"""Create a listening server socket."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
try:
sock.bind(bindaddr)
except Exception:
self._logger.exception("Unable to bind to %s" % str(bindaddr))
raise
sock.listen(self.BACKLOG)
return sock | def function[_bind_socket, parameter[self, bindaddr]]:
constant[Create a listening server socket.]
variable[sock] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]]
call[name[sock].setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_REUSEADDR, constant[1]]]
call[name[sock].setblocking, parameter[constant[0]]]
<ast.Try object at 0x7da1b052a0b0>
call[name[sock].listen, parameter[name[self].BACKLOG]]
return[name[sock]] | keyword[def] identifier[_bind_socket] ( identifier[self] , identifier[bindaddr] ):
literal[string]
identifier[sock] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] )
identifier[sock] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEADDR] , literal[int] )
identifier[sock] . identifier[setblocking] ( literal[int] )
keyword[try] :
identifier[sock] . identifier[bind] ( identifier[bindaddr] )
keyword[except] identifier[Exception] :
identifier[self] . identifier[_logger] . identifier[exception] ( literal[string] % identifier[str] ( identifier[bindaddr] ))
keyword[raise]
identifier[sock] . identifier[listen] ( identifier[self] . identifier[BACKLOG] )
keyword[return] identifier[sock] | def _bind_socket(self, bindaddr):
"""Create a listening server socket."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
try:
sock.bind(bindaddr) # depends on [control=['try'], data=[]]
except Exception:
self._logger.exception('Unable to bind to %s' % str(bindaddr))
raise # depends on [control=['except'], data=[]]
sock.listen(self.BACKLOG)
return sock |
def open_file(self, path,
mode="r",
buff_size=0,
replication=0,
blocksize=0,
encoding=None,
errors=None):
"""
Open an HDFS file.
Supported opening modes are "r", "w", "a". In addition, a
trailing "t" can be added to specify text mode (e.g., "rt" =
open for reading text).
Pass 0 as ``buff_size``, ``replication`` or ``blocksize`` if you want
to use the "configured" values, i.e., the ones set in the Hadoop
configuration files.
:type path: str
:param path: the full path to the file
:type mode: str
:param mode: opening mode
:type buff_size: int
:param buff_size: read/write buffer size in bytes
:type replication: int
:param replication: HDFS block replication
:type blocksize: int
:param blocksize: HDFS block size
:rtpye: :class:`~.file.hdfs_file`
:return: handle to the open file
"""
_complain_ifclosed(self.closed)
if not path:
raise ValueError("Empty path")
m, is_text = common.parse_mode(mode)
if not self.host:
fret = local_file(self, path, m)
if is_text:
cls = io.BufferedReader if m == "r" else io.BufferedWriter
fret = TextIOWrapper(cls(fret), encoding, errors)
return fret
f = self.fs.open_file(path, m, buff_size, replication, blocksize)
cls = FileIO if is_text else hdfs_file
fret = cls(f, self, mode)
return fret | def function[open_file, parameter[self, path, mode, buff_size, replication, blocksize, encoding, errors]]:
constant[
Open an HDFS file.
Supported opening modes are "r", "w", "a". In addition, a
trailing "t" can be added to specify text mode (e.g., "rt" =
open for reading text).
Pass 0 as ``buff_size``, ``replication`` or ``blocksize`` if you want
to use the "configured" values, i.e., the ones set in the Hadoop
configuration files.
:type path: str
:param path: the full path to the file
:type mode: str
:param mode: opening mode
:type buff_size: int
:param buff_size: read/write buffer size in bytes
:type replication: int
:param replication: HDFS block replication
:type blocksize: int
:param blocksize: HDFS block size
:rtpye: :class:`~.file.hdfs_file`
:return: handle to the open file
]
call[name[_complain_ifclosed], parameter[name[self].closed]]
if <ast.UnaryOp object at 0x7da1b12f26e0> begin[:]
<ast.Raise object at 0x7da1b12f24a0>
<ast.Tuple object at 0x7da1b12f3700> assign[=] call[name[common].parse_mode, parameter[name[mode]]]
if <ast.UnaryOp object at 0x7da1b12f2b90> begin[:]
variable[fret] assign[=] call[name[local_file], parameter[name[self], name[path], name[m]]]
if name[is_text] begin[:]
variable[cls] assign[=] <ast.IfExp object at 0x7da1b12f2da0>
variable[fret] assign[=] call[name[TextIOWrapper], parameter[call[name[cls], parameter[name[fret]]], name[encoding], name[errors]]]
return[name[fret]]
variable[f] assign[=] call[name[self].fs.open_file, parameter[name[path], name[m], name[buff_size], name[replication], name[blocksize]]]
variable[cls] assign[=] <ast.IfExp object at 0x7da1b12f2e60>
variable[fret] assign[=] call[name[cls], parameter[name[f], name[self], name[mode]]]
return[name[fret]] | keyword[def] identifier[open_file] ( identifier[self] , identifier[path] ,
identifier[mode] = literal[string] ,
identifier[buff_size] = literal[int] ,
identifier[replication] = literal[int] ,
identifier[blocksize] = literal[int] ,
identifier[encoding] = keyword[None] ,
identifier[errors] = keyword[None] ):
literal[string]
identifier[_complain_ifclosed] ( identifier[self] . identifier[closed] )
keyword[if] keyword[not] identifier[path] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[m] , identifier[is_text] = identifier[common] . identifier[parse_mode] ( identifier[mode] )
keyword[if] keyword[not] identifier[self] . identifier[host] :
identifier[fret] = identifier[local_file] ( identifier[self] , identifier[path] , identifier[m] )
keyword[if] identifier[is_text] :
identifier[cls] = identifier[io] . identifier[BufferedReader] keyword[if] identifier[m] == literal[string] keyword[else] identifier[io] . identifier[BufferedWriter]
identifier[fret] = identifier[TextIOWrapper] ( identifier[cls] ( identifier[fret] ), identifier[encoding] , identifier[errors] )
keyword[return] identifier[fret]
identifier[f] = identifier[self] . identifier[fs] . identifier[open_file] ( identifier[path] , identifier[m] , identifier[buff_size] , identifier[replication] , identifier[blocksize] )
identifier[cls] = identifier[FileIO] keyword[if] identifier[is_text] keyword[else] identifier[hdfs_file]
identifier[fret] = identifier[cls] ( identifier[f] , identifier[self] , identifier[mode] )
keyword[return] identifier[fret] | def open_file(self, path, mode='r', buff_size=0, replication=0, blocksize=0, encoding=None, errors=None):
"""
Open an HDFS file.
Supported opening modes are "r", "w", "a". In addition, a
trailing "t" can be added to specify text mode (e.g., "rt" =
open for reading text).
Pass 0 as ``buff_size``, ``replication`` or ``blocksize`` if you want
to use the "configured" values, i.e., the ones set in the Hadoop
configuration files.
:type path: str
:param path: the full path to the file
:type mode: str
:param mode: opening mode
:type buff_size: int
:param buff_size: read/write buffer size in bytes
:type replication: int
:param replication: HDFS block replication
:type blocksize: int
:param blocksize: HDFS block size
:rtpye: :class:`~.file.hdfs_file`
:return: handle to the open file
"""
_complain_ifclosed(self.closed)
if not path:
raise ValueError('Empty path') # depends on [control=['if'], data=[]]
(m, is_text) = common.parse_mode(mode)
if not self.host:
fret = local_file(self, path, m)
if is_text:
cls = io.BufferedReader if m == 'r' else io.BufferedWriter
fret = TextIOWrapper(cls(fret), encoding, errors) # depends on [control=['if'], data=[]]
return fret # depends on [control=['if'], data=[]]
f = self.fs.open_file(path, m, buff_size, replication, blocksize)
cls = FileIO if is_text else hdfs_file
fret = cls(f, self, mode)
return fret |
def save(self, file=CONFIG_FILE):
"""
Save configuration to provided path as a yaml file
"""
os.makedirs(os.path.dirname(file), exist_ok=True)
with open(file, "w") as f:
yaml.dump(self._settings, f, Dumper=yaml.RoundTripDumper, width=float("inf")) | def function[save, parameter[self, file]]:
constant[
Save configuration to provided path as a yaml file
]
call[name[os].makedirs, parameter[call[name[os].path.dirname, parameter[name[file]]]]]
with call[name[open], parameter[name[file], constant[w]]] begin[:]
call[name[yaml].dump, parameter[name[self]._settings, name[f]]] | keyword[def] identifier[save] ( identifier[self] , identifier[file] = identifier[CONFIG_FILE] ):
literal[string]
identifier[os] . identifier[makedirs] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[file] ), identifier[exist_ok] = keyword[True] )
keyword[with] identifier[open] ( identifier[file] , literal[string] ) keyword[as] identifier[f] :
identifier[yaml] . identifier[dump] ( identifier[self] . identifier[_settings] , identifier[f] , identifier[Dumper] = identifier[yaml] . identifier[RoundTripDumper] , identifier[width] = identifier[float] ( literal[string] )) | def save(self, file=CONFIG_FILE):
"""
Save configuration to provided path as a yaml file
"""
os.makedirs(os.path.dirname(file), exist_ok=True)
with open(file, 'w') as f:
yaml.dump(self._settings, f, Dumper=yaml.RoundTripDumper, width=float('inf')) # depends on [control=['with'], data=['f']] |
def todoppler(self, rf, v0, rfq):
"""Convert a radialvelocity measure or a frequency measure to a
doppler measure. In the case of a frequency, a rest frequency has
to be specified. The type of doppler wanted (e.g. *RADIO*) has to be
specified.
:param rf: doppler reference code (see :meth:`doppler`)
:param v0: a radialvelocity or frequency measure
:param rfq: frequency measure or quantity
Example::
f = dm.frequency('lsrk','1410MHz') # specify a frequency
dm.todoppler('radio', f, dm.constants('HI')) # give doppler, using HI rest
"""
if is_measure(rfq) and rfq['type'] == 'frequency':
rfq = dq.quantity(rfq['m0'])
elif isinstance(rfq, str):
rfq = dq.quantity(rfq)
if is_measure(v0):
if v0['type'] == 'radialvelocity':
return self.todop(v0, dq.quantity(1., 'Hz'))
elif v0['type'] == 'frequency' and dq.is_quantity(rfq) \
and rfq.conforms(dq.quantity('Hz')):
return self.todop(v0, rfq)
else:
raise TypeError('Illegal Doppler or rest frequency specified')
else:
raise TypeError('Illegal Frequency specified') | def function[todoppler, parameter[self, rf, v0, rfq]]:
constant[Convert a radialvelocity measure or a frequency measure to a
doppler measure. In the case of a frequency, a rest frequency has
to be specified. The type of doppler wanted (e.g. *RADIO*) has to be
specified.
:param rf: doppler reference code (see :meth:`doppler`)
:param v0: a radialvelocity or frequency measure
:param rfq: frequency measure or quantity
Example::
f = dm.frequency('lsrk','1410MHz') # specify a frequency
dm.todoppler('radio', f, dm.constants('HI')) # give doppler, using HI rest
]
if <ast.BoolOp object at 0x7da18dc04df0> begin[:]
variable[rfq] assign[=] call[name[dq].quantity, parameter[call[name[rfq]][constant[m0]]]]
if call[name[is_measure], parameter[name[v0]]] begin[:]
if compare[call[name[v0]][constant[type]] equal[==] constant[radialvelocity]] begin[:]
return[call[name[self].todop, parameter[name[v0], call[name[dq].quantity, parameter[constant[1.0], constant[Hz]]]]]] | keyword[def] identifier[todoppler] ( identifier[self] , identifier[rf] , identifier[v0] , identifier[rfq] ):
literal[string]
keyword[if] identifier[is_measure] ( identifier[rfq] ) keyword[and] identifier[rfq] [ literal[string] ]== literal[string] :
identifier[rfq] = identifier[dq] . identifier[quantity] ( identifier[rfq] [ literal[string] ])
keyword[elif] identifier[isinstance] ( identifier[rfq] , identifier[str] ):
identifier[rfq] = identifier[dq] . identifier[quantity] ( identifier[rfq] )
keyword[if] identifier[is_measure] ( identifier[v0] ):
keyword[if] identifier[v0] [ literal[string] ]== literal[string] :
keyword[return] identifier[self] . identifier[todop] ( identifier[v0] , identifier[dq] . identifier[quantity] ( literal[int] , literal[string] ))
keyword[elif] identifier[v0] [ literal[string] ]== literal[string] keyword[and] identifier[dq] . identifier[is_quantity] ( identifier[rfq] ) keyword[and] identifier[rfq] . identifier[conforms] ( identifier[dq] . identifier[quantity] ( literal[string] )):
keyword[return] identifier[self] . identifier[todop] ( identifier[v0] , identifier[rfq] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[else] :
keyword[raise] identifier[TypeError] ( literal[string] ) | def todoppler(self, rf, v0, rfq):
"""Convert a radialvelocity measure or a frequency measure to a
doppler measure. In the case of a frequency, a rest frequency has
to be specified. The type of doppler wanted (e.g. *RADIO*) has to be
specified.
:param rf: doppler reference code (see :meth:`doppler`)
:param v0: a radialvelocity or frequency measure
:param rfq: frequency measure or quantity
Example::
f = dm.frequency('lsrk','1410MHz') # specify a frequency
dm.todoppler('radio', f, dm.constants('HI')) # give doppler, using HI rest
"""
if is_measure(rfq) and rfq['type'] == 'frequency':
rfq = dq.quantity(rfq['m0']) # depends on [control=['if'], data=[]]
elif isinstance(rfq, str):
rfq = dq.quantity(rfq) # depends on [control=['if'], data=[]]
if is_measure(v0):
if v0['type'] == 'radialvelocity':
return self.todop(v0, dq.quantity(1.0, 'Hz')) # depends on [control=['if'], data=[]]
elif v0['type'] == 'frequency' and dq.is_quantity(rfq) and rfq.conforms(dq.quantity('Hz')):
return self.todop(v0, rfq) # depends on [control=['if'], data=[]]
else:
raise TypeError('Illegal Doppler or rest frequency specified') # depends on [control=['if'], data=[]]
else:
raise TypeError('Illegal Frequency specified') |
def build_trees(fclade_counts, namedict):
"""
A subfunc of consensus_tree(). Build an unrooted consensus tree
from filtered clade counts.
"""
## storage
nodes = {}
idxarr = np.arange(len(fclade_counts[0][0]))
queue = []
## create dict of clade counts and set keys
countdict = defaultdict(int)
for clade, count in fclade_counts:
mask = np.int_(list(clade)).astype(np.bool)
ccx = idxarr[mask]
queue.append((len(ccx), frozenset(ccx)))
countdict[frozenset(ccx)] = count
while queue:
queue.sort()
(clade_size, clade) = queue.pop(0)
new_queue = []
# search for ancestors of clade
for (_, ancestor) in queue:
if clade.issubset(ancestor):
# update ancestor such that, in the following example:
# ancestor == {1, 2, 3, 4}
# clade == {2, 3}
# new_ancestor == {1, {2, 3}, 4}
new_ancestor = (ancestor - clade) | frozenset([clade])
countdict[new_ancestor] = countdict.pop(ancestor)
ancestor = new_ancestor
new_queue.append((len(ancestor), ancestor))
# if the clade is a tip, then we have a name
if clade_size == 1:
name = list(clade)[0]
name = namedict[name]
else:
name = None
# the clade will not be in nodes if it is a tip
children = [nodes.pop(c) for c in clade if c in nodes]
node = ete3.Tree(name=name)
#node = toytree.tree(name=name).tree
for child in children:
node.add_child(child)
if not node.is_leaf():
node.dist = int(round(100*countdict[clade]))
node.support = int(round(100*countdict[clade]))
else:
node.dist = int(100)
node.support = int(100)
nodes[clade] = node
queue = new_queue
tre = nodes.values()[0]
tre.unroot()
## return the tree and other trees if present
return tre, list(nodes.values()) | def function[build_trees, parameter[fclade_counts, namedict]]:
constant[
A subfunc of consensus_tree(). Build an unrooted consensus tree
from filtered clade counts.
]
variable[nodes] assign[=] dictionary[[], []]
variable[idxarr] assign[=] call[name[np].arange, parameter[call[name[len], parameter[call[call[name[fclade_counts]][constant[0]]][constant[0]]]]]]
variable[queue] assign[=] list[[]]
variable[countdict] assign[=] call[name[defaultdict], parameter[name[int]]]
for taget[tuple[[<ast.Name object at 0x7da20c6c6950>, <ast.Name object at 0x7da20c6c7880>]]] in starred[name[fclade_counts]] begin[:]
variable[mask] assign[=] call[call[name[np].int_, parameter[call[name[list], parameter[name[clade]]]]].astype, parameter[name[np].bool]]
variable[ccx] assign[=] call[name[idxarr]][name[mask]]
call[name[queue].append, parameter[tuple[[<ast.Call object at 0x7da20c6c6bc0>, <ast.Call object at 0x7da20c6c6c80>]]]]
call[name[countdict]][call[name[frozenset], parameter[name[ccx]]]] assign[=] name[count]
while name[queue] begin[:]
call[name[queue].sort, parameter[]]
<ast.Tuple object at 0x7da20c6c73d0> assign[=] call[name[queue].pop, parameter[constant[0]]]
variable[new_queue] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6c5c00>, <ast.Name object at 0x7da20c6c4130>]]] in starred[name[queue]] begin[:]
if call[name[clade].issubset, parameter[name[ancestor]]] begin[:]
variable[new_ancestor] assign[=] binary_operation[binary_operation[name[ancestor] - name[clade]] <ast.BitOr object at 0x7da2590d6aa0> call[name[frozenset], parameter[list[[<ast.Name object at 0x7da20c6c7d90>]]]]]
call[name[countdict]][name[new_ancestor]] assign[=] call[name[countdict].pop, parameter[name[ancestor]]]
variable[ancestor] assign[=] name[new_ancestor]
call[name[new_queue].append, parameter[tuple[[<ast.Call object at 0x7da20c6c64a0>, <ast.Name object at 0x7da20c6c7c40>]]]]
if compare[name[clade_size] equal[==] constant[1]] begin[:]
variable[name] assign[=] call[call[name[list], parameter[name[clade]]]][constant[0]]
variable[name] assign[=] call[name[namedict]][name[name]]
variable[children] assign[=] <ast.ListComp object at 0x7da20c6c4430>
variable[node] assign[=] call[name[ete3].Tree, parameter[]]
for taget[name[child]] in starred[name[children]] begin[:]
call[name[node].add_child, parameter[name[child]]]
if <ast.UnaryOp object at 0x7da20c6c4ee0> begin[:]
name[node].dist assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[constant[100] * call[name[countdict]][name[clade]]]]]]]
name[node].support assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[constant[100] * call[name[countdict]][name[clade]]]]]]]
call[name[nodes]][name[clade]] assign[=] name[node]
variable[queue] assign[=] name[new_queue]
variable[tre] assign[=] call[call[name[nodes].values, parameter[]]][constant[0]]
call[name[tre].unroot, parameter[]]
return[tuple[[<ast.Name object at 0x7da18f00db40>, <ast.Call object at 0x7da18f00d840>]]] | keyword[def] identifier[build_trees] ( identifier[fclade_counts] , identifier[namedict] ):
literal[string]
identifier[nodes] ={}
identifier[idxarr] = identifier[np] . identifier[arange] ( identifier[len] ( identifier[fclade_counts] [ literal[int] ][ literal[int] ]))
identifier[queue] =[]
identifier[countdict] = identifier[defaultdict] ( identifier[int] )
keyword[for] identifier[clade] , identifier[count] keyword[in] identifier[fclade_counts] :
identifier[mask] = identifier[np] . identifier[int_] ( identifier[list] ( identifier[clade] )). identifier[astype] ( identifier[np] . identifier[bool] )
identifier[ccx] = identifier[idxarr] [ identifier[mask] ]
identifier[queue] . identifier[append] (( identifier[len] ( identifier[ccx] ), identifier[frozenset] ( identifier[ccx] )))
identifier[countdict] [ identifier[frozenset] ( identifier[ccx] )]= identifier[count]
keyword[while] identifier[queue] :
identifier[queue] . identifier[sort] ()
( identifier[clade_size] , identifier[clade] )= identifier[queue] . identifier[pop] ( literal[int] )
identifier[new_queue] =[]
keyword[for] ( identifier[_] , identifier[ancestor] ) keyword[in] identifier[queue] :
keyword[if] identifier[clade] . identifier[issubset] ( identifier[ancestor] ):
identifier[new_ancestor] =( identifier[ancestor] - identifier[clade] )| identifier[frozenset] ([ identifier[clade] ])
identifier[countdict] [ identifier[new_ancestor] ]= identifier[countdict] . identifier[pop] ( identifier[ancestor] )
identifier[ancestor] = identifier[new_ancestor]
identifier[new_queue] . identifier[append] (( identifier[len] ( identifier[ancestor] ), identifier[ancestor] ))
keyword[if] identifier[clade_size] == literal[int] :
identifier[name] = identifier[list] ( identifier[clade] )[ literal[int] ]
identifier[name] = identifier[namedict] [ identifier[name] ]
keyword[else] :
identifier[name] = keyword[None]
identifier[children] =[ identifier[nodes] . identifier[pop] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[clade] keyword[if] identifier[c] keyword[in] identifier[nodes] ]
identifier[node] = identifier[ete3] . identifier[Tree] ( identifier[name] = identifier[name] )
keyword[for] identifier[child] keyword[in] identifier[children] :
identifier[node] . identifier[add_child] ( identifier[child] )
keyword[if] keyword[not] identifier[node] . identifier[is_leaf] ():
identifier[node] . identifier[dist] = identifier[int] ( identifier[round] ( literal[int] * identifier[countdict] [ identifier[clade] ]))
identifier[node] . identifier[support] = identifier[int] ( identifier[round] ( literal[int] * identifier[countdict] [ identifier[clade] ]))
keyword[else] :
identifier[node] . identifier[dist] = identifier[int] ( literal[int] )
identifier[node] . identifier[support] = identifier[int] ( literal[int] )
identifier[nodes] [ identifier[clade] ]= identifier[node]
identifier[queue] = identifier[new_queue]
identifier[tre] = identifier[nodes] . identifier[values] ()[ literal[int] ]
identifier[tre] . identifier[unroot] ()
keyword[return] identifier[tre] , identifier[list] ( identifier[nodes] . identifier[values] ()) | def build_trees(fclade_counts, namedict):
"""
A subfunc of consensus_tree(). Build an unrooted consensus tree
from filtered clade counts.
"""
## storage
nodes = {}
idxarr = np.arange(len(fclade_counts[0][0]))
queue = []
## create dict of clade counts and set keys
countdict = defaultdict(int)
for (clade, count) in fclade_counts:
mask = np.int_(list(clade)).astype(np.bool)
ccx = idxarr[mask]
queue.append((len(ccx), frozenset(ccx)))
countdict[frozenset(ccx)] = count # depends on [control=['for'], data=[]]
while queue:
queue.sort()
(clade_size, clade) = queue.pop(0)
new_queue = []
# search for ancestors of clade
for (_, ancestor) in queue:
if clade.issubset(ancestor):
# update ancestor such that, in the following example:
# ancestor == {1, 2, 3, 4}
# clade == {2, 3}
# new_ancestor == {1, {2, 3}, 4}
new_ancestor = ancestor - clade | frozenset([clade])
countdict[new_ancestor] = countdict.pop(ancestor)
ancestor = new_ancestor # depends on [control=['if'], data=[]]
new_queue.append((len(ancestor), ancestor)) # depends on [control=['for'], data=[]]
# if the clade is a tip, then we have a name
if clade_size == 1:
name = list(clade)[0]
name = namedict[name] # depends on [control=['if'], data=[]]
else:
name = None
# the clade will not be in nodes if it is a tip
children = [nodes.pop(c) for c in clade if c in nodes]
node = ete3.Tree(name=name)
#node = toytree.tree(name=name).tree
for child in children:
node.add_child(child) # depends on [control=['for'], data=['child']]
if not node.is_leaf():
node.dist = int(round(100 * countdict[clade]))
node.support = int(round(100 * countdict[clade])) # depends on [control=['if'], data=[]]
else:
node.dist = int(100)
node.support = int(100)
nodes[clade] = node
queue = new_queue # depends on [control=['while'], data=[]]
tre = nodes.values()[0]
tre.unroot()
## return the tree and other trees if present
return (tre, list(nodes.values())) |
def msvd(m):
"""Modified singular value decomposition.
Returns U, S, V where Udagger M V = diag(S) and the singular values
are sorted in ascending order (small to large).
"""
u, s, vdgr = np.linalg.svd(m)
order = s.argsort()
# reverse the n first columns of u
s = s[order]
u= u[:,order]
vdgr = vdgr[order]
return u, s, vdgr.conj().T | def function[msvd, parameter[m]]:
constant[Modified singular value decomposition.
Returns U, S, V where Udagger M V = diag(S) and the singular values
are sorted in ascending order (small to large).
]
<ast.Tuple object at 0x7da2047e88b0> assign[=] call[name[np].linalg.svd, parameter[name[m]]]
variable[order] assign[=] call[name[s].argsort, parameter[]]
variable[s] assign[=] call[name[s]][name[order]]
variable[u] assign[=] call[name[u]][tuple[[<ast.Slice object at 0x7da20e957520>, <ast.Name object at 0x7da20e9561d0>]]]
variable[vdgr] assign[=] call[name[vdgr]][name[order]]
return[tuple[[<ast.Name object at 0x7da2047eb3a0>, <ast.Name object at 0x7da2047e97b0>, <ast.Attribute object at 0x7da2047e8400>]]] | keyword[def] identifier[msvd] ( identifier[m] ):
literal[string]
identifier[u] , identifier[s] , identifier[vdgr] = identifier[np] . identifier[linalg] . identifier[svd] ( identifier[m] )
identifier[order] = identifier[s] . identifier[argsort] ()
identifier[s] = identifier[s] [ identifier[order] ]
identifier[u] = identifier[u] [:, identifier[order] ]
identifier[vdgr] = identifier[vdgr] [ identifier[order] ]
keyword[return] identifier[u] , identifier[s] , identifier[vdgr] . identifier[conj] (). identifier[T] | def msvd(m):
"""Modified singular value decomposition.
Returns U, S, V where Udagger M V = diag(S) and the singular values
are sorted in ascending order (small to large).
"""
(u, s, vdgr) = np.linalg.svd(m)
order = s.argsort()
# reverse the n first columns of u
s = s[order]
u = u[:, order]
vdgr = vdgr[order]
return (u, s, vdgr.conj().T) |
def contains_cursor(self, cursor):
"""
Checks if the textCursor is in the decoration.
:param cursor: The text cursor to test
:type cursor: QtGui.QTextCursor
:returns: True if the cursor is over the selection
"""
start = self.cursor.selectionStart()
end = self.cursor.selectionEnd()
if cursor.atBlockEnd():
end -= 1
return start <= cursor.position() <= end | def function[contains_cursor, parameter[self, cursor]]:
constant[
Checks if the textCursor is in the decoration.
:param cursor: The text cursor to test
:type cursor: QtGui.QTextCursor
:returns: True if the cursor is over the selection
]
variable[start] assign[=] call[name[self].cursor.selectionStart, parameter[]]
variable[end] assign[=] call[name[self].cursor.selectionEnd, parameter[]]
if call[name[cursor].atBlockEnd, parameter[]] begin[:]
<ast.AugAssign object at 0x7da20c6c4e20>
return[compare[name[start] less_or_equal[<=] call[name[cursor].position, parameter[]]]] | keyword[def] identifier[contains_cursor] ( identifier[self] , identifier[cursor] ):
literal[string]
identifier[start] = identifier[self] . identifier[cursor] . identifier[selectionStart] ()
identifier[end] = identifier[self] . identifier[cursor] . identifier[selectionEnd] ()
keyword[if] identifier[cursor] . identifier[atBlockEnd] ():
identifier[end] -= literal[int]
keyword[return] identifier[start] <= identifier[cursor] . identifier[position] ()<= identifier[end] | def contains_cursor(self, cursor):
"""
Checks if the textCursor is in the decoration.
:param cursor: The text cursor to test
:type cursor: QtGui.QTextCursor
:returns: True if the cursor is over the selection
"""
start = self.cursor.selectionStart()
end = self.cursor.selectionEnd()
if cursor.atBlockEnd():
end -= 1 # depends on [control=['if'], data=[]]
return start <= cursor.position() <= end |
def snr_series_to_xml(snr_series, document, sngl_inspiral_id):
"""Save an SNR time series into an XML document, in a format compatible
with BAYESTAR.
"""
snr_lal = snr_series.lal()
snr_lal.name = 'snr'
snr_lal.sampleUnits = ''
snr_xml = _build_series(snr_lal, (u'Time', u'Time,Real,Imaginary'), None,
'deltaT', 's')
snr_node = document.childNodes[-1].appendChild(snr_xml)
eid_param = ligolw_param.Param.build(u'event_id', u'ilwd:char',
sngl_inspiral_id)
snr_node.appendChild(eid_param) | def function[snr_series_to_xml, parameter[snr_series, document, sngl_inspiral_id]]:
constant[Save an SNR time series into an XML document, in a format compatible
with BAYESTAR.
]
variable[snr_lal] assign[=] call[name[snr_series].lal, parameter[]]
name[snr_lal].name assign[=] constant[snr]
name[snr_lal].sampleUnits assign[=] constant[]
variable[snr_xml] assign[=] call[name[_build_series], parameter[name[snr_lal], tuple[[<ast.Constant object at 0x7da18dc04730>, <ast.Constant object at 0x7da18dc05690>]], constant[None], constant[deltaT], constant[s]]]
variable[snr_node] assign[=] call[call[name[document].childNodes][<ast.UnaryOp object at 0x7da18dc063b0>].appendChild, parameter[name[snr_xml]]]
variable[eid_param] assign[=] call[name[ligolw_param].Param.build, parameter[constant[event_id], constant[ilwd:char], name[sngl_inspiral_id]]]
call[name[snr_node].appendChild, parameter[name[eid_param]]] | keyword[def] identifier[snr_series_to_xml] ( identifier[snr_series] , identifier[document] , identifier[sngl_inspiral_id] ):
literal[string]
identifier[snr_lal] = identifier[snr_series] . identifier[lal] ()
identifier[snr_lal] . identifier[name] = literal[string]
identifier[snr_lal] . identifier[sampleUnits] = literal[string]
identifier[snr_xml] = identifier[_build_series] ( identifier[snr_lal] ,( literal[string] , literal[string] ), keyword[None] ,
literal[string] , literal[string] )
identifier[snr_node] = identifier[document] . identifier[childNodes] [- literal[int] ]. identifier[appendChild] ( identifier[snr_xml] )
identifier[eid_param] = identifier[ligolw_param] . identifier[Param] . identifier[build] ( literal[string] , literal[string] ,
identifier[sngl_inspiral_id] )
identifier[snr_node] . identifier[appendChild] ( identifier[eid_param] ) | def snr_series_to_xml(snr_series, document, sngl_inspiral_id):
"""Save an SNR time series into an XML document, in a format compatible
with BAYESTAR.
"""
snr_lal = snr_series.lal()
snr_lal.name = 'snr'
snr_lal.sampleUnits = ''
snr_xml = _build_series(snr_lal, (u'Time', u'Time,Real,Imaginary'), None, 'deltaT', 's')
snr_node = document.childNodes[-1].appendChild(snr_xml)
eid_param = ligolw_param.Param.build(u'event_id', u'ilwd:char', sngl_inspiral_id)
snr_node.appendChild(eid_param) |
def _timestamp_query_param_from_json(value, field):
"""Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
"""
if _not_null(value, field):
# Canonical formats for timestamps in BigQuery are flexible. See:
# g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type
# The separator between the date and time can be 'T' or ' '.
value = value.replace(" ", "T", 1)
# The UTC timezone may be formatted as Z or +00:00.
value = value.replace("Z", "")
value = value.replace("+00:00", "")
if "." in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace(
tzinfo=UTC
)
else:
# YYYY-MM-DDTHH:MM:SS
return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace(
tzinfo=UTC
)
else:
return None | def function[_timestamp_query_param_from_json, parameter[value, field]]:
constant[Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
]
if call[name[_not_null], parameter[name[value], name[field]]] begin[:]
variable[value] assign[=] call[name[value].replace, parameter[constant[ ], constant[T], constant[1]]]
variable[value] assign[=] call[name[value].replace, parameter[constant[Z], constant[]]]
variable[value] assign[=] call[name[value].replace, parameter[constant[+00:00], constant[]]]
if compare[constant[.] in name[value]] begin[:]
return[call[call[name[datetime].datetime.strptime, parameter[name[value], name[_RFC3339_MICROS_NO_ZULU]]].replace, parameter[]]] | keyword[def] identifier[_timestamp_query_param_from_json] ( identifier[value] , identifier[field] ):
literal[string]
keyword[if] identifier[_not_null] ( identifier[value] , identifier[field] ):
identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] , literal[int] )
identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] )
identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[value] :
keyword[return] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[value] , identifier[_RFC3339_MICROS_NO_ZULU] ). identifier[replace] (
identifier[tzinfo] = identifier[UTC]
)
keyword[else] :
keyword[return] identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[value] , identifier[_RFC3339_NO_FRACTION] ). identifier[replace] (
identifier[tzinfo] = identifier[UTC]
)
keyword[else] :
keyword[return] keyword[None] | def _timestamp_query_param_from_json(value, field):
"""Coerce 'value' to a datetime, if set or not nullable.
Args:
value (str): The timestamp.
field (.SchemaField): The field corresponding to the value.
Returns:
Optional[datetime.datetime]: The parsed datetime object from
``value`` if the ``field`` is not null (otherwise it is
:data:`None`).
"""
if _not_null(value, field):
# Canonical formats for timestamps in BigQuery are flexible. See:
# g.co/cloud/bigquery/docs/reference/standard-sql/data-types#timestamp-type
# The separator between the date and time can be 'T' or ' '.
value = value.replace(' ', 'T', 1)
# The UTC timezone may be formatted as Z or +00:00.
value = value.replace('Z', '')
value = value.replace('+00:00', '')
if '.' in value:
# YYYY-MM-DDTHH:MM:SS.ffffff
return datetime.datetime.strptime(value, _RFC3339_MICROS_NO_ZULU).replace(tzinfo=UTC) # depends on [control=['if'], data=['value']]
else:
# YYYY-MM-DDTHH:MM:SS
return datetime.datetime.strptime(value, _RFC3339_NO_FRACTION).replace(tzinfo=UTC) # depends on [control=['if'], data=[]]
else:
return None |
def connect(workbench):
"""Connection inititalization routine.
"""
d = _getContextFactory(getDataPath(), workbench)
d.addCallback(_connectWithContextFactory, workbench)
return d | def function[connect, parameter[workbench]]:
constant[Connection inititalization routine.
]
variable[d] assign[=] call[name[_getContextFactory], parameter[call[name[getDataPath], parameter[]], name[workbench]]]
call[name[d].addCallback, parameter[name[_connectWithContextFactory], name[workbench]]]
return[name[d]] | keyword[def] identifier[connect] ( identifier[workbench] ):
literal[string]
identifier[d] = identifier[_getContextFactory] ( identifier[getDataPath] (), identifier[workbench] )
identifier[d] . identifier[addCallback] ( identifier[_connectWithContextFactory] , identifier[workbench] )
keyword[return] identifier[d] | def connect(workbench):
"""Connection inititalization routine.
"""
d = _getContextFactory(getDataPath(), workbench)
d.addCallback(_connectWithContextFactory, workbench)
return d |
async def webhook_handle(self, request):
"""
aiohttp.web handle for processing web hooks
:Example:
>>> from aiohttp import web
>>> app = web.Application()
>>> app.router.add_route('/webhook')
"""
update = await request.json(loads=self.json_deserialize)
self._process_update(update)
return web.Response() | <ast.AsyncFunctionDef object at 0x7da20c9917b0> | keyword[async] keyword[def] identifier[webhook_handle] ( identifier[self] , identifier[request] ):
literal[string]
identifier[update] = keyword[await] identifier[request] . identifier[json] ( identifier[loads] = identifier[self] . identifier[json_deserialize] )
identifier[self] . identifier[_process_update] ( identifier[update] )
keyword[return] identifier[web] . identifier[Response] () | async def webhook_handle(self, request):
"""
aiohttp.web handle for processing web hooks
:Example:
>>> from aiohttp import web
>>> app = web.Application()
>>> app.router.add_route('/webhook')
"""
update = await request.json(loads=self.json_deserialize)
self._process_update(update)
return web.Response() |
def _sequenceArgs(self, store):
"""
Filter each element of the data using the attribute type being
tested for containment and hand back the resulting list.
"""
self._sequenceContainer(store) # Force _sequence to be valid
return [self.attribute.infilter(pyval, None, store) for pyval in self._sequence] | def function[_sequenceArgs, parameter[self, store]]:
constant[
Filter each element of the data using the attribute type being
tested for containment and hand back the resulting list.
]
call[name[self]._sequenceContainer, parameter[name[store]]]
return[<ast.ListComp object at 0x7da1b0d6a830>] | keyword[def] identifier[_sequenceArgs] ( identifier[self] , identifier[store] ):
literal[string]
identifier[self] . identifier[_sequenceContainer] ( identifier[store] )
keyword[return] [ identifier[self] . identifier[attribute] . identifier[infilter] ( identifier[pyval] , keyword[None] , identifier[store] ) keyword[for] identifier[pyval] keyword[in] identifier[self] . identifier[_sequence] ] | def _sequenceArgs(self, store):
"""
Filter each element of the data using the attribute type being
tested for containment and hand back the resulting list.
"""
self._sequenceContainer(store) # Force _sequence to be valid
return [self.attribute.infilter(pyval, None, store) for pyval in self._sequence] |
def get_global_option(checker, option, default=None):
""" Retrieve an option defined by the given *checker* or
by all known option providers.
It will look in the list of all options providers
until the given *option* will be found.
If the option wasn't found, the *default* value will be returned.
"""
# First, try in the given checker's config.
# After that, look in the options providers.
try:
return getattr(checker.config, option.replace("-", "_"))
except AttributeError:
pass
for provider in checker.linter.options_providers:
for options in provider.options:
if options[0] == option:
return getattr(provider.config, option.replace("-", "_"))
return default | def function[get_global_option, parameter[checker, option, default]]:
constant[ Retrieve an option defined by the given *checker* or
by all known option providers.
It will look in the list of all options providers
until the given *option* will be found.
If the option wasn't found, the *default* value will be returned.
]
<ast.Try object at 0x7da1b0286dd0>
for taget[name[provider]] in starred[name[checker].linter.options_providers] begin[:]
for taget[name[options]] in starred[name[provider].options] begin[:]
if compare[call[name[options]][constant[0]] equal[==] name[option]] begin[:]
return[call[name[getattr], parameter[name[provider].config, call[name[option].replace, parameter[constant[-], constant[_]]]]]]
return[name[default]] | keyword[def] identifier[get_global_option] ( identifier[checker] , identifier[option] , identifier[default] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[getattr] ( identifier[checker] . identifier[config] , identifier[option] . identifier[replace] ( literal[string] , literal[string] ))
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[for] identifier[provider] keyword[in] identifier[checker] . identifier[linter] . identifier[options_providers] :
keyword[for] identifier[options] keyword[in] identifier[provider] . identifier[options] :
keyword[if] identifier[options] [ literal[int] ]== identifier[option] :
keyword[return] identifier[getattr] ( identifier[provider] . identifier[config] , identifier[option] . identifier[replace] ( literal[string] , literal[string] ))
keyword[return] identifier[default] | def get_global_option(checker, option, default=None):
""" Retrieve an option defined by the given *checker* or
by all known option providers.
It will look in the list of all options providers
until the given *option* will be found.
If the option wasn't found, the *default* value will be returned.
"""
# First, try in the given checker's config.
# After that, look in the options providers.
try:
return getattr(checker.config, option.replace('-', '_')) # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
for provider in checker.linter.options_providers:
for options in provider.options:
if options[0] == option:
return getattr(provider.config, option.replace('-', '_')) # depends on [control=['if'], data=['option']] # depends on [control=['for'], data=['options']] # depends on [control=['for'], data=['provider']]
return default |
def guess_version(pkg_key, default='?'):
"""Guess the version of a pkg when pip doesn't provide it
:param str pkg_key: key of the package
:param str default: default version to return if unable to find
:returns: version
:rtype: string
"""
try:
m = import_module(pkg_key)
except ImportError:
return default
else:
return getattr(m, '__version__', default) | def function[guess_version, parameter[pkg_key, default]]:
constant[Guess the version of a pkg when pip doesn't provide it
:param str pkg_key: key of the package
:param str default: default version to return if unable to find
:returns: version
:rtype: string
]
<ast.Try object at 0x7da1b1ea3490> | keyword[def] identifier[guess_version] ( identifier[pkg_key] , identifier[default] = literal[string] ):
literal[string]
keyword[try] :
identifier[m] = identifier[import_module] ( identifier[pkg_key] )
keyword[except] identifier[ImportError] :
keyword[return] identifier[default]
keyword[else] :
keyword[return] identifier[getattr] ( identifier[m] , literal[string] , identifier[default] ) | def guess_version(pkg_key, default='?'):
"""Guess the version of a pkg when pip doesn't provide it
:param str pkg_key: key of the package
:param str default: default version to return if unable to find
:returns: version
:rtype: string
"""
try:
m = import_module(pkg_key) # depends on [control=['try'], data=[]]
except ImportError:
return default # depends on [control=['except'], data=[]]
else:
return getattr(m, '__version__', default) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.