code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def guess_wxr_version(self, tree):
"""
We will try to guess the wxr version used
to complete the wordpress xml namespace name.
"""
for v in ('1.2', '1.1', '1.0'):
try:
tree.find('channel/{%s}wxr_version' % (WP_NS % v)).text
return v
except AttributeError:
pass
raise CommandError('Cannot resolve the wordpress namespace') | def function[guess_wxr_version, parameter[self, tree]]:
constant[
We will try to guess the wxr version used
to complete the wordpress xml namespace name.
]
for taget[name[v]] in starred[tuple[[<ast.Constant object at 0x7da1b2429bd0>, <ast.Constant object at 0x7da1b2429c00>, <ast.Constant object at 0x7da1b2429c30>]]] begin[:]
<ast.Try object at 0x7da1b2429330>
<ast.Raise object at 0x7da1b2428fa0> | keyword[def] identifier[guess_wxr_version] ( identifier[self] , identifier[tree] ):
literal[string]
keyword[for] identifier[v] keyword[in] ( literal[string] , literal[string] , literal[string] ):
keyword[try] :
identifier[tree] . identifier[find] ( literal[string] %( identifier[WP_NS] % identifier[v] )). identifier[text]
keyword[return] identifier[v]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[raise] identifier[CommandError] ( literal[string] ) | def guess_wxr_version(self, tree):
"""
We will try to guess the wxr version used
to complete the wordpress xml namespace name.
"""
for v in ('1.2', '1.1', '1.0'):
try:
tree.find('channel/{%s}wxr_version' % (WP_NS % v)).text
return v # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['v']]
raise CommandError('Cannot resolve the wordpress namespace') |
def rgb2hex(rgb, force_long=False):
"""Transform RGB tuple to hex RGB representation
:param rgb: RGB 3-uple of float between 0 and 1
:rtype: 3 hex char or 6 hex char string representation
Usage
-----
>>> from colour import rgb2hex
>>> rgb2hex((0.0,1.0,0.0))
'#0f0'
Rounding try to be as natural as possible:
>>> rgb2hex((0.0,0.999999,1.0))
'#0ff'
And if not possible, the 6 hex char representation is used:
>>> rgb2hex((0.23,1.0,1.0))
'#3bffff'
>>> rgb2hex((0.0,0.999999,1.0), force_long=True)
'#00ffff'
"""
hx = ''.join(["%02x" % int(c * 255 + 0.5 - FLOAT_ERROR)
for c in rgb])
if not force_long and hx[0::2] == hx[1::2]:
hx = ''.join(hx[0::2])
return "#%s" % hx | def function[rgb2hex, parameter[rgb, force_long]]:
constant[Transform RGB tuple to hex RGB representation
:param rgb: RGB 3-uple of float between 0 and 1
:rtype: 3 hex char or 6 hex char string representation
Usage
-----
>>> from colour import rgb2hex
>>> rgb2hex((0.0,1.0,0.0))
'#0f0'
Rounding try to be as natural as possible:
>>> rgb2hex((0.0,0.999999,1.0))
'#0ff'
And if not possible, the 6 hex char representation is used:
>>> rgb2hex((0.23,1.0,1.0))
'#3bffff'
>>> rgb2hex((0.0,0.999999,1.0), force_long=True)
'#00ffff'
]
variable[hx] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b084f2b0>]]
if <ast.BoolOp object at 0x7da1b084cb80> begin[:]
variable[hx] assign[=] call[constant[].join, parameter[call[name[hx]][<ast.Slice object at 0x7da1b0778310>]]]
return[binary_operation[constant[#%s] <ast.Mod object at 0x7da2590d6920> name[hx]]] | keyword[def] identifier[rgb2hex] ( identifier[rgb] , identifier[force_long] = keyword[False] ):
literal[string]
identifier[hx] = literal[string] . identifier[join] ([ literal[string] % identifier[int] ( identifier[c] * literal[int] + literal[int] - identifier[FLOAT_ERROR] )
keyword[for] identifier[c] keyword[in] identifier[rgb] ])
keyword[if] keyword[not] identifier[force_long] keyword[and] identifier[hx] [ literal[int] :: literal[int] ]== identifier[hx] [ literal[int] :: literal[int] ]:
identifier[hx] = literal[string] . identifier[join] ( identifier[hx] [ literal[int] :: literal[int] ])
keyword[return] literal[string] % identifier[hx] | def rgb2hex(rgb, force_long=False):
"""Transform RGB tuple to hex RGB representation
:param rgb: RGB 3-uple of float between 0 and 1
:rtype: 3 hex char or 6 hex char string representation
Usage
-----
>>> from colour import rgb2hex
>>> rgb2hex((0.0,1.0,0.0))
'#0f0'
Rounding try to be as natural as possible:
>>> rgb2hex((0.0,0.999999,1.0))
'#0ff'
And if not possible, the 6 hex char representation is used:
>>> rgb2hex((0.23,1.0,1.0))
'#3bffff'
>>> rgb2hex((0.0,0.999999,1.0), force_long=True)
'#00ffff'
"""
hx = ''.join(['%02x' % int(c * 255 + 0.5 - FLOAT_ERROR) for c in rgb])
if not force_long and hx[0::2] == hx[1::2]:
hx = ''.join(hx[0::2]) # depends on [control=['if'], data=[]]
return '#%s' % hx |
def get_page_size_args():
"""
Get page size arguments, returns an int
{ <VIEW_NAME>: PAGE_NUMBER }
Arguments are passed: psize_<VIEW_NAME>=<PAGE_SIZE>
"""
page_sizes = {}
for arg in request.args:
re_match = re.findall("psize_(.*)", arg)
if re_match:
page_sizes[re_match[0]] = int(request.args.get(arg))
return page_sizes | def function[get_page_size_args, parameter[]]:
constant[
Get page size arguments, returns an int
{ <VIEW_NAME>: PAGE_NUMBER }
Arguments are passed: psize_<VIEW_NAME>=<PAGE_SIZE>
]
variable[page_sizes] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[name[request].args] begin[:]
variable[re_match] assign[=] call[name[re].findall, parameter[constant[psize_(.*)], name[arg]]]
if name[re_match] begin[:]
call[name[page_sizes]][call[name[re_match]][constant[0]]] assign[=] call[name[int], parameter[call[name[request].args.get, parameter[name[arg]]]]]
return[name[page_sizes]] | keyword[def] identifier[get_page_size_args] ():
literal[string]
identifier[page_sizes] ={}
keyword[for] identifier[arg] keyword[in] identifier[request] . identifier[args] :
identifier[re_match] = identifier[re] . identifier[findall] ( literal[string] , identifier[arg] )
keyword[if] identifier[re_match] :
identifier[page_sizes] [ identifier[re_match] [ literal[int] ]]= identifier[int] ( identifier[request] . identifier[args] . identifier[get] ( identifier[arg] ))
keyword[return] identifier[page_sizes] | def get_page_size_args():
"""
Get page size arguments, returns an int
{ <VIEW_NAME>: PAGE_NUMBER }
Arguments are passed: psize_<VIEW_NAME>=<PAGE_SIZE>
"""
page_sizes = {}
for arg in request.args:
re_match = re.findall('psize_(.*)', arg)
if re_match:
page_sizes[re_match[0]] = int(request.args.get(arg)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['arg']]
return page_sizes |
def from_stanford_dependencies(this_class, stream, trees,
include_erased=False, include_punct=True):
"""Construct a Corpus. stream is an iterable over strings where
each string is a line representing a Stanford Dependency as in
the output of the command line Stanford Dependency tool:
deprel(gov-index, dep-depindex)
Sentences are separated by blank lines. A corresponding list of
Penn Treebank formatted trees must be provided as well."""
stream = iter(stream)
corpus = this_class()
for tree in trees:
sentence = Sentence.from_stanford_dependencies(stream,
tree,
include_erased,
include_punct)
corpus.append(sentence)
return corpus | def function[from_stanford_dependencies, parameter[this_class, stream, trees, include_erased, include_punct]]:
constant[Construct a Corpus. stream is an iterable over strings where
each string is a line representing a Stanford Dependency as in
the output of the command line Stanford Dependency tool:
deprel(gov-index, dep-depindex)
Sentences are separated by blank lines. A corresponding list of
Penn Treebank formatted trees must be provided as well.]
variable[stream] assign[=] call[name[iter], parameter[name[stream]]]
variable[corpus] assign[=] call[name[this_class], parameter[]]
for taget[name[tree]] in starred[name[trees]] begin[:]
variable[sentence] assign[=] call[name[Sentence].from_stanford_dependencies, parameter[name[stream], name[tree], name[include_erased], name[include_punct]]]
call[name[corpus].append, parameter[name[sentence]]]
return[name[corpus]] | keyword[def] identifier[from_stanford_dependencies] ( identifier[this_class] , identifier[stream] , identifier[trees] ,
identifier[include_erased] = keyword[False] , identifier[include_punct] = keyword[True] ):
literal[string]
identifier[stream] = identifier[iter] ( identifier[stream] )
identifier[corpus] = identifier[this_class] ()
keyword[for] identifier[tree] keyword[in] identifier[trees] :
identifier[sentence] = identifier[Sentence] . identifier[from_stanford_dependencies] ( identifier[stream] ,
identifier[tree] ,
identifier[include_erased] ,
identifier[include_punct] )
identifier[corpus] . identifier[append] ( identifier[sentence] )
keyword[return] identifier[corpus] | def from_stanford_dependencies(this_class, stream, trees, include_erased=False, include_punct=True):
"""Construct a Corpus. stream is an iterable over strings where
each string is a line representing a Stanford Dependency as in
the output of the command line Stanford Dependency tool:
deprel(gov-index, dep-depindex)
Sentences are separated by blank lines. A corresponding list of
Penn Treebank formatted trees must be provided as well."""
stream = iter(stream)
corpus = this_class()
for tree in trees:
sentence = Sentence.from_stanford_dependencies(stream, tree, include_erased, include_punct)
corpus.append(sentence) # depends on [control=['for'], data=['tree']]
return corpus |
def calculate_A50(ctgsizes, cutoff=0, percent=50):
"""
Given an array of contig sizes, produce A50, N50, and L50 values
"""
ctgsizes = np.array(ctgsizes, dtype="int")
ctgsizes = np.sort(ctgsizes)[::-1]
ctgsizes = ctgsizes[ctgsizes >= cutoff]
a50 = np.cumsum(ctgsizes)
total = np.sum(ctgsizes)
idx = bisect(a50, total * percent / 100.)
l50 = ctgsizes[idx]
n50 = idx + 1
return a50, l50, n50 | def function[calculate_A50, parameter[ctgsizes, cutoff, percent]]:
constant[
Given an array of contig sizes, produce A50, N50, and L50 values
]
variable[ctgsizes] assign[=] call[name[np].array, parameter[name[ctgsizes]]]
variable[ctgsizes] assign[=] call[call[name[np].sort, parameter[name[ctgsizes]]]][<ast.Slice object at 0x7da1b08e8b80>]
variable[ctgsizes] assign[=] call[name[ctgsizes]][compare[name[ctgsizes] greater_or_equal[>=] name[cutoff]]]
variable[a50] assign[=] call[name[np].cumsum, parameter[name[ctgsizes]]]
variable[total] assign[=] call[name[np].sum, parameter[name[ctgsizes]]]
variable[idx] assign[=] call[name[bisect], parameter[name[a50], binary_operation[binary_operation[name[total] * name[percent]] / constant[100.0]]]]
variable[l50] assign[=] call[name[ctgsizes]][name[idx]]
variable[n50] assign[=] binary_operation[name[idx] + constant[1]]
return[tuple[[<ast.Name object at 0x7da1b0747eb0>, <ast.Name object at 0x7da1b0747e50>, <ast.Name object at 0x7da1b0747df0>]]] | keyword[def] identifier[calculate_A50] ( identifier[ctgsizes] , identifier[cutoff] = literal[int] , identifier[percent] = literal[int] ):
literal[string]
identifier[ctgsizes] = identifier[np] . identifier[array] ( identifier[ctgsizes] , identifier[dtype] = literal[string] )
identifier[ctgsizes] = identifier[np] . identifier[sort] ( identifier[ctgsizes] )[::- literal[int] ]
identifier[ctgsizes] = identifier[ctgsizes] [ identifier[ctgsizes] >= identifier[cutoff] ]
identifier[a50] = identifier[np] . identifier[cumsum] ( identifier[ctgsizes] )
identifier[total] = identifier[np] . identifier[sum] ( identifier[ctgsizes] )
identifier[idx] = identifier[bisect] ( identifier[a50] , identifier[total] * identifier[percent] / literal[int] )
identifier[l50] = identifier[ctgsizes] [ identifier[idx] ]
identifier[n50] = identifier[idx] + literal[int]
keyword[return] identifier[a50] , identifier[l50] , identifier[n50] | def calculate_A50(ctgsizes, cutoff=0, percent=50):
"""
Given an array of contig sizes, produce A50, N50, and L50 values
"""
ctgsizes = np.array(ctgsizes, dtype='int')
ctgsizes = np.sort(ctgsizes)[::-1]
ctgsizes = ctgsizes[ctgsizes >= cutoff]
a50 = np.cumsum(ctgsizes)
total = np.sum(ctgsizes)
idx = bisect(a50, total * percent / 100.0)
l50 = ctgsizes[idx]
n50 = idx + 1
return (a50, l50, n50) |
def ffconvert(fname, limit_states, ff, min_iml=1E-10):
"""
Convert a fragility function into a numpy array plus a bunch
of attributes.
:param fname: path to the fragility model file
:param limit_states: expected limit states
:param ff: fragility function node
:returns: a pair (array, dictionary)
"""
with context(fname, ff):
ffs = ff[1:]
imls = ff.imls
nodamage = imls.attrib.get('noDamageLimit')
if nodamage == 0:
# use a cutoff to avoid log(0) in GMPE.to_distribution_values
logging.warning('Found a noDamageLimit=0 in %s, line %s, '
'using %g instead', fname, ff.lineno, min_iml)
nodamage = min_iml
with context(fname, imls):
attrs = dict(format=ff['format'],
imt=imls['imt'],
id=ff['id'],
nodamage=nodamage)
LS = len(limit_states)
if LS != len(ffs):
with context(fname, ff):
raise InvalidFile('expected %d limit states, found %d' %
(LS, len(ffs)))
if ff['format'] == 'continuous':
minIML = float(imls['minIML'])
if minIML == 0:
# use a cutoff to avoid log(0) in GMPE.to_distribution_values
logging.warning('Found minIML=0 in %s, line %s, using %g instead',
fname, imls.lineno, min_iml)
minIML = min_iml
attrs['minIML'] = minIML
attrs['maxIML'] = float(imls['maxIML'])
array = numpy.zeros(LS, [('mean', F64), ('stddev', F64)])
for i, ls, node in zip(range(LS), limit_states, ff[1:]):
if ls != node['ls']:
with context(fname, node):
raise InvalidFile('expected %s, found' %
(ls, node['ls']))
array['mean'][i] = node['mean']
array['stddev'][i] = node['stddev']
elif ff['format'] == 'discrete':
attrs['imls'] = ~imls
valid.check_levels(attrs['imls'], attrs['imt'], min_iml)
num_poes = len(attrs['imls'])
array = numpy.zeros((LS, num_poes))
for i, ls, node in zip(range(LS), limit_states, ff[1:]):
with context(fname, node):
if ls != node['ls']:
raise InvalidFile('expected %s, found' %
(ls, node['ls']))
poes = (~node if isinstance(~node, list)
else valid.probabilities(~node))
if len(poes) != num_poes:
raise InvalidFile('expected %s, found' %
(num_poes, len(poes)))
array[i, :] = poes
# NB: the format is constrained in nrml.FragilityNode to be either
# discrete or continuous, there is no third option
return array, attrs | def function[ffconvert, parameter[fname, limit_states, ff, min_iml]]:
constant[
Convert a fragility function into a numpy array plus a bunch
of attributes.
:param fname: path to the fragility model file
:param limit_states: expected limit states
:param ff: fragility function node
:returns: a pair (array, dictionary)
]
with call[name[context], parameter[name[fname], name[ff]]] begin[:]
variable[ffs] assign[=] call[name[ff]][<ast.Slice object at 0x7da18bcc9a50>]
variable[imls] assign[=] name[ff].imls
variable[nodamage] assign[=] call[name[imls].attrib.get, parameter[constant[noDamageLimit]]]
if compare[name[nodamage] equal[==] constant[0]] begin[:]
call[name[logging].warning, parameter[constant[Found a noDamageLimit=0 in %s, line %s, using %g instead], name[fname], name[ff].lineno, name[min_iml]]]
variable[nodamage] assign[=] name[min_iml]
with call[name[context], parameter[name[fname], name[imls]]] begin[:]
variable[attrs] assign[=] call[name[dict], parameter[]]
variable[LS] assign[=] call[name[len], parameter[name[limit_states]]]
if compare[name[LS] not_equal[!=] call[name[len], parameter[name[ffs]]]] begin[:]
with call[name[context], parameter[name[fname], name[ff]]] begin[:]
<ast.Raise object at 0x7da18bccba90>
if compare[call[name[ff]][constant[format]] equal[==] constant[continuous]] begin[:]
variable[minIML] assign[=] call[name[float], parameter[call[name[imls]][constant[minIML]]]]
if compare[name[minIML] equal[==] constant[0]] begin[:]
call[name[logging].warning, parameter[constant[Found minIML=0 in %s, line %s, using %g instead], name[fname], name[imls].lineno, name[min_iml]]]
variable[minIML] assign[=] name[min_iml]
call[name[attrs]][constant[minIML]] assign[=] name[minIML]
call[name[attrs]][constant[maxIML]] assign[=] call[name[float], parameter[call[name[imls]][constant[maxIML]]]]
variable[array] assign[=] call[name[numpy].zeros, parameter[name[LS], list[[<ast.Tuple object at 0x7da18bcca2c0>, <ast.Tuple object at 0x7da18bcca3e0>]]]]
for taget[tuple[[<ast.Name object at 0x7da18bcc8dc0>, <ast.Name object at 0x7da18bccafb0>, <ast.Name object at 0x7da18bcc9660>]]] in starred[call[name[zip], parameter[call[name[range], parameter[name[LS]]], name[limit_states], call[name[ff]][<ast.Slice object at 0x7da18bcca4d0>]]]] begin[:]
if compare[name[ls] not_equal[!=] call[name[node]][constant[ls]]] begin[:]
with call[name[context], parameter[name[fname], name[node]]] begin[:]
<ast.Raise object at 0x7da18bccb100>
call[call[name[array]][constant[mean]]][name[i]] assign[=] call[name[node]][constant[mean]]
call[call[name[array]][constant[stddev]]][name[i]] assign[=] call[name[node]][constant[stddev]]
return[tuple[[<ast.Name object at 0x7da18dc054b0>, <ast.Name object at 0x7da18dc04100>]]] | keyword[def] identifier[ffconvert] ( identifier[fname] , identifier[limit_states] , identifier[ff] , identifier[min_iml] = literal[int] ):
literal[string]
keyword[with] identifier[context] ( identifier[fname] , identifier[ff] ):
identifier[ffs] = identifier[ff] [ literal[int] :]
identifier[imls] = identifier[ff] . identifier[imls]
identifier[nodamage] = identifier[imls] . identifier[attrib] . identifier[get] ( literal[string] )
keyword[if] identifier[nodamage] == literal[int] :
identifier[logging] . identifier[warning] ( literal[string]
literal[string] , identifier[fname] , identifier[ff] . identifier[lineno] , identifier[min_iml] )
identifier[nodamage] = identifier[min_iml]
keyword[with] identifier[context] ( identifier[fname] , identifier[imls] ):
identifier[attrs] = identifier[dict] ( identifier[format] = identifier[ff] [ literal[string] ],
identifier[imt] = identifier[imls] [ literal[string] ],
identifier[id] = identifier[ff] [ literal[string] ],
identifier[nodamage] = identifier[nodamage] )
identifier[LS] = identifier[len] ( identifier[limit_states] )
keyword[if] identifier[LS] != identifier[len] ( identifier[ffs] ):
keyword[with] identifier[context] ( identifier[fname] , identifier[ff] ):
keyword[raise] identifier[InvalidFile] ( literal[string] %
( identifier[LS] , identifier[len] ( identifier[ffs] )))
keyword[if] identifier[ff] [ literal[string] ]== literal[string] :
identifier[minIML] = identifier[float] ( identifier[imls] [ literal[string] ])
keyword[if] identifier[minIML] == literal[int] :
identifier[logging] . identifier[warning] ( literal[string] ,
identifier[fname] , identifier[imls] . identifier[lineno] , identifier[min_iml] )
identifier[minIML] = identifier[min_iml]
identifier[attrs] [ literal[string] ]= identifier[minIML]
identifier[attrs] [ literal[string] ]= identifier[float] ( identifier[imls] [ literal[string] ])
identifier[array] = identifier[numpy] . identifier[zeros] ( identifier[LS] ,[( literal[string] , identifier[F64] ),( literal[string] , identifier[F64] )])
keyword[for] identifier[i] , identifier[ls] , identifier[node] keyword[in] identifier[zip] ( identifier[range] ( identifier[LS] ), identifier[limit_states] , identifier[ff] [ literal[int] :]):
keyword[if] identifier[ls] != identifier[node] [ literal[string] ]:
keyword[with] identifier[context] ( identifier[fname] , identifier[node] ):
keyword[raise] identifier[InvalidFile] ( literal[string] %
( identifier[ls] , identifier[node] [ literal[string] ]))
identifier[array] [ literal[string] ][ identifier[i] ]= identifier[node] [ literal[string] ]
identifier[array] [ literal[string] ][ identifier[i] ]= identifier[node] [ literal[string] ]
keyword[elif] identifier[ff] [ literal[string] ]== literal[string] :
identifier[attrs] [ literal[string] ]=~ identifier[imls]
identifier[valid] . identifier[check_levels] ( identifier[attrs] [ literal[string] ], identifier[attrs] [ literal[string] ], identifier[min_iml] )
identifier[num_poes] = identifier[len] ( identifier[attrs] [ literal[string] ])
identifier[array] = identifier[numpy] . identifier[zeros] (( identifier[LS] , identifier[num_poes] ))
keyword[for] identifier[i] , identifier[ls] , identifier[node] keyword[in] identifier[zip] ( identifier[range] ( identifier[LS] ), identifier[limit_states] , identifier[ff] [ literal[int] :]):
keyword[with] identifier[context] ( identifier[fname] , identifier[node] ):
keyword[if] identifier[ls] != identifier[node] [ literal[string] ]:
keyword[raise] identifier[InvalidFile] ( literal[string] %
( identifier[ls] , identifier[node] [ literal[string] ]))
identifier[poes] =(~ identifier[node] keyword[if] identifier[isinstance] (~ identifier[node] , identifier[list] )
keyword[else] identifier[valid] . identifier[probabilities] (~ identifier[node] ))
keyword[if] identifier[len] ( identifier[poes] )!= identifier[num_poes] :
keyword[raise] identifier[InvalidFile] ( literal[string] %
( identifier[num_poes] , identifier[len] ( identifier[poes] )))
identifier[array] [ identifier[i] ,:]= identifier[poes]
keyword[return] identifier[array] , identifier[attrs] | def ffconvert(fname, limit_states, ff, min_iml=1e-10):
"""
Convert a fragility function into a numpy array plus a bunch
of attributes.
:param fname: path to the fragility model file
:param limit_states: expected limit states
:param ff: fragility function node
:returns: a pair (array, dictionary)
"""
with context(fname, ff):
ffs = ff[1:]
imls = ff.imls # depends on [control=['with'], data=[]]
nodamage = imls.attrib.get('noDamageLimit')
if nodamage == 0:
# use a cutoff to avoid log(0) in GMPE.to_distribution_values
logging.warning('Found a noDamageLimit=0 in %s, line %s, using %g instead', fname, ff.lineno, min_iml)
nodamage = min_iml # depends on [control=['if'], data=['nodamage']]
with context(fname, imls):
attrs = dict(format=ff['format'], imt=imls['imt'], id=ff['id'], nodamage=nodamage) # depends on [control=['with'], data=[]]
LS = len(limit_states)
if LS != len(ffs):
with context(fname, ff):
raise InvalidFile('expected %d limit states, found %d' % (LS, len(ffs))) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['LS']]
if ff['format'] == 'continuous':
minIML = float(imls['minIML'])
if minIML == 0:
# use a cutoff to avoid log(0) in GMPE.to_distribution_values
logging.warning('Found minIML=0 in %s, line %s, using %g instead', fname, imls.lineno, min_iml)
minIML = min_iml # depends on [control=['if'], data=['minIML']]
attrs['minIML'] = minIML
attrs['maxIML'] = float(imls['maxIML'])
array = numpy.zeros(LS, [('mean', F64), ('stddev', F64)])
for (i, ls, node) in zip(range(LS), limit_states, ff[1:]):
if ls != node['ls']:
with context(fname, node):
raise InvalidFile('expected %s, found' % (ls, node['ls'])) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['ls']]
array['mean'][i] = node['mean']
array['stddev'][i] = node['stddev'] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
elif ff['format'] == 'discrete':
attrs['imls'] = ~imls
valid.check_levels(attrs['imls'], attrs['imt'], min_iml)
num_poes = len(attrs['imls'])
array = numpy.zeros((LS, num_poes))
for (i, ls, node) in zip(range(LS), limit_states, ff[1:]):
with context(fname, node):
if ls != node['ls']:
raise InvalidFile('expected %s, found' % (ls, node['ls'])) # depends on [control=['if'], data=['ls']]
poes = ~node if isinstance(~node, list) else valid.probabilities(~node)
if len(poes) != num_poes:
raise InvalidFile('expected %s, found' % (num_poes, len(poes))) # depends on [control=['if'], data=['num_poes']]
array[i, :] = poes # depends on [control=['with'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
# NB: the format is constrained in nrml.FragilityNode to be either
# discrete or continuous, there is no third option
return (array, attrs) |
def ensure_annotations(resources, data):
"""Prepare any potentially missing annotations for downstream processing in a local directory.
"""
transcript_gff = tz.get_in(["rnaseq", "transcripts"], resources)
if transcript_gff and utils.file_exists(transcript_gff):
out_dir = os.path.join(tz.get_in(["dirs", "work"], data),
"inputs", "data", "annotations")
resources["rnaseq"]["gene_bed"] = gtf.gtf_to_bed(transcript_gff, out_dir)
return resources | def function[ensure_annotations, parameter[resources, data]]:
constant[Prepare any potentially missing annotations for downstream processing in a local directory.
]
variable[transcript_gff] assign[=] call[name[tz].get_in, parameter[list[[<ast.Constant object at 0x7da1b18bc550>, <ast.Constant object at 0x7da1b18be1a0>]], name[resources]]]
if <ast.BoolOp object at 0x7da1b19d9690> begin[:]
variable[out_dir] assign[=] call[name[os].path.join, parameter[call[name[tz].get_in, parameter[list[[<ast.Constant object at 0x7da1b18d2b00>, <ast.Constant object at 0x7da1b18d2920>]], name[data]]], constant[inputs], constant[data], constant[annotations]]]
call[call[name[resources]][constant[rnaseq]]][constant[gene_bed]] assign[=] call[name[gtf].gtf_to_bed, parameter[name[transcript_gff], name[out_dir]]]
return[name[resources]] | keyword[def] identifier[ensure_annotations] ( identifier[resources] , identifier[data] ):
literal[string]
identifier[transcript_gff] = identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] ], identifier[resources] )
keyword[if] identifier[transcript_gff] keyword[and] identifier[utils] . identifier[file_exists] ( identifier[transcript_gff] ):
identifier[out_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[tz] . identifier[get_in] ([ literal[string] , literal[string] ], identifier[data] ),
literal[string] , literal[string] , literal[string] )
identifier[resources] [ literal[string] ][ literal[string] ]= identifier[gtf] . identifier[gtf_to_bed] ( identifier[transcript_gff] , identifier[out_dir] )
keyword[return] identifier[resources] | def ensure_annotations(resources, data):
"""Prepare any potentially missing annotations for downstream processing in a local directory.
"""
transcript_gff = tz.get_in(['rnaseq', 'transcripts'], resources)
if transcript_gff and utils.file_exists(transcript_gff):
out_dir = os.path.join(tz.get_in(['dirs', 'work'], data), 'inputs', 'data', 'annotations')
resources['rnaseq']['gene_bed'] = gtf.gtf_to_bed(transcript_gff, out_dir) # depends on [control=['if'], data=[]]
return resources |
def _lat_lons_from_geojson(s):
"""Return a latitude-longitude pairs from nested GeoJSON coordinates.
GeoJSON coordinates are always stored in (longitude, latitude) order.
"""
if len(s) >= 2 and isinstance(s[0], _number) and isinstance(s[0], _number):
lat, lon = s[1], s[0]
return [(lat, lon)]
else:
return [lat_lon for sub in s for lat_lon in _lat_lons_from_geojson(sub)] | def function[_lat_lons_from_geojson, parameter[s]]:
constant[Return a latitude-longitude pairs from nested GeoJSON coordinates.
GeoJSON coordinates are always stored in (longitude, latitude) order.
]
if <ast.BoolOp object at 0x7da1b07933d0> begin[:]
<ast.Tuple object at 0x7da1b0792530> assign[=] tuple[[<ast.Subscript object at 0x7da1b07909d0>, <ast.Subscript object at 0x7da1b0793c40>]]
return[list[[<ast.Tuple object at 0x7da1b0791ab0>]]] | keyword[def] identifier[_lat_lons_from_geojson] ( identifier[s] ):
literal[string]
keyword[if] identifier[len] ( identifier[s] )>= literal[int] keyword[and] identifier[isinstance] ( identifier[s] [ literal[int] ], identifier[_number] ) keyword[and] identifier[isinstance] ( identifier[s] [ literal[int] ], identifier[_number] ):
identifier[lat] , identifier[lon] = identifier[s] [ literal[int] ], identifier[s] [ literal[int] ]
keyword[return] [( identifier[lat] , identifier[lon] )]
keyword[else] :
keyword[return] [ identifier[lat_lon] keyword[for] identifier[sub] keyword[in] identifier[s] keyword[for] identifier[lat_lon] keyword[in] identifier[_lat_lons_from_geojson] ( identifier[sub] )] | def _lat_lons_from_geojson(s):
"""Return a latitude-longitude pairs from nested GeoJSON coordinates.
GeoJSON coordinates are always stored in (longitude, latitude) order.
"""
if len(s) >= 2 and isinstance(s[0], _number) and isinstance(s[0], _number):
(lat, lon) = (s[1], s[0])
return [(lat, lon)] # depends on [control=['if'], data=[]]
else:
return [lat_lon for sub in s for lat_lon in _lat_lons_from_geojson(sub)] |
def set(self, name, value):
"""
Sets the value of the field `name` to `value`, which is `True` or
`False`.
"""
flag = self.flags[name]
self._value = (self.value | flag) if value else (self.value & ~flag) | def function[set, parameter[self, name, value]]:
constant[
Sets the value of the field `name` to `value`, which is `True` or
`False`.
]
variable[flag] assign[=] call[name[self].flags][name[name]]
name[self]._value assign[=] <ast.IfExp object at 0x7da1b25d2f80> | keyword[def] identifier[set] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
identifier[flag] = identifier[self] . identifier[flags] [ identifier[name] ]
identifier[self] . identifier[_value] =( identifier[self] . identifier[value] | identifier[flag] ) keyword[if] identifier[value] keyword[else] ( identifier[self] . identifier[value] &~ identifier[flag] ) | def set(self, name, value):
"""
Sets the value of the field `name` to `value`, which is `True` or
`False`.
"""
flag = self.flags[name]
self._value = self.value | flag if value else self.value & ~flag |
def qteAddWidget(self, widgetObj: QtGui.QWidget, isFocusable: bool=True,
widgetSignature: str=None, autoBind: bool=True):
"""
Augment the standard Qt ``widgetObj`` with Qtmacs specific fields.
Example: from a programmers perspective there is no difference
between::
wid = QtGui.QTextEdit(self)
and::
wid = self.qteAddWidget(QtGui.QTextEdit(self))
Both return a handle to a Qt widget (a ``QTextEdit`` in this
case). However, the ``qteAddWidget`` adds the following fields
to the object:
* ``_qteAdmin``: this is an instance of the ``QtmacsAdminStructure``
to tell Qtmacs how to treat the widget.
* ``qteSignature``: an attribute that returns the signature of the
widget and equals ``widgetSignature``. If no such signature was
specified it defaults to the Qt internal name as a string, eg.
for a push button this would be 'QPushButton'.
* ``qteSetKeyFilterPolicy``: this points directly to the equally
named method inside the _qteAdmin object. This is a convenience
shortcut to avoid using the _qteAdmin structure directly in
macro/applet code, because only Qtmacs itself should temper
with it.
|Args|
* ``widgetObj`` (**QWidget**): any widget from the QtGui library.
* ``isFocusable`` (**bool**): whether or not the widget can
receive the focus.
* ``widgetSignature`` (**str**): specify the widget signature
(defaults to class name)
* ``autoBind`` (**bool**): if **True** and ``widgetSignature``
is a recognisable name (eg. **QTextEdit**) then automatically
load the appropriate key-bindings for this widget.
|Returns|
* **QWidget**: handle to widget object (or **None** if it could
not be added).
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
"""
# Add a Qtmacs data structure to the widget to allow their
# event administration. Note that, in all likelihood, the
# widget is an arbitrary Qt widget (eg. QLineEdit,
# QPushButton, etc).
widgetObj._qteAdmin = QtmacsAdminStructure(
self, isFocusable=isFocusable)
widgetObj._qteAdmin.appletID = self._qteAdmin.appletID
# Specify that this widget is not a QtmacsApplet.
widgetObj._qteAdmin.isQtmacsApplet = False
# Remember the signature of the applet containing this widget.
widgetObj._qteAdmin.appletSignature = self.qteAppletSignature()
# Set the widget signature. If none was specified, use the
# class name (eg. QLineEdit).
if widgetSignature is None:
widgetObj._qteAdmin.widgetSignature = widgetObj.__class__.__name__
else:
widgetObj._qteAdmin.widgetSignature = widgetSignature
# For convenience, as it is otherwise difficult for the macro
# programmer to determine the widget signature used by Qtmacs.
# Note: the "wo" is only a shorthand to avoid too long lines.
wo = widgetObj
wo.qteSignature = wo._qteAdmin.widgetSignature
wo.qteSetKeyFilterPolicy = wo._qteAdmin.qteSetKeyFilterPolicy
del wo
# Add the widget to the widgetList of this QtmacsApplet.
# Important: this MUST happen before macros and key-bindings are loaded
# and bound automatically (see code below) because the method to
# bind the keys will verify that the widget exists in ``widgetList``.
self._qteAdmin.widgetList.append(widgetObj)
# If a widget has a default key-bindings file then the global
# dictionary ``default_widget_keybindings`` will contain its
# name.
default_bind = qte_global.default_widget_keybindings
if autoBind and (widgetObj.qteSignature in default_bind):
# Shorthand.
module_name = default_bind[widgetObj.qteSignature]
# Import the module with the default key-bindings for the
# current widget type.
try:
mod = importlib.import_module(module_name)
except ImportError:
msg = ('Module <b>{}</b> could not be imported.'.format(
module_name))
self.qteLogger.exception(msg, stack_info=True)
return
if hasattr(mod, 'install_macros_and_bindings'):
# By convention, the module has an
# install_macros_and_bindings method. If an error
# occurs intercept it, but do not abort the method
# since the error only relates to a failed attempt to
# apply default key-bindings, not to register the
# widget (the main purpose of this method).
try:
mod.install_macros_and_bindings(widgetObj)
except Exception:
msg = ('<b>install_macros_and_bindings</b> function'
' in <b>{}</b> did not execute properly.')
msg = msg.format(module_name)
self.qteLogger.error(msg, stack_info=True)
else:
msg = ('Module <b>{}</b> has no '
'<b>install_macros_and_bindings</b>'
' method'.format(module_name))
self.qteLogger.error(msg)
return widgetObj | def function[qteAddWidget, parameter[self, widgetObj, isFocusable, widgetSignature, autoBind]]:
constant[
Augment the standard Qt ``widgetObj`` with Qtmacs specific fields.
Example: from a programmers perspective there is no difference
between::
wid = QtGui.QTextEdit(self)
and::
wid = self.qteAddWidget(QtGui.QTextEdit(self))
Both return a handle to a Qt widget (a ``QTextEdit`` in this
case). However, the ``qteAddWidget`` adds the following fields
to the object:
* ``_qteAdmin``: this is an instance of the ``QtmacsAdminStructure``
to tell Qtmacs how to treat the widget.
* ``qteSignature``: an attribute that returns the signature of the
widget and equals ``widgetSignature``. If no such signature was
specified it defaults to the Qt internal name as a string, eg.
for a push button this would be 'QPushButton'.
* ``qteSetKeyFilterPolicy``: this points directly to the equally
named method inside the _qteAdmin object. This is a convenience
shortcut to avoid using the _qteAdmin structure directly in
macro/applet code, because only Qtmacs itself should temper
with it.
|Args|
* ``widgetObj`` (**QWidget**): any widget from the QtGui library.
* ``isFocusable`` (**bool**): whether or not the widget can
receive the focus.
* ``widgetSignature`` (**str**): specify the widget signature
(defaults to class name)
* ``autoBind`` (**bool**): if **True** and ``widgetSignature``
is a recognisable name (eg. **QTextEdit**) then automatically
load the appropriate key-bindings for this widget.
|Returns|
* **QWidget**: handle to widget object (or **None** if it could
not be added).
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
]
name[widgetObj]._qteAdmin assign[=] call[name[QtmacsAdminStructure], parameter[name[self]]]
name[widgetObj]._qteAdmin.appletID assign[=] name[self]._qteAdmin.appletID
name[widgetObj]._qteAdmin.isQtmacsApplet assign[=] constant[False]
name[widgetObj]._qteAdmin.appletSignature assign[=] call[name[self].qteAppletSignature, parameter[]]
if compare[name[widgetSignature] is constant[None]] begin[:]
name[widgetObj]._qteAdmin.widgetSignature assign[=] name[widgetObj].__class__.__name__
variable[wo] assign[=] name[widgetObj]
name[wo].qteSignature assign[=] name[wo]._qteAdmin.widgetSignature
name[wo].qteSetKeyFilterPolicy assign[=] name[wo]._qteAdmin.qteSetKeyFilterPolicy
<ast.Delete object at 0x7da204962b60>
call[name[self]._qteAdmin.widgetList.append, parameter[name[widgetObj]]]
variable[default_bind] assign[=] name[qte_global].default_widget_keybindings
if <ast.BoolOp object at 0x7da204961810> begin[:]
variable[module_name] assign[=] call[name[default_bind]][name[widgetObj].qteSignature]
<ast.Try object at 0x7da2044c3460>
if call[name[hasattr], parameter[name[mod], constant[install_macros_and_bindings]]] begin[:]
<ast.Try object at 0x7da2044c16c0>
return[name[widgetObj]] | keyword[def] identifier[qteAddWidget] ( identifier[self] , identifier[widgetObj] : identifier[QtGui] . identifier[QWidget] , identifier[isFocusable] : identifier[bool] = keyword[True] ,
identifier[widgetSignature] : identifier[str] = keyword[None] , identifier[autoBind] : identifier[bool] = keyword[True] ):
literal[string]
identifier[widgetObj] . identifier[_qteAdmin] = identifier[QtmacsAdminStructure] (
identifier[self] , identifier[isFocusable] = identifier[isFocusable] )
identifier[widgetObj] . identifier[_qteAdmin] . identifier[appletID] = identifier[self] . identifier[_qteAdmin] . identifier[appletID]
identifier[widgetObj] . identifier[_qteAdmin] . identifier[isQtmacsApplet] = keyword[False]
identifier[widgetObj] . identifier[_qteAdmin] . identifier[appletSignature] = identifier[self] . identifier[qteAppletSignature] ()
keyword[if] identifier[widgetSignature] keyword[is] keyword[None] :
identifier[widgetObj] . identifier[_qteAdmin] . identifier[widgetSignature] = identifier[widgetObj] . identifier[__class__] . identifier[__name__]
keyword[else] :
identifier[widgetObj] . identifier[_qteAdmin] . identifier[widgetSignature] = identifier[widgetSignature]
identifier[wo] = identifier[widgetObj]
identifier[wo] . identifier[qteSignature] = identifier[wo] . identifier[_qteAdmin] . identifier[widgetSignature]
identifier[wo] . identifier[qteSetKeyFilterPolicy] = identifier[wo] . identifier[_qteAdmin] . identifier[qteSetKeyFilterPolicy]
keyword[del] identifier[wo]
identifier[self] . identifier[_qteAdmin] . identifier[widgetList] . identifier[append] ( identifier[widgetObj] )
identifier[default_bind] = identifier[qte_global] . identifier[default_widget_keybindings]
keyword[if] identifier[autoBind] keyword[and] ( identifier[widgetObj] . identifier[qteSignature] keyword[in] identifier[default_bind] ):
identifier[module_name] = identifier[default_bind] [ identifier[widgetObj] . identifier[qteSignature] ]
keyword[try] :
identifier[mod] = identifier[importlib] . identifier[import_module] ( identifier[module_name] )
keyword[except] identifier[ImportError] :
identifier[msg] =( literal[string] . identifier[format] (
identifier[module_name] ))
identifier[self] . identifier[qteLogger] . identifier[exception] ( identifier[msg] , identifier[stack_info] = keyword[True] )
keyword[return]
keyword[if] identifier[hasattr] ( identifier[mod] , literal[string] ):
keyword[try] :
identifier[mod] . identifier[install_macros_and_bindings] ( identifier[widgetObj] )
keyword[except] identifier[Exception] :
identifier[msg] =( literal[string]
literal[string] )
identifier[msg] = identifier[msg] . identifier[format] ( identifier[module_name] )
identifier[self] . identifier[qteLogger] . identifier[error] ( identifier[msg] , identifier[stack_info] = keyword[True] )
keyword[else] :
identifier[msg] =( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[module_name] ))
identifier[self] . identifier[qteLogger] . identifier[error] ( identifier[msg] )
keyword[return] identifier[widgetObj] | def qteAddWidget(self, widgetObj: QtGui.QWidget, isFocusable: bool=True, widgetSignature: str=None, autoBind: bool=True):
"""
Augment the standard Qt ``widgetObj`` with Qtmacs specific fields.
Example: from a programmers perspective there is no difference
between::
wid = QtGui.QTextEdit(self)
and::
wid = self.qteAddWidget(QtGui.QTextEdit(self))
Both return a handle to a Qt widget (a ``QTextEdit`` in this
case). However, the ``qteAddWidget`` adds the following fields
to the object:
* ``_qteAdmin``: this is an instance of the ``QtmacsAdminStructure``
to tell Qtmacs how to treat the widget.
* ``qteSignature``: an attribute that returns the signature of the
widget and equals ``widgetSignature``. If no such signature was
specified it defaults to the Qt internal name as a string, eg.
for a push button this would be 'QPushButton'.
* ``qteSetKeyFilterPolicy``: this points directly to the equally
named method inside the _qteAdmin object. This is a convenience
shortcut to avoid using the _qteAdmin structure directly in
macro/applet code, because only Qtmacs itself should temper
with it.
|Args|
* ``widgetObj`` (**QWidget**): any widget from the QtGui library.
* ``isFocusable`` (**bool**): whether or not the widget can
receive the focus.
* ``widgetSignature`` (**str**): specify the widget signature
(defaults to class name)
* ``autoBind`` (**bool**): if **True** and ``widgetSignature``
is a recognisable name (eg. **QTextEdit**) then automatically
load the appropriate key-bindings for this widget.
|Returns|
* **QWidget**: handle to widget object (or **None** if it could
not be added).
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type.
"""
# Add a Qtmacs data structure to the widget to allow their
# event administration. Note that, in all likelihood, the
# widget is an arbitrary Qt widget (eg. QLineEdit,
# QPushButton, etc).
widgetObj._qteAdmin = QtmacsAdminStructure(self, isFocusable=isFocusable)
widgetObj._qteAdmin.appletID = self._qteAdmin.appletID
# Specify that this widget is not a QtmacsApplet.
widgetObj._qteAdmin.isQtmacsApplet = False
# Remember the signature of the applet containing this widget.
widgetObj._qteAdmin.appletSignature = self.qteAppletSignature()
# Set the widget signature. If none was specified, use the
# class name (eg. QLineEdit).
if widgetSignature is None:
widgetObj._qteAdmin.widgetSignature = widgetObj.__class__.__name__ # depends on [control=['if'], data=[]]
else:
widgetObj._qteAdmin.widgetSignature = widgetSignature
# For convenience, as it is otherwise difficult for the macro
# programmer to determine the widget signature used by Qtmacs.
# Note: the "wo" is only a shorthand to avoid too long lines.
wo = widgetObj
wo.qteSignature = wo._qteAdmin.widgetSignature
wo.qteSetKeyFilterPolicy = wo._qteAdmin.qteSetKeyFilterPolicy
del wo
# Add the widget to the widgetList of this QtmacsApplet.
# Important: this MUST happen before macros and key-bindings are loaded
# and bound automatically (see code below) because the method to
# bind the keys will verify that the widget exists in ``widgetList``.
self._qteAdmin.widgetList.append(widgetObj)
# If a widget has a default key-bindings file then the global
# dictionary ``default_widget_keybindings`` will contain its
# name.
default_bind = qte_global.default_widget_keybindings
if autoBind and widgetObj.qteSignature in default_bind:
# Shorthand.
module_name = default_bind[widgetObj.qteSignature]
# Import the module with the default key-bindings for the
# current widget type.
try:
mod = importlib.import_module(module_name) # depends on [control=['try'], data=[]]
except ImportError:
msg = 'Module <b>{}</b> could not be imported.'.format(module_name)
self.qteLogger.exception(msg, stack_info=True)
return # depends on [control=['except'], data=[]]
if hasattr(mod, 'install_macros_and_bindings'):
# By convention, the module has an
# install_macros_and_bindings method. If an error
# occurs intercept it, but do not abort the method
# since the error only relates to a failed attempt to
# apply default key-bindings, not to register the
# widget (the main purpose of this method).
try:
mod.install_macros_and_bindings(widgetObj) # depends on [control=['try'], data=[]]
except Exception:
msg = '<b>install_macros_and_bindings</b> function in <b>{}</b> did not execute properly.'
msg = msg.format(module_name)
self.qteLogger.error(msg, stack_info=True) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
msg = 'Module <b>{}</b> has no <b>install_macros_and_bindings</b> method'.format(module_name)
self.qteLogger.error(msg) # depends on [control=['if'], data=[]]
return widgetObj |
def read_folder(folder, ext='*', uppercase=False, replace_dot='.', parent=''):
"""
This will read all of the files in the folder with the extension equal
to ext
:param folder: str of the folder name
:param ext: str of the extension
:param uppercase: bool if True will uppercase all the file names
:param replace_dot: str will replace "." in the filename
:param parent: str of the parent folder
:return: dict of basename with the value of the text in the file
"""
ret = {}
if os.path.exists(folder):
for file in os.listdir(folder):
if os.path.isdir(os.path.join(folder, file)):
child = read_folder(os.path.join(folder, file),
ext, uppercase, replace_dot,
parent=parent + file + '/')
ret.update(child)
else:
if ext == '*' or file.endswith(ext):
key = file.replace('.', replace_dot)
key = uppercase and key.upper() or key
ret[parent + key] = read_file(os.path.join(folder, file))
return ret | def function[read_folder, parameter[folder, ext, uppercase, replace_dot, parent]]:
constant[
This will read all of the files in the folder with the extension equal
to ext
:param folder: str of the folder name
:param ext: str of the extension
:param uppercase: bool if True will uppercase all the file names
:param replace_dot: str will replace "." in the filename
:param parent: str of the parent folder
:return: dict of basename with the value of the text in the file
]
variable[ret] assign[=] dictionary[[], []]
if call[name[os].path.exists, parameter[name[folder]]] begin[:]
for taget[name[file]] in starred[call[name[os].listdir, parameter[name[folder]]]] begin[:]
if call[name[os].path.isdir, parameter[call[name[os].path.join, parameter[name[folder], name[file]]]]] begin[:]
variable[child] assign[=] call[name[read_folder], parameter[call[name[os].path.join, parameter[name[folder], name[file]]], name[ext], name[uppercase], name[replace_dot]]]
call[name[ret].update, parameter[name[child]]]
return[name[ret]] | keyword[def] identifier[read_folder] ( identifier[folder] , identifier[ext] = literal[string] , identifier[uppercase] = keyword[False] , identifier[replace_dot] = literal[string] , identifier[parent] = literal[string] ):
literal[string]
identifier[ret] ={}
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[folder] ):
keyword[for] identifier[file] keyword[in] identifier[os] . identifier[listdir] ( identifier[folder] ):
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , identifier[file] )):
identifier[child] = identifier[read_folder] ( identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , identifier[file] ),
identifier[ext] , identifier[uppercase] , identifier[replace_dot] ,
identifier[parent] = identifier[parent] + identifier[file] + literal[string] )
identifier[ret] . identifier[update] ( identifier[child] )
keyword[else] :
keyword[if] identifier[ext] == literal[string] keyword[or] identifier[file] . identifier[endswith] ( identifier[ext] ):
identifier[key] = identifier[file] . identifier[replace] ( literal[string] , identifier[replace_dot] )
identifier[key] = identifier[uppercase] keyword[and] identifier[key] . identifier[upper] () keyword[or] identifier[key]
identifier[ret] [ identifier[parent] + identifier[key] ]= identifier[read_file] ( identifier[os] . identifier[path] . identifier[join] ( identifier[folder] , identifier[file] ))
keyword[return] identifier[ret] | def read_folder(folder, ext='*', uppercase=False, replace_dot='.', parent=''):
"""
This will read all of the files in the folder with the extension equal
to ext
:param folder: str of the folder name
:param ext: str of the extension
:param uppercase: bool if True will uppercase all the file names
:param replace_dot: str will replace "." in the filename
:param parent: str of the parent folder
:return: dict of basename with the value of the text in the file
"""
ret = {}
if os.path.exists(folder):
for file in os.listdir(folder):
if os.path.isdir(os.path.join(folder, file)):
child = read_folder(os.path.join(folder, file), ext, uppercase, replace_dot, parent=parent + file + '/')
ret.update(child) # depends on [control=['if'], data=[]]
elif ext == '*' or file.endswith(ext):
key = file.replace('.', replace_dot)
key = uppercase and key.upper() or key
ret[parent + key] = read_file(os.path.join(folder, file)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file']] # depends on [control=['if'], data=[]]
return ret |
def stereo_FM(x,fs=2.4e6,file_name='test.wav'):
"""
Stereo demod from complex baseband at sampling rate fs.
Assume fs is 2400 ksps
Mark Wickert July 2017
"""
N1 = 10
b = signal.firwin(64,2*200e3/float(fs))
# Filter and decimate (should be polyphase)
y = signal.lfilter(b,1,x)
z = ss.downsample(y,N1)
# Apply complex baseband discriminator
z_bb = discrim(z)
# Work with the (3) stereo multiplex signals:
# Begin by designing a lowpass filter for L+R and DSP demoded (L-R)
# (fc = 12 KHz)
b12 = signal.firwin(128,2*12e3/(float(fs)/N1))
# The L + R term is at baseband, we just lowpass filter to remove
# other terms above 12 kHz.
y_lpr = signal.lfilter(b12,1,z_bb)
b19 = signal.firwin(128,2*1e3*np.array([19-5,19+5])/(float(fs)/N1),
pass_zero=False);
z_bb19 = signal.lfilter(b19,1,z_bb)
# Lock PLL to 19 kHz pilot
# A type 2 loop with bandwidth Bn = 10 Hz and damping zeta = 0.707
# The VCO quiescent frequency is set to 19000 Hz.
theta, phi_error = pilot_PLL(z_bb19,19000,fs/N1,2,10,0.707)
# Coherently demodulate the L - R subcarrier at 38 kHz.
# theta is the PLL output phase at 19 kHz, so to double multiply
# by 2 and wrap with cos() or sin().
# First bandpass filter
b38 = signal.firwin(128,2*1e3*np.array([38-5,38+5])/(float(fs)/N1),
pass_zero=False);
x_lmr = signal.lfilter(b38,1,z_bb)
# Coherently demodulate using the PLL output phase
x_lmr = 2*np.sqrt(2)*np.cos(2*theta)*x_lmr
# Lowpass at 12 kHz to recover the desired DSB demod term
y_lmr = signal.lfilter(b12,1,x_lmr)
# Matrix the y_lmr and y_lpr for form right and left channels:
y_left = y_lpr + y_lmr
y_right = y_lpr - y_lmr
# Decimate by N2 (nominally 5)
N2 = 5
fs2 = float(fs)/(N1*N2) # (nominally 48 ksps)
y_left_DN2 = ss.downsample(y_left,N2)
y_right_DN2 = ss.downsample(y_right,N2)
# Deemphasize with 75 us time constant to 'undo' the preemphasis
# applied at the transmitter in broadcast FM.
# A 1-pole digital lowpass works well here.
a_de = np.exp(-2.1*1e3*2*np.pi/fs2)
z_left = signal.lfilter([1-a_de],[1, -a_de],y_left_DN2)
z_right = signal.lfilter([1-a_de],[1, -a_de],y_right_DN2)
# Place left and righ channels as side-by-side columns in a 2D array
z_out = np.hstack((np.array([z_left]).T,(np.array([z_right]).T)))
ss.to_wav(file_name, 48000, z_out/2)
print('Done!')
#return z_bb, z_out
return z_bb, theta, y_lpr, y_lmr, z_out | def function[stereo_FM, parameter[x, fs, file_name]]:
constant[
Stereo demod from complex baseband at sampling rate fs.
Assume fs is 2400 ksps
Mark Wickert July 2017
]
variable[N1] assign[=] constant[10]
variable[b] assign[=] call[name[signal].firwin, parameter[constant[64], binary_operation[binary_operation[constant[2] * constant[200000.0]] / call[name[float], parameter[name[fs]]]]]]
variable[y] assign[=] call[name[signal].lfilter, parameter[name[b], constant[1], name[x]]]
variable[z] assign[=] call[name[ss].downsample, parameter[name[y], name[N1]]]
variable[z_bb] assign[=] call[name[discrim], parameter[name[z]]]
variable[b12] assign[=] call[name[signal].firwin, parameter[constant[128], binary_operation[binary_operation[constant[2] * constant[12000.0]] / binary_operation[call[name[float], parameter[name[fs]]] / name[N1]]]]]
variable[y_lpr] assign[=] call[name[signal].lfilter, parameter[name[b12], constant[1], name[z_bb]]]
variable[b19] assign[=] call[name[signal].firwin, parameter[constant[128], binary_operation[binary_operation[binary_operation[constant[2] * constant[1000.0]] * call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da18f58f520>, <ast.BinOp object at 0x7da18f58fac0>]]]]] / binary_operation[call[name[float], parameter[name[fs]]] / name[N1]]]]]
variable[z_bb19] assign[=] call[name[signal].lfilter, parameter[name[b19], constant[1], name[z_bb]]]
<ast.Tuple object at 0x7da18f58f3d0> assign[=] call[name[pilot_PLL], parameter[name[z_bb19], constant[19000], binary_operation[name[fs] / name[N1]], constant[2], constant[10], constant[0.707]]]
variable[b38] assign[=] call[name[signal].firwin, parameter[constant[128], binary_operation[binary_operation[binary_operation[constant[2] * constant[1000.0]] * call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da18f58df30>, <ast.BinOp object at 0x7da18f58c8b0>]]]]] / binary_operation[call[name[float], parameter[name[fs]]] / name[N1]]]]]
variable[x_lmr] assign[=] call[name[signal].lfilter, parameter[name[b38], constant[1], name[z_bb]]]
variable[x_lmr] assign[=] binary_operation[binary_operation[binary_operation[constant[2] * call[name[np].sqrt, parameter[constant[2]]]] * call[name[np].cos, parameter[binary_operation[constant[2] * name[theta]]]]] * name[x_lmr]]
variable[y_lmr] assign[=] call[name[signal].lfilter, parameter[name[b12], constant[1], name[x_lmr]]]
variable[y_left] assign[=] binary_operation[name[y_lpr] + name[y_lmr]]
variable[y_right] assign[=] binary_operation[name[y_lpr] - name[y_lmr]]
variable[N2] assign[=] constant[5]
variable[fs2] assign[=] binary_operation[call[name[float], parameter[name[fs]]] / binary_operation[name[N1] * name[N2]]]
variable[y_left_DN2] assign[=] call[name[ss].downsample, parameter[name[y_left], name[N2]]]
variable[y_right_DN2] assign[=] call[name[ss].downsample, parameter[name[y_right], name[N2]]]
variable[a_de] assign[=] call[name[np].exp, parameter[binary_operation[binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f58f7f0> * constant[1000.0]] * constant[2]] * name[np].pi] / name[fs2]]]]
variable[z_left] assign[=] call[name[signal].lfilter, parameter[list[[<ast.BinOp object at 0x7da20c76fd90>]], list[[<ast.Constant object at 0x7da20c76fca0>, <ast.UnaryOp object at 0x7da20c76ee00>]], name[y_left_DN2]]]
variable[z_right] assign[=] call[name[signal].lfilter, parameter[list[[<ast.BinOp object at 0x7da20c76ef80>]], list[[<ast.Constant object at 0x7da20c76fe50>, <ast.UnaryOp object at 0x7da20c76f610>]], name[y_right_DN2]]]
variable[z_out] assign[=] call[name[np].hstack, parameter[tuple[[<ast.Attribute object at 0x7da20c76e8c0>, <ast.Attribute object at 0x7da20c76ea70>]]]]
call[name[ss].to_wav, parameter[name[file_name], constant[48000], binary_operation[name[z_out] / constant[2]]]]
call[name[print], parameter[constant[Done!]]]
return[tuple[[<ast.Name object at 0x7da20c76cbe0>, <ast.Name object at 0x7da20c76e6e0>, <ast.Name object at 0x7da20c76e050>, <ast.Name object at 0x7da20c76c700>, <ast.Name object at 0x7da20c76fe20>]]] | keyword[def] identifier[stereo_FM] ( identifier[x] , identifier[fs] = literal[int] , identifier[file_name] = literal[string] ):
literal[string]
identifier[N1] = literal[int]
identifier[b] = identifier[signal] . identifier[firwin] ( literal[int] , literal[int] * literal[int] / identifier[float] ( identifier[fs] ))
identifier[y] = identifier[signal] . identifier[lfilter] ( identifier[b] , literal[int] , identifier[x] )
identifier[z] = identifier[ss] . identifier[downsample] ( identifier[y] , identifier[N1] )
identifier[z_bb] = identifier[discrim] ( identifier[z] )
identifier[b12] = identifier[signal] . identifier[firwin] ( literal[int] , literal[int] * literal[int] /( identifier[float] ( identifier[fs] )/ identifier[N1] ))
identifier[y_lpr] = identifier[signal] . identifier[lfilter] ( identifier[b12] , literal[int] , identifier[z_bb] )
identifier[b19] = identifier[signal] . identifier[firwin] ( literal[int] , literal[int] * literal[int] * identifier[np] . identifier[array] ([ literal[int] - literal[int] , literal[int] + literal[int] ])/( identifier[float] ( identifier[fs] )/ identifier[N1] ),
identifier[pass_zero] = keyword[False] );
identifier[z_bb19] = identifier[signal] . identifier[lfilter] ( identifier[b19] , literal[int] , identifier[z_bb] )
identifier[theta] , identifier[phi_error] = identifier[pilot_PLL] ( identifier[z_bb19] , literal[int] , identifier[fs] / identifier[N1] , literal[int] , literal[int] , literal[int] )
identifier[b38] = identifier[signal] . identifier[firwin] ( literal[int] , literal[int] * literal[int] * identifier[np] . identifier[array] ([ literal[int] - literal[int] , literal[int] + literal[int] ])/( identifier[float] ( identifier[fs] )/ identifier[N1] ),
identifier[pass_zero] = keyword[False] );
identifier[x_lmr] = identifier[signal] . identifier[lfilter] ( identifier[b38] , literal[int] , identifier[z_bb] )
identifier[x_lmr] = literal[int] * identifier[np] . identifier[sqrt] ( literal[int] )* identifier[np] . identifier[cos] ( literal[int] * identifier[theta] )* identifier[x_lmr]
identifier[y_lmr] = identifier[signal] . identifier[lfilter] ( identifier[b12] , literal[int] , identifier[x_lmr] )
identifier[y_left] = identifier[y_lpr] + identifier[y_lmr]
identifier[y_right] = identifier[y_lpr] - identifier[y_lmr]
identifier[N2] = literal[int]
identifier[fs2] = identifier[float] ( identifier[fs] )/( identifier[N1] * identifier[N2] )
identifier[y_left_DN2] = identifier[ss] . identifier[downsample] ( identifier[y_left] , identifier[N2] )
identifier[y_right_DN2] = identifier[ss] . identifier[downsample] ( identifier[y_right] , identifier[N2] )
identifier[a_de] = identifier[np] . identifier[exp] (- literal[int] * literal[int] * literal[int] * identifier[np] . identifier[pi] / identifier[fs2] )
identifier[z_left] = identifier[signal] . identifier[lfilter] ([ literal[int] - identifier[a_de] ],[ literal[int] ,- identifier[a_de] ], identifier[y_left_DN2] )
identifier[z_right] = identifier[signal] . identifier[lfilter] ([ literal[int] - identifier[a_de] ],[ literal[int] ,- identifier[a_de] ], identifier[y_right_DN2] )
identifier[z_out] = identifier[np] . identifier[hstack] (( identifier[np] . identifier[array] ([ identifier[z_left] ]). identifier[T] ,( identifier[np] . identifier[array] ([ identifier[z_right] ]). identifier[T] )))
identifier[ss] . identifier[to_wav] ( identifier[file_name] , literal[int] , identifier[z_out] / literal[int] )
identifier[print] ( literal[string] )
keyword[return] identifier[z_bb] , identifier[theta] , identifier[y_lpr] , identifier[y_lmr] , identifier[z_out] | def stereo_FM(x, fs=2400000.0, file_name='test.wav'):
"""
Stereo demod from complex baseband at sampling rate fs.
Assume fs is 2400 ksps
Mark Wickert July 2017
"""
N1 = 10
b = signal.firwin(64, 2 * 200000.0 / float(fs)) # Filter and decimate (should be polyphase)
y = signal.lfilter(b, 1, x)
z = ss.downsample(y, N1) # Apply complex baseband discriminator
z_bb = discrim(z) # Work with the (3) stereo multiplex signals:
# Begin by designing a lowpass filter for L+R and DSP demoded (L-R)
# (fc = 12 KHz)
b12 = signal.firwin(128, 2 * 12000.0 / (float(fs) / N1)) # The L + R term is at baseband, we just lowpass filter to remove
# other terms above 12 kHz.
y_lpr = signal.lfilter(b12, 1, z_bb)
b19 = signal.firwin(128, 2 * 1000.0 * np.array([19 - 5, 19 + 5]) / (float(fs) / N1), pass_zero=False)
z_bb19 = signal.lfilter(b19, 1, z_bb) # Lock PLL to 19 kHz pilot
# A type 2 loop with bandwidth Bn = 10 Hz and damping zeta = 0.707
# The VCO quiescent frequency is set to 19000 Hz.
(theta, phi_error) = pilot_PLL(z_bb19, 19000, fs / N1, 2, 10, 0.707) # Coherently demodulate the L - R subcarrier at 38 kHz.
# theta is the PLL output phase at 19 kHz, so to double multiply
# by 2 and wrap with cos() or sin().
# First bandpass filter
b38 = signal.firwin(128, 2 * 1000.0 * np.array([38 - 5, 38 + 5]) / (float(fs) / N1), pass_zero=False)
x_lmr = signal.lfilter(b38, 1, z_bb) # Coherently demodulate using the PLL output phase
x_lmr = 2 * np.sqrt(2) * np.cos(2 * theta) * x_lmr # Lowpass at 12 kHz to recover the desired DSB demod term
y_lmr = signal.lfilter(b12, 1, x_lmr) # Matrix the y_lmr and y_lpr for form right and left channels:
y_left = y_lpr + y_lmr
y_right = y_lpr - y_lmr # Decimate by N2 (nominally 5)
N2 = 5
fs2 = float(fs) / (N1 * N2) # (nominally 48 ksps)
y_left_DN2 = ss.downsample(y_left, N2)
y_right_DN2 = ss.downsample(y_right, N2) # Deemphasize with 75 us time constant to 'undo' the preemphasis
# applied at the transmitter in broadcast FM.
# A 1-pole digital lowpass works well here.
a_de = np.exp(-2.1 * 1000.0 * 2 * np.pi / fs2)
z_left = signal.lfilter([1 - a_de], [1, -a_de], y_left_DN2)
z_right = signal.lfilter([1 - a_de], [1, -a_de], y_right_DN2) # Place left and righ channels as side-by-side columns in a 2D array
z_out = np.hstack((np.array([z_left]).T, np.array([z_right]).T))
ss.to_wav(file_name, 48000, z_out / 2)
print('Done!') #return z_bb, z_out
return (z_bb, theta, y_lpr, y_lmr, z_out) |
def remove_repo(self, rname):
"""todo: Docstring for remove_repo
:param repo: arg description
:type repo: type description
:return:
:rtype:
"""
logger.debug("%s", rname)
repo = Repo(name=rname)
repo.remove()
return repo | def function[remove_repo, parameter[self, rname]]:
constant[todo: Docstring for remove_repo
:param repo: arg description
:type repo: type description
:return:
:rtype:
]
call[name[logger].debug, parameter[constant[%s], name[rname]]]
variable[repo] assign[=] call[name[Repo], parameter[]]
call[name[repo].remove, parameter[]]
return[name[repo]] | keyword[def] identifier[remove_repo] ( identifier[self] , identifier[rname] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[rname] )
identifier[repo] = identifier[Repo] ( identifier[name] = identifier[rname] )
identifier[repo] . identifier[remove] ()
keyword[return] identifier[repo] | def remove_repo(self, rname):
"""todo: Docstring for remove_repo
:param repo: arg description
:type repo: type description
:return:
:rtype:
"""
logger.debug('%s', rname)
repo = Repo(name=rname)
repo.remove()
return repo |
def dump_session_params(path):
"""
Dump value of all TRAINABLE + MODEL variables to a dict, and save as
npz format (loadable by :func:`sessinit.get_model_loader`).
Args:
path(str): the file name to save the parameters. Must ends with npz.
"""
# save variables that are GLOBAL, and either TRAINABLE or MODEL
var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
var.extend(tf.get_collection(tf.GraphKeys.MODEL_VARIABLES))
# TODO dedup
assert len(set(var)) == len(var), "TRAINABLE and MODEL variables have duplication!"
gvars = set([k.name for k in tf.global_variables()])
var = [v for v in var if v.name in gvars]
result = {}
for v in var:
result[v.name] = v.eval()
save_chkpt_vars(result, path) | def function[dump_session_params, parameter[path]]:
constant[
Dump value of all TRAINABLE + MODEL variables to a dict, and save as
npz format (loadable by :func:`sessinit.get_model_loader`).
Args:
path(str): the file name to save the parameters. Must ends with npz.
]
variable[var] assign[=] call[name[tf].get_collection, parameter[name[tf].GraphKeys.TRAINABLE_VARIABLES]]
call[name[var].extend, parameter[call[name[tf].get_collection, parameter[name[tf].GraphKeys.MODEL_VARIABLES]]]]
assert[compare[call[name[len], parameter[call[name[set], parameter[name[var]]]]] equal[==] call[name[len], parameter[name[var]]]]]
variable[gvars] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b2346ec0>]]
variable[var] assign[=] <ast.ListComp object at 0x7da1b2346a40>
variable[result] assign[=] dictionary[[], []]
for taget[name[v]] in starred[name[var]] begin[:]
call[name[result]][name[v].name] assign[=] call[name[v].eval, parameter[]]
call[name[save_chkpt_vars], parameter[name[result], name[path]]] | keyword[def] identifier[dump_session_params] ( identifier[path] ):
literal[string]
identifier[var] = identifier[tf] . identifier[get_collection] ( identifier[tf] . identifier[GraphKeys] . identifier[TRAINABLE_VARIABLES] )
identifier[var] . identifier[extend] ( identifier[tf] . identifier[get_collection] ( identifier[tf] . identifier[GraphKeys] . identifier[MODEL_VARIABLES] ))
keyword[assert] identifier[len] ( identifier[set] ( identifier[var] ))== identifier[len] ( identifier[var] ), literal[string]
identifier[gvars] = identifier[set] ([ identifier[k] . identifier[name] keyword[for] identifier[k] keyword[in] identifier[tf] . identifier[global_variables] ()])
identifier[var] =[ identifier[v] keyword[for] identifier[v] keyword[in] identifier[var] keyword[if] identifier[v] . identifier[name] keyword[in] identifier[gvars] ]
identifier[result] ={}
keyword[for] identifier[v] keyword[in] identifier[var] :
identifier[result] [ identifier[v] . identifier[name] ]= identifier[v] . identifier[eval] ()
identifier[save_chkpt_vars] ( identifier[result] , identifier[path] ) | def dump_session_params(path):
"""
Dump value of all TRAINABLE + MODEL variables to a dict, and save as
npz format (loadable by :func:`sessinit.get_model_loader`).
Args:
path(str): the file name to save the parameters. Must ends with npz.
"""
# save variables that are GLOBAL, and either TRAINABLE or MODEL
var = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
var.extend(tf.get_collection(tf.GraphKeys.MODEL_VARIABLES))
# TODO dedup
assert len(set(var)) == len(var), 'TRAINABLE and MODEL variables have duplication!'
gvars = set([k.name for k in tf.global_variables()])
var = [v for v in var if v.name in gvars]
result = {}
for v in var:
result[v.name] = v.eval() # depends on [control=['for'], data=['v']]
save_chkpt_vars(result, path) |
def SDSS_spectra_query(self, cat_name, ra, dec, radius, group=True, **kwargs):
"""
Use astroquery to search SDSS for sources within a search cone
Parameters
----------
cat_name: str
A name for the imported catalog (e.g. '2MASS')
ra: astropy.units.quantity.Quantity
The RA of the center of the cone search
dec: astropy.units.quantity.Quantity
The Dec of the center of the cone search
radius: astropy.units.quantity.Quantity
The radius of the cone search
"""
# Verify the cat_name
if self._catalog_check(cat_name):
# Prep the current catalog as an astropy.QTable
tab = at.Table.from_pandas(self.catalog)
# Cone search Vizier
print("Searching SDSS for sources within {} of ({}, {}). Please be patient...".format(viz_cat, radius, ra, dec))
crds = coord.SkyCoord(ra=ra, dec=dec, frame='icrs')
try:
data = SDSS.query_region(crds, spectro=True, radius=radius)
except:
print("No data found in SDSS within {} of ({}, {}).".format(viz_cat, radius, ra, dec))
return
# Ingest the data
self.ingest_data(data, cat_name, 'id', ra_col=ra_col, dec_col=dec_col)
# Regroup
if len(self.catalogs)>1 and group:
self.group_sources(self.xmatch_radius) | def function[SDSS_spectra_query, parameter[self, cat_name, ra, dec, radius, group]]:
constant[
Use astroquery to search SDSS for sources within a search cone
Parameters
----------
cat_name: str
A name for the imported catalog (e.g. '2MASS')
ra: astropy.units.quantity.Quantity
The RA of the center of the cone search
dec: astropy.units.quantity.Quantity
The Dec of the center of the cone search
radius: astropy.units.quantity.Quantity
The radius of the cone search
]
if call[name[self]._catalog_check, parameter[name[cat_name]]] begin[:]
variable[tab] assign[=] call[name[at].Table.from_pandas, parameter[name[self].catalog]]
call[name[print], parameter[call[constant[Searching SDSS for sources within {} of ({}, {}). Please be patient...].format, parameter[name[viz_cat], name[radius], name[ra], name[dec]]]]]
variable[crds] assign[=] call[name[coord].SkyCoord, parameter[]]
<ast.Try object at 0x7da1b0aa5690>
call[name[self].ingest_data, parameter[name[data], name[cat_name], constant[id]]]
if <ast.BoolOp object at 0x7da1b0aa6f20> begin[:]
call[name[self].group_sources, parameter[name[self].xmatch_radius]] | keyword[def] identifier[SDSS_spectra_query] ( identifier[self] , identifier[cat_name] , identifier[ra] , identifier[dec] , identifier[radius] , identifier[group] = keyword[True] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_check] ( identifier[cat_name] ):
identifier[tab] = identifier[at] . identifier[Table] . identifier[from_pandas] ( identifier[self] . identifier[catalog] )
identifier[print] ( literal[string] . identifier[format] ( identifier[viz_cat] , identifier[radius] , identifier[ra] , identifier[dec] ))
identifier[crds] = identifier[coord] . identifier[SkyCoord] ( identifier[ra] = identifier[ra] , identifier[dec] = identifier[dec] , identifier[frame] = literal[string] )
keyword[try] :
identifier[data] = identifier[SDSS] . identifier[query_region] ( identifier[crds] , identifier[spectro] = keyword[True] , identifier[radius] = identifier[radius] )
keyword[except] :
identifier[print] ( literal[string] . identifier[format] ( identifier[viz_cat] , identifier[radius] , identifier[ra] , identifier[dec] ))
keyword[return]
identifier[self] . identifier[ingest_data] ( identifier[data] , identifier[cat_name] , literal[string] , identifier[ra_col] = identifier[ra_col] , identifier[dec_col] = identifier[dec_col] )
keyword[if] identifier[len] ( identifier[self] . identifier[catalogs] )> literal[int] keyword[and] identifier[group] :
identifier[self] . identifier[group_sources] ( identifier[self] . identifier[xmatch_radius] ) | def SDSS_spectra_query(self, cat_name, ra, dec, radius, group=True, **kwargs):
"""
Use astroquery to search SDSS for sources within a search cone
Parameters
----------
cat_name: str
A name for the imported catalog (e.g. '2MASS')
ra: astropy.units.quantity.Quantity
The RA of the center of the cone search
dec: astropy.units.quantity.Quantity
The Dec of the center of the cone search
radius: astropy.units.quantity.Quantity
The radius of the cone search
"""
# Verify the cat_name
if self._catalog_check(cat_name):
# Prep the current catalog as an astropy.QTable
tab = at.Table.from_pandas(self.catalog)
# Cone search Vizier
print('Searching SDSS for sources within {} of ({}, {}). Please be patient...'.format(viz_cat, radius, ra, dec))
crds = coord.SkyCoord(ra=ra, dec=dec, frame='icrs')
try:
data = SDSS.query_region(crds, spectro=True, radius=radius) # depends on [control=['try'], data=[]]
except:
print('No data found in SDSS within {} of ({}, {}).'.format(viz_cat, radius, ra, dec))
return # depends on [control=['except'], data=[]]
# Ingest the data
self.ingest_data(data, cat_name, 'id', ra_col=ra_col, dec_col=dec_col)
# Regroup
if len(self.catalogs) > 1 and group:
self.group_sources(self.xmatch_radius) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def close_fds(self, skip_close_fds): # pragma: no cover, not with unit tests...
"""Close all the process file descriptors.
Skip the descriptors present in the skip_close_fds list
:param skip_close_fds: list of file descriptor to preserve from closing
:type skip_close_fds: list
:return: None
"""
# First we manage the file descriptor, because debug file can be
# relative to pwd
max_fds = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if max_fds == resource.RLIM_INFINITY:
max_fds = 1024
self.pre_log.append(("DEBUG", "Maximum file descriptors: %d" % max_fds))
# Iterate through and close all file descriptors.
for file_d in range(0, max_fds):
if file_d in skip_close_fds:
self.pre_log.append(("INFO", "Do not close fd: %s" % file_d))
continue
try:
os.close(file_d)
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass | def function[close_fds, parameter[self, skip_close_fds]]:
constant[Close all the process file descriptors.
Skip the descriptors present in the skip_close_fds list
:param skip_close_fds: list of file descriptor to preserve from closing
:type skip_close_fds: list
:return: None
]
variable[max_fds] assign[=] call[call[name[resource].getrlimit, parameter[name[resource].RLIMIT_NOFILE]]][constant[1]]
if compare[name[max_fds] equal[==] name[resource].RLIM_INFINITY] begin[:]
variable[max_fds] assign[=] constant[1024]
call[name[self].pre_log.append, parameter[tuple[[<ast.Constant object at 0x7da207f01cf0>, <ast.BinOp object at 0x7da207f02b30>]]]]
for taget[name[file_d]] in starred[call[name[range], parameter[constant[0], name[max_fds]]]] begin[:]
if compare[name[file_d] in name[skip_close_fds]] begin[:]
call[name[self].pre_log.append, parameter[tuple[[<ast.Constant object at 0x7da207f00040>, <ast.BinOp object at 0x7da207f01e40>]]]]
continue
<ast.Try object at 0x7da207f03010> | keyword[def] identifier[close_fds] ( identifier[self] , identifier[skip_close_fds] ):
literal[string]
identifier[max_fds] = identifier[resource] . identifier[getrlimit] ( identifier[resource] . identifier[RLIMIT_NOFILE] )[ literal[int] ]
keyword[if] identifier[max_fds] == identifier[resource] . identifier[RLIM_INFINITY] :
identifier[max_fds] = literal[int]
identifier[self] . identifier[pre_log] . identifier[append] (( literal[string] , literal[string] % identifier[max_fds] ))
keyword[for] identifier[file_d] keyword[in] identifier[range] ( literal[int] , identifier[max_fds] ):
keyword[if] identifier[file_d] keyword[in] identifier[skip_close_fds] :
identifier[self] . identifier[pre_log] . identifier[append] (( literal[string] , literal[string] % identifier[file_d] ))
keyword[continue]
keyword[try] :
identifier[os] . identifier[close] ( identifier[file_d] )
keyword[except] identifier[OSError] :
keyword[pass] | def close_fds(self, skip_close_fds): # pragma: no cover, not with unit tests...
'Close all the process file descriptors.\n Skip the descriptors present in the skip_close_fds list\n\n :param skip_close_fds: list of file descriptor to preserve from closing\n :type skip_close_fds: list\n :return: None\n '
# First we manage the file descriptor, because debug file can be
# relative to pwd
max_fds = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if max_fds == resource.RLIM_INFINITY:
max_fds = 1024 # depends on [control=['if'], data=['max_fds']]
self.pre_log.append(('DEBUG', 'Maximum file descriptors: %d' % max_fds))
# Iterate through and close all file descriptors.
for file_d in range(0, max_fds):
if file_d in skip_close_fds:
self.pre_log.append(('INFO', 'Do not close fd: %s' % file_d))
continue # depends on [control=['if'], data=['file_d']]
try:
os.close(file_d) # depends on [control=['try'], data=[]]
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['file_d']] |
def scale_between(minval, maxval, numStops):
""" Scale a min and max value to equal interval domain with
numStops discrete values
"""
scale = []
if numStops < 2:
return [minval, maxval]
elif maxval < minval:
raise ValueError()
else:
domain = maxval - minval
interval = float(domain) / float(numStops)
for i in range(numStops):
scale.append(round(minval + interval * i, 2))
return scale | def function[scale_between, parameter[minval, maxval, numStops]]:
constant[ Scale a min and max value to equal interval domain with
numStops discrete values
]
variable[scale] assign[=] list[[]]
if compare[name[numStops] less[<] constant[2]] begin[:]
return[list[[<ast.Name object at 0x7da1b18e5870>, <ast.Name object at 0x7da1b18e60b0>]]] | keyword[def] identifier[scale_between] ( identifier[minval] , identifier[maxval] , identifier[numStops] ):
literal[string]
identifier[scale] =[]
keyword[if] identifier[numStops] < literal[int] :
keyword[return] [ identifier[minval] , identifier[maxval] ]
keyword[elif] identifier[maxval] < identifier[minval] :
keyword[raise] identifier[ValueError] ()
keyword[else] :
identifier[domain] = identifier[maxval] - identifier[minval]
identifier[interval] = identifier[float] ( identifier[domain] )/ identifier[float] ( identifier[numStops] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[numStops] ):
identifier[scale] . identifier[append] ( identifier[round] ( identifier[minval] + identifier[interval] * identifier[i] , literal[int] ))
keyword[return] identifier[scale] | def scale_between(minval, maxval, numStops):
""" Scale a min and max value to equal interval domain with
numStops discrete values
"""
scale = []
if numStops < 2:
return [minval, maxval] # depends on [control=['if'], data=[]]
elif maxval < minval:
raise ValueError() # depends on [control=['if'], data=[]]
else:
domain = maxval - minval
interval = float(domain) / float(numStops)
for i in range(numStops):
scale.append(round(minval + interval * i, 2)) # depends on [control=['for'], data=['i']]
return scale |
def find_output_without_tag(self, tag):
"""
Find all files who do not have tag in self.tags
"""
# Enforce upper case
tag = tag.upper()
return FileList([i for i in self if tag not in i.tags]) | def function[find_output_without_tag, parameter[self, tag]]:
constant[
Find all files who do not have tag in self.tags
]
variable[tag] assign[=] call[name[tag].upper, parameter[]]
return[call[name[FileList], parameter[<ast.ListComp object at 0x7da20c6abb20>]]] | keyword[def] identifier[find_output_without_tag] ( identifier[self] , identifier[tag] ):
literal[string]
identifier[tag] = identifier[tag] . identifier[upper] ()
keyword[return] identifier[FileList] ([ identifier[i] keyword[for] identifier[i] keyword[in] identifier[self] keyword[if] identifier[tag] keyword[not] keyword[in] identifier[i] . identifier[tags] ]) | def find_output_without_tag(self, tag):
"""
Find all files who do not have tag in self.tags
"""
# Enforce upper case
tag = tag.upper()
return FileList([i for i in self if tag not in i.tags]) |
def start_of_paragraph(self, count=1, before=False):
"""
Return the start of the current paragraph. (Relative cursor position.)
"""
def match_func(text):
return not text or text.isspace()
line_index = self.find_previous_matching_line(match_func=match_func, count=count)
if line_index:
add = 0 if before else 1
return min(0, self.get_cursor_up_position(count=-line_index) + add)
else:
return -self.cursor_position | def function[start_of_paragraph, parameter[self, count, before]]:
constant[
Return the start of the current paragraph. (Relative cursor position.)
]
def function[match_func, parameter[text]]:
return[<ast.BoolOp object at 0x7da204565300>]
variable[line_index] assign[=] call[name[self].find_previous_matching_line, parameter[]]
if name[line_index] begin[:]
variable[add] assign[=] <ast.IfExp object at 0x7da1b08a48e0>
return[call[name[min], parameter[constant[0], binary_operation[call[name[self].get_cursor_up_position, parameter[]] + name[add]]]]] | keyword[def] identifier[start_of_paragraph] ( identifier[self] , identifier[count] = literal[int] , identifier[before] = keyword[False] ):
literal[string]
keyword[def] identifier[match_func] ( identifier[text] ):
keyword[return] keyword[not] identifier[text] keyword[or] identifier[text] . identifier[isspace] ()
identifier[line_index] = identifier[self] . identifier[find_previous_matching_line] ( identifier[match_func] = identifier[match_func] , identifier[count] = identifier[count] )
keyword[if] identifier[line_index] :
identifier[add] = literal[int] keyword[if] identifier[before] keyword[else] literal[int]
keyword[return] identifier[min] ( literal[int] , identifier[self] . identifier[get_cursor_up_position] ( identifier[count] =- identifier[line_index] )+ identifier[add] )
keyword[else] :
keyword[return] - identifier[self] . identifier[cursor_position] | def start_of_paragraph(self, count=1, before=False):
"""
Return the start of the current paragraph. (Relative cursor position.)
"""
def match_func(text):
return not text or text.isspace()
line_index = self.find_previous_matching_line(match_func=match_func, count=count)
if line_index:
add = 0 if before else 1
return min(0, self.get_cursor_up_position(count=-line_index) + add) # depends on [control=['if'], data=[]]
else:
return -self.cursor_position |
def _serialize_data(self, data, options):
'''Serialize data'''
serialization_algorithm_id = options['serialization_algorithm_id']
if serialization_algorithm_id not in self.serialization_algorithms:
raise Exception('Unknown serialization algorithm id: %d'
% serialization_algorithm_id)
serialization_algorithm = \
self.serialization_algorithms[serialization_algorithm_id]
algorithm = self._get_algorithm_info(serialization_algorithm)
data = self._encode(data, algorithm)
return data | def function[_serialize_data, parameter[self, data, options]]:
constant[Serialize data]
variable[serialization_algorithm_id] assign[=] call[name[options]][constant[serialization_algorithm_id]]
if compare[name[serialization_algorithm_id] <ast.NotIn object at 0x7da2590d7190> name[self].serialization_algorithms] begin[:]
<ast.Raise object at 0x7da20c6abe80>
variable[serialization_algorithm] assign[=] call[name[self].serialization_algorithms][name[serialization_algorithm_id]]
variable[algorithm] assign[=] call[name[self]._get_algorithm_info, parameter[name[serialization_algorithm]]]
variable[data] assign[=] call[name[self]._encode, parameter[name[data], name[algorithm]]]
return[name[data]] | keyword[def] identifier[_serialize_data] ( identifier[self] , identifier[data] , identifier[options] ):
literal[string]
identifier[serialization_algorithm_id] = identifier[options] [ literal[string] ]
keyword[if] identifier[serialization_algorithm_id] keyword[not] keyword[in] identifier[self] . identifier[serialization_algorithms] :
keyword[raise] identifier[Exception] ( literal[string]
% identifier[serialization_algorithm_id] )
identifier[serialization_algorithm] = identifier[self] . identifier[serialization_algorithms] [ identifier[serialization_algorithm_id] ]
identifier[algorithm] = identifier[self] . identifier[_get_algorithm_info] ( identifier[serialization_algorithm] )
identifier[data] = identifier[self] . identifier[_encode] ( identifier[data] , identifier[algorithm] )
keyword[return] identifier[data] | def _serialize_data(self, data, options):
"""Serialize data"""
serialization_algorithm_id = options['serialization_algorithm_id']
if serialization_algorithm_id not in self.serialization_algorithms:
raise Exception('Unknown serialization algorithm id: %d' % serialization_algorithm_id) # depends on [control=['if'], data=['serialization_algorithm_id']]
serialization_algorithm = self.serialization_algorithms[serialization_algorithm_id]
algorithm = self._get_algorithm_info(serialization_algorithm)
data = self._encode(data, algorithm)
return data |
def find_by(self, values={}, **kwargs):
"""
Returns a single record matching the criteria in ``values`` found in the model's table in the replica database.
:param values: Criteria to find the record.
:type values: dict
:returns: an instance of the model.
"""
try:
return self(
**self.select(
where=values,
limit=1,
**kwargs
).to_dict(orient='records')[0]
)
except IndexError:
return None | def function[find_by, parameter[self, values]]:
constant[
Returns a single record matching the criteria in ``values`` found in the model's table in the replica database.
:param values: Criteria to find the record.
:type values: dict
:returns: an instance of the model.
]
<ast.Try object at 0x7da20e9b3e20> | keyword[def] identifier[find_by] ( identifier[self] , identifier[values] ={},** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] (
** identifier[self] . identifier[select] (
identifier[where] = identifier[values] ,
identifier[limit] = literal[int] ,
** identifier[kwargs]
). identifier[to_dict] ( identifier[orient] = literal[string] )[ literal[int] ]
)
keyword[except] identifier[IndexError] :
keyword[return] keyword[None] | def find_by(self, values={}, **kwargs):
"""
Returns a single record matching the criteria in ``values`` found in the model's table in the replica database.
:param values: Criteria to find the record.
:type values: dict
:returns: an instance of the model.
"""
try:
return self(**self.select(where=values, limit=1, **kwargs).to_dict(orient='records')[0]) # depends on [control=['try'], data=[]]
except IndexError:
return None # depends on [control=['except'], data=[]] |
def get_context_from_cmdln(args, desc="Run scriptworker"):
"""Create a Context object from args.
Args:
args (list): the commandline args. Generally sys.argv
Returns:
tuple: ``scriptworker.context.Context`` with populated config, and
credentials frozendict
"""
context = Context()
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
"config_path", type=str, nargs="?", default="scriptworker.yaml",
help="the path to the config file"
)
parsed_args = parser.parse_args(args)
context.config, credentials = create_config(config_path=parsed_args.config_path)
update_logging_config(context)
return context, credentials | def function[get_context_from_cmdln, parameter[args, desc]]:
constant[Create a Context object from args.
Args:
args (list): the commandline args. Generally sys.argv
Returns:
tuple: ``scriptworker.context.Context`` with populated config, and
credentials frozendict
]
variable[context] assign[=] call[name[Context], parameter[]]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[config_path]]]
variable[parsed_args] assign[=] call[name[parser].parse_args, parameter[name[args]]]
<ast.Tuple object at 0x7da18dc9bb80> assign[=] call[name[create_config], parameter[]]
call[name[update_logging_config], parameter[name[context]]]
return[tuple[[<ast.Name object at 0x7da18dc98d60>, <ast.Name object at 0x7da18dc9ac20>]]] | keyword[def] identifier[get_context_from_cmdln] ( identifier[args] , identifier[desc] = literal[string] ):
literal[string]
identifier[context] = identifier[Context] ()
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[desc] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[type] = identifier[str] , identifier[nargs] = literal[string] , identifier[default] = literal[string] ,
identifier[help] = literal[string]
)
identifier[parsed_args] = identifier[parser] . identifier[parse_args] ( identifier[args] )
identifier[context] . identifier[config] , identifier[credentials] = identifier[create_config] ( identifier[config_path] = identifier[parsed_args] . identifier[config_path] )
identifier[update_logging_config] ( identifier[context] )
keyword[return] identifier[context] , identifier[credentials] | def get_context_from_cmdln(args, desc='Run scriptworker'):
"""Create a Context object from args.
Args:
args (list): the commandline args. Generally sys.argv
Returns:
tuple: ``scriptworker.context.Context`` with populated config, and
credentials frozendict
"""
context = Context()
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('config_path', type=str, nargs='?', default='scriptworker.yaml', help='the path to the config file')
parsed_args = parser.parse_args(args)
(context.config, credentials) = create_config(config_path=parsed_args.config_path)
update_logging_config(context)
return (context, credentials) |
def record_magic(dct, magic_kind, magic_name, func):
"""Utility function to store a function as a magic of a specific kind.
Parameters
----------
dct : dict
A dictionary with 'line' and 'cell' subdicts.
magic_kind : str
Kind of magic to be stored.
magic_name : str
Key to store the magic as.
func : function
Callable object to store.
"""
if magic_kind == 'line_cell':
dct['line'][magic_name] = dct['cell'][magic_name] = func
else:
dct[magic_kind][magic_name] = func | def function[record_magic, parameter[dct, magic_kind, magic_name, func]]:
constant[Utility function to store a function as a magic of a specific kind.
Parameters
----------
dct : dict
A dictionary with 'line' and 'cell' subdicts.
magic_kind : str
Kind of magic to be stored.
magic_name : str
Key to store the magic as.
func : function
Callable object to store.
]
if compare[name[magic_kind] equal[==] constant[line_cell]] begin[:]
call[call[name[dct]][constant[line]]][name[magic_name]] assign[=] name[func] | keyword[def] identifier[record_magic] ( identifier[dct] , identifier[magic_kind] , identifier[magic_name] , identifier[func] ):
literal[string]
keyword[if] identifier[magic_kind] == literal[string] :
identifier[dct] [ literal[string] ][ identifier[magic_name] ]= identifier[dct] [ literal[string] ][ identifier[magic_name] ]= identifier[func]
keyword[else] :
identifier[dct] [ identifier[magic_kind] ][ identifier[magic_name] ]= identifier[func] | def record_magic(dct, magic_kind, magic_name, func):
"""Utility function to store a function as a magic of a specific kind.
Parameters
----------
dct : dict
A dictionary with 'line' and 'cell' subdicts.
magic_kind : str
Kind of magic to be stored.
magic_name : str
Key to store the magic as.
func : function
Callable object to store.
"""
if magic_kind == 'line_cell':
dct['line'][magic_name] = dct['cell'][magic_name] = func # depends on [control=['if'], data=[]]
else:
dct[magic_kind][magic_name] = func |
def c(*args):
r"""Imitates the ``c`` function from R.
Since this whole library is aimed at re-creating in
Python what R has already done so well, the ``c`` function was created to
wrap ``numpy.concatenate`` and mimic the R functionality. Similar to R,
this works with scalars, iterables, and any mix therein.
Note that using the ``c`` function on multi-nested lists or iterables
will fail!
Examples
--------
Using ``c`` with varargs will yield a single array:
>>> c(1, 2, 3, 4)
array([1, 2, 3, 4])
Using ``c`` with nested lists and scalars will also yield a single array:
>>> c([1, 2], 4, c(5, 4))
array([1, 2, 4, 5, 4])
However, using ``c`` with multi-level lists will fail!
>>> c([1, 2, 3], [[1, 2]]) # doctest: +SKIP
ValueError: all the input arrays must have same number of dimensions
References
----------
.. [1] https://stat.ethz.ch/R-manual/R-devel/library/base/html/c.html
"""
# R returns NULL for this
if not args:
return None
# just an array of len 1
if len(args) == 1:
element = args[0]
# if it's iterable, make it an array
if is_iterable(element):
return np.asarray(element)
# otherwise it's not iterable, put it in an array
return np.asarray([element])
# np.concat all. This can be slow, as noted by numerous threads on
# numpy concat efficiency, however an alternative using recursive
# yields was tested and performed far worse:
#
# >>> def timeit(func, ntimes, *args):
# ... times = []
# ... for i in range(ntimes):
# ... start = time.time()
# ... func(*args)
# ... times.append(time.time() - start)
# ... arr = np.asarray(times)
# ... print("%s (%i times) - Mean: %.5f sec, "
# ... "Min: %.5f sec, Max: %.5f" % (func.__name__, ntimes,
# ... arr.mean(), arr.min(),
# ... arr.max()))
# >>> y = [np.arange(10000), range(500), (1000,), 100, np.arange(50000)]
# >>> timeit(c1, 100, *y)
# c1 (100 times) - Mean: 0.00009 sec, Min: 0.00006 sec, Max: 0.00065
# >>> timeit(c2, 100, *y)
# c2 (100 times) - Mean: 0.08708 sec, Min: 0.08273 sec, Max: 0.10115
#
# So we stick with c1, which is this variant.
return np.concatenate([a if is_iterable(a) else [a] for a in args]) | def function[c, parameter[]]:
constant[Imitates the ``c`` function from R.
Since this whole library is aimed at re-creating in
Python what R has already done so well, the ``c`` function was created to
wrap ``numpy.concatenate`` and mimic the R functionality. Similar to R,
this works with scalars, iterables, and any mix therein.
Note that using the ``c`` function on multi-nested lists or iterables
will fail!
Examples
--------
Using ``c`` with varargs will yield a single array:
>>> c(1, 2, 3, 4)
array([1, 2, 3, 4])
Using ``c`` with nested lists and scalars will also yield a single array:
>>> c([1, 2], 4, c(5, 4))
array([1, 2, 4, 5, 4])
However, using ``c`` with multi-level lists will fail!
>>> c([1, 2, 3], [[1, 2]]) # doctest: +SKIP
ValueError: all the input arrays must have same number of dimensions
References
----------
.. [1] https://stat.ethz.ch/R-manual/R-devel/library/base/html/c.html
]
if <ast.UnaryOp object at 0x7da1b1eb4550> begin[:]
return[constant[None]]
if compare[call[name[len], parameter[name[args]]] equal[==] constant[1]] begin[:]
variable[element] assign[=] call[name[args]][constant[0]]
if call[name[is_iterable], parameter[name[element]]] begin[:]
return[call[name[np].asarray, parameter[name[element]]]]
return[call[name[np].asarray, parameter[list[[<ast.Name object at 0x7da1b1d3aec0>]]]]]
return[call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da1b1d396c0>]]] | keyword[def] identifier[c] (* identifier[args] ):
literal[string]
keyword[if] keyword[not] identifier[args] :
keyword[return] keyword[None]
keyword[if] identifier[len] ( identifier[args] )== literal[int] :
identifier[element] = identifier[args] [ literal[int] ]
keyword[if] identifier[is_iterable] ( identifier[element] ):
keyword[return] identifier[np] . identifier[asarray] ( identifier[element] )
keyword[return] identifier[np] . identifier[asarray] ([ identifier[element] ])
keyword[return] identifier[np] . identifier[concatenate] ([ identifier[a] keyword[if] identifier[is_iterable] ( identifier[a] ) keyword[else] [ identifier[a] ] keyword[for] identifier[a] keyword[in] identifier[args] ]) | def c(*args):
"""Imitates the ``c`` function from R.
Since this whole library is aimed at re-creating in
Python what R has already done so well, the ``c`` function was created to
wrap ``numpy.concatenate`` and mimic the R functionality. Similar to R,
this works with scalars, iterables, and any mix therein.
Note that using the ``c`` function on multi-nested lists or iterables
will fail!
Examples
--------
Using ``c`` with varargs will yield a single array:
>>> c(1, 2, 3, 4)
array([1, 2, 3, 4])
Using ``c`` with nested lists and scalars will also yield a single array:
>>> c([1, 2], 4, c(5, 4))
array([1, 2, 4, 5, 4])
However, using ``c`` with multi-level lists will fail!
>>> c([1, 2, 3], [[1, 2]]) # doctest: +SKIP
ValueError: all the input arrays must have same number of dimensions
References
----------
.. [1] https://stat.ethz.ch/R-manual/R-devel/library/base/html/c.html
"""
# R returns NULL for this
if not args:
return None # depends on [control=['if'], data=[]]
# just an array of len 1
if len(args) == 1:
element = args[0]
# if it's iterable, make it an array
if is_iterable(element):
return np.asarray(element) # depends on [control=['if'], data=[]]
# otherwise it's not iterable, put it in an array
return np.asarray([element]) # depends on [control=['if'], data=[]]
# np.concat all. This can be slow, as noted by numerous threads on
# numpy concat efficiency, however an alternative using recursive
# yields was tested and performed far worse:
#
# >>> def timeit(func, ntimes, *args):
# ... times = []
# ... for i in range(ntimes):
# ... start = time.time()
# ... func(*args)
# ... times.append(time.time() - start)
# ... arr = np.asarray(times)
# ... print("%s (%i times) - Mean: %.5f sec, "
# ... "Min: %.5f sec, Max: %.5f" % (func.__name__, ntimes,
# ... arr.mean(), arr.min(),
# ... arr.max()))
# >>> y = [np.arange(10000), range(500), (1000,), 100, np.arange(50000)]
# >>> timeit(c1, 100, *y)
# c1 (100 times) - Mean: 0.00009 sec, Min: 0.00006 sec, Max: 0.00065
# >>> timeit(c2, 100, *y)
# c2 (100 times) - Mean: 0.08708 sec, Min: 0.08273 sec, Max: 0.10115
#
# So we stick with c1, which is this variant.
return np.concatenate([a if is_iterable(a) else [a] for a in args]) |
def from_file(cls, filename):
"""Read the configuration parameters from the Yaml file filename."""
try:
with open(filename, "r") as fh:
return cls.from_dict(yaml.safe_load(fh))
except Exception as exc:
print("Error while reading TaskManager parameters from %s\n" % filename)
raise | def function[from_file, parameter[cls, filename]]:
constant[Read the configuration parameters from the Yaml file filename.]
<ast.Try object at 0x7da18c4cf4f0> | keyword[def] identifier[from_file] ( identifier[cls] , identifier[filename] ):
literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fh] :
keyword[return] identifier[cls] . identifier[from_dict] ( identifier[yaml] . identifier[safe_load] ( identifier[fh] ))
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[print] ( literal[string] % identifier[filename] )
keyword[raise] | def from_file(cls, filename):
"""Read the configuration parameters from the Yaml file filename."""
try:
with open(filename, 'r') as fh:
return cls.from_dict(yaml.safe_load(fh)) # depends on [control=['with'], data=['fh']] # depends on [control=['try'], data=[]]
except Exception as exc:
print('Error while reading TaskManager parameters from %s\n' % filename)
raise # depends on [control=['except'], data=[]] |
def get_message(self, block=False, timeout=None):
"""Removes and returns a RTMMessage from self._inbox
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
RTMMessage if self._inbox is not empty, else None
"""
try:
message = self._inbox.get(block=block, timeout=timeout)
return message
except Exception:
return None | def function[get_message, parameter[self, block, timeout]]:
constant[Removes and returns a RTMMessage from self._inbox
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
RTMMessage if self._inbox is not empty, else None
]
<ast.Try object at 0x7da1b2349c00> | keyword[def] identifier[get_message] ( identifier[self] , identifier[block] = keyword[False] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[try] :
identifier[message] = identifier[self] . identifier[_inbox] . identifier[get] ( identifier[block] = identifier[block] , identifier[timeout] = identifier[timeout] )
keyword[return] identifier[message]
keyword[except] identifier[Exception] :
keyword[return] keyword[None] | def get_message(self, block=False, timeout=None):
"""Removes and returns a RTMMessage from self._inbox
Args:
block(bool): if True block until a RTMMessage is available,
else it will return None when self._inbox is empty
timeout(int): it blocks at most timeout seconds
Returns:
RTMMessage if self._inbox is not empty, else None
"""
try:
message = self._inbox.get(block=block, timeout=timeout)
return message # depends on [control=['try'], data=[]]
except Exception:
return None # depends on [control=['except'], data=[]] |
def help(self, msg):
"""
help [command] - displays available commands, or help message for given command
"""
commands = {}
for handler in self.irc.handlers:
if isinstance(handler, CommandHandler):
for command in handler.triggers:
method = getattr(handler, command)
if hasattr(method, '__doc__') and method.__doc__:
commands[command] = method.__doc__.strip()
else:
commands[command] = 'No help available for command: %s' % command
if len(msg.args) == 2:
if msg.args[1] not in commands:
message = 'Unknown command: %s' % msg.args[1]
else:
message = commands[msg.args[1]]
else:
message = 'Available commands: %s' % ', '.join(commands.keys())
if msg.event == EVT_PUBLIC:
self.irc.msg(msg.dst, message)
elif msg.event == EVT_PRIVATE:
self.irc.msg(msg.src, message)
elif msg.event == EVT_NOTICE:
self.irc.notice(msg.src, message) | def function[help, parameter[self, msg]]:
constant[
help [command] - displays available commands, or help message for given command
]
variable[commands] assign[=] dictionary[[], []]
for taget[name[handler]] in starred[name[self].irc.handlers] begin[:]
if call[name[isinstance], parameter[name[handler], name[CommandHandler]]] begin[:]
for taget[name[command]] in starred[name[handler].triggers] begin[:]
variable[method] assign[=] call[name[getattr], parameter[name[handler], name[command]]]
if <ast.BoolOp object at 0x7da20c6aab00> begin[:]
call[name[commands]][name[command]] assign[=] call[name[method].__doc__.strip, parameter[]]
if compare[call[name[len], parameter[name[msg].args]] equal[==] constant[2]] begin[:]
if compare[call[name[msg].args][constant[1]] <ast.NotIn object at 0x7da2590d7190> name[commands]] begin[:]
variable[message] assign[=] binary_operation[constant[Unknown command: %s] <ast.Mod object at 0x7da2590d6920> call[name[msg].args][constant[1]]]
if compare[name[msg].event equal[==] name[EVT_PUBLIC]] begin[:]
call[name[self].irc.msg, parameter[name[msg].dst, name[message]]] | keyword[def] identifier[help] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[commands] ={}
keyword[for] identifier[handler] keyword[in] identifier[self] . identifier[irc] . identifier[handlers] :
keyword[if] identifier[isinstance] ( identifier[handler] , identifier[CommandHandler] ):
keyword[for] identifier[command] keyword[in] identifier[handler] . identifier[triggers] :
identifier[method] = identifier[getattr] ( identifier[handler] , identifier[command] )
keyword[if] identifier[hasattr] ( identifier[method] , literal[string] ) keyword[and] identifier[method] . identifier[__doc__] :
identifier[commands] [ identifier[command] ]= identifier[method] . identifier[__doc__] . identifier[strip] ()
keyword[else] :
identifier[commands] [ identifier[command] ]= literal[string] % identifier[command]
keyword[if] identifier[len] ( identifier[msg] . identifier[args] )== literal[int] :
keyword[if] identifier[msg] . identifier[args] [ literal[int] ] keyword[not] keyword[in] identifier[commands] :
identifier[message] = literal[string] % identifier[msg] . identifier[args] [ literal[int] ]
keyword[else] :
identifier[message] = identifier[commands] [ identifier[msg] . identifier[args] [ literal[int] ]]
keyword[else] :
identifier[message] = literal[string] % literal[string] . identifier[join] ( identifier[commands] . identifier[keys] ())
keyword[if] identifier[msg] . identifier[event] == identifier[EVT_PUBLIC] :
identifier[self] . identifier[irc] . identifier[msg] ( identifier[msg] . identifier[dst] , identifier[message] )
keyword[elif] identifier[msg] . identifier[event] == identifier[EVT_PRIVATE] :
identifier[self] . identifier[irc] . identifier[msg] ( identifier[msg] . identifier[src] , identifier[message] )
keyword[elif] identifier[msg] . identifier[event] == identifier[EVT_NOTICE] :
identifier[self] . identifier[irc] . identifier[notice] ( identifier[msg] . identifier[src] , identifier[message] ) | def help(self, msg):
"""
help [command] - displays available commands, or help message for given command
"""
commands = {}
for handler in self.irc.handlers:
if isinstance(handler, CommandHandler):
for command in handler.triggers:
method = getattr(handler, command)
if hasattr(method, '__doc__') and method.__doc__:
commands[command] = method.__doc__.strip() # depends on [control=['if'], data=[]]
else:
commands[command] = 'No help available for command: %s' % command # depends on [control=['for'], data=['command']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['handler']]
if len(msg.args) == 2:
if msg.args[1] not in commands:
message = 'Unknown command: %s' % msg.args[1] # depends on [control=['if'], data=[]]
else:
message = commands[msg.args[1]] # depends on [control=['if'], data=[]]
else:
message = 'Available commands: %s' % ', '.join(commands.keys())
if msg.event == EVT_PUBLIC:
self.irc.msg(msg.dst, message) # depends on [control=['if'], data=[]]
elif msg.event == EVT_PRIVATE:
self.irc.msg(msg.src, message) # depends on [control=['if'], data=[]]
elif msg.event == EVT_NOTICE:
self.irc.notice(msg.src, message) # depends on [control=['if'], data=[]] |
def get_address_by_id(cls, address_id, **kwargs):
"""Find Address
Return single instance of Address by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_address_by_id(address_id, async=True)
>>> result = thread.get()
:param async bool
:param str address_id: ID of address to return (required)
:return: Address
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_address_by_id_with_http_info(address_id, **kwargs)
else:
(data) = cls._get_address_by_id_with_http_info(address_id, **kwargs)
return data | def function[get_address_by_id, parameter[cls, address_id]]:
constant[Find Address
Return single instance of Address by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_address_by_id(address_id, async=True)
>>> result = thread.get()
:param async bool
:param str address_id: ID of address to return (required)
:return: Address
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._get_address_by_id_with_http_info, parameter[name[address_id]]]] | keyword[def] identifier[get_address_by_id] ( identifier[cls] , identifier[address_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_get_address_by_id_with_http_info] ( identifier[address_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_get_address_by_id_with_http_info] ( identifier[address_id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def get_address_by_id(cls, address_id, **kwargs):
"""Find Address
Return single instance of Address by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_address_by_id(address_id, async=True)
>>> result = thread.get()
:param async bool
:param str address_id: ID of address to return (required)
:return: Address
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._get_address_by_id_with_http_info(address_id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._get_address_by_id_with_http_info(address_id, **kwargs)
return data |
def set_value(dct, key, value):
"""Set a value in a dict. If `key` contains a '.', it is assumed
be a path (i.e. dot-delimited string) to the value's location.
::
>>> d = {}
>>> set_value(d, 'foo.bar', 42)
>>> d
{'foo': {'bar': 42}}
"""
if '.' in key:
head, rest = key.split('.', 1)
target = dct.setdefault(head, {})
if not isinstance(target, dict):
raise ValueError(
'Cannot set {key} in {head} '
'due to existing value: {target}'.format(key=key, head=head, target=target),
)
set_value(target, rest, value)
else:
dct[key] = value | def function[set_value, parameter[dct, key, value]]:
constant[Set a value in a dict. If `key` contains a '.', it is assumed
be a path (i.e. dot-delimited string) to the value's location.
::
>>> d = {}
>>> set_value(d, 'foo.bar', 42)
>>> d
{'foo': {'bar': 42}}
]
if compare[constant[.] in name[key]] begin[:]
<ast.Tuple object at 0x7da18f58f4f0> assign[=] call[name[key].split, parameter[constant[.], constant[1]]]
variable[target] assign[=] call[name[dct].setdefault, parameter[name[head], dictionary[[], []]]]
if <ast.UnaryOp object at 0x7da18f58cf40> begin[:]
<ast.Raise object at 0x7da18f58cd60>
call[name[set_value], parameter[name[target], name[rest], name[value]]] | keyword[def] identifier[set_value] ( identifier[dct] , identifier[key] , identifier[value] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[key] :
identifier[head] , identifier[rest] = identifier[key] . identifier[split] ( literal[string] , literal[int] )
identifier[target] = identifier[dct] . identifier[setdefault] ( identifier[head] ,{})
keyword[if] keyword[not] identifier[isinstance] ( identifier[target] , identifier[dict] ):
keyword[raise] identifier[ValueError] (
literal[string]
literal[string] . identifier[format] ( identifier[key] = identifier[key] , identifier[head] = identifier[head] , identifier[target] = identifier[target] ),
)
identifier[set_value] ( identifier[target] , identifier[rest] , identifier[value] )
keyword[else] :
identifier[dct] [ identifier[key] ]= identifier[value] | def set_value(dct, key, value):
"""Set a value in a dict. If `key` contains a '.', it is assumed
be a path (i.e. dot-delimited string) to the value's location.
::
>>> d = {}
>>> set_value(d, 'foo.bar', 42)
>>> d
{'foo': {'bar': 42}}
"""
if '.' in key:
(head, rest) = key.split('.', 1)
target = dct.setdefault(head, {})
if not isinstance(target, dict):
raise ValueError('Cannot set {key} in {head} due to existing value: {target}'.format(key=key, head=head, target=target)) # depends on [control=['if'], data=[]]
set_value(target, rest, value) # depends on [control=['if'], data=['key']]
else:
dct[key] = value |
def cli(id):
"""Analyse an OpenStreetMap changeset."""
ch = Analyse(id)
ch.full_analysis()
click.echo(
'Created: %s. Modified: %s. Deleted: %s' % (ch.create, ch.modify, ch.delete)
)
if ch.is_suspect:
click.echo('The changeset {} is suspect! Reasons: {}'.format(
id,
', '.join(ch.suspicion_reasons)
))
else:
click.echo('The changeset %s is not suspect!' % id) | def function[cli, parameter[id]]:
constant[Analyse an OpenStreetMap changeset.]
variable[ch] assign[=] call[name[Analyse], parameter[name[id]]]
call[name[ch].full_analysis, parameter[]]
call[name[click].echo, parameter[binary_operation[constant[Created: %s. Modified: %s. Deleted: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e954700>, <ast.Attribute object at 0x7da20e956dd0>, <ast.Attribute object at 0x7da20e9546d0>]]]]]
if name[ch].is_suspect begin[:]
call[name[click].echo, parameter[call[constant[The changeset {} is suspect! Reasons: {}].format, parameter[name[id], call[constant[, ].join, parameter[name[ch].suspicion_reasons]]]]]] | keyword[def] identifier[cli] ( identifier[id] ):
literal[string]
identifier[ch] = identifier[Analyse] ( identifier[id] )
identifier[ch] . identifier[full_analysis] ()
identifier[click] . identifier[echo] (
literal[string] %( identifier[ch] . identifier[create] , identifier[ch] . identifier[modify] , identifier[ch] . identifier[delete] )
)
keyword[if] identifier[ch] . identifier[is_suspect] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] (
identifier[id] ,
literal[string] . identifier[join] ( identifier[ch] . identifier[suspicion_reasons] )
))
keyword[else] :
identifier[click] . identifier[echo] ( literal[string] % identifier[id] ) | def cli(id):
"""Analyse an OpenStreetMap changeset."""
ch = Analyse(id)
ch.full_analysis()
click.echo('Created: %s. Modified: %s. Deleted: %s' % (ch.create, ch.modify, ch.delete))
if ch.is_suspect:
click.echo('The changeset {} is suspect! Reasons: {}'.format(id, ', '.join(ch.suspicion_reasons))) # depends on [control=['if'], data=[]]
else:
click.echo('The changeset %s is not suspect!' % id) |
def formalize(self):
'''
Formalize the association and expose referential attributes
on instances.
'''
source_class = self.source_link.to_metaclass
target_class = self.target_link.to_metaclass
source_class.referential_attributes |= set(self.source_keys)
target_class.identifying_attributes |= set(self.target_keys)
def fget(inst, ref_name, alt_prop):
other_inst = self.target_link.navigate_one(inst)
if other_inst is None and alt_prop:
return alt_prop.fget(inst)
return getattr(other_inst, ref_name, None)
def fset(inst, value, name, ref_name, alt_prop):
kind = get_metaclass(inst).kind
raise MetaException('%s.%s is a referential attribute '\
'and cannot be assigned directly'% (kind, name))
#other_inst = self.target_link.navigate_one(inst)
#if other_inst is None and alt_prop:
# return alt_prop.fset(inst, value)
#
#elif other_inst:
# return setattr(other_inst, ref_name, value)
for ref_key, primary_key in zip(self.source_keys, self.target_keys):
prop = getattr(source_class.clazz, ref_key, None)
prop = property(partial(fget, ref_name=primary_key, alt_prop=prop),
partial(fset, name=ref_key, ref_name=primary_key, alt_prop=prop))
setattr(source_class.clazz, ref_key, prop) | def function[formalize, parameter[self]]:
constant[
Formalize the association and expose referential attributes
on instances.
]
variable[source_class] assign[=] name[self].source_link.to_metaclass
variable[target_class] assign[=] name[self].target_link.to_metaclass
<ast.AugAssign object at 0x7da2054a6d70>
<ast.AugAssign object at 0x7da2054a5090>
def function[fget, parameter[inst, ref_name, alt_prop]]:
variable[other_inst] assign[=] call[name[self].target_link.navigate_one, parameter[name[inst]]]
if <ast.BoolOp object at 0x7da2054a5930> begin[:]
return[call[name[alt_prop].fget, parameter[name[inst]]]]
return[call[name[getattr], parameter[name[other_inst], name[ref_name], constant[None]]]]
def function[fset, parameter[inst, value, name, ref_name, alt_prop]]:
variable[kind] assign[=] call[name[get_metaclass], parameter[name[inst]]].kind
<ast.Raise object at 0x7da2054a62c0>
for taget[tuple[[<ast.Name object at 0x7da2054a41c0>, <ast.Name object at 0x7da2054a6770>]]] in starred[call[name[zip], parameter[name[self].source_keys, name[self].target_keys]]] begin[:]
variable[prop] assign[=] call[name[getattr], parameter[name[source_class].clazz, name[ref_key], constant[None]]]
variable[prop] assign[=] call[name[property], parameter[call[name[partial], parameter[name[fget]]], call[name[partial], parameter[name[fset]]]]]
call[name[setattr], parameter[name[source_class].clazz, name[ref_key], name[prop]]] | keyword[def] identifier[formalize] ( identifier[self] ):
literal[string]
identifier[source_class] = identifier[self] . identifier[source_link] . identifier[to_metaclass]
identifier[target_class] = identifier[self] . identifier[target_link] . identifier[to_metaclass]
identifier[source_class] . identifier[referential_attributes] |= identifier[set] ( identifier[self] . identifier[source_keys] )
identifier[target_class] . identifier[identifying_attributes] |= identifier[set] ( identifier[self] . identifier[target_keys] )
keyword[def] identifier[fget] ( identifier[inst] , identifier[ref_name] , identifier[alt_prop] ):
identifier[other_inst] = identifier[self] . identifier[target_link] . identifier[navigate_one] ( identifier[inst] )
keyword[if] identifier[other_inst] keyword[is] keyword[None] keyword[and] identifier[alt_prop] :
keyword[return] identifier[alt_prop] . identifier[fget] ( identifier[inst] )
keyword[return] identifier[getattr] ( identifier[other_inst] , identifier[ref_name] , keyword[None] )
keyword[def] identifier[fset] ( identifier[inst] , identifier[value] , identifier[name] , identifier[ref_name] , identifier[alt_prop] ):
identifier[kind] = identifier[get_metaclass] ( identifier[inst] ). identifier[kind]
keyword[raise] identifier[MetaException] ( literal[string] literal[string] %( identifier[kind] , identifier[name] ))
keyword[for] identifier[ref_key] , identifier[primary_key] keyword[in] identifier[zip] ( identifier[self] . identifier[source_keys] , identifier[self] . identifier[target_keys] ):
identifier[prop] = identifier[getattr] ( identifier[source_class] . identifier[clazz] , identifier[ref_key] , keyword[None] )
identifier[prop] = identifier[property] ( identifier[partial] ( identifier[fget] , identifier[ref_name] = identifier[primary_key] , identifier[alt_prop] = identifier[prop] ),
identifier[partial] ( identifier[fset] , identifier[name] = identifier[ref_key] , identifier[ref_name] = identifier[primary_key] , identifier[alt_prop] = identifier[prop] ))
identifier[setattr] ( identifier[source_class] . identifier[clazz] , identifier[ref_key] , identifier[prop] ) | def formalize(self):
"""
Formalize the association and expose referential attributes
on instances.
"""
source_class = self.source_link.to_metaclass
target_class = self.target_link.to_metaclass
source_class.referential_attributes |= set(self.source_keys)
target_class.identifying_attributes |= set(self.target_keys)
def fget(inst, ref_name, alt_prop):
other_inst = self.target_link.navigate_one(inst)
if other_inst is None and alt_prop:
return alt_prop.fget(inst) # depends on [control=['if'], data=[]]
return getattr(other_inst, ref_name, None)
def fset(inst, value, name, ref_name, alt_prop):
kind = get_metaclass(inst).kind
raise MetaException('%s.%s is a referential attribute and cannot be assigned directly' % (kind, name))
#other_inst = self.target_link.navigate_one(inst)
#if other_inst is None and alt_prop:
# return alt_prop.fset(inst, value)
#
#elif other_inst:
# return setattr(other_inst, ref_name, value)
for (ref_key, primary_key) in zip(self.source_keys, self.target_keys):
prop = getattr(source_class.clazz, ref_key, None)
prop = property(partial(fget, ref_name=primary_key, alt_prop=prop), partial(fset, name=ref_key, ref_name=primary_key, alt_prop=prop))
setattr(source_class.clazz, ref_key, prop) # depends on [control=['for'], data=[]] |
def _generate_non_lastnames_variations(non_lastnames):
"""Generate variations for all non-lastnames.
E.g. For 'John Richard', this method generates: [
'John', 'J', 'Richard', 'R', 'John Richard', 'John R', 'J Richard', 'J R',
]
"""
if not non_lastnames:
return []
# Generate name transformations in place for all non lastnames. Transformations include:
# 1. Drop non last name, 2. use initial, 3. use full non lastname
for idx, non_lastname in enumerate(non_lastnames):
non_lastnames[idx] = (u'', non_lastname[0], non_lastname)
# Generate the cartesian product of the transformed non lastnames and flatten them.
return [
(u' '.join(var_elem for var_elem in variation if var_elem)).strip()
for variation in product(*non_lastnames)
] | def function[_generate_non_lastnames_variations, parameter[non_lastnames]]:
constant[Generate variations for all non-lastnames.
E.g. For 'John Richard', this method generates: [
'John', 'J', 'Richard', 'R', 'John Richard', 'John R', 'J Richard', 'J R',
]
]
if <ast.UnaryOp object at 0x7da207f9a560> begin[:]
return[list[[]]]
for taget[tuple[[<ast.Name object at 0x7da207f99210>, <ast.Name object at 0x7da207f99f30>]]] in starred[call[name[enumerate], parameter[name[non_lastnames]]]] begin[:]
call[name[non_lastnames]][name[idx]] assign[=] tuple[[<ast.Constant object at 0x7da207f99540>, <ast.Subscript object at 0x7da207f98c70>, <ast.Name object at 0x7da1b2345b70>]]
return[<ast.ListComp object at 0x7da1b23476a0>] | keyword[def] identifier[_generate_non_lastnames_variations] ( identifier[non_lastnames] ):
literal[string]
keyword[if] keyword[not] identifier[non_lastnames] :
keyword[return] []
keyword[for] identifier[idx] , identifier[non_lastname] keyword[in] identifier[enumerate] ( identifier[non_lastnames] ):
identifier[non_lastnames] [ identifier[idx] ]=( literal[string] , identifier[non_lastname] [ literal[int] ], identifier[non_lastname] )
keyword[return] [
( literal[string] . identifier[join] ( identifier[var_elem] keyword[for] identifier[var_elem] keyword[in] identifier[variation] keyword[if] identifier[var_elem] )). identifier[strip] ()
keyword[for] identifier[variation] keyword[in] identifier[product] (* identifier[non_lastnames] )
] | def _generate_non_lastnames_variations(non_lastnames):
"""Generate variations for all non-lastnames.
E.g. For 'John Richard', this method generates: [
'John', 'J', 'Richard', 'R', 'John Richard', 'John R', 'J Richard', 'J R',
]
"""
if not non_lastnames:
return [] # depends on [control=['if'], data=[]]
# Generate name transformations in place for all non lastnames. Transformations include:
# 1. Drop non last name, 2. use initial, 3. use full non lastname
for (idx, non_lastname) in enumerate(non_lastnames):
non_lastnames[idx] = (u'', non_lastname[0], non_lastname) # depends on [control=['for'], data=[]]
# Generate the cartesian product of the transformed non lastnames and flatten them.
return [u' '.join((var_elem for var_elem in variation if var_elem)).strip() for variation in product(*non_lastnames)] |
def id_to_word(self, word_id):
"""Returns the word string of an integer word id."""
if word_id >= len(self.reverse_vocab):
return self.reverse_vocab[self.unk_id]
else:
return self.reverse_vocab[word_id] | def function[id_to_word, parameter[self, word_id]]:
constant[Returns the word string of an integer word id.]
if compare[name[word_id] greater_or_equal[>=] call[name[len], parameter[name[self].reverse_vocab]]] begin[:]
return[call[name[self].reverse_vocab][name[self].unk_id]] | keyword[def] identifier[id_to_word] ( identifier[self] , identifier[word_id] ):
literal[string]
keyword[if] identifier[word_id] >= identifier[len] ( identifier[self] . identifier[reverse_vocab] ):
keyword[return] identifier[self] . identifier[reverse_vocab] [ identifier[self] . identifier[unk_id] ]
keyword[else] :
keyword[return] identifier[self] . identifier[reverse_vocab] [ identifier[word_id] ] | def id_to_word(self, word_id):
"""Returns the word string of an integer word id."""
if word_id >= len(self.reverse_vocab):
return self.reverse_vocab[self.unk_id] # depends on [control=['if'], data=[]]
else:
return self.reverse_vocab[word_id] |
def load(self) -> None:
"""Checks existence of the model file, loads the model if the file exists"""
# Checks presence of the model files
if self.load_path.exists():
path = str(self.load_path.resolve())
log.info('[loading model from {}]'.format(path))
self._net.load(path) | def function[load, parameter[self]]:
constant[Checks existence of the model file, loads the model if the file exists]
if call[name[self].load_path.exists, parameter[]] begin[:]
variable[path] assign[=] call[name[str], parameter[call[name[self].load_path.resolve, parameter[]]]]
call[name[log].info, parameter[call[constant[[loading model from {}]].format, parameter[name[path]]]]]
call[name[self]._net.load, parameter[name[path]]] | keyword[def] identifier[load] ( identifier[self] )-> keyword[None] :
literal[string]
keyword[if] identifier[self] . identifier[load_path] . identifier[exists] ():
identifier[path] = identifier[str] ( identifier[self] . identifier[load_path] . identifier[resolve] ())
identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[path] ))
identifier[self] . identifier[_net] . identifier[load] ( identifier[path] ) | def load(self) -> None:
"""Checks existence of the model file, loads the model if the file exists"""
# Checks presence of the model files
if self.load_path.exists():
path = str(self.load_path.resolve())
log.info('[loading model from {}]'.format(path))
self._net.load(path) # depends on [control=['if'], data=[]] |
def get_lib_volume_mounts(base_lib_name, assembled_specs):
""" Returns a list of the formatted volume specs for a lib"""
volumes = [_get_lib_repo_volume_mount(assembled_specs['libs'][base_lib_name])]
volumes.append(get_command_files_volume_mount(base_lib_name, test=True))
for lib_name in assembled_specs['libs'][base_lib_name]['depends']['libs']:
lib_spec = assembled_specs['libs'][lib_name]
volumes.append(_get_lib_repo_volume_mount(lib_spec))
return volumes | def function[get_lib_volume_mounts, parameter[base_lib_name, assembled_specs]]:
constant[ Returns a list of the formatted volume specs for a lib]
variable[volumes] assign[=] list[[<ast.Call object at 0x7da20e962110>]]
call[name[volumes].append, parameter[call[name[get_command_files_volume_mount], parameter[name[base_lib_name]]]]]
for taget[name[lib_name]] in starred[call[call[call[call[name[assembled_specs]][constant[libs]]][name[base_lib_name]]][constant[depends]]][constant[libs]]] begin[:]
variable[lib_spec] assign[=] call[call[name[assembled_specs]][constant[libs]]][name[lib_name]]
call[name[volumes].append, parameter[call[name[_get_lib_repo_volume_mount], parameter[name[lib_spec]]]]]
return[name[volumes]] | keyword[def] identifier[get_lib_volume_mounts] ( identifier[base_lib_name] , identifier[assembled_specs] ):
literal[string]
identifier[volumes] =[ identifier[_get_lib_repo_volume_mount] ( identifier[assembled_specs] [ literal[string] ][ identifier[base_lib_name] ])]
identifier[volumes] . identifier[append] ( identifier[get_command_files_volume_mount] ( identifier[base_lib_name] , identifier[test] = keyword[True] ))
keyword[for] identifier[lib_name] keyword[in] identifier[assembled_specs] [ literal[string] ][ identifier[base_lib_name] ][ literal[string] ][ literal[string] ]:
identifier[lib_spec] = identifier[assembled_specs] [ literal[string] ][ identifier[lib_name] ]
identifier[volumes] . identifier[append] ( identifier[_get_lib_repo_volume_mount] ( identifier[lib_spec] ))
keyword[return] identifier[volumes] | def get_lib_volume_mounts(base_lib_name, assembled_specs):
""" Returns a list of the formatted volume specs for a lib"""
volumes = [_get_lib_repo_volume_mount(assembled_specs['libs'][base_lib_name])]
volumes.append(get_command_files_volume_mount(base_lib_name, test=True))
for lib_name in assembled_specs['libs'][base_lib_name]['depends']['libs']:
lib_spec = assembled_specs['libs'][lib_name]
volumes.append(_get_lib_repo_volume_mount(lib_spec)) # depends on [control=['for'], data=['lib_name']]
return volumes |
def transaction_error_code(self):
"""The machine-readable error code for a transaction error."""
error = self.response_doc.find('transaction_error')
if error is not None:
code = error.find('error_code')
if code is not None:
return code.text | def function[transaction_error_code, parameter[self]]:
constant[The machine-readable error code for a transaction error.]
variable[error] assign[=] call[name[self].response_doc.find, parameter[constant[transaction_error]]]
if compare[name[error] is_not constant[None]] begin[:]
variable[code] assign[=] call[name[error].find, parameter[constant[error_code]]]
if compare[name[code] is_not constant[None]] begin[:]
return[name[code].text] | keyword[def] identifier[transaction_error_code] ( identifier[self] ):
literal[string]
identifier[error] = identifier[self] . identifier[response_doc] . identifier[find] ( literal[string] )
keyword[if] identifier[error] keyword[is] keyword[not] keyword[None] :
identifier[code] = identifier[error] . identifier[find] ( literal[string] )
keyword[if] identifier[code] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[code] . identifier[text] | def transaction_error_code(self):
"""The machine-readable error code for a transaction error."""
error = self.response_doc.find('transaction_error')
if error is not None:
code = error.find('error_code')
if code is not None:
return code.text # depends on [control=['if'], data=['code']] # depends on [control=['if'], data=['error']] |
def create_shared_data(self, name=None, **kwargs):
"""Calls the corresponding function of the shared data item"""
if name is None:
item = self.f_get()
else:
item = self.f_get(name)
return item.create_shared_data(**kwargs) | def function[create_shared_data, parameter[self, name]]:
constant[Calls the corresponding function of the shared data item]
if compare[name[name] is constant[None]] begin[:]
variable[item] assign[=] call[name[self].f_get, parameter[]]
return[call[name[item].create_shared_data, parameter[]]] | keyword[def] identifier[create_shared_data] ( identifier[self] , identifier[name] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[item] = identifier[self] . identifier[f_get] ()
keyword[else] :
identifier[item] = identifier[self] . identifier[f_get] ( identifier[name] )
keyword[return] identifier[item] . identifier[create_shared_data] (** identifier[kwargs] ) | def create_shared_data(self, name=None, **kwargs):
"""Calls the corresponding function of the shared data item"""
if name is None:
item = self.f_get() # depends on [control=['if'], data=[]]
else:
item = self.f_get(name)
return item.create_shared_data(**kwargs) |
async def _inform_watchdog(self):
"""Inform the watchdog of activity."""
async with self._wd_lock:
if self._watchdog_task is None:
# Check within the Lock to deal with external cancel_watchdog
# calls with queued _inform_watchdog tasks.
return
self._watchdog_task.cancel()
try:
await self._watchdog_task
except asyncio.CancelledError:
self._watchdog_task = self.loop.create_task(self._watchdog(
self._watchdog_timeout)) | <ast.AsyncFunctionDef object at 0x7da20c7cb220> | keyword[async] keyword[def] identifier[_inform_watchdog] ( identifier[self] ):
literal[string]
keyword[async] keyword[with] identifier[self] . identifier[_wd_lock] :
keyword[if] identifier[self] . identifier[_watchdog_task] keyword[is] keyword[None] :
keyword[return]
identifier[self] . identifier[_watchdog_task] . identifier[cancel] ()
keyword[try] :
keyword[await] identifier[self] . identifier[_watchdog_task]
keyword[except] identifier[asyncio] . identifier[CancelledError] :
identifier[self] . identifier[_watchdog_task] = identifier[self] . identifier[loop] . identifier[create_task] ( identifier[self] . identifier[_watchdog] (
identifier[self] . identifier[_watchdog_timeout] )) | async def _inform_watchdog(self):
"""Inform the watchdog of activity."""
async with self._wd_lock:
if self._watchdog_task is None:
# Check within the Lock to deal with external cancel_watchdog
# calls with queued _inform_watchdog tasks.
return # depends on [control=['if'], data=[]]
self._watchdog_task.cancel()
try:
await self._watchdog_task # depends on [control=['try'], data=[]]
except asyncio.CancelledError:
self._watchdog_task = self.loop.create_task(self._watchdog(self._watchdog_timeout)) # depends on [control=['except'], data=[]] |
def is_integer(dtype):
"""Returns whether this is a (non-quantized) integer type."""
dtype = tf.as_dtype(dtype)
if hasattr(dtype, 'is_integer'):
return dtype.is_integer
return np.issubdtype(np.dtype(dtype), np.integer) | def function[is_integer, parameter[dtype]]:
constant[Returns whether this is a (non-quantized) integer type.]
variable[dtype] assign[=] call[name[tf].as_dtype, parameter[name[dtype]]]
if call[name[hasattr], parameter[name[dtype], constant[is_integer]]] begin[:]
return[name[dtype].is_integer]
return[call[name[np].issubdtype, parameter[call[name[np].dtype, parameter[name[dtype]]], name[np].integer]]] | keyword[def] identifier[is_integer] ( identifier[dtype] ):
literal[string]
identifier[dtype] = identifier[tf] . identifier[as_dtype] ( identifier[dtype] )
keyword[if] identifier[hasattr] ( identifier[dtype] , literal[string] ):
keyword[return] identifier[dtype] . identifier[is_integer]
keyword[return] identifier[np] . identifier[issubdtype] ( identifier[np] . identifier[dtype] ( identifier[dtype] ), identifier[np] . identifier[integer] ) | def is_integer(dtype):
"""Returns whether this is a (non-quantized) integer type."""
dtype = tf.as_dtype(dtype)
if hasattr(dtype, 'is_integer'):
return dtype.is_integer # depends on [control=['if'], data=[]]
return np.issubdtype(np.dtype(dtype), np.integer) |
def fetch(url, binary, outfile, noprint, rendered):
'''
Fetch a specified URL's content, and output it to the console.
'''
with chrome_context.ChromeContext(binary=binary) as cr:
resp = cr.blocking_navigate_and_get_source(url)
if rendered:
resp['content'] = cr.get_rendered_page_source()
resp['binary'] = False
resp['mimie'] = 'text/html'
if not noprint:
if resp['binary'] is False:
print(resp['content'])
else:
print("Response is a binary file")
print("Cannot print!")
if outfile:
with open(outfile, "wb") as fp:
if resp['binary']:
fp.write(resp['content'])
else:
fp.write(resp['content'].encode("UTF-8")) | def function[fetch, parameter[url, binary, outfile, noprint, rendered]]:
constant[
Fetch a specified URL's content, and output it to the console.
]
with call[name[chrome_context].ChromeContext, parameter[]] begin[:]
variable[resp] assign[=] call[name[cr].blocking_navigate_and_get_source, parameter[name[url]]]
if name[rendered] begin[:]
call[name[resp]][constant[content]] assign[=] call[name[cr].get_rendered_page_source, parameter[]]
call[name[resp]][constant[binary]] assign[=] constant[False]
call[name[resp]][constant[mimie]] assign[=] constant[text/html]
if <ast.UnaryOp object at 0x7da1b1028220> begin[:]
if compare[call[name[resp]][constant[binary]] is constant[False]] begin[:]
call[name[print], parameter[call[name[resp]][constant[content]]]]
if name[outfile] begin[:]
with call[name[open], parameter[name[outfile], constant[wb]]] begin[:]
if call[name[resp]][constant[binary]] begin[:]
call[name[fp].write, parameter[call[name[resp]][constant[content]]]] | keyword[def] identifier[fetch] ( identifier[url] , identifier[binary] , identifier[outfile] , identifier[noprint] , identifier[rendered] ):
literal[string]
keyword[with] identifier[chrome_context] . identifier[ChromeContext] ( identifier[binary] = identifier[binary] ) keyword[as] identifier[cr] :
identifier[resp] = identifier[cr] . identifier[blocking_navigate_and_get_source] ( identifier[url] )
keyword[if] identifier[rendered] :
identifier[resp] [ literal[string] ]= identifier[cr] . identifier[get_rendered_page_source] ()
identifier[resp] [ literal[string] ]= keyword[False]
identifier[resp] [ literal[string] ]= literal[string]
keyword[if] keyword[not] identifier[noprint] :
keyword[if] identifier[resp] [ literal[string] ] keyword[is] keyword[False] :
identifier[print] ( identifier[resp] [ literal[string] ])
keyword[else] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
keyword[if] identifier[outfile] :
keyword[with] identifier[open] ( identifier[outfile] , literal[string] ) keyword[as] identifier[fp] :
keyword[if] identifier[resp] [ literal[string] ]:
identifier[fp] . identifier[write] ( identifier[resp] [ literal[string] ])
keyword[else] :
identifier[fp] . identifier[write] ( identifier[resp] [ literal[string] ]. identifier[encode] ( literal[string] )) | def fetch(url, binary, outfile, noprint, rendered):
"""
Fetch a specified URL's content, and output it to the console.
"""
with chrome_context.ChromeContext(binary=binary) as cr:
resp = cr.blocking_navigate_and_get_source(url)
if rendered:
resp['content'] = cr.get_rendered_page_source()
resp['binary'] = False
resp['mimie'] = 'text/html' # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['cr']]
if not noprint:
if resp['binary'] is False:
print(resp['content']) # depends on [control=['if'], data=[]]
else:
print('Response is a binary file')
print('Cannot print!') # depends on [control=['if'], data=[]]
if outfile:
with open(outfile, 'wb') as fp:
if resp['binary']:
fp.write(resp['content']) # depends on [control=['if'], data=[]]
else:
fp.write(resp['content'].encode('UTF-8')) # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]] |
def is_specified_directive(directive: GraphQLDirective):
"""Check whether the given directive is one of the specified directives."""
return isinstance(directive, GraphQLDirective) and any(
specified_directive.name == directive.name
for specified_directive in specified_directives
) | def function[is_specified_directive, parameter[directive]]:
constant[Check whether the given directive is one of the specified directives.]
return[<ast.BoolOp object at 0x7da1b22eb7c0>] | keyword[def] identifier[is_specified_directive] ( identifier[directive] : identifier[GraphQLDirective] ):
literal[string]
keyword[return] identifier[isinstance] ( identifier[directive] , identifier[GraphQLDirective] ) keyword[and] identifier[any] (
identifier[specified_directive] . identifier[name] == identifier[directive] . identifier[name]
keyword[for] identifier[specified_directive] keyword[in] identifier[specified_directives]
) | def is_specified_directive(directive: GraphQLDirective):
"""Check whether the given directive is one of the specified directives."""
return isinstance(directive, GraphQLDirective) and any((specified_directive.name == directive.name for specified_directive in specified_directives)) |
def margin(self, left, right, top, bottom, lwidth=0, lheight=0):
"""
Set margins for chart area
args are of the form::
<left margin>,
<right margin>,
<top margin>,
<bottom margin>|
<legend width>,
<legend height>
APIPARAM: chma
"""
self['chma'] = '%d,%d,%d,%d' % (left, right, top, bottom)
if lwidth or lheight:
self['chma'] += '|%d,%d' % (lwidth, lheight)
return self | def function[margin, parameter[self, left, right, top, bottom, lwidth, lheight]]:
constant[
Set margins for chart area
args are of the form::
<left margin>,
<right margin>,
<top margin>,
<bottom margin>|
<legend width>,
<legend height>
APIPARAM: chma
]
call[name[self]][constant[chma]] assign[=] binary_operation[constant[%d,%d,%d,%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6e7df0>, <ast.Name object at 0x7da20c6e79a0>, <ast.Name object at 0x7da20c6e7f40>, <ast.Name object at 0x7da20c6e64d0>]]]
if <ast.BoolOp object at 0x7da20c6e70d0> begin[:]
<ast.AugAssign object at 0x7da20c6e7af0>
return[name[self]] | keyword[def] identifier[margin] ( identifier[self] , identifier[left] , identifier[right] , identifier[top] , identifier[bottom] , identifier[lwidth] = literal[int] , identifier[lheight] = literal[int] ):
literal[string]
identifier[self] [ literal[string] ]= literal[string] %( identifier[left] , identifier[right] , identifier[top] , identifier[bottom] )
keyword[if] identifier[lwidth] keyword[or] identifier[lheight] :
identifier[self] [ literal[string] ]+= literal[string] %( identifier[lwidth] , identifier[lheight] )
keyword[return] identifier[self] | def margin(self, left, right, top, bottom, lwidth=0, lheight=0):
"""
Set margins for chart area
args are of the form::
<left margin>,
<right margin>,
<top margin>,
<bottom margin>|
<legend width>,
<legend height>
APIPARAM: chma
"""
self['chma'] = '%d,%d,%d,%d' % (left, right, top, bottom)
if lwidth or lheight:
self['chma'] += '|%d,%d' % (lwidth, lheight) # depends on [control=['if'], data=[]]
return self |
def check_redis():
"""
Redis checks the connection
It displays on the screen whether or not you have a connection.
"""
from pyoko.db.connection import cache
from redis.exceptions import ConnectionError
try:
cache.ping()
print(CheckList.OKGREEN + "{0}Redis is working{1}" + CheckList.ENDC)
except ConnectionError as e:
print(__(u"{0}Redis is not working{1} ").format(CheckList.FAIL,
CheckList.ENDC), e.message) | def function[check_redis, parameter[]]:
constant[
Redis checks the connection
It displays on the screen whether or not you have a connection.
]
from relative_module[pyoko.db.connection] import module[cache]
from relative_module[redis.exceptions] import module[ConnectionError]
<ast.Try object at 0x7da2041d80d0> | keyword[def] identifier[check_redis] ():
literal[string]
keyword[from] identifier[pyoko] . identifier[db] . identifier[connection] keyword[import] identifier[cache]
keyword[from] identifier[redis] . identifier[exceptions] keyword[import] identifier[ConnectionError]
keyword[try] :
identifier[cache] . identifier[ping] ()
identifier[print] ( identifier[CheckList] . identifier[OKGREEN] + literal[string] + identifier[CheckList] . identifier[ENDC] )
keyword[except] identifier[ConnectionError] keyword[as] identifier[e] :
identifier[print] ( identifier[__] ( literal[string] ). identifier[format] ( identifier[CheckList] . identifier[FAIL] ,
identifier[CheckList] . identifier[ENDC] ), identifier[e] . identifier[message] ) | def check_redis():
"""
Redis checks the connection
It displays on the screen whether or not you have a connection.
"""
from pyoko.db.connection import cache
from redis.exceptions import ConnectionError
try:
cache.ping()
print(CheckList.OKGREEN + '{0}Redis is working{1}' + CheckList.ENDC) # depends on [control=['try'], data=[]]
except ConnectionError as e:
print(__(u'{0}Redis is not working{1} ').format(CheckList.FAIL, CheckList.ENDC), e.message) # depends on [control=['except'], data=['e']] |
def _event_monitor_loop(region_name, vpc_id,
watcher_plugin, health_plugin,
iterations, sleep_time,
route_check_time_interval=30):
"""
Monitor queues to receive updates about new route specs or any detected
failed IPs.
If any of those have updates, notify the health-monitor thread with a
message on a special queue and also re-process the entire routing table.
The 'iterations' argument allows us to limit the running time of the watch
loop for test purposes. Not used during normal operation. Also, for faster
tests, sleep_time can be set to values less than 1.
The 'route_check_time_interval' arguments specifies the number of seconds
we allow to elapse before forcing a re-check of the VPC routes. This is so
that accidentally deleted routes or manually broken route tables can be
fixed back up again on their own.
"""
q_route_spec = watcher_plugin.get_route_spec_queue()
q_monitor_ips, q_failed_ips, q_questionable_ips = \
health_plugin.get_queues()
time.sleep(sleep_time) # Wait to allow monitor to report results
current_route_spec = {} # The last route spec we have seen
all_ips = [] # Cache of IP addresses we currently know about
# Occasionally we want to recheck VPC routes even without other updates.
# That way, if a route is manually deleted by someone, it will be
# re-created on its own.
last_route_check_time = time.time()
while not CURRENT_STATE._stop_all:
try:
# Get the latest messages from the route-spec monitor and the
# health-check monitor. At system start the route-spec queue should
# immediately have been initialized with a first message.
failed_ips = utils.read_last_msg_from_queue(q_failed_ips)
questnbl_ips = utils.read_last_msg_from_queue(q_questionable_ips)
new_route_spec = utils.read_last_msg_from_queue(q_route_spec)
if failed_ips:
# Store the failed IPs in the shared state
CURRENT_STATE.failed_ips = failed_ips
if questnbl_ips:
# Store the questionable IPs in the shared state
CURRENT_STATE.questionble_ips = questnbl_ips
if new_route_spec:
# Store the new route spec in the shared state
CURRENT_STATE.route_spec = new_route_spec
current_route_spec = new_route_spec
# Need to communicate a new set of IPs to the health
# monitoring thread, in case the list changed. The list of
# addresses is extracted from the route spec. Pass in the old
# version of the address list, so that this function can
# compare to see if there are any changes to the host list.
all_ips = _update_health_monitor_with_new_ips(new_route_spec,
all_ips,
q_monitor_ips)
# Spec or list of failed or questionable IPs changed? Update
# routes...
# We pass in the last route spec we have seen, since we are also
# here in case we only have failed/questionable IPs, but no new
# route spec. This is also called occasionally on its own, so that
# we can repair any damaged route tables in VPC.
now = time.time()
time_for_regular_recheck = \
(now - last_route_check_time) > route_check_time_interval
if new_route_spec or failed_ips or questnbl_ips or \
time_for_regular_recheck:
if not new_route_spec and not (failed_ips or questnbl_ips):
# Only reason we are here is due to expired timer.
logging.debug("Time for regular route check")
last_route_check_time = now
vpc.handle_spec(region_name, vpc_id, current_route_spec,
failed_ips if failed_ips else [],
questnbl_ips if questnbl_ips else [])
# If iterations are provided, count down and exit
if iterations is not None:
iterations -= 1
if iterations == 0:
break
time.sleep(sleep_time)
except KeyboardInterrupt:
# Allow exit via keyboard interrupt, useful during development
return
except Exception as e:
# Of course we should never get here, but if we do, better to log
# it and keep operating best we can...
import traceback
traceback.print_exc()
logging.error("*** Uncaught exception 1: %s" % str(e))
return
logging.debug("event_monitor_loop ended: Global stop") | def function[_event_monitor_loop, parameter[region_name, vpc_id, watcher_plugin, health_plugin, iterations, sleep_time, route_check_time_interval]]:
constant[
Monitor queues to receive updates about new route specs or any detected
failed IPs.
If any of those have updates, notify the health-monitor thread with a
message on a special queue and also re-process the entire routing table.
The 'iterations' argument allows us to limit the running time of the watch
loop for test purposes. Not used during normal operation. Also, for faster
tests, sleep_time can be set to values less than 1.
The 'route_check_time_interval' arguments specifies the number of seconds
we allow to elapse before forcing a re-check of the VPC routes. This is so
that accidentally deleted routes or manually broken route tables can be
fixed back up again on their own.
]
variable[q_route_spec] assign[=] call[name[watcher_plugin].get_route_spec_queue, parameter[]]
<ast.Tuple object at 0x7da18f722bc0> assign[=] call[name[health_plugin].get_queues, parameter[]]
call[name[time].sleep, parameter[name[sleep_time]]]
variable[current_route_spec] assign[=] dictionary[[], []]
variable[all_ips] assign[=] list[[]]
variable[last_route_check_time] assign[=] call[name[time].time, parameter[]]
while <ast.UnaryOp object at 0x7da18f722b90> begin[:]
<ast.Try object at 0x7da18f722a10>
call[name[logging].debug, parameter[constant[event_monitor_loop ended: Global stop]]] | keyword[def] identifier[_event_monitor_loop] ( identifier[region_name] , identifier[vpc_id] ,
identifier[watcher_plugin] , identifier[health_plugin] ,
identifier[iterations] , identifier[sleep_time] ,
identifier[route_check_time_interval] = literal[int] ):
literal[string]
identifier[q_route_spec] = identifier[watcher_plugin] . identifier[get_route_spec_queue] ()
identifier[q_monitor_ips] , identifier[q_failed_ips] , identifier[q_questionable_ips] = identifier[health_plugin] . identifier[get_queues] ()
identifier[time] . identifier[sleep] ( identifier[sleep_time] )
identifier[current_route_spec] ={}
identifier[all_ips] =[]
identifier[last_route_check_time] = identifier[time] . identifier[time] ()
keyword[while] keyword[not] identifier[CURRENT_STATE] . identifier[_stop_all] :
keyword[try] :
identifier[failed_ips] = identifier[utils] . identifier[read_last_msg_from_queue] ( identifier[q_failed_ips] )
identifier[questnbl_ips] = identifier[utils] . identifier[read_last_msg_from_queue] ( identifier[q_questionable_ips] )
identifier[new_route_spec] = identifier[utils] . identifier[read_last_msg_from_queue] ( identifier[q_route_spec] )
keyword[if] identifier[failed_ips] :
identifier[CURRENT_STATE] . identifier[failed_ips] = identifier[failed_ips]
keyword[if] identifier[questnbl_ips] :
identifier[CURRENT_STATE] . identifier[questionble_ips] = identifier[questnbl_ips]
keyword[if] identifier[new_route_spec] :
identifier[CURRENT_STATE] . identifier[route_spec] = identifier[new_route_spec]
identifier[current_route_spec] = identifier[new_route_spec]
identifier[all_ips] = identifier[_update_health_monitor_with_new_ips] ( identifier[new_route_spec] ,
identifier[all_ips] ,
identifier[q_monitor_ips] )
identifier[now] = identifier[time] . identifier[time] ()
identifier[time_for_regular_recheck] =( identifier[now] - identifier[last_route_check_time] )> identifier[route_check_time_interval]
keyword[if] identifier[new_route_spec] keyword[or] identifier[failed_ips] keyword[or] identifier[questnbl_ips] keyword[or] identifier[time_for_regular_recheck] :
keyword[if] keyword[not] identifier[new_route_spec] keyword[and] keyword[not] ( identifier[failed_ips] keyword[or] identifier[questnbl_ips] ):
identifier[logging] . identifier[debug] ( literal[string] )
identifier[last_route_check_time] = identifier[now]
identifier[vpc] . identifier[handle_spec] ( identifier[region_name] , identifier[vpc_id] , identifier[current_route_spec] ,
identifier[failed_ips] keyword[if] identifier[failed_ips] keyword[else] [],
identifier[questnbl_ips] keyword[if] identifier[questnbl_ips] keyword[else] [])
keyword[if] identifier[iterations] keyword[is] keyword[not] keyword[None] :
identifier[iterations] -= literal[int]
keyword[if] identifier[iterations] == literal[int] :
keyword[break]
identifier[time] . identifier[sleep] ( identifier[sleep_time] )
keyword[except] identifier[KeyboardInterrupt] :
keyword[return]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[import] identifier[traceback]
identifier[traceback] . identifier[print_exc] ()
identifier[logging] . identifier[error] ( literal[string] % identifier[str] ( identifier[e] ))
keyword[return]
identifier[logging] . identifier[debug] ( literal[string] ) | def _event_monitor_loop(region_name, vpc_id, watcher_plugin, health_plugin, iterations, sleep_time, route_check_time_interval=30):
"""
Monitor queues to receive updates about new route specs or any detected
failed IPs.
If any of those have updates, notify the health-monitor thread with a
message on a special queue and also re-process the entire routing table.
The 'iterations' argument allows us to limit the running time of the watch
loop for test purposes. Not used during normal operation. Also, for faster
tests, sleep_time can be set to values less than 1.
The 'route_check_time_interval' arguments specifies the number of seconds
we allow to elapse before forcing a re-check of the VPC routes. This is so
that accidentally deleted routes or manually broken route tables can be
fixed back up again on their own.
"""
q_route_spec = watcher_plugin.get_route_spec_queue()
(q_monitor_ips, q_failed_ips, q_questionable_ips) = health_plugin.get_queues()
time.sleep(sleep_time) # Wait to allow monitor to report results
current_route_spec = {} # The last route spec we have seen
all_ips = [] # Cache of IP addresses we currently know about
# Occasionally we want to recheck VPC routes even without other updates.
# That way, if a route is manually deleted by someone, it will be
# re-created on its own.
last_route_check_time = time.time()
while not CURRENT_STATE._stop_all:
try:
# Get the latest messages from the route-spec monitor and the
# health-check monitor. At system start the route-spec queue should
# immediately have been initialized with a first message.
failed_ips = utils.read_last_msg_from_queue(q_failed_ips)
questnbl_ips = utils.read_last_msg_from_queue(q_questionable_ips)
new_route_spec = utils.read_last_msg_from_queue(q_route_spec)
if failed_ips:
# Store the failed IPs in the shared state
CURRENT_STATE.failed_ips = failed_ips # depends on [control=['if'], data=[]]
if questnbl_ips:
# Store the questionable IPs in the shared state
CURRENT_STATE.questionble_ips = questnbl_ips # depends on [control=['if'], data=[]]
if new_route_spec:
# Store the new route spec in the shared state
CURRENT_STATE.route_spec = new_route_spec
current_route_spec = new_route_spec
# Need to communicate a new set of IPs to the health
# monitoring thread, in case the list changed. The list of
# addresses is extracted from the route spec. Pass in the old
# version of the address list, so that this function can
# compare to see if there are any changes to the host list.
all_ips = _update_health_monitor_with_new_ips(new_route_spec, all_ips, q_monitor_ips) # depends on [control=['if'], data=[]]
# Spec or list of failed or questionable IPs changed? Update
# routes...
# We pass in the last route spec we have seen, since we are also
# here in case we only have failed/questionable IPs, but no new
# route spec. This is also called occasionally on its own, so that
# we can repair any damaged route tables in VPC.
now = time.time()
time_for_regular_recheck = now - last_route_check_time > route_check_time_interval
if new_route_spec or failed_ips or questnbl_ips or time_for_regular_recheck:
if not new_route_spec and (not (failed_ips or questnbl_ips)):
# Only reason we are here is due to expired timer.
logging.debug('Time for regular route check') # depends on [control=['if'], data=[]]
last_route_check_time = now
vpc.handle_spec(region_name, vpc_id, current_route_spec, failed_ips if failed_ips else [], questnbl_ips if questnbl_ips else []) # depends on [control=['if'], data=[]]
# If iterations are provided, count down and exit
if iterations is not None:
iterations -= 1
if iterations == 0:
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['iterations']]
time.sleep(sleep_time) # depends on [control=['try'], data=[]]
except KeyboardInterrupt:
# Allow exit via keyboard interrupt, useful during development
return # depends on [control=['except'], data=[]]
except Exception as e:
# Of course we should never get here, but if we do, better to log
# it and keep operating best we can...
import traceback
traceback.print_exc()
logging.error('*** Uncaught exception 1: %s' % str(e))
return # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
logging.debug('event_monitor_loop ended: Global stop') |
def list_objects_access(f):
"""Access to listObjects() controlled by settings.PUBLIC_OBJECT_LIST."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if not django.conf.settings.PUBLIC_OBJECT_LIST:
trusted(request)
return f(request, *args, **kwargs)
return wrapper | def function[list_objects_access, parameter[f]]:
constant[Access to listObjects() controlled by settings.PUBLIC_OBJECT_LIST.]
def function[wrapper, parameter[request]]:
if <ast.UnaryOp object at 0x7da18dc06b00> begin[:]
call[name[trusted], parameter[name[request]]]
return[call[name[f], parameter[name[request], <ast.Starred object at 0x7da18c4ce8f0>]]]
return[name[wrapper]] | keyword[def] identifier[list_objects_access] ( identifier[f] ):
literal[string]
@ identifier[functools] . identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] identifier[django] . identifier[conf] . identifier[settings] . identifier[PUBLIC_OBJECT_LIST] :
identifier[trusted] ( identifier[request] )
keyword[return] identifier[f] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper] | def list_objects_access(f):
"""Access to listObjects() controlled by settings.PUBLIC_OBJECT_LIST."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if not django.conf.settings.PUBLIC_OBJECT_LIST:
trusted(request) # depends on [control=['if'], data=[]]
return f(request, *args, **kwargs)
return wrapper |
def FindExtensionByName(self, full_name):
"""Loads the named extension descriptor from the pool.
Args:
full_name: The full name of the extension descriptor to load.
Returns:
A FieldDescriptor, describing the named extension.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, extension_name = full_name.rpartition('.')
try:
# Most extensions are nested inside a message.
scope = self.FindMessageTypeByName(message_name)
except KeyError:
# Some extensions are defined at file scope.
scope = self.FindFileContainingSymbol(full_name)
return scope.extensions_by_name[extension_name] | def function[FindExtensionByName, parameter[self, full_name]]:
constant[Loads the named extension descriptor from the pool.
Args:
full_name: The full name of the extension descriptor to load.
Returns:
A FieldDescriptor, describing the named extension.
]
variable[full_name] assign[=] call[name[_NormalizeFullyQualifiedName], parameter[name[full_name]]]
<ast.Tuple object at 0x7da2041dad70> assign[=] call[name[full_name].rpartition, parameter[constant[.]]]
<ast.Try object at 0x7da2041da410>
return[call[name[scope].extensions_by_name][name[extension_name]]] | keyword[def] identifier[FindExtensionByName] ( identifier[self] , identifier[full_name] ):
literal[string]
identifier[full_name] = identifier[_NormalizeFullyQualifiedName] ( identifier[full_name] )
identifier[message_name] , identifier[_] , identifier[extension_name] = identifier[full_name] . identifier[rpartition] ( literal[string] )
keyword[try] :
identifier[scope] = identifier[self] . identifier[FindMessageTypeByName] ( identifier[message_name] )
keyword[except] identifier[KeyError] :
identifier[scope] = identifier[self] . identifier[FindFileContainingSymbol] ( identifier[full_name] )
keyword[return] identifier[scope] . identifier[extensions_by_name] [ identifier[extension_name] ] | def FindExtensionByName(self, full_name):
"""Loads the named extension descriptor from the pool.
Args:
full_name: The full name of the extension descriptor to load.
Returns:
A FieldDescriptor, describing the named extension.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
(message_name, _, extension_name) = full_name.rpartition('.')
try:
# Most extensions are nested inside a message.
scope = self.FindMessageTypeByName(message_name) # depends on [control=['try'], data=[]]
except KeyError:
# Some extensions are defined at file scope.
scope = self.FindFileContainingSymbol(full_name) # depends on [control=['except'], data=[]]
return scope.extensions_by_name[extension_name] |
def tables(subjects=None,
pastDays=None,
include_inactive=False,
lang=DEFAULT_LANGUAGE):
"""Find tables placed under given subjects.
"""
request = Request('tables',
subjects=subjects,
pastDays=pastDays,
includeInactive=include_inactive,
lang=lang)
return (Table(table, lang=lang) for table in request.json) | def function[tables, parameter[subjects, pastDays, include_inactive, lang]]:
constant[Find tables placed under given subjects.
]
variable[request] assign[=] call[name[Request], parameter[constant[tables]]]
return[<ast.GeneratorExp object at 0x7da204347700>] | keyword[def] identifier[tables] ( identifier[subjects] = keyword[None] ,
identifier[pastDays] = keyword[None] ,
identifier[include_inactive] = keyword[False] ,
identifier[lang] = identifier[DEFAULT_LANGUAGE] ):
literal[string]
identifier[request] = identifier[Request] ( literal[string] ,
identifier[subjects] = identifier[subjects] ,
identifier[pastDays] = identifier[pastDays] ,
identifier[includeInactive] = identifier[include_inactive] ,
identifier[lang] = identifier[lang] )
keyword[return] ( identifier[Table] ( identifier[table] , identifier[lang] = identifier[lang] ) keyword[for] identifier[table] keyword[in] identifier[request] . identifier[json] ) | def tables(subjects=None, pastDays=None, include_inactive=False, lang=DEFAULT_LANGUAGE):
"""Find tables placed under given subjects.
"""
request = Request('tables', subjects=subjects, pastDays=pastDays, includeInactive=include_inactive, lang=lang)
return (Table(table, lang=lang) for table in request.json) |
def _load_config(self):
"""load configuration from config/"""
logger.debug(
'Listing per-account config subdirectories in %s', self._conf_dir
)
for acct_id in os.listdir(self._conf_dir):
path = os.path.join(self._conf_dir, acct_id)
# skip if not a directory
if not os.path.isdir(path):
continue
# skip if doesn't match ^[0-9]+$
if not self.acct_id_re.match(acct_id):
continue
# call _load_account specifying the directory name (acct ID) & path
self._load_account(acct_id, path)
# Once all configuration is loaded, build a dict of Account Name to
# Account ID (``self._acct_name_to_id``) for faster access to configs
# by name.
for acct_id, data in self._config.items():
if data['name'] is None:
continue
self._acct_name_to_id[data['name']] = acct_id | def function[_load_config, parameter[self]]:
constant[load configuration from config/]
call[name[logger].debug, parameter[constant[Listing per-account config subdirectories in %s], name[self]._conf_dir]]
for taget[name[acct_id]] in starred[call[name[os].listdir, parameter[name[self]._conf_dir]]] begin[:]
variable[path] assign[=] call[name[os].path.join, parameter[name[self]._conf_dir, name[acct_id]]]
if <ast.UnaryOp object at 0x7da18f812800> begin[:]
continue
if <ast.UnaryOp object at 0x7da18f812bf0> begin[:]
continue
call[name[self]._load_account, parameter[name[acct_id], name[path]]]
for taget[tuple[[<ast.Name object at 0x7da18f8103a0>, <ast.Name object at 0x7da18f810ca0>]]] in starred[call[name[self]._config.items, parameter[]]] begin[:]
if compare[call[name[data]][constant[name]] is constant[None]] begin[:]
continue
call[name[self]._acct_name_to_id][call[name[data]][constant[name]]] assign[=] name[acct_id] | keyword[def] identifier[_load_config] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[debug] (
literal[string] , identifier[self] . identifier[_conf_dir]
)
keyword[for] identifier[acct_id] keyword[in] identifier[os] . identifier[listdir] ( identifier[self] . identifier[_conf_dir] ):
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_conf_dir] , identifier[acct_id] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ):
keyword[continue]
keyword[if] keyword[not] identifier[self] . identifier[acct_id_re] . identifier[match] ( identifier[acct_id] ):
keyword[continue]
identifier[self] . identifier[_load_account] ( identifier[acct_id] , identifier[path] )
keyword[for] identifier[acct_id] , identifier[data] keyword[in] identifier[self] . identifier[_config] . identifier[items] ():
keyword[if] identifier[data] [ literal[string] ] keyword[is] keyword[None] :
keyword[continue]
identifier[self] . identifier[_acct_name_to_id] [ identifier[data] [ literal[string] ]]= identifier[acct_id] | def _load_config(self):
"""load configuration from config/"""
logger.debug('Listing per-account config subdirectories in %s', self._conf_dir)
for acct_id in os.listdir(self._conf_dir):
path = os.path.join(self._conf_dir, acct_id)
# skip if not a directory
if not os.path.isdir(path):
continue # depends on [control=['if'], data=[]]
# skip if doesn't match ^[0-9]+$
if not self.acct_id_re.match(acct_id):
continue # depends on [control=['if'], data=[]]
# call _load_account specifying the directory name (acct ID) & path
self._load_account(acct_id, path) # depends on [control=['for'], data=['acct_id']]
# Once all configuration is loaded, build a dict of Account Name to
# Account ID (``self._acct_name_to_id``) for faster access to configs
# by name.
for (acct_id, data) in self._config.items():
if data['name'] is None:
continue # depends on [control=['if'], data=[]]
self._acct_name_to_id[data['name']] = acct_id # depends on [control=['for'], data=[]] |
def stop(self):
"""Mark the stop of the interval.
Calling stop on an already stopped interval has no effect.
An interval can only be stopped once.
:returns: the duration if the interval is truely stopped otherwise ``False``.
"""
if self._start_instant is None:
raise IntervalException("Attempt to stop an interval that has not started.")
if self._stop_instant is None:
self._stop_instant = instant()
self._duration = int((self._stop_instant - self._start_instant) * 1000)
return self._duration
return False | def function[stop, parameter[self]]:
constant[Mark the stop of the interval.
Calling stop on an already stopped interval has no effect.
An interval can only be stopped once.
:returns: the duration if the interval is truely stopped otherwise ``False``.
]
if compare[name[self]._start_instant is constant[None]] begin[:]
<ast.Raise object at 0x7da18f721660>
if compare[name[self]._stop_instant is constant[None]] begin[:]
name[self]._stop_instant assign[=] call[name[instant], parameter[]]
name[self]._duration assign[=] call[name[int], parameter[binary_operation[binary_operation[name[self]._stop_instant - name[self]._start_instant] * constant[1000]]]]
return[name[self]._duration]
return[constant[False]] | keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_start_instant] keyword[is] keyword[None] :
keyword[raise] identifier[IntervalException] ( literal[string] )
keyword[if] identifier[self] . identifier[_stop_instant] keyword[is] keyword[None] :
identifier[self] . identifier[_stop_instant] = identifier[instant] ()
identifier[self] . identifier[_duration] = identifier[int] (( identifier[self] . identifier[_stop_instant] - identifier[self] . identifier[_start_instant] )* literal[int] )
keyword[return] identifier[self] . identifier[_duration]
keyword[return] keyword[False] | def stop(self):
"""Mark the stop of the interval.
Calling stop on an already stopped interval has no effect.
An interval can only be stopped once.
:returns: the duration if the interval is truely stopped otherwise ``False``.
"""
if self._start_instant is None:
raise IntervalException('Attempt to stop an interval that has not started.') # depends on [control=['if'], data=[]]
if self._stop_instant is None:
self._stop_instant = instant()
self._duration = int((self._stop_instant - self._start_instant) * 1000)
return self._duration # depends on [control=['if'], data=[]]
return False |
def APO(series, fast=12, slow=26, matype=0):
"""double exponential moving average"""
return _series_to_series(series, talib.APO, fast, slow, matype) | def function[APO, parameter[series, fast, slow, matype]]:
constant[double exponential moving average]
return[call[name[_series_to_series], parameter[name[series], name[talib].APO, name[fast], name[slow], name[matype]]]] | keyword[def] identifier[APO] ( identifier[series] , identifier[fast] = literal[int] , identifier[slow] = literal[int] , identifier[matype] = literal[int] ):
literal[string]
keyword[return] identifier[_series_to_series] ( identifier[series] , identifier[talib] . identifier[APO] , identifier[fast] , identifier[slow] , identifier[matype] ) | def APO(series, fast=12, slow=26, matype=0):
"""double exponential moving average"""
return _series_to_series(series, talib.APO, fast, slow, matype) |
def values_clear(self, range):
"""Lower-level method that directly calls `spreadsheets.values.clear <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear>`_.
:param str range: The `A1 notation <https://developers.google.com/sheets/api/guides/concepts#a1_notation>`_ of the values to clear.
:returns: `Response body <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear#response-body>`_.
:rtype: dict
.. versionadded:: 3.0
"""
url = SPREADSHEET_VALUES_CLEAR_URL % (self.id, quote(range))
r = self.client.request('post', url)
return r.json() | def function[values_clear, parameter[self, range]]:
constant[Lower-level method that directly calls `spreadsheets.values.clear <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear>`_.
:param str range: The `A1 notation <https://developers.google.com/sheets/api/guides/concepts#a1_notation>`_ of the values to clear.
:returns: `Response body <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear#response-body>`_.
:rtype: dict
.. versionadded:: 3.0
]
variable[url] assign[=] binary_operation[name[SPREADSHEET_VALUES_CLEAR_URL] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e956f20>, <ast.Call object at 0x7da20e956650>]]]
variable[r] assign[=] call[name[self].client.request, parameter[constant[post], name[url]]]
return[call[name[r].json, parameter[]]] | keyword[def] identifier[values_clear] ( identifier[self] , identifier[range] ):
literal[string]
identifier[url] = identifier[SPREADSHEET_VALUES_CLEAR_URL] %( identifier[self] . identifier[id] , identifier[quote] ( identifier[range] ))
identifier[r] = identifier[self] . identifier[client] . identifier[request] ( literal[string] , identifier[url] )
keyword[return] identifier[r] . identifier[json] () | def values_clear(self, range):
"""Lower-level method that directly calls `spreadsheets.values.clear <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear>`_.
:param str range: The `A1 notation <https://developers.google.com/sheets/api/guides/concepts#a1_notation>`_ of the values to clear.
:returns: `Response body <https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear#response-body>`_.
:rtype: dict
.. versionadded:: 3.0
"""
url = SPREADSHEET_VALUES_CLEAR_URL % (self.id, quote(range))
r = self.client.request('post', url)
return r.json() |
def pnl(self, account='', modelCode='') -> List[PnL]:
"""
List of subscribed :class:`.PnL` objects (profit and loss),
optionally filtered by account and/or modelCode.
The :class:`.PnL` objects are kept live updated.
Args:
account: If specified, filter for this account name.
modelCode: If specified, filter for this account model.
"""
return [v for v in self.wrapper.pnls.values() if
(not account or v.account == account) and
(not modelCode or v.modelCode == modelCode)] | def function[pnl, parameter[self, account, modelCode]]:
constant[
List of subscribed :class:`.PnL` objects (profit and loss),
optionally filtered by account and/or modelCode.
The :class:`.PnL` objects are kept live updated.
Args:
account: If specified, filter for this account name.
modelCode: If specified, filter for this account model.
]
return[<ast.ListComp object at 0x7da20e954c40>] | keyword[def] identifier[pnl] ( identifier[self] , identifier[account] = literal[string] , identifier[modelCode] = literal[string] )-> identifier[List] [ identifier[PnL] ]:
literal[string]
keyword[return] [ identifier[v] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[wrapper] . identifier[pnls] . identifier[values] () keyword[if]
( keyword[not] identifier[account] keyword[or] identifier[v] . identifier[account] == identifier[account] ) keyword[and]
( keyword[not] identifier[modelCode] keyword[or] identifier[v] . identifier[modelCode] == identifier[modelCode] )] | def pnl(self, account='', modelCode='') -> List[PnL]:
"""
List of subscribed :class:`.PnL` objects (profit and loss),
optionally filtered by account and/or modelCode.
The :class:`.PnL` objects are kept live updated.
Args:
account: If specified, filter for this account name.
modelCode: If specified, filter for this account model.
"""
return [v for v in self.wrapper.pnls.values() if (not account or v.account == account) and (not modelCode or v.modelCode == modelCode)] |
def register_shortcut(self, qaction_or_qshortcut, context, name,
add_sc_to_tip=False):
"""
Register QAction or QShortcut to Spyder main application.
if add_sc_to_tip is True, the shortcut is added to the
action's tooltip
"""
self.main.register_shortcut(qaction_or_qshortcut, context,
name, add_sc_to_tip) | def function[register_shortcut, parameter[self, qaction_or_qshortcut, context, name, add_sc_to_tip]]:
constant[
Register QAction or QShortcut to Spyder main application.
if add_sc_to_tip is True, the shortcut is added to the
action's tooltip
]
call[name[self].main.register_shortcut, parameter[name[qaction_or_qshortcut], name[context], name[name], name[add_sc_to_tip]]] | keyword[def] identifier[register_shortcut] ( identifier[self] , identifier[qaction_or_qshortcut] , identifier[context] , identifier[name] ,
identifier[add_sc_to_tip] = keyword[False] ):
literal[string]
identifier[self] . identifier[main] . identifier[register_shortcut] ( identifier[qaction_or_qshortcut] , identifier[context] ,
identifier[name] , identifier[add_sc_to_tip] ) | def register_shortcut(self, qaction_or_qshortcut, context, name, add_sc_to_tip=False):
"""
Register QAction or QShortcut to Spyder main application.
if add_sc_to_tip is True, the shortcut is added to the
action's tooltip
"""
self.main.register_shortcut(qaction_or_qshortcut, context, name, add_sc_to_tip) |
def StopHunt(hunt_id, reason=None):
"""Stops a hunt with a given id."""
hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
if hunt_obj.hunt_state not in [
hunt_obj.HuntState.STARTED, hunt_obj.HuntState.PAUSED
]:
raise OnlyStartedOrPausedHuntCanBeStoppedError(hunt_obj)
data_store.REL_DB.UpdateHuntObject(
hunt_id, hunt_state=hunt_obj.HuntState.STOPPED, hunt_state_comment=reason)
data_store.REL_DB.RemoveForemanRule(hunt_id=hunt_obj.hunt_id)
if (reason is not None and
hunt_obj.creator not in aff4_users.GRRUser.SYSTEM_USERS):
notification.Notify(
hunt_obj.creator, rdf_objects.UserNotification.Type.TYPE_HUNT_STOPPED,
reason,
rdf_objects.ObjectReference(
reference_type=rdf_objects.ObjectReference.Type.HUNT,
hunt=rdf_objects.HuntReference(hunt_id=hunt_obj.hunt_id)))
return data_store.REL_DB.ReadHuntObject(hunt_id) | def function[StopHunt, parameter[hunt_id, reason]]:
constant[Stops a hunt with a given id.]
variable[hunt_obj] assign[=] call[name[data_store].REL_DB.ReadHuntObject, parameter[name[hunt_id]]]
if compare[name[hunt_obj].hunt_state <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da1b1b87070>, <ast.Attribute object at 0x7da1b1d90190>]]] begin[:]
<ast.Raise object at 0x7da1b1d908e0>
call[name[data_store].REL_DB.UpdateHuntObject, parameter[name[hunt_id]]]
call[name[data_store].REL_DB.RemoveForemanRule, parameter[]]
if <ast.BoolOp object at 0x7da1b1d921d0> begin[:]
call[name[notification].Notify, parameter[name[hunt_obj].creator, name[rdf_objects].UserNotification.Type.TYPE_HUNT_STOPPED, name[reason], call[name[rdf_objects].ObjectReference, parameter[]]]]
return[call[name[data_store].REL_DB.ReadHuntObject, parameter[name[hunt_id]]]] | keyword[def] identifier[StopHunt] ( identifier[hunt_id] , identifier[reason] = keyword[None] ):
literal[string]
identifier[hunt_obj] = identifier[data_store] . identifier[REL_DB] . identifier[ReadHuntObject] ( identifier[hunt_id] )
keyword[if] identifier[hunt_obj] . identifier[hunt_state] keyword[not] keyword[in] [
identifier[hunt_obj] . identifier[HuntState] . identifier[STARTED] , identifier[hunt_obj] . identifier[HuntState] . identifier[PAUSED]
]:
keyword[raise] identifier[OnlyStartedOrPausedHuntCanBeStoppedError] ( identifier[hunt_obj] )
identifier[data_store] . identifier[REL_DB] . identifier[UpdateHuntObject] (
identifier[hunt_id] , identifier[hunt_state] = identifier[hunt_obj] . identifier[HuntState] . identifier[STOPPED] , identifier[hunt_state_comment] = identifier[reason] )
identifier[data_store] . identifier[REL_DB] . identifier[RemoveForemanRule] ( identifier[hunt_id] = identifier[hunt_obj] . identifier[hunt_id] )
keyword[if] ( identifier[reason] keyword[is] keyword[not] keyword[None] keyword[and]
identifier[hunt_obj] . identifier[creator] keyword[not] keyword[in] identifier[aff4_users] . identifier[GRRUser] . identifier[SYSTEM_USERS] ):
identifier[notification] . identifier[Notify] (
identifier[hunt_obj] . identifier[creator] , identifier[rdf_objects] . identifier[UserNotification] . identifier[Type] . identifier[TYPE_HUNT_STOPPED] ,
identifier[reason] ,
identifier[rdf_objects] . identifier[ObjectReference] (
identifier[reference_type] = identifier[rdf_objects] . identifier[ObjectReference] . identifier[Type] . identifier[HUNT] ,
identifier[hunt] = identifier[rdf_objects] . identifier[HuntReference] ( identifier[hunt_id] = identifier[hunt_obj] . identifier[hunt_id] )))
keyword[return] identifier[data_store] . identifier[REL_DB] . identifier[ReadHuntObject] ( identifier[hunt_id] ) | def StopHunt(hunt_id, reason=None):
"""Stops a hunt with a given id."""
hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
if hunt_obj.hunt_state not in [hunt_obj.HuntState.STARTED, hunt_obj.HuntState.PAUSED]:
raise OnlyStartedOrPausedHuntCanBeStoppedError(hunt_obj) # depends on [control=['if'], data=[]]
data_store.REL_DB.UpdateHuntObject(hunt_id, hunt_state=hunt_obj.HuntState.STOPPED, hunt_state_comment=reason)
data_store.REL_DB.RemoveForemanRule(hunt_id=hunt_obj.hunt_id)
if reason is not None and hunt_obj.creator not in aff4_users.GRRUser.SYSTEM_USERS:
notification.Notify(hunt_obj.creator, rdf_objects.UserNotification.Type.TYPE_HUNT_STOPPED, reason, rdf_objects.ObjectReference(reference_type=rdf_objects.ObjectReference.Type.HUNT, hunt=rdf_objects.HuntReference(hunt_id=hunt_obj.hunt_id))) # depends on [control=['if'], data=[]]
return data_store.REL_DB.ReadHuntObject(hunt_id) |
def add(self, source, email=None, name=None, username=None, uuid=None,
matching=None, interactive=False):
"""Add an identity to the registry.
This method adds a new identity to the registry. By default, a new
unique identity will be also added an associated to the new identity.
When <uuid> parameter is set, it only creates a new identity that will be
associated to a unique identity defined by <uuid>.
The method will print the uuids associated to the new registered identity.
Optionally, this method can look for possible identities that match with
the new one to insert. If a match is found, that means both identities are
likely the same. Therefore, both identities would be merged into one. The
algorithm used to search for matches will be defined by <matching> parameter.
Please take into account that both unique identities will be always merged
into the one from the registry, not into the new one.
When <interactive> parameter is set to True, the user will have to confirm
whether these to identities should be merged into one. By default, the method
is set to False.
:param source: data source
:param email: email of the identity
:param name: full name of the identity
:param username: user name used by the identity
:param uuid: associates the new identity to the unique identity
identified by this id
:param matching: type of matching used to merge existing identities
:param interactive: interactive mode for merging identities, only available
when <matching> parameter is set
"""
matcher = None
if matching:
try:
blacklist = api.blacklist(self.db)
matcher = create_identity_matcher(matching, blacklist)
except MatcherNotSupportedError as e:
self.error(str(e))
return e.code
try:
new_uuid = api.add_identity(self.db, source, email, name, username, uuid)
uuid = uuid or new_uuid
self.display('add.tmpl', id=new_uuid, uuid=uuid)
if matcher:
self.__merge_on_matching(uuid, matcher, interactive)
except AlreadyExistsError as e:
msg = "unique identity '%s' already exists in the registry" % e.eid
self.error(msg)
return e.code
except (NotFoundError, InvalidValueError) as e:
self.error(str(e))
return e.code
return CMD_SUCCESS | def function[add, parameter[self, source, email, name, username, uuid, matching, interactive]]:
constant[Add an identity to the registry.
This method adds a new identity to the registry. By default, a new
unique identity will be also added an associated to the new identity.
When <uuid> parameter is set, it only creates a new identity that will be
associated to a unique identity defined by <uuid>.
The method will print the uuids associated to the new registered identity.
Optionally, this method can look for possible identities that match with
the new one to insert. If a match is found, that means both identities are
likely the same. Therefore, both identities would be merged into one. The
algorithm used to search for matches will be defined by <matching> parameter.
Please take into account that both unique identities will be always merged
into the one from the registry, not into the new one.
When <interactive> parameter is set to True, the user will have to confirm
whether these to identities should be merged into one. By default, the method
is set to False.
:param source: data source
:param email: email of the identity
:param name: full name of the identity
:param username: user name used by the identity
:param uuid: associates the new identity to the unique identity
identified by this id
:param matching: type of matching used to merge existing identities
:param interactive: interactive mode for merging identities, only available
when <matching> parameter is set
]
variable[matcher] assign[=] constant[None]
if name[matching] begin[:]
<ast.Try object at 0x7da1b0ef9330>
<ast.Try object at 0x7da1b0efa0b0>
return[name[CMD_SUCCESS]] | keyword[def] identifier[add] ( identifier[self] , identifier[source] , identifier[email] = keyword[None] , identifier[name] = keyword[None] , identifier[username] = keyword[None] , identifier[uuid] = keyword[None] ,
identifier[matching] = keyword[None] , identifier[interactive] = keyword[False] ):
literal[string]
identifier[matcher] = keyword[None]
keyword[if] identifier[matching] :
keyword[try] :
identifier[blacklist] = identifier[api] . identifier[blacklist] ( identifier[self] . identifier[db] )
identifier[matcher] = identifier[create_identity_matcher] ( identifier[matching] , identifier[blacklist] )
keyword[except] identifier[MatcherNotSupportedError] keyword[as] identifier[e] :
identifier[self] . identifier[error] ( identifier[str] ( identifier[e] ))
keyword[return] identifier[e] . identifier[code]
keyword[try] :
identifier[new_uuid] = identifier[api] . identifier[add_identity] ( identifier[self] . identifier[db] , identifier[source] , identifier[email] , identifier[name] , identifier[username] , identifier[uuid] )
identifier[uuid] = identifier[uuid] keyword[or] identifier[new_uuid]
identifier[self] . identifier[display] ( literal[string] , identifier[id] = identifier[new_uuid] , identifier[uuid] = identifier[uuid] )
keyword[if] identifier[matcher] :
identifier[self] . identifier[__merge_on_matching] ( identifier[uuid] , identifier[matcher] , identifier[interactive] )
keyword[except] identifier[AlreadyExistsError] keyword[as] identifier[e] :
identifier[msg] = literal[string] % identifier[e] . identifier[eid]
identifier[self] . identifier[error] ( identifier[msg] )
keyword[return] identifier[e] . identifier[code]
keyword[except] ( identifier[NotFoundError] , identifier[InvalidValueError] ) keyword[as] identifier[e] :
identifier[self] . identifier[error] ( identifier[str] ( identifier[e] ))
keyword[return] identifier[e] . identifier[code]
keyword[return] identifier[CMD_SUCCESS] | def add(self, source, email=None, name=None, username=None, uuid=None, matching=None, interactive=False):
"""Add an identity to the registry.
This method adds a new identity to the registry. By default, a new
unique identity will be also added an associated to the new identity.
When <uuid> parameter is set, it only creates a new identity that will be
associated to a unique identity defined by <uuid>.
The method will print the uuids associated to the new registered identity.
Optionally, this method can look for possible identities that match with
the new one to insert. If a match is found, that means both identities are
likely the same. Therefore, both identities would be merged into one. The
algorithm used to search for matches will be defined by <matching> parameter.
Please take into account that both unique identities will be always merged
into the one from the registry, not into the new one.
When <interactive> parameter is set to True, the user will have to confirm
whether these to identities should be merged into one. By default, the method
is set to False.
:param source: data source
:param email: email of the identity
:param name: full name of the identity
:param username: user name used by the identity
:param uuid: associates the new identity to the unique identity
identified by this id
:param matching: type of matching used to merge existing identities
:param interactive: interactive mode for merging identities, only available
when <matching> parameter is set
"""
matcher = None
if matching:
try:
blacklist = api.blacklist(self.db)
matcher = create_identity_matcher(matching, blacklist) # depends on [control=['try'], data=[]]
except MatcherNotSupportedError as e:
self.error(str(e))
return e.code # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]]
try:
new_uuid = api.add_identity(self.db, source, email, name, username, uuid)
uuid = uuid or new_uuid
self.display('add.tmpl', id=new_uuid, uuid=uuid)
if matcher:
self.__merge_on_matching(uuid, matcher, interactive) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except AlreadyExistsError as e:
msg = "unique identity '%s' already exists in the registry" % e.eid
self.error(msg)
return e.code # depends on [control=['except'], data=['e']]
except (NotFoundError, InvalidValueError) as e:
self.error(str(e))
return e.code # depends on [control=['except'], data=['e']]
return CMD_SUCCESS |
def from_config(cls, cp, ifo, section):
"""Read a config file to get calibration options and transfer
functions which will be used to intialize the model.
Parameters
----------
cp : WorkflowConfigParser
An open config file.
ifo : string
The detector (H1, L1) for which the calibration model will
be loaded.
section : string
The section name in the config file from which to retrieve
the calibration options.
Return
------
instance
An instance of the Recalibrate class.
"""
# read transfer functions
tfs = []
tf_names = ["a-tst", "a-pu", "c", "d"]
for tag in ['-'.join([ifo, "transfer-function", name])
for name in tf_names]:
tf_path = cp.get_opt_tag(section, tag, None)
tfs.append(cls.tf_from_file(tf_path))
a_tst0 = tfs[0][:, 1]
a_pu0 = tfs[1][:, 1]
c0 = tfs[2][:, 1]
d0 = tfs[3][:, 1]
freq = tfs[0][:, 0]
# if upper stage actuation is included, read that in and add it
# to a_pu0
uim_tag = '-'.join([ifo, 'transfer-function-a-uim'])
if cp.has_option(section, uim_tag):
tf_path = cp.get_opt_tag(section, uim_tag, None)
a_pu0 += cls.tf_from_file(tf_path)[:, 1]
# read fc0, fs0, and qinv0
fc0 = cp.get_opt_tag(section, '-'.join([ifo, "fc0"]), None)
fs0 = cp.get_opt_tag(section, '-'.join([ifo, "fs0"]), None)
qinv0 = cp.get_opt_tag(section, '-'.join([ifo, "qinv0"]), None)
return cls(freq=freq, fc0=fc0, c0=c0, d0=d0, a_tst0=a_tst0,
a_pu0=a_pu0, fs0=fs0, qinv0=qinv0) | def function[from_config, parameter[cls, cp, ifo, section]]:
constant[Read a config file to get calibration options and transfer
functions which will be used to intialize the model.
Parameters
----------
cp : WorkflowConfigParser
An open config file.
ifo : string
The detector (H1, L1) for which the calibration model will
be loaded.
section : string
The section name in the config file from which to retrieve
the calibration options.
Return
------
instance
An instance of the Recalibrate class.
]
variable[tfs] assign[=] list[[]]
variable[tf_names] assign[=] list[[<ast.Constant object at 0x7da2044c1d20>, <ast.Constant object at 0x7da2044c0400>, <ast.Constant object at 0x7da2044c1ab0>, <ast.Constant object at 0x7da2044c1330>]]
for taget[name[tag]] in starred[<ast.ListComp object at 0x7da2044c2fb0>] begin[:]
variable[tf_path] assign[=] call[name[cp].get_opt_tag, parameter[name[section], name[tag], constant[None]]]
call[name[tfs].append, parameter[call[name[cls].tf_from_file, parameter[name[tf_path]]]]]
variable[a_tst0] assign[=] call[call[name[tfs]][constant[0]]][tuple[[<ast.Slice object at 0x7da2044c2260>, <ast.Constant object at 0x7da2044c35b0>]]]
variable[a_pu0] assign[=] call[call[name[tfs]][constant[1]]][tuple[[<ast.Slice object at 0x7da2044c3e50>, <ast.Constant object at 0x7da2044c3340>]]]
variable[c0] assign[=] call[call[name[tfs]][constant[2]]][tuple[[<ast.Slice object at 0x7da2044c13f0>, <ast.Constant object at 0x7da2044c2200>]]]
variable[d0] assign[=] call[call[name[tfs]][constant[3]]][tuple[[<ast.Slice object at 0x7da2044c3640>, <ast.Constant object at 0x7da2044c3a00>]]]
variable[freq] assign[=] call[call[name[tfs]][constant[0]]][tuple[[<ast.Slice object at 0x7da2044c07f0>, <ast.Constant object at 0x7da2044c3df0>]]]
variable[uim_tag] assign[=] call[constant[-].join, parameter[list[[<ast.Name object at 0x7da20c6a9600>, <ast.Constant object at 0x7da20c6a9840>]]]]
if call[name[cp].has_option, parameter[name[section], name[uim_tag]]] begin[:]
variable[tf_path] assign[=] call[name[cp].get_opt_tag, parameter[name[section], name[uim_tag], constant[None]]]
<ast.AugAssign object at 0x7da20c6a8190>
variable[fc0] assign[=] call[name[cp].get_opt_tag, parameter[name[section], call[constant[-].join, parameter[list[[<ast.Name object at 0x7da20c6aac20>, <ast.Constant object at 0x7da20c6a8340>]]]], constant[None]]]
variable[fs0] assign[=] call[name[cp].get_opt_tag, parameter[name[section], call[constant[-].join, parameter[list[[<ast.Name object at 0x7da20c6a99f0>, <ast.Constant object at 0x7da20c6a8d00>]]]], constant[None]]]
variable[qinv0] assign[=] call[name[cp].get_opt_tag, parameter[name[section], call[constant[-].join, parameter[list[[<ast.Name object at 0x7da2041d9ae0>, <ast.Constant object at 0x7da2041dbdc0>]]]], constant[None]]]
return[call[name[cls], parameter[]]] | keyword[def] identifier[from_config] ( identifier[cls] , identifier[cp] , identifier[ifo] , identifier[section] ):
literal[string]
identifier[tfs] =[]
identifier[tf_names] =[ literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[tag] keyword[in] [ literal[string] . identifier[join] ([ identifier[ifo] , literal[string] , identifier[name] ])
keyword[for] identifier[name] keyword[in] identifier[tf_names] ]:
identifier[tf_path] = identifier[cp] . identifier[get_opt_tag] ( identifier[section] , identifier[tag] , keyword[None] )
identifier[tfs] . identifier[append] ( identifier[cls] . identifier[tf_from_file] ( identifier[tf_path] ))
identifier[a_tst0] = identifier[tfs] [ literal[int] ][:, literal[int] ]
identifier[a_pu0] = identifier[tfs] [ literal[int] ][:, literal[int] ]
identifier[c0] = identifier[tfs] [ literal[int] ][:, literal[int] ]
identifier[d0] = identifier[tfs] [ literal[int] ][:, literal[int] ]
identifier[freq] = identifier[tfs] [ literal[int] ][:, literal[int] ]
identifier[uim_tag] = literal[string] . identifier[join] ([ identifier[ifo] , literal[string] ])
keyword[if] identifier[cp] . identifier[has_option] ( identifier[section] , identifier[uim_tag] ):
identifier[tf_path] = identifier[cp] . identifier[get_opt_tag] ( identifier[section] , identifier[uim_tag] , keyword[None] )
identifier[a_pu0] += identifier[cls] . identifier[tf_from_file] ( identifier[tf_path] )[:, literal[int] ]
identifier[fc0] = identifier[cp] . identifier[get_opt_tag] ( identifier[section] , literal[string] . identifier[join] ([ identifier[ifo] , literal[string] ]), keyword[None] )
identifier[fs0] = identifier[cp] . identifier[get_opt_tag] ( identifier[section] , literal[string] . identifier[join] ([ identifier[ifo] , literal[string] ]), keyword[None] )
identifier[qinv0] = identifier[cp] . identifier[get_opt_tag] ( identifier[section] , literal[string] . identifier[join] ([ identifier[ifo] , literal[string] ]), keyword[None] )
keyword[return] identifier[cls] ( identifier[freq] = identifier[freq] , identifier[fc0] = identifier[fc0] , identifier[c0] = identifier[c0] , identifier[d0] = identifier[d0] , identifier[a_tst0] = identifier[a_tst0] ,
identifier[a_pu0] = identifier[a_pu0] , identifier[fs0] = identifier[fs0] , identifier[qinv0] = identifier[qinv0] ) | def from_config(cls, cp, ifo, section):
"""Read a config file to get calibration options and transfer
functions which will be used to intialize the model.
Parameters
----------
cp : WorkflowConfigParser
An open config file.
ifo : string
The detector (H1, L1) for which the calibration model will
be loaded.
section : string
The section name in the config file from which to retrieve
the calibration options.
Return
------
instance
An instance of the Recalibrate class.
"""
# read transfer functions
tfs = []
tf_names = ['a-tst', 'a-pu', 'c', 'd']
for tag in ['-'.join([ifo, 'transfer-function', name]) for name in tf_names]:
tf_path = cp.get_opt_tag(section, tag, None)
tfs.append(cls.tf_from_file(tf_path)) # depends on [control=['for'], data=['tag']]
a_tst0 = tfs[0][:, 1]
a_pu0 = tfs[1][:, 1]
c0 = tfs[2][:, 1]
d0 = tfs[3][:, 1]
freq = tfs[0][:, 0]
# if upper stage actuation is included, read that in and add it
# to a_pu0
uim_tag = '-'.join([ifo, 'transfer-function-a-uim'])
if cp.has_option(section, uim_tag):
tf_path = cp.get_opt_tag(section, uim_tag, None)
a_pu0 += cls.tf_from_file(tf_path)[:, 1] # depends on [control=['if'], data=[]]
# read fc0, fs0, and qinv0
fc0 = cp.get_opt_tag(section, '-'.join([ifo, 'fc0']), None)
fs0 = cp.get_opt_tag(section, '-'.join([ifo, 'fs0']), None)
qinv0 = cp.get_opt_tag(section, '-'.join([ifo, 'qinv0']), None)
return cls(freq=freq, fc0=fc0, c0=c0, d0=d0, a_tst0=a_tst0, a_pu0=a_pu0, fs0=fs0, qinv0=qinv0) |
def write(self, file_name):
"""
Writes the chapter object to an xhtml file.
Args:
file_name (str): The full name of the xhtml file to save to.
"""
try:
assert file_name[-6:] == '.xhtml'
except (AssertionError, IndexError):
raise ValueError('filename must end with .xhtml')
with open(file_name, 'wb') as f:
f.write(self.content.encode('utf-8')) | def function[write, parameter[self, file_name]]:
constant[
Writes the chapter object to an xhtml file.
Args:
file_name (str): The full name of the xhtml file to save to.
]
<ast.Try object at 0x7da20c6e4d90>
with call[name[open], parameter[name[file_name], constant[wb]]] begin[:]
call[name[f].write, parameter[call[name[self].content.encode, parameter[constant[utf-8]]]]] | keyword[def] identifier[write] ( identifier[self] , identifier[file_name] ):
literal[string]
keyword[try] :
keyword[assert] identifier[file_name] [- literal[int] :]== literal[string]
keyword[except] ( identifier[AssertionError] , identifier[IndexError] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[with] identifier[open] ( identifier[file_name] , literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[self] . identifier[content] . identifier[encode] ( literal[string] )) | def write(self, file_name):
"""
Writes the chapter object to an xhtml file.
Args:
file_name (str): The full name of the xhtml file to save to.
"""
try:
assert file_name[-6:] == '.xhtml' # depends on [control=['try'], data=[]]
except (AssertionError, IndexError):
raise ValueError('filename must end with .xhtml') # depends on [control=['except'], data=[]]
with open(file_name, 'wb') as f:
f.write(self.content.encode('utf-8')) # depends on [control=['with'], data=['f']] |
def on(self, source: mx.sym.Symbol, source_length: mx.sym.Symbol, source_seq_len: int) -> Callable:
"""
Returns callable to be used for recurrent attention in a sequence decoder.
The callable is a recurrent function of the form:
AttentionState = attend(AttentionInput, AttentionState).
:param source: Shape: (batch_size, seq_len, encoder_num_hidden).
:param source_length: Shape: (batch_size,).
:param source_seq_len: Maximum length of source sequences.
:return: Attention callable.
"""
def attend(att_input: AttentionInput, att_state: AttentionState) -> AttentionState:
"""
Returns updated attention state given attention input and current attention state.
:param att_input: Attention input as returned by make_input().
:param att_state: Current attention state
:return: Updated attention state.
"""
raise NotImplementedError()
return attend | def function[on, parameter[self, source, source_length, source_seq_len]]:
constant[
Returns callable to be used for recurrent attention in a sequence decoder.
The callable is a recurrent function of the form:
AttentionState = attend(AttentionInput, AttentionState).
:param source: Shape: (batch_size, seq_len, encoder_num_hidden).
:param source_length: Shape: (batch_size,).
:param source_seq_len: Maximum length of source sequences.
:return: Attention callable.
]
def function[attend, parameter[att_input, att_state]]:
constant[
Returns updated attention state given attention input and current attention state.
:param att_input: Attention input as returned by make_input().
:param att_state: Current attention state
:return: Updated attention state.
]
<ast.Raise object at 0x7da1b1d37040>
return[name[attend]] | keyword[def] identifier[on] ( identifier[self] , identifier[source] : identifier[mx] . identifier[sym] . identifier[Symbol] , identifier[source_length] : identifier[mx] . identifier[sym] . identifier[Symbol] , identifier[source_seq_len] : identifier[int] )-> identifier[Callable] :
literal[string]
keyword[def] identifier[attend] ( identifier[att_input] : identifier[AttentionInput] , identifier[att_state] : identifier[AttentionState] )-> identifier[AttentionState] :
literal[string]
keyword[raise] identifier[NotImplementedError] ()
keyword[return] identifier[attend] | def on(self, source: mx.sym.Symbol, source_length: mx.sym.Symbol, source_seq_len: int) -> Callable:
"""
Returns callable to be used for recurrent attention in a sequence decoder.
The callable is a recurrent function of the form:
AttentionState = attend(AttentionInput, AttentionState).
:param source: Shape: (batch_size, seq_len, encoder_num_hidden).
:param source_length: Shape: (batch_size,).
:param source_seq_len: Maximum length of source sequences.
:return: Attention callable.
"""
def attend(att_input: AttentionInput, att_state: AttentionState) -> AttentionState:
"""
Returns updated attention state given attention input and current attention state.
:param att_input: Attention input as returned by make_input().
:param att_state: Current attention state
:return: Updated attention state.
"""
raise NotImplementedError()
return attend |
def target_location(self, roi):
"""
Point the gimbal at a specific region of interest (ROI).
.. code-block:: python
#Set the camera to track the current home location.
vehicle.gimbal.target_location(vehicle.home_location)
The target position must be defined in a :py:class:`LocationGlobalRelative` or :py:class:`LocationGlobal`.
This function can be called in AUTO or GUIDED mode.
In order to clear an ROI you can send a location with all zeros (e.g. ``LocationGlobalRelative(0,0,0)``).
:param roi: Target location in global relative frame.
"""
# set gimbal to targeting mode
msg = self._vehicle.message_factory.mount_configure_encode(
0, 1, # target system, target component
mavutil.mavlink.MAV_MOUNT_MODE_GPS_POINT, # mount_mode
1, # stabilize roll
1, # stabilize pitch
1, # stabilize yaw
)
self._vehicle.send_mavlink(msg)
# Get altitude relative to home irrespective of Location object passed in.
if isinstance(roi, LocationGlobalRelative):
alt = roi.alt
elif isinstance(roi, LocationGlobal):
if not self.home_location:
self.commands.download()
self.commands.wait_ready()
alt = roi.alt - self.home_location.alt
else:
raise ValueError('Expecting location to be LocationGlobal or LocationGlobalRelative.')
# set the ROI
msg = self._vehicle.message_factory.command_long_encode(
0, 1, # target system, target component
mavutil.mavlink.MAV_CMD_DO_SET_ROI, # command
0, # confirmation
0, 0, 0, 0, # params 1-4
roi.lat,
roi.lon,
alt
)
self._vehicle.send_mavlink(msg) | def function[target_location, parameter[self, roi]]:
constant[
Point the gimbal at a specific region of interest (ROI).
.. code-block:: python
#Set the camera to track the current home location.
vehicle.gimbal.target_location(vehicle.home_location)
The target position must be defined in a :py:class:`LocationGlobalRelative` or :py:class:`LocationGlobal`.
This function can be called in AUTO or GUIDED mode.
In order to clear an ROI you can send a location with all zeros (e.g. ``LocationGlobalRelative(0,0,0)``).
:param roi: Target location in global relative frame.
]
variable[msg] assign[=] call[name[self]._vehicle.message_factory.mount_configure_encode, parameter[constant[0], constant[1], name[mavutil].mavlink.MAV_MOUNT_MODE_GPS_POINT, constant[1], constant[1], constant[1]]]
call[name[self]._vehicle.send_mavlink, parameter[name[msg]]]
if call[name[isinstance], parameter[name[roi], name[LocationGlobalRelative]]] begin[:]
variable[alt] assign[=] name[roi].alt
variable[msg] assign[=] call[name[self]._vehicle.message_factory.command_long_encode, parameter[constant[0], constant[1], name[mavutil].mavlink.MAV_CMD_DO_SET_ROI, constant[0], constant[0], constant[0], constant[0], constant[0], name[roi].lat, name[roi].lon, name[alt]]]
call[name[self]._vehicle.send_mavlink, parameter[name[msg]]] | keyword[def] identifier[target_location] ( identifier[self] , identifier[roi] ):
literal[string]
identifier[msg] = identifier[self] . identifier[_vehicle] . identifier[message_factory] . identifier[mount_configure_encode] (
literal[int] , literal[int] ,
identifier[mavutil] . identifier[mavlink] . identifier[MAV_MOUNT_MODE_GPS_POINT] ,
literal[int] ,
literal[int] ,
literal[int] ,
)
identifier[self] . identifier[_vehicle] . identifier[send_mavlink] ( identifier[msg] )
keyword[if] identifier[isinstance] ( identifier[roi] , identifier[LocationGlobalRelative] ):
identifier[alt] = identifier[roi] . identifier[alt]
keyword[elif] identifier[isinstance] ( identifier[roi] , identifier[LocationGlobal] ):
keyword[if] keyword[not] identifier[self] . identifier[home_location] :
identifier[self] . identifier[commands] . identifier[download] ()
identifier[self] . identifier[commands] . identifier[wait_ready] ()
identifier[alt] = identifier[roi] . identifier[alt] - identifier[self] . identifier[home_location] . identifier[alt]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[msg] = identifier[self] . identifier[_vehicle] . identifier[message_factory] . identifier[command_long_encode] (
literal[int] , literal[int] ,
identifier[mavutil] . identifier[mavlink] . identifier[MAV_CMD_DO_SET_ROI] ,
literal[int] ,
literal[int] , literal[int] , literal[int] , literal[int] ,
identifier[roi] . identifier[lat] ,
identifier[roi] . identifier[lon] ,
identifier[alt]
)
identifier[self] . identifier[_vehicle] . identifier[send_mavlink] ( identifier[msg] ) | def target_location(self, roi):
"""
Point the gimbal at a specific region of interest (ROI).
.. code-block:: python
#Set the camera to track the current home location.
vehicle.gimbal.target_location(vehicle.home_location)
The target position must be defined in a :py:class:`LocationGlobalRelative` or :py:class:`LocationGlobal`.
This function can be called in AUTO or GUIDED mode.
In order to clear an ROI you can send a location with all zeros (e.g. ``LocationGlobalRelative(0,0,0)``).
:param roi: Target location in global relative frame.
"""
# set gimbal to targeting mode
# target system, target component
# mount_mode
# stabilize roll
# stabilize pitch
# stabilize yaw
msg = self._vehicle.message_factory.mount_configure_encode(0, 1, mavutil.mavlink.MAV_MOUNT_MODE_GPS_POINT, 1, 1, 1)
self._vehicle.send_mavlink(msg)
# Get altitude relative to home irrespective of Location object passed in.
if isinstance(roi, LocationGlobalRelative):
alt = roi.alt # depends on [control=['if'], data=[]]
elif isinstance(roi, LocationGlobal):
if not self.home_location:
self.commands.download()
self.commands.wait_ready() # depends on [control=['if'], data=[]]
alt = roi.alt - self.home_location.alt # depends on [control=['if'], data=[]]
else:
raise ValueError('Expecting location to be LocationGlobal or LocationGlobalRelative.')
# set the ROI
# target system, target component
# command
# confirmation
# params 1-4
msg = self._vehicle.message_factory.command_long_encode(0, 1, mavutil.mavlink.MAV_CMD_DO_SET_ROI, 0, 0, 0, 0, 0, roi.lat, roi.lon, alt)
self._vehicle.send_mavlink(msg) |
def linkify_one_command_with_commands(self, commands, prop):
"""
Link a command to a property (check_command for example)
:param commands: commands object
:type commands: alignak.objects.command.Commands
:param prop: property name
:type prop: str
:param default: default command to use if the property is not defined
:type default: str
:return: None
"""
for i in self:
command = getattr(i, prop, '').strip()
if command:
setattr(i, prop, self.create_commandcall(i, commands, command))
else:
# No defined command
setattr(i, prop, None) | def function[linkify_one_command_with_commands, parameter[self, commands, prop]]:
constant[
Link a command to a property (check_command for example)
:param commands: commands object
:type commands: alignak.objects.command.Commands
:param prop: property name
:type prop: str
:param default: default command to use if the property is not defined
:type default: str
:return: None
]
for taget[name[i]] in starred[name[self]] begin[:]
variable[command] assign[=] call[call[name[getattr], parameter[name[i], name[prop], constant[]]].strip, parameter[]]
if name[command] begin[:]
call[name[setattr], parameter[name[i], name[prop], call[name[self].create_commandcall, parameter[name[i], name[commands], name[command]]]]] | keyword[def] identifier[linkify_one_command_with_commands] ( identifier[self] , identifier[commands] , identifier[prop] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[self] :
identifier[command] = identifier[getattr] ( identifier[i] , identifier[prop] , literal[string] ). identifier[strip] ()
keyword[if] identifier[command] :
identifier[setattr] ( identifier[i] , identifier[prop] , identifier[self] . identifier[create_commandcall] ( identifier[i] , identifier[commands] , identifier[command] ))
keyword[else] :
identifier[setattr] ( identifier[i] , identifier[prop] , keyword[None] ) | def linkify_one_command_with_commands(self, commands, prop):
"""
Link a command to a property (check_command for example)
:param commands: commands object
:type commands: alignak.objects.command.Commands
:param prop: property name
:type prop: str
:param default: default command to use if the property is not defined
:type default: str
:return: None
"""
for i in self:
command = getattr(i, prop, '').strip()
if command:
setattr(i, prop, self.create_commandcall(i, commands, command)) # depends on [control=['if'], data=[]]
else:
# No defined command
setattr(i, prop, None) # depends on [control=['for'], data=['i']] |
def _readable(self, watcher, events):
"""Called by the pyev watcher (self.read_watcher) whenever the socket
is readable.
This means either the socket has been closed or there is a new
client connection waiting.
"""
protocol = self.factory.build(self.loop)
try:
sock, address = self.sock.accept()
connection = Connection(self.loop, sock, address, protocol, self)
self.connections.add(connection)
connection.make_connection()
logger.debug("added connection")
except IOError as e:
self.shutdown(e) | def function[_readable, parameter[self, watcher, events]]:
constant[Called by the pyev watcher (self.read_watcher) whenever the socket
is readable.
This means either the socket has been closed or there is a new
client connection waiting.
]
variable[protocol] assign[=] call[name[self].factory.build, parameter[name[self].loop]]
<ast.Try object at 0x7da1b09126e0> | keyword[def] identifier[_readable] ( identifier[self] , identifier[watcher] , identifier[events] ):
literal[string]
identifier[protocol] = identifier[self] . identifier[factory] . identifier[build] ( identifier[self] . identifier[loop] )
keyword[try] :
identifier[sock] , identifier[address] = identifier[self] . identifier[sock] . identifier[accept] ()
identifier[connection] = identifier[Connection] ( identifier[self] . identifier[loop] , identifier[sock] , identifier[address] , identifier[protocol] , identifier[self] )
identifier[self] . identifier[connections] . identifier[add] ( identifier[connection] )
identifier[connection] . identifier[make_connection] ()
identifier[logger] . identifier[debug] ( literal[string] )
keyword[except] identifier[IOError] keyword[as] identifier[e] :
identifier[self] . identifier[shutdown] ( identifier[e] ) | def _readable(self, watcher, events):
"""Called by the pyev watcher (self.read_watcher) whenever the socket
is readable.
This means either the socket has been closed or there is a new
client connection waiting.
"""
protocol = self.factory.build(self.loop)
try:
(sock, address) = self.sock.accept()
connection = Connection(self.loop, sock, address, protocol, self)
self.connections.add(connection)
connection.make_connection()
logger.debug('added connection') # depends on [control=['try'], data=[]]
except IOError as e:
self.shutdown(e) # depends on [control=['except'], data=['e']] |
def get_nn(self, structure, n):
"""
Get near neighbors of site with index n in structure.
Args:
structure (Structure): input structure.
n (integer): index of site in structure for which to determine
neighbors.
Returns:
sites (list of Site objects): near neighbors.
"""
return [e['site'] for e in self.get_nn_info(structure, n)] | def function[get_nn, parameter[self, structure, n]]:
constant[
Get near neighbors of site with index n in structure.
Args:
structure (Structure): input structure.
n (integer): index of site in structure for which to determine
neighbors.
Returns:
sites (list of Site objects): near neighbors.
]
return[<ast.ListComp object at 0x7da20ec049a0>] | keyword[def] identifier[get_nn] ( identifier[self] , identifier[structure] , identifier[n] ):
literal[string]
keyword[return] [ identifier[e] [ literal[string] ] keyword[for] identifier[e] keyword[in] identifier[self] . identifier[get_nn_info] ( identifier[structure] , identifier[n] )] | def get_nn(self, structure, n):
"""
Get near neighbors of site with index n in structure.
Args:
structure (Structure): input structure.
n (integer): index of site in structure for which to determine
neighbors.
Returns:
sites (list of Site objects): near neighbors.
"""
return [e['site'] for e in self.get_nn_info(structure, n)] |
def scan(self, table_name, scan_filter=None,
attributes_to_get=None, limit=None,
count=False, exclusive_start_key=None,
object_hook=None):
"""
Perform a scan of DynamoDB. This version is currently punting
and expecting you to provide a full and correct JSON body
which is passed as is to DynamoDB.
:type table_name: str
:param table_name: The name of the table to scan.
:type scan_filter: dict
:param scan_filter: A Python version of the
ScanFilter data structure.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type limit: int
:param limit: The maximum number of items to return.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
"""
data = {'TableName': table_name}
if scan_filter:
data['ScanFilter'] = scan_filter
if attributes_to_get:
data['AttributesToGet'] = attributes_to_get
if limit:
data['Limit'] = limit
if count:
data['Count'] = True
if exclusive_start_key:
data['ExclusiveStartKey'] = exclusive_start_key
json_input = json.dumps(data)
return self.make_request('Scan', json_input, object_hook=object_hook) | def function[scan, parameter[self, table_name, scan_filter, attributes_to_get, limit, count, exclusive_start_key, object_hook]]:
constant[
Perform a scan of DynamoDB. This version is currently punting
and expecting you to provide a full and correct JSON body
which is passed as is to DynamoDB.
:type table_name: str
:param table_name: The name of the table to scan.
:type scan_filter: dict
:param scan_filter: A Python version of the
ScanFilter data structure.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type limit: int
:param limit: The maximum number of items to return.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2617580>], [<ast.Name object at 0x7da1b2614610>]]
if name[scan_filter] begin[:]
call[name[data]][constant[ScanFilter]] assign[=] name[scan_filter]
if name[attributes_to_get] begin[:]
call[name[data]][constant[AttributesToGet]] assign[=] name[attributes_to_get]
if name[limit] begin[:]
call[name[data]][constant[Limit]] assign[=] name[limit]
if name[count] begin[:]
call[name[data]][constant[Count]] assign[=] constant[True]
if name[exclusive_start_key] begin[:]
call[name[data]][constant[ExclusiveStartKey]] assign[=] name[exclusive_start_key]
variable[json_input] assign[=] call[name[json].dumps, parameter[name[data]]]
return[call[name[self].make_request, parameter[constant[Scan], name[json_input]]]] | keyword[def] identifier[scan] ( identifier[self] , identifier[table_name] , identifier[scan_filter] = keyword[None] ,
identifier[attributes_to_get] = keyword[None] , identifier[limit] = keyword[None] ,
identifier[count] = keyword[False] , identifier[exclusive_start_key] = keyword[None] ,
identifier[object_hook] = keyword[None] ):
literal[string]
identifier[data] ={ literal[string] : identifier[table_name] }
keyword[if] identifier[scan_filter] :
identifier[data] [ literal[string] ]= identifier[scan_filter]
keyword[if] identifier[attributes_to_get] :
identifier[data] [ literal[string] ]= identifier[attributes_to_get]
keyword[if] identifier[limit] :
identifier[data] [ literal[string] ]= identifier[limit]
keyword[if] identifier[count] :
identifier[data] [ literal[string] ]= keyword[True]
keyword[if] identifier[exclusive_start_key] :
identifier[data] [ literal[string] ]= identifier[exclusive_start_key]
identifier[json_input] = identifier[json] . identifier[dumps] ( identifier[data] )
keyword[return] identifier[self] . identifier[make_request] ( literal[string] , identifier[json_input] , identifier[object_hook] = identifier[object_hook] ) | def scan(self, table_name, scan_filter=None, attributes_to_get=None, limit=None, count=False, exclusive_start_key=None, object_hook=None):
"""
Perform a scan of DynamoDB. This version is currently punting
and expecting you to provide a full and correct JSON body
which is passed as is to DynamoDB.
:type table_name: str
:param table_name: The name of the table to scan.
:type scan_filter: dict
:param scan_filter: A Python version of the
ScanFilter data structure.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type limit: int
:param limit: The maximum number of items to return.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
"""
data = {'TableName': table_name}
if scan_filter:
data['ScanFilter'] = scan_filter # depends on [control=['if'], data=[]]
if attributes_to_get:
data['AttributesToGet'] = attributes_to_get # depends on [control=['if'], data=[]]
if limit:
data['Limit'] = limit # depends on [control=['if'], data=[]]
if count:
data['Count'] = True # depends on [control=['if'], data=[]]
if exclusive_start_key:
data['ExclusiveStartKey'] = exclusive_start_key # depends on [control=['if'], data=[]]
json_input = json.dumps(data)
return self.make_request('Scan', json_input, object_hook=object_hook) |
def count(self):
""" Returns the number of rows matched by this query """
if self._batch:
raise CQLEngineException("Only inserts, updates, and deletes are available in batch mode")
if self._result_cache is None:
query = self._select_query()
query.count = True
result = self._execute(query)
return result[0]['count']
else:
return len(self._result_cache) | def function[count, parameter[self]]:
constant[ Returns the number of rows matched by this query ]
if name[self]._batch begin[:]
<ast.Raise object at 0x7da20c7cab90>
if compare[name[self]._result_cache is constant[None]] begin[:]
variable[query] assign[=] call[name[self]._select_query, parameter[]]
name[query].count assign[=] constant[True]
variable[result] assign[=] call[name[self]._execute, parameter[name[query]]]
return[call[call[name[result]][constant[0]]][constant[count]]] | keyword[def] identifier[count] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_batch] :
keyword[raise] identifier[CQLEngineException] ( literal[string] )
keyword[if] identifier[self] . identifier[_result_cache] keyword[is] keyword[None] :
identifier[query] = identifier[self] . identifier[_select_query] ()
identifier[query] . identifier[count] = keyword[True]
identifier[result] = identifier[self] . identifier[_execute] ( identifier[query] )
keyword[return] identifier[result] [ literal[int] ][ literal[string] ]
keyword[else] :
keyword[return] identifier[len] ( identifier[self] . identifier[_result_cache] ) | def count(self):
""" Returns the number of rows matched by this query """
if self._batch:
raise CQLEngineException('Only inserts, updates, and deletes are available in batch mode') # depends on [control=['if'], data=[]]
if self._result_cache is None:
query = self._select_query()
query.count = True
result = self._execute(query)
return result[0]['count'] # depends on [control=['if'], data=[]]
else:
return len(self._result_cache) |
def get_keys_for(self, value, include_uncommitted=False):
"""Get keys for a given value.
:param value: The value to look for
:type value: object
:param include_uncommitted: Include uncommitted values in results
:type include_uncommitted: bool
:return: The keys for the given value
:rtype: list(str)
"""
if not include_uncommitted:
return super(TransactionalIndex, self).get_keys_for(value)
else:
keys = super(TransactionalIndex, self).get_keys_for(value)
hash_value = self.get_hash_for(value)
keys += self._reverse_add_cache[hash_value]
return keys | def function[get_keys_for, parameter[self, value, include_uncommitted]]:
constant[Get keys for a given value.
:param value: The value to look for
:type value: object
:param include_uncommitted: Include uncommitted values in results
:type include_uncommitted: bool
:return: The keys for the given value
:rtype: list(str)
]
if <ast.UnaryOp object at 0x7da1b18e4b50> begin[:]
return[call[call[name[super], parameter[name[TransactionalIndex], name[self]]].get_keys_for, parameter[name[value]]]] | keyword[def] identifier[get_keys_for] ( identifier[self] , identifier[value] , identifier[include_uncommitted] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[include_uncommitted] :
keyword[return] identifier[super] ( identifier[TransactionalIndex] , identifier[self] ). identifier[get_keys_for] ( identifier[value] )
keyword[else] :
identifier[keys] = identifier[super] ( identifier[TransactionalIndex] , identifier[self] ). identifier[get_keys_for] ( identifier[value] )
identifier[hash_value] = identifier[self] . identifier[get_hash_for] ( identifier[value] )
identifier[keys] += identifier[self] . identifier[_reverse_add_cache] [ identifier[hash_value] ]
keyword[return] identifier[keys] | def get_keys_for(self, value, include_uncommitted=False):
"""Get keys for a given value.
:param value: The value to look for
:type value: object
:param include_uncommitted: Include uncommitted values in results
:type include_uncommitted: bool
:return: The keys for the given value
:rtype: list(str)
"""
if not include_uncommitted:
return super(TransactionalIndex, self).get_keys_for(value) # depends on [control=['if'], data=[]]
else:
keys = super(TransactionalIndex, self).get_keys_for(value)
hash_value = self.get_hash_for(value)
keys += self._reverse_add_cache[hash_value]
return keys |
def generate_by_hash(hashcode):
"""Generates an PIL image avatar based on the given
hash String. Acts as the main accessor to pagan."""
img = Image.new(IMAGE_MODE, IMAGE_SIZE, BACKGROUND_COLOR)
if len(hashcode) < 32:
print ("hashcode must have lenght >= 32, %s" % hashcode)
raise FalseHashError
allowed = "0123456789abcdef"
hashcheck = [c in allowed for c in hashcode]
if False in hashcheck:
print ("hashcode has not allowed structure %s" % hashcode)
raise FalseHashError
pixelmap = setup_pixelmap(hashcode)
draw_image(pixelmap, img)
return img | def function[generate_by_hash, parameter[hashcode]]:
constant[Generates an PIL image avatar based on the given
hash String. Acts as the main accessor to pagan.]
variable[img] assign[=] call[name[Image].new, parameter[name[IMAGE_MODE], name[IMAGE_SIZE], name[BACKGROUND_COLOR]]]
if compare[call[name[len], parameter[name[hashcode]]] less[<] constant[32]] begin[:]
call[name[print], parameter[binary_operation[constant[hashcode must have lenght >= 32, %s] <ast.Mod object at 0x7da2590d6920> name[hashcode]]]]
<ast.Raise object at 0x7da20c6e4e20>
variable[allowed] assign[=] constant[0123456789abcdef]
variable[hashcheck] assign[=] <ast.ListComp object at 0x7da20c6e63b0>
if compare[constant[False] in name[hashcheck]] begin[:]
call[name[print], parameter[binary_operation[constant[hashcode has not allowed structure %s] <ast.Mod object at 0x7da2590d6920> name[hashcode]]]]
<ast.Raise object at 0x7da20c6e4fa0>
variable[pixelmap] assign[=] call[name[setup_pixelmap], parameter[name[hashcode]]]
call[name[draw_image], parameter[name[pixelmap], name[img]]]
return[name[img]] | keyword[def] identifier[generate_by_hash] ( identifier[hashcode] ):
literal[string]
identifier[img] = identifier[Image] . identifier[new] ( identifier[IMAGE_MODE] , identifier[IMAGE_SIZE] , identifier[BACKGROUND_COLOR] )
keyword[if] identifier[len] ( identifier[hashcode] )< literal[int] :
identifier[print] ( literal[string] % identifier[hashcode] )
keyword[raise] identifier[FalseHashError]
identifier[allowed] = literal[string]
identifier[hashcheck] =[ identifier[c] keyword[in] identifier[allowed] keyword[for] identifier[c] keyword[in] identifier[hashcode] ]
keyword[if] keyword[False] keyword[in] identifier[hashcheck] :
identifier[print] ( literal[string] % identifier[hashcode] )
keyword[raise] identifier[FalseHashError]
identifier[pixelmap] = identifier[setup_pixelmap] ( identifier[hashcode] )
identifier[draw_image] ( identifier[pixelmap] , identifier[img] )
keyword[return] identifier[img] | def generate_by_hash(hashcode):
"""Generates an PIL image avatar based on the given
hash String. Acts as the main accessor to pagan."""
img = Image.new(IMAGE_MODE, IMAGE_SIZE, BACKGROUND_COLOR)
if len(hashcode) < 32:
print('hashcode must have lenght >= 32, %s' % hashcode)
raise FalseHashError # depends on [control=['if'], data=[]]
allowed = '0123456789abcdef'
hashcheck = [c in allowed for c in hashcode]
if False in hashcheck:
print('hashcode has not allowed structure %s' % hashcode)
raise FalseHashError # depends on [control=['if'], data=[]]
pixelmap = setup_pixelmap(hashcode)
draw_image(pixelmap, img)
return img |
def p_extr_lic_id_1(self, p):
"""extr_lic_id : LICS_ID LINE"""
try:
if six.PY2:
value = p[2].decode(encoding='utf-8')
else:
value = p[2]
self.builder.set_lic_id(self.document, value)
except SPDXValueError:
self.error = True
msg = ERROR_MESSAGES['LICS_ID_VALUE'].format(p.lineno(1))
self.logger.log(msg) | def function[p_extr_lic_id_1, parameter[self, p]]:
constant[extr_lic_id : LICS_ID LINE]
<ast.Try object at 0x7da1b020e770> | keyword[def] identifier[p_extr_lic_id_1] ( identifier[self] , identifier[p] ):
literal[string]
keyword[try] :
keyword[if] identifier[six] . identifier[PY2] :
identifier[value] = identifier[p] [ literal[int] ]. identifier[decode] ( identifier[encoding] = literal[string] )
keyword[else] :
identifier[value] = identifier[p] [ literal[int] ]
identifier[self] . identifier[builder] . identifier[set_lic_id] ( identifier[self] . identifier[document] , identifier[value] )
keyword[except] identifier[SPDXValueError] :
identifier[self] . identifier[error] = keyword[True]
identifier[msg] = identifier[ERROR_MESSAGES] [ literal[string] ]. identifier[format] ( identifier[p] . identifier[lineno] ( literal[int] ))
identifier[self] . identifier[logger] . identifier[log] ( identifier[msg] ) | def p_extr_lic_id_1(self, p):
"""extr_lic_id : LICS_ID LINE"""
try:
if six.PY2:
value = p[2].decode(encoding='utf-8') # depends on [control=['if'], data=[]]
else:
value = p[2]
self.builder.set_lic_id(self.document, value) # depends on [control=['try'], data=[]]
except SPDXValueError:
self.error = True
msg = ERROR_MESSAGES['LICS_ID_VALUE'].format(p.lineno(1))
self.logger.log(msg) # depends on [control=['except'], data=[]] |
def get(self, collection, doc_id, **kwargs):
"""
:param str collection: The name of the collection for the request
:param str doc_id: ID of the document to be retrieved.
Retrieve document from Solr based on the ID. ::
>>> solr.get('SolrClient_unittest','changeme')
"""
resp, con_inf = self.transport.send_request(method='GET',
endpoint='get',
collection=collection,
params={'id': doc_id},
**kwargs)
if 'doc' in resp and resp['doc']:
return resp['doc']
raise NotFoundError | def function[get, parameter[self, collection, doc_id]]:
constant[
:param str collection: The name of the collection for the request
:param str doc_id: ID of the document to be retrieved.
Retrieve document from Solr based on the ID. ::
>>> solr.get('SolrClient_unittest','changeme')
]
<ast.Tuple object at 0x7da207f037f0> assign[=] call[name[self].transport.send_request, parameter[]]
if <ast.BoolOp object at 0x7da207f03430> begin[:]
return[call[name[resp]][constant[doc]]]
<ast.Raise object at 0x7da20c7ca140> | keyword[def] identifier[get] ( identifier[self] , identifier[collection] , identifier[doc_id] ,** identifier[kwargs] ):
literal[string]
identifier[resp] , identifier[con_inf] = identifier[self] . identifier[transport] . identifier[send_request] ( identifier[method] = literal[string] ,
identifier[endpoint] = literal[string] ,
identifier[collection] = identifier[collection] ,
identifier[params] ={ literal[string] : identifier[doc_id] },
** identifier[kwargs] )
keyword[if] literal[string] keyword[in] identifier[resp] keyword[and] identifier[resp] [ literal[string] ]:
keyword[return] identifier[resp] [ literal[string] ]
keyword[raise] identifier[NotFoundError] | def get(self, collection, doc_id, **kwargs):
"""
:param str collection: The name of the collection for the request
:param str doc_id: ID of the document to be retrieved.
Retrieve document from Solr based on the ID. ::
>>> solr.get('SolrClient_unittest','changeme')
"""
(resp, con_inf) = self.transport.send_request(method='GET', endpoint='get', collection=collection, params={'id': doc_id}, **kwargs)
if 'doc' in resp and resp['doc']:
return resp['doc'] # depends on [control=['if'], data=[]]
raise NotFoundError |
def iso_register(iso_code):
"""
Registers Calendar class as country or region in IsoRegistry.
Registered country must set class variables ``iso`` using this decorator.
>>> from workalendar.core import Calendar
>>> @iso_register('MC-MR')
>>> class MyRegion(Calendar):
>>> 'My Region'
Region calendar is then retrievable from registry:
>>> calendar = registry.get_calendar_class('MC-MR')
"""
def wrapper(cls):
registry.register(iso_code, cls)
return cls
return wrapper | def function[iso_register, parameter[iso_code]]:
constant[
Registers Calendar class as country or region in IsoRegistry.
Registered country must set class variables ``iso`` using this decorator.
>>> from workalendar.core import Calendar
>>> @iso_register('MC-MR')
>>> class MyRegion(Calendar):
>>> 'My Region'
Region calendar is then retrievable from registry:
>>> calendar = registry.get_calendar_class('MC-MR')
]
def function[wrapper, parameter[cls]]:
call[name[registry].register, parameter[name[iso_code], name[cls]]]
return[name[cls]]
return[name[wrapper]] | keyword[def] identifier[iso_register] ( identifier[iso_code] ):
literal[string]
keyword[def] identifier[wrapper] ( identifier[cls] ):
identifier[registry] . identifier[register] ( identifier[iso_code] , identifier[cls] )
keyword[return] identifier[cls]
keyword[return] identifier[wrapper] | def iso_register(iso_code):
"""
Registers Calendar class as country or region in IsoRegistry.
Registered country must set class variables ``iso`` using this decorator.
>>> from workalendar.core import Calendar
>>> @iso_register('MC-MR')
>>> class MyRegion(Calendar):
>>> 'My Region'
Region calendar is then retrievable from registry:
>>> calendar = registry.get_calendar_class('MC-MR')
"""
def wrapper(cls):
registry.register(iso_code, cls)
return cls
return wrapper |
def number_peaks(x, n):
"""
Calculates the number of peaks of at least support n in the time series x. A peak of support n is defined as a
subsequence of x where a value occurs, which is bigger than its n neighbours to the left and to the right.
Hence in the sequence
>>> x = [3, 0, 0, 4, 0, 0, 13]
4 is a peak of support 1 and 2 because in the subsequences
>>> [0, 4, 0]
>>> [0, 0, 4, 0, 0]
4 is still the highest value. Here, 4 is not a peak of support 3 because 13 is the 3th neighbour to the right of 4
and its bigger than 4.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:param n: the support of the peak
:type n: int
:return: the value of this feature
:return type: float
"""
x_reduced = x[n:-n]
res = None
for i in range(1, n + 1):
result_first = (x_reduced > _roll(x, i)[n:-n])
if res is None:
res = result_first
else:
res &= result_first
res &= (x_reduced > _roll(x, -i)[n:-n])
return np.sum(res) | def function[number_peaks, parameter[x, n]]:
constant[
Calculates the number of peaks of at least support n in the time series x. A peak of support n is defined as a
subsequence of x where a value occurs, which is bigger than its n neighbours to the left and to the right.
Hence in the sequence
>>> x = [3, 0, 0, 4, 0, 0, 13]
4 is a peak of support 1 and 2 because in the subsequences
>>> [0, 4, 0]
>>> [0, 0, 4, 0, 0]
4 is still the highest value. Here, 4 is not a peak of support 3 because 13 is the 3th neighbour to the right of 4
and its bigger than 4.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:param n: the support of the peak
:type n: int
:return: the value of this feature
:return type: float
]
variable[x_reduced] assign[=] call[name[x]][<ast.Slice object at 0x7da20c6aa1a0>]
variable[res] assign[=] constant[None]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[n] + constant[1]]]]] begin[:]
variable[result_first] assign[=] compare[name[x_reduced] greater[>] call[call[name[_roll], parameter[name[x], name[i]]]][<ast.Slice object at 0x7da204623c10>]]
if compare[name[res] is constant[None]] begin[:]
variable[res] assign[=] name[result_first]
<ast.AugAssign object at 0x7da2046231c0>
return[call[name[np].sum, parameter[name[res]]]] | keyword[def] identifier[number_peaks] ( identifier[x] , identifier[n] ):
literal[string]
identifier[x_reduced] = identifier[x] [ identifier[n] :- identifier[n] ]
identifier[res] = keyword[None]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[n] + literal[int] ):
identifier[result_first] =( identifier[x_reduced] > identifier[_roll] ( identifier[x] , identifier[i] )[ identifier[n] :- identifier[n] ])
keyword[if] identifier[res] keyword[is] keyword[None] :
identifier[res] = identifier[result_first]
keyword[else] :
identifier[res] &= identifier[result_first]
identifier[res] &=( identifier[x_reduced] > identifier[_roll] ( identifier[x] ,- identifier[i] )[ identifier[n] :- identifier[n] ])
keyword[return] identifier[np] . identifier[sum] ( identifier[res] ) | def number_peaks(x, n):
"""
Calculates the number of peaks of at least support n in the time series x. A peak of support n is defined as a
subsequence of x where a value occurs, which is bigger than its n neighbours to the left and to the right.
Hence in the sequence
>>> x = [3, 0, 0, 4, 0, 0, 13]
4 is a peak of support 1 and 2 because in the subsequences
>>> [0, 4, 0]
>>> [0, 0, 4, 0, 0]
4 is still the highest value. Here, 4 is not a peak of support 3 because 13 is the 3th neighbour to the right of 4
and its bigger than 4.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:param n: the support of the peak
:type n: int
:return: the value of this feature
:return type: float
"""
x_reduced = x[n:-n]
res = None
for i in range(1, n + 1):
result_first = x_reduced > _roll(x, i)[n:-n]
if res is None:
res = result_first # depends on [control=['if'], data=['res']]
else:
res &= result_first
res &= x_reduced > _roll(x, -i)[n:-n] # depends on [control=['for'], data=['i']]
return np.sum(res) |
def plot(self,
legend=None,
tracks=None,
track_titles=None,
alias=None,
basis=None,
return_fig=False,
extents='td',
**kwargs):
"""
Plot multiple tracks.
Args:
legend (striplog.legend): A legend instance.
tracks (list): A list of strings and/or lists of strings. The
tracks you want to plot from ``data``. Optional, but you will
usually want to give it.
track_titles (list): Optional. A list of strings and/or lists of
strings. The names to give the tracks, if you don't want welly
to guess.
alias (dict): a dictionary mapping mnemonics to lists of mnemonics.
basis (ndarray): Optional. The basis of the plot, if you don't
want welly to guess (probably the best idea).
return_fig (bool): Whether to return the matplotlig figure. Default
False.
extents (str): What to use for the y limits:
'td' — plot 0 to TD.
'curves' — use a basis that accommodates all the curves.
'all' — use a basis that accommodates everything.
(tuple) — give the upper and lower explictly.
Returns:
None. The plot is a side-effect.
"""
# These will be treated differently.
depth_tracks = ['MD', 'TVD']
# Set tracks to 'all' if it's None.
tracks = tracks or list(self.data.keys())
track_titles = track_titles or tracks
# Figure out limits
if basis is None:
basis = self.survey_basis(keys=tracks)
if extents == 'curves':
upper, lower = basis[0], basis[-1]
elif extents == 'td':
try:
upper, lower = 0, self.location.td
except:
m = "Could not read self.location.td, try extents='curves'"
raise WellError(m)
if not lower:
lower = basis[-1]
elif extents == 'all':
raise NotImplementedError("You cannot do that yet.")
else:
try:
upper, lower = extents
except:
upper, lower = basis[0], basis[-1]
# Figure out widths because we can't us gs.update() for that.
widths = [0.4 if t in depth_tracks else 1.0 for t in tracks]
# Set up the figure.
ntracks = len(tracks)
fig = plt.figure(figsize=(2*ntracks, 12), facecolor='w')
fig.suptitle(self.header.name, size=16, zorder=100,
bbox=dict(facecolor='w', alpha=1.0, ec='none'))
gs = mpl.gridspec.GridSpec(1, ntracks, width_ratios=widths)
# Plot first axis.
# kwargs = {}
ax0 = fig.add_subplot(gs[0, 0])
ax0.depth_track = False
track = tracks[0]
if '.' in track:
track, kwargs['field'] = track.split('.')
if track in depth_tracks:
ax0 = self._plot_depth_track(ax=ax0, md=basis, kind=track)
else:
try: # ...treating as a plottable object.
ax0 = self.get_curve(track, alias=alias).plot(ax=ax0, legend=legend, **kwargs)
except AttributeError: # ...it's not there.
pass
except TypeError: # ...it's a list.
for t in track:
try:
ax0 = self.get_curve(t, alias=alias).plot(ax=ax0, legend=legend, **kwargs)
except AttributeError: # ...it's not there.
pass
tx = ax0.get_xticks()
ax0.set_xticks(tx[1:-1])
ax0.set_title(track_titles[0])
# Plot remaining axes.
for i, track in enumerate(tracks[1:]):
# kwargs = {}
ax = fig.add_subplot(gs[0, i+1])
ax.depth_track = False
if track in depth_tracks:
ax = self._plot_depth_track(ax=ax, md=basis, kind=track)
continue
if '.' in track:
track, kwargs['field'] = track.split('.')
plt.setp(ax.get_yticklabels(), visible=False)
try: # ...treating as a plottable object.
ax = self.get_curve(track, alias=alias).plot(ax=ax, legend=legend, **kwargs)
except AttributeError: # ...it's not there.
continue
except TypeError: # ...it's a list.
for j, t in enumerate(track):
if '.' in t:
track, kwargs['field'] = track.split('.')
try:
ax = self.get_curve(t, alias=alias).plot(ax=ax, legend=legend, **kwargs)
except AttributeError:
continue
except KeyError:
continue
tx = ax.get_xticks()
ax.set_xticks(tx[1:-1])
ax.set_title(track_titles[i+1])
# Set sharing.
axes = fig.get_axes()
utils.sharey(axes)
axes[0].set_ylim([lower, upper])
# Adjust the grid.
gs.update(wspace=0)
# Adjust spines and ticks for non-depth tracks.
for ax in axes:
if ax.depth_track:
pass
if not ax.depth_track:
ax.set(yticks=[])
ax.autoscale(False)
ax.yaxis.set_ticks_position('none')
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
for sp in ax.spines.values():
sp.set_color('gray')
if return_fig:
return fig
else:
return None | def function[plot, parameter[self, legend, tracks, track_titles, alias, basis, return_fig, extents]]:
constant[
Plot multiple tracks.
Args:
legend (striplog.legend): A legend instance.
tracks (list): A list of strings and/or lists of strings. The
tracks you want to plot from ``data``. Optional, but you will
usually want to give it.
track_titles (list): Optional. A list of strings and/or lists of
strings. The names to give the tracks, if you don't want welly
to guess.
alias (dict): a dictionary mapping mnemonics to lists of mnemonics.
basis (ndarray): Optional. The basis of the plot, if you don't
want welly to guess (probably the best idea).
return_fig (bool): Whether to return the matplotlig figure. Default
False.
extents (str): What to use for the y limits:
'td' — plot 0 to TD.
'curves' — use a basis that accommodates all the curves.
'all' — use a basis that accommodates everything.
(tuple) — give the upper and lower explictly.
Returns:
None. The plot is a side-effect.
]
variable[depth_tracks] assign[=] list[[<ast.Constant object at 0x7da1b23c6e90>, <ast.Constant object at 0x7da1b23c44f0>]]
variable[tracks] assign[=] <ast.BoolOp object at 0x7da1b23c5ab0>
variable[track_titles] assign[=] <ast.BoolOp object at 0x7da1b23c5480>
if compare[name[basis] is constant[None]] begin[:]
variable[basis] assign[=] call[name[self].survey_basis, parameter[]]
if compare[name[extents] equal[==] constant[curves]] begin[:]
<ast.Tuple object at 0x7da1b23802e0> assign[=] tuple[[<ast.Subscript object at 0x7da1b2382650>, <ast.Subscript object at 0x7da1b2383130>]]
variable[widths] assign[=] <ast.ListComp object at 0x7da1b2380130>
variable[ntracks] assign[=] call[name[len], parameter[name[tracks]]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
call[name[fig].suptitle, parameter[name[self].header.name]]
variable[gs] assign[=] call[name[mpl].gridspec.GridSpec, parameter[constant[1], name[ntracks]]]
variable[ax0] assign[=] call[name[fig].add_subplot, parameter[call[name[gs]][tuple[[<ast.Constant object at 0x7da1b22bfd90>, <ast.Constant object at 0x7da1b22bf790>]]]]]
name[ax0].depth_track assign[=] constant[False]
variable[track] assign[=] call[name[tracks]][constant[0]]
if compare[constant[.] in name[track]] begin[:]
<ast.Tuple object at 0x7da1b22bff10> assign[=] call[name[track].split, parameter[constant[.]]]
if compare[name[track] in name[depth_tracks]] begin[:]
variable[ax0] assign[=] call[name[self]._plot_depth_track, parameter[]]
variable[tx] assign[=] call[name[ax0].get_xticks, parameter[]]
call[name[ax0].set_xticks, parameter[call[name[tx]][<ast.Slice object at 0x7da1b1d4b730>]]]
call[name[ax0].set_title, parameter[call[name[track_titles]][constant[0]]]]
for taget[tuple[[<ast.Name object at 0x7da1b1d48880>, <ast.Name object at 0x7da1b1d49d80>]]] in starred[call[name[enumerate], parameter[call[name[tracks]][<ast.Slice object at 0x7da1b1d4a500>]]]] begin[:]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[call[name[gs]][tuple[[<ast.Constant object at 0x7da1b1d48730>, <ast.BinOp object at 0x7da1b1d48a00>]]]]]
name[ax].depth_track assign[=] constant[False]
if compare[name[track] in name[depth_tracks]] begin[:]
variable[ax] assign[=] call[name[self]._plot_depth_track, parameter[]]
continue
if compare[constant[.] in name[track]] begin[:]
<ast.Tuple object at 0x7da1b1d4b220> assign[=] call[name[track].split, parameter[constant[.]]]
call[name[plt].setp, parameter[call[name[ax].get_yticklabels, parameter[]]]]
<ast.Try object at 0x7da1b1d4b1c0>
variable[tx] assign[=] call[name[ax].get_xticks, parameter[]]
call[name[ax].set_xticks, parameter[call[name[tx]][<ast.Slice object at 0x7da1b231f100>]]]
call[name[ax].set_title, parameter[call[name[track_titles]][binary_operation[name[i] + constant[1]]]]]
variable[axes] assign[=] call[name[fig].get_axes, parameter[]]
call[name[utils].sharey, parameter[name[axes]]]
call[call[name[axes]][constant[0]].set_ylim, parameter[list[[<ast.Name object at 0x7da1b231d690>, <ast.Name object at 0x7da1b231d000>]]]]
call[name[gs].update, parameter[]]
for taget[name[ax]] in starred[name[axes]] begin[:]
if name[ax].depth_track begin[:]
pass
if <ast.UnaryOp object at 0x7da1b2310eb0> begin[:]
call[name[ax].set, parameter[]]
call[name[ax].autoscale, parameter[constant[False]]]
call[name[ax].yaxis.set_ticks_position, parameter[constant[none]]]
call[call[name[ax].spines][constant[top]].set_visible, parameter[constant[True]]]
call[call[name[ax].spines][constant[bottom]].set_visible, parameter[constant[True]]]
for taget[name[sp]] in starred[call[name[ax].spines.values, parameter[]]] begin[:]
call[name[sp].set_color, parameter[constant[gray]]]
if name[return_fig] begin[:]
return[name[fig]] | keyword[def] identifier[plot] ( identifier[self] ,
identifier[legend] = keyword[None] ,
identifier[tracks] = keyword[None] ,
identifier[track_titles] = keyword[None] ,
identifier[alias] = keyword[None] ,
identifier[basis] = keyword[None] ,
identifier[return_fig] = keyword[False] ,
identifier[extents] = literal[string] ,
** identifier[kwargs] ):
literal[string]
identifier[depth_tracks] =[ literal[string] , literal[string] ]
identifier[tracks] = identifier[tracks] keyword[or] identifier[list] ( identifier[self] . identifier[data] . identifier[keys] ())
identifier[track_titles] = identifier[track_titles] keyword[or] identifier[tracks]
keyword[if] identifier[basis] keyword[is] keyword[None] :
identifier[basis] = identifier[self] . identifier[survey_basis] ( identifier[keys] = identifier[tracks] )
keyword[if] identifier[extents] == literal[string] :
identifier[upper] , identifier[lower] = identifier[basis] [ literal[int] ], identifier[basis] [- literal[int] ]
keyword[elif] identifier[extents] == literal[string] :
keyword[try] :
identifier[upper] , identifier[lower] = literal[int] , identifier[self] . identifier[location] . identifier[td]
keyword[except] :
identifier[m] = literal[string]
keyword[raise] identifier[WellError] ( identifier[m] )
keyword[if] keyword[not] identifier[lower] :
identifier[lower] = identifier[basis] [- literal[int] ]
keyword[elif] identifier[extents] == literal[string] :
keyword[raise] identifier[NotImplementedError] ( literal[string] )
keyword[else] :
keyword[try] :
identifier[upper] , identifier[lower] = identifier[extents]
keyword[except] :
identifier[upper] , identifier[lower] = identifier[basis] [ literal[int] ], identifier[basis] [- literal[int] ]
identifier[widths] =[ literal[int] keyword[if] identifier[t] keyword[in] identifier[depth_tracks] keyword[else] literal[int] keyword[for] identifier[t] keyword[in] identifier[tracks] ]
identifier[ntracks] = identifier[len] ( identifier[tracks] )
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] * identifier[ntracks] , literal[int] ), identifier[facecolor] = literal[string] )
identifier[fig] . identifier[suptitle] ( identifier[self] . identifier[header] . identifier[name] , identifier[size] = literal[int] , identifier[zorder] = literal[int] ,
identifier[bbox] = identifier[dict] ( identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] , identifier[ec] = literal[string] ))
identifier[gs] = identifier[mpl] . identifier[gridspec] . identifier[GridSpec] ( literal[int] , identifier[ntracks] , identifier[width_ratios] = identifier[widths] )
identifier[ax0] = identifier[fig] . identifier[add_subplot] ( identifier[gs] [ literal[int] , literal[int] ])
identifier[ax0] . identifier[depth_track] = keyword[False]
identifier[track] = identifier[tracks] [ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[track] :
identifier[track] , identifier[kwargs] [ literal[string] ]= identifier[track] . identifier[split] ( literal[string] )
keyword[if] identifier[track] keyword[in] identifier[depth_tracks] :
identifier[ax0] = identifier[self] . identifier[_plot_depth_track] ( identifier[ax] = identifier[ax0] , identifier[md] = identifier[basis] , identifier[kind] = identifier[track] )
keyword[else] :
keyword[try] :
identifier[ax0] = identifier[self] . identifier[get_curve] ( identifier[track] , identifier[alias] = identifier[alias] ). identifier[plot] ( identifier[ax] = identifier[ax0] , identifier[legend] = identifier[legend] ,** identifier[kwargs] )
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[except] identifier[TypeError] :
keyword[for] identifier[t] keyword[in] identifier[track] :
keyword[try] :
identifier[ax0] = identifier[self] . identifier[get_curve] ( identifier[t] , identifier[alias] = identifier[alias] ). identifier[plot] ( identifier[ax] = identifier[ax0] , identifier[legend] = identifier[legend] ,** identifier[kwargs] )
keyword[except] identifier[AttributeError] :
keyword[pass]
identifier[tx] = identifier[ax0] . identifier[get_xticks] ()
identifier[ax0] . identifier[set_xticks] ( identifier[tx] [ literal[int] :- literal[int] ])
identifier[ax0] . identifier[set_title] ( identifier[track_titles] [ literal[int] ])
keyword[for] identifier[i] , identifier[track] keyword[in] identifier[enumerate] ( identifier[tracks] [ literal[int] :]):
identifier[ax] = identifier[fig] . identifier[add_subplot] ( identifier[gs] [ literal[int] , identifier[i] + literal[int] ])
identifier[ax] . identifier[depth_track] = keyword[False]
keyword[if] identifier[track] keyword[in] identifier[depth_tracks] :
identifier[ax] = identifier[self] . identifier[_plot_depth_track] ( identifier[ax] = identifier[ax] , identifier[md] = identifier[basis] , identifier[kind] = identifier[track] )
keyword[continue]
keyword[if] literal[string] keyword[in] identifier[track] :
identifier[track] , identifier[kwargs] [ literal[string] ]= identifier[track] . identifier[split] ( literal[string] )
identifier[plt] . identifier[setp] ( identifier[ax] . identifier[get_yticklabels] (), identifier[visible] = keyword[False] )
keyword[try] :
identifier[ax] = identifier[self] . identifier[get_curve] ( identifier[track] , identifier[alias] = identifier[alias] ). identifier[plot] ( identifier[ax] = identifier[ax] , identifier[legend] = identifier[legend] ,** identifier[kwargs] )
keyword[except] identifier[AttributeError] :
keyword[continue]
keyword[except] identifier[TypeError] :
keyword[for] identifier[j] , identifier[t] keyword[in] identifier[enumerate] ( identifier[track] ):
keyword[if] literal[string] keyword[in] identifier[t] :
identifier[track] , identifier[kwargs] [ literal[string] ]= identifier[track] . identifier[split] ( literal[string] )
keyword[try] :
identifier[ax] = identifier[self] . identifier[get_curve] ( identifier[t] , identifier[alias] = identifier[alias] ). identifier[plot] ( identifier[ax] = identifier[ax] , identifier[legend] = identifier[legend] ,** identifier[kwargs] )
keyword[except] identifier[AttributeError] :
keyword[continue]
keyword[except] identifier[KeyError] :
keyword[continue]
identifier[tx] = identifier[ax] . identifier[get_xticks] ()
identifier[ax] . identifier[set_xticks] ( identifier[tx] [ literal[int] :- literal[int] ])
identifier[ax] . identifier[set_title] ( identifier[track_titles] [ identifier[i] + literal[int] ])
identifier[axes] = identifier[fig] . identifier[get_axes] ()
identifier[utils] . identifier[sharey] ( identifier[axes] )
identifier[axes] [ literal[int] ]. identifier[set_ylim] ([ identifier[lower] , identifier[upper] ])
identifier[gs] . identifier[update] ( identifier[wspace] = literal[int] )
keyword[for] identifier[ax] keyword[in] identifier[axes] :
keyword[if] identifier[ax] . identifier[depth_track] :
keyword[pass]
keyword[if] keyword[not] identifier[ax] . identifier[depth_track] :
identifier[ax] . identifier[set] ( identifier[yticks] =[])
identifier[ax] . identifier[autoscale] ( keyword[False] )
identifier[ax] . identifier[yaxis] . identifier[set_ticks_position] ( literal[string] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[True] )
identifier[ax] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[True] )
keyword[for] identifier[sp] keyword[in] identifier[ax] . identifier[spines] . identifier[values] ():
identifier[sp] . identifier[set_color] ( literal[string] )
keyword[if] identifier[return_fig] :
keyword[return] identifier[fig]
keyword[else] :
keyword[return] keyword[None] | def plot(self, legend=None, tracks=None, track_titles=None, alias=None, basis=None, return_fig=False, extents='td', **kwargs):
"""
Plot multiple tracks.
Args:
legend (striplog.legend): A legend instance.
tracks (list): A list of strings and/or lists of strings. The
tracks you want to plot from ``data``. Optional, but you will
usually want to give it.
track_titles (list): Optional. A list of strings and/or lists of
strings. The names to give the tracks, if you don't want welly
to guess.
alias (dict): a dictionary mapping mnemonics to lists of mnemonics.
basis (ndarray): Optional. The basis of the plot, if you don't
want welly to guess (probably the best idea).
return_fig (bool): Whether to return the matplotlig figure. Default
False.
extents (str): What to use for the y limits:
'td' —\xa0plot 0 to TD.
'curves' —\xa0use a basis that accommodates all the curves.
'all' —\xa0use a basis that accommodates everything.
(tuple) —\xa0give the upper and lower explictly.
Returns:
None. The plot is a side-effect.
"""
# These will be treated differently.
depth_tracks = ['MD', 'TVD']
# Set tracks to 'all' if it's None.
tracks = tracks or list(self.data.keys())
track_titles = track_titles or tracks
# Figure out limits
if basis is None:
basis = self.survey_basis(keys=tracks) # depends on [control=['if'], data=['basis']]
if extents == 'curves':
(upper, lower) = (basis[0], basis[-1]) # depends on [control=['if'], data=[]]
elif extents == 'td':
try:
(upper, lower) = (0, self.location.td) # depends on [control=['try'], data=[]]
except:
m = "Could not read self.location.td, try extents='curves'"
raise WellError(m) # depends on [control=['except'], data=[]]
if not lower:
lower = basis[-1] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif extents == 'all':
raise NotImplementedError('You cannot do that yet.') # depends on [control=['if'], data=[]]
else:
try:
(upper, lower) = extents # depends on [control=['try'], data=[]]
except:
(upper, lower) = (basis[0], basis[-1]) # depends on [control=['except'], data=[]]
# Figure out widths because we can't us gs.update() for that.
widths = [0.4 if t in depth_tracks else 1.0 for t in tracks]
# Set up the figure.
ntracks = len(tracks)
fig = plt.figure(figsize=(2 * ntracks, 12), facecolor='w')
fig.suptitle(self.header.name, size=16, zorder=100, bbox=dict(facecolor='w', alpha=1.0, ec='none'))
gs = mpl.gridspec.GridSpec(1, ntracks, width_ratios=widths)
# Plot first axis.
# kwargs = {}
ax0 = fig.add_subplot(gs[0, 0])
ax0.depth_track = False
track = tracks[0]
if '.' in track:
(track, kwargs['field']) = track.split('.') # depends on [control=['if'], data=['track']]
if track in depth_tracks:
ax0 = self._plot_depth_track(ax=ax0, md=basis, kind=track) # depends on [control=['if'], data=['track']]
else:
try: # ...treating as a plottable object.
ax0 = self.get_curve(track, alias=alias).plot(ax=ax0, legend=legend, **kwargs) # depends on [control=['try'], data=[]]
except AttributeError: # ...it's not there.
pass # depends on [control=['except'], data=[]]
except TypeError: # ...it's a list.
for t in track:
try:
ax0 = self.get_curve(t, alias=alias).plot(ax=ax0, legend=legend, **kwargs) # depends on [control=['try'], data=[]]
except AttributeError: # ...it's not there.
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['t']] # depends on [control=['except'], data=[]]
tx = ax0.get_xticks()
ax0.set_xticks(tx[1:-1])
ax0.set_title(track_titles[0])
# Plot remaining axes.
for (i, track) in enumerate(tracks[1:]):
# kwargs = {}
ax = fig.add_subplot(gs[0, i + 1])
ax.depth_track = False
if track in depth_tracks:
ax = self._plot_depth_track(ax=ax, md=basis, kind=track)
continue # depends on [control=['if'], data=['track']]
if '.' in track:
(track, kwargs['field']) = track.split('.') # depends on [control=['if'], data=['track']]
plt.setp(ax.get_yticklabels(), visible=False)
try: # ...treating as a plottable object.
ax = self.get_curve(track, alias=alias).plot(ax=ax, legend=legend, **kwargs) # depends on [control=['try'], data=[]]
except AttributeError: # ...it's not there.
continue # depends on [control=['except'], data=[]]
except TypeError: # ...it's a list.
for (j, t) in enumerate(track):
if '.' in t:
(track, kwargs['field']) = track.split('.') # depends on [control=['if'], data=[]]
try:
ax = self.get_curve(t, alias=alias).plot(ax=ax, legend=legend, **kwargs) # depends on [control=['try'], data=[]]
except AttributeError:
continue # depends on [control=['except'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['except'], data=[]]
tx = ax.get_xticks()
ax.set_xticks(tx[1:-1])
ax.set_title(track_titles[i + 1]) # depends on [control=['for'], data=[]]
# Set sharing.
axes = fig.get_axes()
utils.sharey(axes)
axes[0].set_ylim([lower, upper])
# Adjust the grid.
gs.update(wspace=0)
# Adjust spines and ticks for non-depth tracks.
for ax in axes:
if ax.depth_track:
pass # depends on [control=['if'], data=[]]
if not ax.depth_track:
ax.set(yticks=[])
ax.autoscale(False)
ax.yaxis.set_ticks_position('none')
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
for sp in ax.spines.values():
sp.set_color('gray') # depends on [control=['for'], data=['sp']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ax']]
if return_fig:
return fig # depends on [control=['if'], data=[]]
else:
return None |
def try_one_generator (project, name, generator, target_type, properties, sources):
""" Checks if generator invocation can be pruned, because it's guaranteed
to fail. If so, quickly returns empty list. Otherwise, calls
try_one_generator_really.
"""
if __debug__:
from .targets import ProjectTarget
assert isinstance(project, ProjectTarget)
assert isinstance(name, basestring) or name is None
assert isinstance(generator, Generator)
assert isinstance(target_type, basestring)
assert isinstance(properties, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
source_types = []
for s in sources:
source_types.append (s.type ())
viable_source_types = viable_source_types_for_generator (generator)
if source_types and viable_source_types != ['*'] and\
not set_.intersection (source_types, viable_source_types):
if project.manager ().logger ().on ():
id = generator.id ()
project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
return []
else:
return try_one_generator_really (project, name, generator, target_type, properties, sources) | def function[try_one_generator, parameter[project, name, generator, target_type, properties, sources]]:
constant[ Checks if generator invocation can be pruned, because it's guaranteed
to fail. If so, quickly returns empty list. Otherwise, calls
try_one_generator_really.
]
if name[__debug__] begin[:]
from relative_module[targets] import module[ProjectTarget]
assert[call[name[isinstance], parameter[name[project], name[ProjectTarget]]]]
assert[<ast.BoolOp object at 0x7da1b208c7f0>]
assert[call[name[isinstance], parameter[name[generator], name[Generator]]]]
assert[call[name[isinstance], parameter[name[target_type], name[basestring]]]]
assert[call[name[isinstance], parameter[name[properties], name[property_set].PropertySet]]]
assert[call[name[is_iterable_typed], parameter[name[sources], name[virtual_target].VirtualTarget]]]
variable[source_types] assign[=] list[[]]
for taget[name[s]] in starred[name[sources]] begin[:]
call[name[source_types].append, parameter[call[name[s].type, parameter[]]]]
variable[viable_source_types] assign[=] call[name[viable_source_types_for_generator], parameter[name[generator]]]
if <ast.BoolOp object at 0x7da1b208cf10> begin[:]
if call[call[call[name[project].manager, parameter[]].logger, parameter[]].on, parameter[]] begin[:]
variable[id] assign[=] call[name[generator].id, parameter[]]
call[call[call[name[project].manager, parameter[]].logger, parameter[]].log, parameter[name[__name__], binary_operation[constant[generator '%s' pruned] <ast.Mod object at 0x7da2590d6920> name[id]]]]
call[call[call[name[project].manager, parameter[]].logger, parameter[]].log, parameter[name[__name__], binary_operation[constant[source_types%s] <ast.Mod object at 0x7da2590d6920> name[source_types]]]]
call[call[call[name[project].manager, parameter[]].logger, parameter[]].log, parameter[name[__name__], binary_operation[constant[viable_source_types '%s'] <ast.Mod object at 0x7da2590d6920> name[viable_source_types]]]]
return[list[[]]] | keyword[def] identifier[try_one_generator] ( identifier[project] , identifier[name] , identifier[generator] , identifier[target_type] , identifier[properties] , identifier[sources] ):
literal[string]
keyword[if] identifier[__debug__] :
keyword[from] . identifier[targets] keyword[import] identifier[ProjectTarget]
keyword[assert] identifier[isinstance] ( identifier[project] , identifier[ProjectTarget] )
keyword[assert] identifier[isinstance] ( identifier[name] , identifier[basestring] ) keyword[or] identifier[name] keyword[is] keyword[None]
keyword[assert] identifier[isinstance] ( identifier[generator] , identifier[Generator] )
keyword[assert] identifier[isinstance] ( identifier[target_type] , identifier[basestring] )
keyword[assert] identifier[isinstance] ( identifier[properties] , identifier[property_set] . identifier[PropertySet] )
keyword[assert] identifier[is_iterable_typed] ( identifier[sources] , identifier[virtual_target] . identifier[VirtualTarget] )
identifier[source_types] =[]
keyword[for] identifier[s] keyword[in] identifier[sources] :
identifier[source_types] . identifier[append] ( identifier[s] . identifier[type] ())
identifier[viable_source_types] = identifier[viable_source_types_for_generator] ( identifier[generator] )
keyword[if] identifier[source_types] keyword[and] identifier[viable_source_types] !=[ literal[string] ] keyword[and] keyword[not] identifier[set_] . identifier[intersection] ( identifier[source_types] , identifier[viable_source_types] ):
keyword[if] identifier[project] . identifier[manager] (). identifier[logger] (). identifier[on] ():
identifier[id] = identifier[generator] . identifier[id] ()
identifier[project] . identifier[manager] (). identifier[logger] (). identifier[log] ( identifier[__name__] , literal[string] % identifier[id] )
identifier[project] . identifier[manager] (). identifier[logger] (). identifier[log] ( identifier[__name__] , literal[string] literal[string] % identifier[source_types] )
identifier[project] . identifier[manager] (). identifier[logger] (). identifier[log] ( identifier[__name__] , literal[string] % identifier[viable_source_types] )
keyword[return] []
keyword[else] :
keyword[return] identifier[try_one_generator_really] ( identifier[project] , identifier[name] , identifier[generator] , identifier[target_type] , identifier[properties] , identifier[sources] ) | def try_one_generator(project, name, generator, target_type, properties, sources):
""" Checks if generator invocation can be pruned, because it's guaranteed
to fail. If so, quickly returns empty list. Otherwise, calls
try_one_generator_really.
"""
if __debug__:
from .targets import ProjectTarget
assert isinstance(project, ProjectTarget)
assert isinstance(name, basestring) or name is None
assert isinstance(generator, Generator)
assert isinstance(target_type, basestring)
assert isinstance(properties, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget) # depends on [control=['if'], data=[]]
source_types = []
for s in sources:
source_types.append(s.type()) # depends on [control=['for'], data=['s']]
viable_source_types = viable_source_types_for_generator(generator)
if source_types and viable_source_types != ['*'] and (not set_.intersection(source_types, viable_source_types)):
if project.manager().logger().on():
id = generator.id()
project.manager().logger().log(__name__, "generator '%s' pruned" % id)
project.manager().logger().log(__name__, 'source_types%s' % source_types)
project.manager().logger().log(__name__, "viable_source_types '%s'" % viable_source_types) # depends on [control=['if'], data=[]]
return [] # depends on [control=['if'], data=[]]
else:
return try_one_generator_really(project, name, generator, target_type, properties, sources) |
def summed(*values):
"""
Returns the sum of all supplied values. One or more *values* can be
specified. For example, to light a :class:`~gpiozero.PWMLED` as the
(scaled) sum of several potentiometers connected to an
:class:`~gpiozero.MCP3008` ADC::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import summed, scaled
from signal import pause
pot1 = MCP3008(channel=0)
pot2 = MCP3008(channel=1)
pot3 = MCP3008(channel=2)
led = PWMLED(4)
led.source = scaled(summed(pot1, pot2, pot3), 0, 1, 0, 3)
pause()
"""
values = [_normalize(v) for v in values]
for v in zip(*values):
yield sum(v) | def function[summed, parameter[]]:
constant[
Returns the sum of all supplied values. One or more *values* can be
specified. For example, to light a :class:`~gpiozero.PWMLED` as the
(scaled) sum of several potentiometers connected to an
:class:`~gpiozero.MCP3008` ADC::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import summed, scaled
from signal import pause
pot1 = MCP3008(channel=0)
pot2 = MCP3008(channel=1)
pot3 = MCP3008(channel=2)
led = PWMLED(4)
led.source = scaled(summed(pot1, pot2, pot3), 0, 1, 0, 3)
pause()
]
variable[values] assign[=] <ast.ListComp object at 0x7da18f09d720>
for taget[name[v]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da18f09f940>]]] begin[:]
<ast.Yield object at 0x7da18f09e350> | keyword[def] identifier[summed] (* identifier[values] ):
literal[string]
identifier[values] =[ identifier[_normalize] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[values] ]
keyword[for] identifier[v] keyword[in] identifier[zip] (* identifier[values] ):
keyword[yield] identifier[sum] ( identifier[v] ) | def summed(*values):
"""
Returns the sum of all supplied values. One or more *values* can be
specified. For example, to light a :class:`~gpiozero.PWMLED` as the
(scaled) sum of several potentiometers connected to an
:class:`~gpiozero.MCP3008` ADC::
from gpiozero import MCP3008, PWMLED
from gpiozero.tools import summed, scaled
from signal import pause
pot1 = MCP3008(channel=0)
pot2 = MCP3008(channel=1)
pot3 = MCP3008(channel=2)
led = PWMLED(4)
led.source = scaled(summed(pot1, pot2, pot3), 0, 1, 0, 3)
pause()
"""
values = [_normalize(v) for v in values]
for v in zip(*values):
yield sum(v) # depends on [control=['for'], data=['v']] |
def describe_lcc_csv(lcdict, returndesc=False):
'''
This describes the LCC CSV format light curve file.
Parameters
----------
lcdict : dict
The input lcdict to parse for column and metadata info.
returndesc : bool
If True, returns the description string as an str instead of just
printing it to stdout.
Returns
-------
str or None
If returndesc is True, returns the description lines as a str, otherwise
returns nothing.
'''
metadata_lines = []
coldef_lines = []
if 'lcformat' in lcdict and 'lcc-csv' in lcdict['lcformat'].lower():
metadata = lcdict['metadata']
metakeys = lcdict['objectinfo'].keys()
coldefs = lcdict['coldefs']
for mk in metakeys:
metadata_lines.append(
'%20s | %s' % (
mk,
metadata[mk]['desc']
)
)
for ck in lcdict['columns']:
coldef_lines.append('column %02d | %8s | numpy dtype: %3s | %s'
% (coldefs[ck]['colnum'],
ck,
coldefs[ck]['dtype'],
coldefs[ck]['desc']))
desc = LCC_CSVLC_DESCTEMPLATE.format(
objectid=lcdict['objectid'],
metadata_desc='\n'.join(metadata_lines),
metadata=pformat(lcdict['objectinfo']),
columndefs='\n'.join(coldef_lines)
)
print(desc)
if returndesc:
return desc
else:
LOGERROR("this lcdict is not from an LCC CSV, can't figure it out...")
return None | def function[describe_lcc_csv, parameter[lcdict, returndesc]]:
constant[
This describes the LCC CSV format light curve file.
Parameters
----------
lcdict : dict
The input lcdict to parse for column and metadata info.
returndesc : bool
If True, returns the description string as an str instead of just
printing it to stdout.
Returns
-------
str or None
If returndesc is True, returns the description lines as a str, otherwise
returns nothing.
]
variable[metadata_lines] assign[=] list[[]]
variable[coldef_lines] assign[=] list[[]]
if <ast.BoolOp object at 0x7da20c7967d0> begin[:]
variable[metadata] assign[=] call[name[lcdict]][constant[metadata]]
variable[metakeys] assign[=] call[call[name[lcdict]][constant[objectinfo]].keys, parameter[]]
variable[coldefs] assign[=] call[name[lcdict]][constant[coldefs]]
for taget[name[mk]] in starred[name[metakeys]] begin[:]
call[name[metadata_lines].append, parameter[binary_operation[constant[%20s | %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c795270>, <ast.Subscript object at 0x7da20c795e70>]]]]]
for taget[name[ck]] in starred[call[name[lcdict]][constant[columns]]] begin[:]
call[name[coldef_lines].append, parameter[binary_operation[constant[column %02d | %8s | numpy dtype: %3s | %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da20c795930>, <ast.Name object at 0x7da20c794a60>, <ast.Subscript object at 0x7da20c794100>, <ast.Subscript object at 0x7da20c794610>]]]]]
variable[desc] assign[=] call[name[LCC_CSVLC_DESCTEMPLATE].format, parameter[]]
call[name[print], parameter[name[desc]]]
if name[returndesc] begin[:]
return[name[desc]] | keyword[def] identifier[describe_lcc_csv] ( identifier[lcdict] , identifier[returndesc] = keyword[False] ):
literal[string]
identifier[metadata_lines] =[]
identifier[coldef_lines] =[]
keyword[if] literal[string] keyword[in] identifier[lcdict] keyword[and] literal[string] keyword[in] identifier[lcdict] [ literal[string] ]. identifier[lower] ():
identifier[metadata] = identifier[lcdict] [ literal[string] ]
identifier[metakeys] = identifier[lcdict] [ literal[string] ]. identifier[keys] ()
identifier[coldefs] = identifier[lcdict] [ literal[string] ]
keyword[for] identifier[mk] keyword[in] identifier[metakeys] :
identifier[metadata_lines] . identifier[append] (
literal[string] %(
identifier[mk] ,
identifier[metadata] [ identifier[mk] ][ literal[string] ]
)
)
keyword[for] identifier[ck] keyword[in] identifier[lcdict] [ literal[string] ]:
identifier[coldef_lines] . identifier[append] ( literal[string]
%( identifier[coldefs] [ identifier[ck] ][ literal[string] ],
identifier[ck] ,
identifier[coldefs] [ identifier[ck] ][ literal[string] ],
identifier[coldefs] [ identifier[ck] ][ literal[string] ]))
identifier[desc] = identifier[LCC_CSVLC_DESCTEMPLATE] . identifier[format] (
identifier[objectid] = identifier[lcdict] [ literal[string] ],
identifier[metadata_desc] = literal[string] . identifier[join] ( identifier[metadata_lines] ),
identifier[metadata] = identifier[pformat] ( identifier[lcdict] [ literal[string] ]),
identifier[columndefs] = literal[string] . identifier[join] ( identifier[coldef_lines] )
)
identifier[print] ( identifier[desc] )
keyword[if] identifier[returndesc] :
keyword[return] identifier[desc]
keyword[else] :
identifier[LOGERROR] ( literal[string] )
keyword[return] keyword[None] | def describe_lcc_csv(lcdict, returndesc=False):
"""
This describes the LCC CSV format light curve file.
Parameters
----------
lcdict : dict
The input lcdict to parse for column and metadata info.
returndesc : bool
If True, returns the description string as an str instead of just
printing it to stdout.
Returns
-------
str or None
If returndesc is True, returns the description lines as a str, otherwise
returns nothing.
"""
metadata_lines = []
coldef_lines = []
if 'lcformat' in lcdict and 'lcc-csv' in lcdict['lcformat'].lower():
metadata = lcdict['metadata']
metakeys = lcdict['objectinfo'].keys()
coldefs = lcdict['coldefs']
for mk in metakeys:
metadata_lines.append('%20s | %s' % (mk, metadata[mk]['desc'])) # depends on [control=['for'], data=['mk']]
for ck in lcdict['columns']:
coldef_lines.append('column %02d | %8s | numpy dtype: %3s | %s' % (coldefs[ck]['colnum'], ck, coldefs[ck]['dtype'], coldefs[ck]['desc'])) # depends on [control=['for'], data=['ck']]
desc = LCC_CSVLC_DESCTEMPLATE.format(objectid=lcdict['objectid'], metadata_desc='\n'.join(metadata_lines), metadata=pformat(lcdict['objectinfo']), columndefs='\n'.join(coldef_lines))
print(desc)
if returndesc:
return desc # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
LOGERROR("this lcdict is not from an LCC CSV, can't figure it out...")
return None |
def disks(self):
"""Instance depends on the API version:
* 2016-04-30-preview: :class:`DisksOperations<azure.mgmt.compute.v2016_04_30_preview.operations.DisksOperations>`
* 2017-03-30: :class:`DisksOperations<azure.mgmt.compute.v2017_03_30.operations.DisksOperations>`
* 2018-04-01: :class:`DisksOperations<azure.mgmt.compute.v2018_04_01.operations.DisksOperations>`
* 2018-06-01: :class:`DisksOperations<azure.mgmt.compute.v2018_06_01.operations.DisksOperations>`
* 2018-09-30: :class:`DisksOperations<azure.mgmt.compute.v2018_09_30.operations.DisksOperations>`
"""
api_version = self._get_api_version('disks')
if api_version == '2016-04-30-preview':
from .v2016_04_30_preview.operations import DisksOperations as OperationClass
elif api_version == '2017-03-30':
from .v2017_03_30.operations import DisksOperations as OperationClass
elif api_version == '2018-04-01':
from .v2018_04_01.operations import DisksOperations as OperationClass
elif api_version == '2018-06-01':
from .v2018_06_01.operations import DisksOperations as OperationClass
elif api_version == '2018-09-30':
from .v2018_09_30.operations import DisksOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) | def function[disks, parameter[self]]:
constant[Instance depends on the API version:
* 2016-04-30-preview: :class:`DisksOperations<azure.mgmt.compute.v2016_04_30_preview.operations.DisksOperations>`
* 2017-03-30: :class:`DisksOperations<azure.mgmt.compute.v2017_03_30.operations.DisksOperations>`
* 2018-04-01: :class:`DisksOperations<azure.mgmt.compute.v2018_04_01.operations.DisksOperations>`
* 2018-06-01: :class:`DisksOperations<azure.mgmt.compute.v2018_06_01.operations.DisksOperations>`
* 2018-09-30: :class:`DisksOperations<azure.mgmt.compute.v2018_09_30.operations.DisksOperations>`
]
variable[api_version] assign[=] call[name[self]._get_api_version, parameter[constant[disks]]]
if compare[name[api_version] equal[==] constant[2016-04-30-preview]] begin[:]
from relative_module[v2016_04_30_preview.operations] import module[DisksOperations]
return[call[name[OperationClass], parameter[name[self]._client, name[self].config, call[name[Serializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]], call[name[Deserializer], parameter[call[name[self]._models_dict, parameter[name[api_version]]]]]]]] | keyword[def] identifier[disks] ( identifier[self] ):
literal[string]
identifier[api_version] = identifier[self] . identifier[_get_api_version] ( literal[string] )
keyword[if] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2016_04_30_preview] . identifier[operations] keyword[import] identifier[DisksOperations] keyword[as] identifier[OperationClass]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2017_03_30] . identifier[operations] keyword[import] identifier[DisksOperations] keyword[as] identifier[OperationClass]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_04_01] . identifier[operations] keyword[import] identifier[DisksOperations] keyword[as] identifier[OperationClass]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_06_01] . identifier[operations] keyword[import] identifier[DisksOperations] keyword[as] identifier[OperationClass]
keyword[elif] identifier[api_version] == literal[string] :
keyword[from] . identifier[v2018_09_30] . identifier[operations] keyword[import] identifier[DisksOperations] keyword[as] identifier[OperationClass]
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string] . identifier[format] ( identifier[api_version] ))
keyword[return] identifier[OperationClass] ( identifier[self] . identifier[_client] , identifier[self] . identifier[config] , identifier[Serializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] )), identifier[Deserializer] ( identifier[self] . identifier[_models_dict] ( identifier[api_version] ))) | def disks(self):
"""Instance depends on the API version:
* 2016-04-30-preview: :class:`DisksOperations<azure.mgmt.compute.v2016_04_30_preview.operations.DisksOperations>`
* 2017-03-30: :class:`DisksOperations<azure.mgmt.compute.v2017_03_30.operations.DisksOperations>`
* 2018-04-01: :class:`DisksOperations<azure.mgmt.compute.v2018_04_01.operations.DisksOperations>`
* 2018-06-01: :class:`DisksOperations<azure.mgmt.compute.v2018_06_01.operations.DisksOperations>`
* 2018-09-30: :class:`DisksOperations<azure.mgmt.compute.v2018_09_30.operations.DisksOperations>`
"""
api_version = self._get_api_version('disks')
if api_version == '2016-04-30-preview':
from .v2016_04_30_preview.operations import DisksOperations as OperationClass # depends on [control=['if'], data=[]]
elif api_version == '2017-03-30':
from .v2017_03_30.operations import DisksOperations as OperationClass # depends on [control=['if'], data=[]]
elif api_version == '2018-04-01':
from .v2018_04_01.operations import DisksOperations as OperationClass # depends on [control=['if'], data=[]]
elif api_version == '2018-06-01':
from .v2018_06_01.operations import DisksOperations as OperationClass # depends on [control=['if'], data=[]]
elif api_version == '2018-09-30':
from .v2018_09_30.operations import DisksOperations as OperationClass # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('APIVersion {} is not available'.format(api_version))
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) |
def _plot_table(matrix, text_format='{:.2f}', cmap=None, **kwargs):
"""
Plot a numpy matrix as a table. Uses the current axis bounding box to decide on limits.
text_format specifies the formatting to apply to the values.
Parameters
----------
matrix : ndarray
text_format : str
Indicates how to format the the values
text_format = {:.2} -> keeps all digits until the first 2 significant digits past the decimal
text_format = {:.2f} -> keeps only 2 digits past the decimal
cmap : None | colormap
if a colormap is provided, this colormap will be used to choose the color of the text.
**kwargs : all other arguments passed to plt.text function
Examples
----------
plot_table(numpy.random.random((3,3))
plt.show()
"""
shape = matrix.shape
xtick_pos = numpy.arange(shape[1])
ytick_pos = numpy.arange(shape[0])
xtick_grid, ytick_grid = numpy.meshgrid(xtick_pos, ytick_pos)
vmax = numpy.nanmax(matrix)
vmin = numpy.nanmin(matrix)
if not kwargs.get('color', None) and cmap is not None:
use_cmap = True
norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax, clip=False)
else:
use_cmap = False
for (row, col), w in numpy.ndenumerate(matrix):
x = xtick_grid[row, col]
y = ytick_grid[row, col]
if use_cmap:
kwargs['color'] = cmap(norm(w))
plt.text(x, y, text_format.format(w), horizontalalignment='center',
verticalalignment='center', transform=plt.gca().transData, **kwargs) | def function[_plot_table, parameter[matrix, text_format, cmap]]:
constant[
Plot a numpy matrix as a table. Uses the current axis bounding box to decide on limits.
text_format specifies the formatting to apply to the values.
Parameters
----------
matrix : ndarray
text_format : str
Indicates how to format the the values
text_format = {:.2} -> keeps all digits until the first 2 significant digits past the decimal
text_format = {:.2f} -> keeps only 2 digits past the decimal
cmap : None | colormap
if a colormap is provided, this colormap will be used to choose the color of the text.
**kwargs : all other arguments passed to plt.text function
Examples
----------
plot_table(numpy.random.random((3,3))
plt.show()
]
variable[shape] assign[=] name[matrix].shape
variable[xtick_pos] assign[=] call[name[numpy].arange, parameter[call[name[shape]][constant[1]]]]
variable[ytick_pos] assign[=] call[name[numpy].arange, parameter[call[name[shape]][constant[0]]]]
<ast.Tuple object at 0x7da18f58e2f0> assign[=] call[name[numpy].meshgrid, parameter[name[xtick_pos], name[ytick_pos]]]
variable[vmax] assign[=] call[name[numpy].nanmax, parameter[name[matrix]]]
variable[vmin] assign[=] call[name[numpy].nanmin, parameter[name[matrix]]]
if <ast.BoolOp object at 0x7da18f58c7c0> begin[:]
variable[use_cmap] assign[=] constant[True]
variable[norm] assign[=] call[name[matplotlib].colors.Normalize, parameter[]]
for taget[tuple[[<ast.Tuple object at 0x7da18f58c610>, <ast.Name object at 0x7da18f58d9c0>]]] in starred[call[name[numpy].ndenumerate, parameter[name[matrix]]]] begin[:]
variable[x] assign[=] call[name[xtick_grid]][tuple[[<ast.Name object at 0x7da18f58f2b0>, <ast.Name object at 0x7da18f58f5e0>]]]
variable[y] assign[=] call[name[ytick_grid]][tuple[[<ast.Name object at 0x7da18f58d570>, <ast.Name object at 0x7da18f58f490>]]]
if name[use_cmap] begin[:]
call[name[kwargs]][constant[color]] assign[=] call[name[cmap], parameter[call[name[norm], parameter[name[w]]]]]
call[name[plt].text, parameter[name[x], name[y], call[name[text_format].format, parameter[name[w]]]]] | keyword[def] identifier[_plot_table] ( identifier[matrix] , identifier[text_format] = literal[string] , identifier[cmap] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[shape] = identifier[matrix] . identifier[shape]
identifier[xtick_pos] = identifier[numpy] . identifier[arange] ( identifier[shape] [ literal[int] ])
identifier[ytick_pos] = identifier[numpy] . identifier[arange] ( identifier[shape] [ literal[int] ])
identifier[xtick_grid] , identifier[ytick_grid] = identifier[numpy] . identifier[meshgrid] ( identifier[xtick_pos] , identifier[ytick_pos] )
identifier[vmax] = identifier[numpy] . identifier[nanmax] ( identifier[matrix] )
identifier[vmin] = identifier[numpy] . identifier[nanmin] ( identifier[matrix] )
keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[and] identifier[cmap] keyword[is] keyword[not] keyword[None] :
identifier[use_cmap] = keyword[True]
identifier[norm] = identifier[matplotlib] . identifier[colors] . identifier[Normalize] ( identifier[vmin] = identifier[vmin] , identifier[vmax] = identifier[vmax] , identifier[clip] = keyword[False] )
keyword[else] :
identifier[use_cmap] = keyword[False]
keyword[for] ( identifier[row] , identifier[col] ), identifier[w] keyword[in] identifier[numpy] . identifier[ndenumerate] ( identifier[matrix] ):
identifier[x] = identifier[xtick_grid] [ identifier[row] , identifier[col] ]
identifier[y] = identifier[ytick_grid] [ identifier[row] , identifier[col] ]
keyword[if] identifier[use_cmap] :
identifier[kwargs] [ literal[string] ]= identifier[cmap] ( identifier[norm] ( identifier[w] ))
identifier[plt] . identifier[text] ( identifier[x] , identifier[y] , identifier[text_format] . identifier[format] ( identifier[w] ), identifier[horizontalalignment] = literal[string] ,
identifier[verticalalignment] = literal[string] , identifier[transform] = identifier[plt] . identifier[gca] (). identifier[transData] ,** identifier[kwargs] ) | def _plot_table(matrix, text_format='{:.2f}', cmap=None, **kwargs):
"""
Plot a numpy matrix as a table. Uses the current axis bounding box to decide on limits.
text_format specifies the formatting to apply to the values.
Parameters
----------
matrix : ndarray
text_format : str
Indicates how to format the the values
text_format = {:.2} -> keeps all digits until the first 2 significant digits past the decimal
text_format = {:.2f} -> keeps only 2 digits past the decimal
cmap : None | colormap
if a colormap is provided, this colormap will be used to choose the color of the text.
**kwargs : all other arguments passed to plt.text function
Examples
----------
plot_table(numpy.random.random((3,3))
plt.show()
"""
shape = matrix.shape
xtick_pos = numpy.arange(shape[1])
ytick_pos = numpy.arange(shape[0])
(xtick_grid, ytick_grid) = numpy.meshgrid(xtick_pos, ytick_pos)
vmax = numpy.nanmax(matrix)
vmin = numpy.nanmin(matrix)
if not kwargs.get('color', None) and cmap is not None:
use_cmap = True
norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax, clip=False) # depends on [control=['if'], data=[]]
else:
use_cmap = False
for ((row, col), w) in numpy.ndenumerate(matrix):
x = xtick_grid[row, col]
y = ytick_grid[row, col]
if use_cmap:
kwargs['color'] = cmap(norm(w)) # depends on [control=['if'], data=[]]
plt.text(x, y, text_format.format(w), horizontalalignment='center', verticalalignment='center', transform=plt.gca().transData, **kwargs) # depends on [control=['for'], data=[]] |
def write_logevidence(self, lnz, dlnz):
"""Writes the given log evidence and its error.
Results are saved to file's 'log_evidence' and 'dlog_evidence'
attributes.
Parameters
----------
lnz : float
The log of the evidence.
dlnz : float
The error in the estimate of the log evidence.
"""
self.attrs['log_evidence'] = lnz
self.attrs['dlog_evidence'] = dlnz | def function[write_logevidence, parameter[self, lnz, dlnz]]:
constant[Writes the given log evidence and its error.
Results are saved to file's 'log_evidence' and 'dlog_evidence'
attributes.
Parameters
----------
lnz : float
The log of the evidence.
dlnz : float
The error in the estimate of the log evidence.
]
call[name[self].attrs][constant[log_evidence]] assign[=] name[lnz]
call[name[self].attrs][constant[dlog_evidence]] assign[=] name[dlnz] | keyword[def] identifier[write_logevidence] ( identifier[self] , identifier[lnz] , identifier[dlnz] ):
literal[string]
identifier[self] . identifier[attrs] [ literal[string] ]= identifier[lnz]
identifier[self] . identifier[attrs] [ literal[string] ]= identifier[dlnz] | def write_logevidence(self, lnz, dlnz):
"""Writes the given log evidence and its error.
Results are saved to file's 'log_evidence' and 'dlog_evidence'
attributes.
Parameters
----------
lnz : float
The log of the evidence.
dlnz : float
The error in the estimate of the log evidence.
"""
self.attrs['log_evidence'] = lnz
self.attrs['dlog_evidence'] = dlnz |
def __unset_binding(self, dependency, service, reference):
# type: (Any, Any, ServiceReference) -> None
"""
Removes a service from the component
:param dependency: The dependency handler
:param service: The injected service
:param reference: The reference of the injected service
"""
# Call the component back
self.__safe_field_callback(
dependency.get_field(),
constants.IPOPO_CALLBACK_UNBIND_FIELD,
service,
reference,
)
self.safe_callback(constants.IPOPO_CALLBACK_UNBIND, service, reference)
# Update the injected field
setattr(self.instance, dependency.get_field(), dependency.get_value())
# Unget the service
self.bundle_context.unget_service(reference) | def function[__unset_binding, parameter[self, dependency, service, reference]]:
constant[
Removes a service from the component
:param dependency: The dependency handler
:param service: The injected service
:param reference: The reference of the injected service
]
call[name[self].__safe_field_callback, parameter[call[name[dependency].get_field, parameter[]], name[constants].IPOPO_CALLBACK_UNBIND_FIELD, name[service], name[reference]]]
call[name[self].safe_callback, parameter[name[constants].IPOPO_CALLBACK_UNBIND, name[service], name[reference]]]
call[name[setattr], parameter[name[self].instance, call[name[dependency].get_field, parameter[]], call[name[dependency].get_value, parameter[]]]]
call[name[self].bundle_context.unget_service, parameter[name[reference]]] | keyword[def] identifier[__unset_binding] ( identifier[self] , identifier[dependency] , identifier[service] , identifier[reference] ):
literal[string]
identifier[self] . identifier[__safe_field_callback] (
identifier[dependency] . identifier[get_field] (),
identifier[constants] . identifier[IPOPO_CALLBACK_UNBIND_FIELD] ,
identifier[service] ,
identifier[reference] ,
)
identifier[self] . identifier[safe_callback] ( identifier[constants] . identifier[IPOPO_CALLBACK_UNBIND] , identifier[service] , identifier[reference] )
identifier[setattr] ( identifier[self] . identifier[instance] , identifier[dependency] . identifier[get_field] (), identifier[dependency] . identifier[get_value] ())
identifier[self] . identifier[bundle_context] . identifier[unget_service] ( identifier[reference] ) | def __unset_binding(self, dependency, service, reference):
# type: (Any, Any, ServiceReference) -> None
'\n Removes a service from the component\n\n :param dependency: The dependency handler\n :param service: The injected service\n :param reference: The reference of the injected service\n '
# Call the component back
self.__safe_field_callback(dependency.get_field(), constants.IPOPO_CALLBACK_UNBIND_FIELD, service, reference)
self.safe_callback(constants.IPOPO_CALLBACK_UNBIND, service, reference)
# Update the injected field
setattr(self.instance, dependency.get_field(), dependency.get_value())
# Unget the service
self.bundle_context.unget_service(reference) |
def obfn_g(self, Y):
r"""Compute :math:`g(\mathbf{y}) = g_0(\mathbf{y}_0) +
g_1(\mathbf{y}_1)` component of ADMM objective function.
"""
return self.obfn_g0(self.obfn_g0var()) + \
self.obfn_g1(self.obfn_g1var()) | def function[obfn_g, parameter[self, Y]]:
constant[Compute :math:`g(\mathbf{y}) = g_0(\mathbf{y}_0) +
g_1(\mathbf{y}_1)` component of ADMM objective function.
]
return[binary_operation[call[name[self].obfn_g0, parameter[call[name[self].obfn_g0var, parameter[]]]] + call[name[self].obfn_g1, parameter[call[name[self].obfn_g1var, parameter[]]]]]] | keyword[def] identifier[obfn_g] ( identifier[self] , identifier[Y] ):
literal[string]
keyword[return] identifier[self] . identifier[obfn_g0] ( identifier[self] . identifier[obfn_g0var] ())+ identifier[self] . identifier[obfn_g1] ( identifier[self] . identifier[obfn_g1var] ()) | def obfn_g(self, Y):
"""Compute :math:`g(\\mathbf{y}) = g_0(\\mathbf{y}_0) +
g_1(\\mathbf{y}_1)` component of ADMM objective function.
"""
return self.obfn_g0(self.obfn_g0var()) + self.obfn_g1(self.obfn_g1var()) |
def init_requests_cache(refresh_cache=False):
"""
Initializes a cache which the ``requests`` library will consult for
responses, before making network requests.
:param refresh_cache: Whether the cache should be cleared out
"""
# Cache data from external sources; used in some checks
dirs = AppDirs("stix2-validator", "OASIS")
# Create cache dir if doesn't exist
try:
os.makedirs(dirs.user_cache_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
requests_cache.install_cache(
cache_name=os.path.join(dirs.user_cache_dir, 'py{}cache'.format(
sys.version_info[0])),
expire_after=datetime.timedelta(weeks=1))
if refresh_cache:
clear_requests_cache() | def function[init_requests_cache, parameter[refresh_cache]]:
constant[
Initializes a cache which the ``requests`` library will consult for
responses, before making network requests.
:param refresh_cache: Whether the cache should be cleared out
]
variable[dirs] assign[=] call[name[AppDirs], parameter[constant[stix2-validator], constant[OASIS]]]
<ast.Try object at 0x7da1b10e66b0>
call[name[requests_cache].install_cache, parameter[]]
if name[refresh_cache] begin[:]
call[name[clear_requests_cache], parameter[]] | keyword[def] identifier[init_requests_cache] ( identifier[refresh_cache] = keyword[False] ):
literal[string]
identifier[dirs] = identifier[AppDirs] ( literal[string] , literal[string] )
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[dirs] . identifier[user_cache_dir] )
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] != identifier[errno] . identifier[EEXIST] :
keyword[raise]
identifier[requests_cache] . identifier[install_cache] (
identifier[cache_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[dirs] . identifier[user_cache_dir] , literal[string] . identifier[format] (
identifier[sys] . identifier[version_info] [ literal[int] ])),
identifier[expire_after] = identifier[datetime] . identifier[timedelta] ( identifier[weeks] = literal[int] ))
keyword[if] identifier[refresh_cache] :
identifier[clear_requests_cache] () | def init_requests_cache(refresh_cache=False):
"""
Initializes a cache which the ``requests`` library will consult for
responses, before making network requests.
:param refresh_cache: Whether the cache should be cleared out
"""
# Cache data from external sources; used in some checks
dirs = AppDirs('stix2-validator', 'OASIS')
# Create cache dir if doesn't exist
try:
os.makedirs(dirs.user_cache_dir) # depends on [control=['try'], data=[]]
except OSError as e:
if e.errno != errno.EEXIST:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
requests_cache.install_cache(cache_name=os.path.join(dirs.user_cache_dir, 'py{}cache'.format(sys.version_info[0])), expire_after=datetime.timedelta(weeks=1))
if refresh_cache:
clear_requests_cache() # depends on [control=['if'], data=[]] |
def _check_required_group(self):
"""
Returns True if the group requirement (AUTH_LDAP_REQUIRE_GROUP) is
met. Always returns True if AUTH_LDAP_REQUIRE_GROUP is None.
"""
required_group_dn = self.settings.REQUIRE_GROUP
if required_group_dn is not None:
if not isinstance(required_group_dn, LDAPGroupQuery):
required_group_dn = LDAPGroupQuery(required_group_dn)
result = required_group_dn.resolve(self)
if not result:
raise self.AuthenticationFailed(
"user does not satisfy AUTH_LDAP_REQUIRE_GROUP"
)
return True | def function[_check_required_group, parameter[self]]:
constant[
Returns True if the group requirement (AUTH_LDAP_REQUIRE_GROUP) is
met. Always returns True if AUTH_LDAP_REQUIRE_GROUP is None.
]
variable[required_group_dn] assign[=] name[self].settings.REQUIRE_GROUP
if compare[name[required_group_dn] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b17f8d00> begin[:]
variable[required_group_dn] assign[=] call[name[LDAPGroupQuery], parameter[name[required_group_dn]]]
variable[result] assign[=] call[name[required_group_dn].resolve, parameter[name[self]]]
if <ast.UnaryOp object at 0x7da1b17f9720> begin[:]
<ast.Raise object at 0x7da1b17f9360>
return[constant[True]] | keyword[def] identifier[_check_required_group] ( identifier[self] ):
literal[string]
identifier[required_group_dn] = identifier[self] . identifier[settings] . identifier[REQUIRE_GROUP]
keyword[if] identifier[required_group_dn] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[required_group_dn] , identifier[LDAPGroupQuery] ):
identifier[required_group_dn] = identifier[LDAPGroupQuery] ( identifier[required_group_dn] )
identifier[result] = identifier[required_group_dn] . identifier[resolve] ( identifier[self] )
keyword[if] keyword[not] identifier[result] :
keyword[raise] identifier[self] . identifier[AuthenticationFailed] (
literal[string]
)
keyword[return] keyword[True] | def _check_required_group(self):
"""
Returns True if the group requirement (AUTH_LDAP_REQUIRE_GROUP) is
met. Always returns True if AUTH_LDAP_REQUIRE_GROUP is None.
"""
required_group_dn = self.settings.REQUIRE_GROUP
if required_group_dn is not None:
if not isinstance(required_group_dn, LDAPGroupQuery):
required_group_dn = LDAPGroupQuery(required_group_dn) # depends on [control=['if'], data=[]]
result = required_group_dn.resolve(self)
if not result:
raise self.AuthenticationFailed('user does not satisfy AUTH_LDAP_REQUIRE_GROUP') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['required_group_dn']]
return True |
def add_interaction(self, u, v, t=None, e=None):
"""Add an interaction between u and v at time t vanishing (optional) at time e.
The nodes u and v will be automatically added if they are
not already in the graph.
Parameters
----------
u, v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
t : appearance snapshot id, mandatory
e : vanishing snapshot id, optional (default=None)
See Also
--------
add_edges_from : add a collection of interaction at time t
Notes
-----
Adding an interaction that already exists but with different snapshot id updates the interaction data.
Examples
--------
The following all add the interaction e=(1,2, 0) to graph G:
>>> G = dn.DynGraph()
>>> G.add_interaction(1, 2, 0) # explicit two-node form
>>> G.add_interaction( [(1,2)], t=0 ) # add interaction from iterable container
Specify the vanishing of the interaction
>>>> G.add_interaction(1, 3, t=1, e=10)
will produce an interaction present in snapshots [0, 9]
"""
if t is None:
raise nx.NetworkXError(
"The t argument must be specified.")
if u not in self._node:
self._adj[u] = self.adjlist_inner_dict_factory()
self._node[u] = {}
if v not in self._node:
self._adj[v] = self.adjlist_inner_dict_factory()
self._node[v] = {}
if type(t) != list:
t = [t, t]
for idt in [t[0]]:
if self.has_edge(u, v) and not self.edge_removal:
continue
else:
if idt not in self.time_to_edge:
self.time_to_edge[idt] = {(u, v, "+"): None}
else:
if (u, v, "+") not in self.time_to_edge[idt]:
self.time_to_edge[idt][(u, v, "+")] = None
if e is not None and self.edge_removal:
t[1] = e - 1
if e not in self.time_to_edge:
self.time_to_edge[e] = {(u, v, "-"): None}
else:
self.time_to_edge[e][(u, v, "-")] = None
# add the interaction
datadict = self._adj[u].get(v, self.edge_attr_dict_factory())
if 't' in datadict:
app = datadict['t']
max_end = app[-1][1]
if max_end == app[-1][0] and t[0] == app[-1][0] + 1:
app[-1] = [app[-1][0], t[1]]
if app[-1][0] + 1 in self.time_to_edge and (u, v, "+") in self.time_to_edge[app[-1][0] + 1]:
del self.time_to_edge[app[-1][0] + 1][(u, v, "+")]
else:
if t[0] < app[-1][0]:
raise ValueError("The specified interaction extension is broader than "
"the ones already present for the given nodes.")
if t[0] <= max_end < t[1]:
app[-1][1] = t[1]
if max_end + 1 in self.time_to_edge:
if self.edge_removal:
del self.time_to_edge[max_end + 1][(u, v, "-")]
del self.time_to_edge[t[0]][(u, v, "+")]
elif max_end == t[0] - 1:
if max_end + 1 in self.time_to_edge and (u, v, "+") in self.time_to_edge[max_end + 1]:
del self.time_to_edge[max_end + 1][(u, v, "+")]
if self.edge_removal:
if max_end + 1 in self.time_to_edge and (u, v, '-') in self.time_to_edge[max_end + 1]:
del self.time_to_edge[max_end + 1][(u, v, '-')]
if t[1] + 1 in self.time_to_edge:
self.time_to_edge[t[1] + 1][(u, v, "-")] = None
else:
self.time_to_edge[t[1] + 1] = {(u, v, "-"): None}
app[-1][1] = t[1]
else:
app.append(t)
else:
datadict['t'] = [t]
if e is not None:
span = range(t[0], t[1] + 1)
for idt in span:
if idt not in self.snapshots:
self.snapshots[idt] = 1
else:
self.snapshots[idt] += 1
else:
for idt in t:
if idt is not None:
if idt not in self.snapshots:
self.snapshots[idt] = 1
else:
self.snapshots[idt] += 1
self._adj[u][v] = datadict
self._adj[v][u] = datadict | def function[add_interaction, parameter[self, u, v, t, e]]:
constant[Add an interaction between u and v at time t vanishing (optional) at time e.
The nodes u and v will be automatically added if they are
not already in the graph.
Parameters
----------
u, v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
t : appearance snapshot id, mandatory
e : vanishing snapshot id, optional (default=None)
See Also
--------
add_edges_from : add a collection of interaction at time t
Notes
-----
Adding an interaction that already exists but with different snapshot id updates the interaction data.
Examples
--------
The following all add the interaction e=(1,2, 0) to graph G:
>>> G = dn.DynGraph()
>>> G.add_interaction(1, 2, 0) # explicit two-node form
>>> G.add_interaction( [(1,2)], t=0 ) # add interaction from iterable container
Specify the vanishing of the interaction
>>>> G.add_interaction(1, 3, t=1, e=10)
will produce an interaction present in snapshots [0, 9]
]
if compare[name[t] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b04c96c0>
if compare[name[u] <ast.NotIn object at 0x7da2590d7190> name[self]._node] begin[:]
call[name[self]._adj][name[u]] assign[=] call[name[self].adjlist_inner_dict_factory, parameter[]]
call[name[self]._node][name[u]] assign[=] dictionary[[], []]
if compare[name[v] <ast.NotIn object at 0x7da2590d7190> name[self]._node] begin[:]
call[name[self]._adj][name[v]] assign[=] call[name[self].adjlist_inner_dict_factory, parameter[]]
call[name[self]._node][name[v]] assign[=] dictionary[[], []]
if compare[call[name[type], parameter[name[t]]] not_equal[!=] name[list]] begin[:]
variable[t] assign[=] list[[<ast.Name object at 0x7da1b04c8f40>, <ast.Name object at 0x7da1b04c8d60>]]
for taget[name[idt]] in starred[list[[<ast.Subscript object at 0x7da1b04c9180>]]] begin[:]
if <ast.BoolOp object at 0x7da1b04c95a0> begin[:]
continue
if <ast.BoolOp object at 0x7da1b04c9a20> begin[:]
call[name[t]][constant[1]] assign[=] binary_operation[name[e] - constant[1]]
if compare[name[e] <ast.NotIn object at 0x7da2590d7190> name[self].time_to_edge] begin[:]
call[name[self].time_to_edge][name[e]] assign[=] dictionary[[<ast.Tuple object at 0x7da1b04cb760>], [<ast.Constant object at 0x7da1b04cb940>]]
variable[datadict] assign[=] call[call[name[self]._adj][name[u]].get, parameter[name[v], call[name[self].edge_attr_dict_factory, parameter[]]]]
if compare[constant[t] in name[datadict]] begin[:]
variable[app] assign[=] call[name[datadict]][constant[t]]
variable[max_end] assign[=] call[call[name[app]][<ast.UnaryOp object at 0x7da1b04cb250>]][constant[1]]
if <ast.BoolOp object at 0x7da1b04c8760> begin[:]
call[name[app]][<ast.UnaryOp object at 0x7da1b04ca470>] assign[=] list[[<ast.Subscript object at 0x7da1b04cbd90>, <ast.Subscript object at 0x7da1b04ca290>]]
if <ast.BoolOp object at 0x7da1b04c9fc0> begin[:]
<ast.Delete object at 0x7da1b04c9870>
if compare[name[e] is_not constant[None]] begin[:]
variable[span] assign[=] call[name[range], parameter[call[name[t]][constant[0]], binary_operation[call[name[t]][constant[1]] + constant[1]]]]
for taget[name[idt]] in starred[name[span]] begin[:]
if compare[name[idt] <ast.NotIn object at 0x7da2590d7190> name[self].snapshots] begin[:]
call[name[self].snapshots][name[idt]] assign[=] constant[1]
call[call[name[self]._adj][name[u]]][name[v]] assign[=] name[datadict]
call[call[name[self]._adj][name[v]]][name[u]] assign[=] name[datadict] | keyword[def] identifier[add_interaction] ( identifier[self] , identifier[u] , identifier[v] , identifier[t] = keyword[None] , identifier[e] = keyword[None] ):
literal[string]
keyword[if] identifier[t] keyword[is] keyword[None] :
keyword[raise] identifier[nx] . identifier[NetworkXError] (
literal[string] )
keyword[if] identifier[u] keyword[not] keyword[in] identifier[self] . identifier[_node] :
identifier[self] . identifier[_adj] [ identifier[u] ]= identifier[self] . identifier[adjlist_inner_dict_factory] ()
identifier[self] . identifier[_node] [ identifier[u] ]={}
keyword[if] identifier[v] keyword[not] keyword[in] identifier[self] . identifier[_node] :
identifier[self] . identifier[_adj] [ identifier[v] ]= identifier[self] . identifier[adjlist_inner_dict_factory] ()
identifier[self] . identifier[_node] [ identifier[v] ]={}
keyword[if] identifier[type] ( identifier[t] )!= identifier[list] :
identifier[t] =[ identifier[t] , identifier[t] ]
keyword[for] identifier[idt] keyword[in] [ identifier[t] [ literal[int] ]]:
keyword[if] identifier[self] . identifier[has_edge] ( identifier[u] , identifier[v] ) keyword[and] keyword[not] identifier[self] . identifier[edge_removal] :
keyword[continue]
keyword[else] :
keyword[if] identifier[idt] keyword[not] keyword[in] identifier[self] . identifier[time_to_edge] :
identifier[self] . identifier[time_to_edge] [ identifier[idt] ]={( identifier[u] , identifier[v] , literal[string] ): keyword[None] }
keyword[else] :
keyword[if] ( identifier[u] , identifier[v] , literal[string] ) keyword[not] keyword[in] identifier[self] . identifier[time_to_edge] [ identifier[idt] ]:
identifier[self] . identifier[time_to_edge] [ identifier[idt] ][( identifier[u] , identifier[v] , literal[string] )]= keyword[None]
keyword[if] identifier[e] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[edge_removal] :
identifier[t] [ literal[int] ]= identifier[e] - literal[int]
keyword[if] identifier[e] keyword[not] keyword[in] identifier[self] . identifier[time_to_edge] :
identifier[self] . identifier[time_to_edge] [ identifier[e] ]={( identifier[u] , identifier[v] , literal[string] ): keyword[None] }
keyword[else] :
identifier[self] . identifier[time_to_edge] [ identifier[e] ][( identifier[u] , identifier[v] , literal[string] )]= keyword[None]
identifier[datadict] = identifier[self] . identifier[_adj] [ identifier[u] ]. identifier[get] ( identifier[v] , identifier[self] . identifier[edge_attr_dict_factory] ())
keyword[if] literal[string] keyword[in] identifier[datadict] :
identifier[app] = identifier[datadict] [ literal[string] ]
identifier[max_end] = identifier[app] [- literal[int] ][ literal[int] ]
keyword[if] identifier[max_end] == identifier[app] [- literal[int] ][ literal[int] ] keyword[and] identifier[t] [ literal[int] ]== identifier[app] [- literal[int] ][ literal[int] ]+ literal[int] :
identifier[app] [- literal[int] ]=[ identifier[app] [- literal[int] ][ literal[int] ], identifier[t] [ literal[int] ]]
keyword[if] identifier[app] [- literal[int] ][ literal[int] ]+ literal[int] keyword[in] identifier[self] . identifier[time_to_edge] keyword[and] ( identifier[u] , identifier[v] , literal[string] ) keyword[in] identifier[self] . identifier[time_to_edge] [ identifier[app] [- literal[int] ][ literal[int] ]+ literal[int] ]:
keyword[del] identifier[self] . identifier[time_to_edge] [ identifier[app] [- literal[int] ][ literal[int] ]+ literal[int] ][( identifier[u] , identifier[v] , literal[string] )]
keyword[else] :
keyword[if] identifier[t] [ literal[int] ]< identifier[app] [- literal[int] ][ literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[t] [ literal[int] ]<= identifier[max_end] < identifier[t] [ literal[int] ]:
identifier[app] [- literal[int] ][ literal[int] ]= identifier[t] [ literal[int] ]
keyword[if] identifier[max_end] + literal[int] keyword[in] identifier[self] . identifier[time_to_edge] :
keyword[if] identifier[self] . identifier[edge_removal] :
keyword[del] identifier[self] . identifier[time_to_edge] [ identifier[max_end] + literal[int] ][( identifier[u] , identifier[v] , literal[string] )]
keyword[del] identifier[self] . identifier[time_to_edge] [ identifier[t] [ literal[int] ]][( identifier[u] , identifier[v] , literal[string] )]
keyword[elif] identifier[max_end] == identifier[t] [ literal[int] ]- literal[int] :
keyword[if] identifier[max_end] + literal[int] keyword[in] identifier[self] . identifier[time_to_edge] keyword[and] ( identifier[u] , identifier[v] , literal[string] ) keyword[in] identifier[self] . identifier[time_to_edge] [ identifier[max_end] + literal[int] ]:
keyword[del] identifier[self] . identifier[time_to_edge] [ identifier[max_end] + literal[int] ][( identifier[u] , identifier[v] , literal[string] )]
keyword[if] identifier[self] . identifier[edge_removal] :
keyword[if] identifier[max_end] + literal[int] keyword[in] identifier[self] . identifier[time_to_edge] keyword[and] ( identifier[u] , identifier[v] , literal[string] ) keyword[in] identifier[self] . identifier[time_to_edge] [ identifier[max_end] + literal[int] ]:
keyword[del] identifier[self] . identifier[time_to_edge] [ identifier[max_end] + literal[int] ][( identifier[u] , identifier[v] , literal[string] )]
keyword[if] identifier[t] [ literal[int] ]+ literal[int] keyword[in] identifier[self] . identifier[time_to_edge] :
identifier[self] . identifier[time_to_edge] [ identifier[t] [ literal[int] ]+ literal[int] ][( identifier[u] , identifier[v] , literal[string] )]= keyword[None]
keyword[else] :
identifier[self] . identifier[time_to_edge] [ identifier[t] [ literal[int] ]+ literal[int] ]={( identifier[u] , identifier[v] , literal[string] ): keyword[None] }
identifier[app] [- literal[int] ][ literal[int] ]= identifier[t] [ literal[int] ]
keyword[else] :
identifier[app] . identifier[append] ( identifier[t] )
keyword[else] :
identifier[datadict] [ literal[string] ]=[ identifier[t] ]
keyword[if] identifier[e] keyword[is] keyword[not] keyword[None] :
identifier[span] = identifier[range] ( identifier[t] [ literal[int] ], identifier[t] [ literal[int] ]+ literal[int] )
keyword[for] identifier[idt] keyword[in] identifier[span] :
keyword[if] identifier[idt] keyword[not] keyword[in] identifier[self] . identifier[snapshots] :
identifier[self] . identifier[snapshots] [ identifier[idt] ]= literal[int]
keyword[else] :
identifier[self] . identifier[snapshots] [ identifier[idt] ]+= literal[int]
keyword[else] :
keyword[for] identifier[idt] keyword[in] identifier[t] :
keyword[if] identifier[idt] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[idt] keyword[not] keyword[in] identifier[self] . identifier[snapshots] :
identifier[self] . identifier[snapshots] [ identifier[idt] ]= literal[int]
keyword[else] :
identifier[self] . identifier[snapshots] [ identifier[idt] ]+= literal[int]
identifier[self] . identifier[_adj] [ identifier[u] ][ identifier[v] ]= identifier[datadict]
identifier[self] . identifier[_adj] [ identifier[v] ][ identifier[u] ]= identifier[datadict] | def add_interaction(self, u, v, t=None, e=None):
"""Add an interaction between u and v at time t vanishing (optional) at time e.
The nodes u and v will be automatically added if they are
not already in the graph.
Parameters
----------
u, v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
t : appearance snapshot id, mandatory
e : vanishing snapshot id, optional (default=None)
See Also
--------
add_edges_from : add a collection of interaction at time t
Notes
-----
Adding an interaction that already exists but with different snapshot id updates the interaction data.
Examples
--------
The following all add the interaction e=(1,2, 0) to graph G:
>>> G = dn.DynGraph()
>>> G.add_interaction(1, 2, 0) # explicit two-node form
>>> G.add_interaction( [(1,2)], t=0 ) # add interaction from iterable container
Specify the vanishing of the interaction
>>>> G.add_interaction(1, 3, t=1, e=10)
will produce an interaction present in snapshots [0, 9]
"""
if t is None:
raise nx.NetworkXError('The t argument must be specified.') # depends on [control=['if'], data=[]]
if u not in self._node:
self._adj[u] = self.adjlist_inner_dict_factory()
self._node[u] = {} # depends on [control=['if'], data=['u']]
if v not in self._node:
self._adj[v] = self.adjlist_inner_dict_factory()
self._node[v] = {} # depends on [control=['if'], data=['v']]
if type(t) != list:
t = [t, t] # depends on [control=['if'], data=[]]
for idt in [t[0]]:
if self.has_edge(u, v) and (not self.edge_removal):
continue # depends on [control=['if'], data=[]]
elif idt not in self.time_to_edge:
self.time_to_edge[idt] = {(u, v, '+'): None} # depends on [control=['if'], data=['idt']]
elif (u, v, '+') not in self.time_to_edge[idt]:
self.time_to_edge[idt][u, v, '+'] = None # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idt']]
if e is not None and self.edge_removal:
t[1] = e - 1
if e not in self.time_to_edge:
self.time_to_edge[e] = {(u, v, '-'): None} # depends on [control=['if'], data=['e']]
else:
self.time_to_edge[e][u, v, '-'] = None # depends on [control=['if'], data=[]]
# add the interaction
datadict = self._adj[u].get(v, self.edge_attr_dict_factory())
if 't' in datadict:
app = datadict['t']
max_end = app[-1][1]
if max_end == app[-1][0] and t[0] == app[-1][0] + 1:
app[-1] = [app[-1][0], t[1]]
if app[-1][0] + 1 in self.time_to_edge and (u, v, '+') in self.time_to_edge[app[-1][0] + 1]:
del self.time_to_edge[app[-1][0] + 1][u, v, '+'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
if t[0] < app[-1][0]:
raise ValueError('The specified interaction extension is broader than the ones already present for the given nodes.') # depends on [control=['if'], data=[]]
if t[0] <= max_end < t[1]:
app[-1][1] = t[1]
if max_end + 1 in self.time_to_edge:
if self.edge_removal:
del self.time_to_edge[max_end + 1][u, v, '-'] # depends on [control=['if'], data=[]]
del self.time_to_edge[t[0]][u, v, '+'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['max_end']]
elif max_end == t[0] - 1:
if max_end + 1 in self.time_to_edge and (u, v, '+') in self.time_to_edge[max_end + 1]:
del self.time_to_edge[max_end + 1][u, v, '+']
if self.edge_removal:
if max_end + 1 in self.time_to_edge and (u, v, '-') in self.time_to_edge[max_end + 1]:
del self.time_to_edge[max_end + 1][u, v, '-'] # depends on [control=['if'], data=[]]
if t[1] + 1 in self.time_to_edge:
self.time_to_edge[t[1] + 1][u, v, '-'] = None # depends on [control=['if'], data=[]]
else:
self.time_to_edge[t[1] + 1] = {(u, v, '-'): None} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
app[-1][1] = t[1] # depends on [control=['if'], data=['max_end']]
else:
app.append(t) # depends on [control=['if'], data=['datadict']]
else:
datadict['t'] = [t]
if e is not None:
span = range(t[0], t[1] + 1)
for idt in span:
if idt not in self.snapshots:
self.snapshots[idt] = 1 # depends on [control=['if'], data=['idt']]
else:
self.snapshots[idt] += 1 # depends on [control=['for'], data=['idt']] # depends on [control=['if'], data=[]]
else:
for idt in t:
if idt is not None:
if idt not in self.snapshots:
self.snapshots[idt] = 1 # depends on [control=['if'], data=['idt']]
else:
self.snapshots[idt] += 1 # depends on [control=['if'], data=['idt']] # depends on [control=['for'], data=['idt']]
self._adj[u][v] = datadict
self._adj[v][u] = datadict |
def clean_undefined(obj):
"""
Convert Undefined array entries to None (null)
"""
if isinstance(obj, list):
return [
None if isinstance(item, Undefined) else item
for item in obj
]
if isinstance(obj, dict):
for key in obj:
obj[key] = clean_undefined(obj[key])
return obj | def function[clean_undefined, parameter[obj]]:
constant[
Convert Undefined array entries to None (null)
]
if call[name[isinstance], parameter[name[obj], name[list]]] begin[:]
return[<ast.ListComp object at 0x7da204346320>]
if call[name[isinstance], parameter[name[obj], name[dict]]] begin[:]
for taget[name[key]] in starred[name[obj]] begin[:]
call[name[obj]][name[key]] assign[=] call[name[clean_undefined], parameter[call[name[obj]][name[key]]]]
return[name[obj]] | keyword[def] identifier[clean_undefined] ( identifier[obj] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[list] ):
keyword[return] [
keyword[None] keyword[if] identifier[isinstance] ( identifier[item] , identifier[Undefined] ) keyword[else] identifier[item]
keyword[for] identifier[item] keyword[in] identifier[obj]
]
keyword[if] identifier[isinstance] ( identifier[obj] , identifier[dict] ):
keyword[for] identifier[key] keyword[in] identifier[obj] :
identifier[obj] [ identifier[key] ]= identifier[clean_undefined] ( identifier[obj] [ identifier[key] ])
keyword[return] identifier[obj] | def clean_undefined(obj):
"""
Convert Undefined array entries to None (null)
"""
if isinstance(obj, list):
return [None if isinstance(item, Undefined) else item for item in obj] # depends on [control=['if'], data=[]]
if isinstance(obj, dict):
for key in obj:
obj[key] = clean_undefined(obj[key]) # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
return obj |
def add_inote(self, msg, idx, off=None):
"""
Add a message to a specific instruction by using (default) the index of the address if specified
:param msg: the message
:type msg: string
:param idx: index of the instruction (the position in the list of the instruction)
:type idx: int
:param off: address of the instruction
:type off: int
"""
if self.code != None:
self.code.add_inote(msg, idx, off) | def function[add_inote, parameter[self, msg, idx, off]]:
constant[
Add a message to a specific instruction by using (default) the index of the address if specified
:param msg: the message
:type msg: string
:param idx: index of the instruction (the position in the list of the instruction)
:type idx: int
:param off: address of the instruction
:type off: int
]
if compare[name[self].code not_equal[!=] constant[None]] begin[:]
call[name[self].code.add_inote, parameter[name[msg], name[idx], name[off]]] | keyword[def] identifier[add_inote] ( identifier[self] , identifier[msg] , identifier[idx] , identifier[off] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[code] != keyword[None] :
identifier[self] . identifier[code] . identifier[add_inote] ( identifier[msg] , identifier[idx] , identifier[off] ) | def add_inote(self, msg, idx, off=None):
"""
Add a message to a specific instruction by using (default) the index of the address if specified
:param msg: the message
:type msg: string
:param idx: index of the instruction (the position in the list of the instruction)
:type idx: int
:param off: address of the instruction
:type off: int
"""
if self.code != None:
self.code.add_inote(msg, idx, off) # depends on [control=['if'], data=[]] |
def has_segment_tables(xmldoc, name = None):
"""
Return True if the document contains a complete set of segment
tables. Returns False otherwise. If name is given and not None
then the return value is True only if the document's segment
tables, if present, contain a segment list by that name.
"""
try:
names = lsctables.SegmentDefTable.get_table(xmldoc).getColumnByName("name")
lsctables.SegmentTable.get_table(xmldoc)
lsctables.SegmentSumTable.get_table(xmldoc)
except (ValueError, KeyError):
return False
return name is None or name in names | def function[has_segment_tables, parameter[xmldoc, name]]:
constant[
Return True if the document contains a complete set of segment
tables. Returns False otherwise. If name is given and not None
then the return value is True only if the document's segment
tables, if present, contain a segment list by that name.
]
<ast.Try object at 0x7da20c7cb2e0>
return[<ast.BoolOp object at 0x7da18f720610>] | keyword[def] identifier[has_segment_tables] ( identifier[xmldoc] , identifier[name] = keyword[None] ):
literal[string]
keyword[try] :
identifier[names] = identifier[lsctables] . identifier[SegmentDefTable] . identifier[get_table] ( identifier[xmldoc] ). identifier[getColumnByName] ( literal[string] )
identifier[lsctables] . identifier[SegmentTable] . identifier[get_table] ( identifier[xmldoc] )
identifier[lsctables] . identifier[SegmentSumTable] . identifier[get_table] ( identifier[xmldoc] )
keyword[except] ( identifier[ValueError] , identifier[KeyError] ):
keyword[return] keyword[False]
keyword[return] identifier[name] keyword[is] keyword[None] keyword[or] identifier[name] keyword[in] identifier[names] | def has_segment_tables(xmldoc, name=None):
"""
Return True if the document contains a complete set of segment
tables. Returns False otherwise. If name is given and not None
then the return value is True only if the document's segment
tables, if present, contain a segment list by that name.
"""
try:
names = lsctables.SegmentDefTable.get_table(xmldoc).getColumnByName('name')
lsctables.SegmentTable.get_table(xmldoc)
lsctables.SegmentSumTable.get_table(xmldoc) # depends on [control=['try'], data=[]]
except (ValueError, KeyError):
return False # depends on [control=['except'], data=[]]
return name is None or name in names |
def coerce_annotation(ann, namespace):
'''Validate that the annotation has the correct namespace,
and is well-formed.
If the annotation is not of the correct namespace, automatic conversion
is attempted.
Parameters
----------
ann : jams.Annotation
The annotation object in question
namespace : str
The namespace pattern to match `ann` against
Returns
-------
ann_coerced: jams.Annotation
The annotation coerced to the target namespace
Raises
------
NamespaceError
If `ann` does not match the proper namespace
SchemaError
If `ann` fails schema validation
See Also
--------
jams.nsconvert.convert
'''
ann = convert(ann, namespace)
ann.validate(strict=True)
return ann | def function[coerce_annotation, parameter[ann, namespace]]:
constant[Validate that the annotation has the correct namespace,
and is well-formed.
If the annotation is not of the correct namespace, automatic conversion
is attempted.
Parameters
----------
ann : jams.Annotation
The annotation object in question
namespace : str
The namespace pattern to match `ann` against
Returns
-------
ann_coerced: jams.Annotation
The annotation coerced to the target namespace
Raises
------
NamespaceError
If `ann` does not match the proper namespace
SchemaError
If `ann` fails schema validation
See Also
--------
jams.nsconvert.convert
]
variable[ann] assign[=] call[name[convert], parameter[name[ann], name[namespace]]]
call[name[ann].validate, parameter[]]
return[name[ann]] | keyword[def] identifier[coerce_annotation] ( identifier[ann] , identifier[namespace] ):
literal[string]
identifier[ann] = identifier[convert] ( identifier[ann] , identifier[namespace] )
identifier[ann] . identifier[validate] ( identifier[strict] = keyword[True] )
keyword[return] identifier[ann] | def coerce_annotation(ann, namespace):
"""Validate that the annotation has the correct namespace,
and is well-formed.
If the annotation is not of the correct namespace, automatic conversion
is attempted.
Parameters
----------
ann : jams.Annotation
The annotation object in question
namespace : str
The namespace pattern to match `ann` against
Returns
-------
ann_coerced: jams.Annotation
The annotation coerced to the target namespace
Raises
------
NamespaceError
If `ann` does not match the proper namespace
SchemaError
If `ann` fails schema validation
See Also
--------
jams.nsconvert.convert
"""
ann = convert(ann, namespace)
ann.validate(strict=True)
return ann |
def apply_transformation(self, structure):
"""
Returns a copy of structure with lattice parameters
and sites scaled to the same degree as the relaxed_structure.
Arg:
structure (Structure): A structurally similar structure in
regards to crystal and site positions.
"""
if self.species_map == None:
match = StructureMatcher()
s_map = \
match.get_best_electronegativity_anonymous_mapping(self.unrelaxed_structure,
structure)
else:
s_map = self.species_map
params = list(structure.lattice.abc)
params.extend(structure.lattice.angles)
new_lattice = Lattice.from_parameters(*[p*self.params_percent_change[i] \
for i, p in enumerate(params)])
species, frac_coords = [], []
for site in self.relaxed_structure:
species.append(s_map[site.specie])
frac_coords.append(site.frac_coords)
return Structure(new_lattice, species, frac_coords) | def function[apply_transformation, parameter[self, structure]]:
constant[
Returns a copy of structure with lattice parameters
and sites scaled to the same degree as the relaxed_structure.
Arg:
structure (Structure): A structurally similar structure in
regards to crystal and site positions.
]
if compare[name[self].species_map equal[==] constant[None]] begin[:]
variable[match] assign[=] call[name[StructureMatcher], parameter[]]
variable[s_map] assign[=] call[name[match].get_best_electronegativity_anonymous_mapping, parameter[name[self].unrelaxed_structure, name[structure]]]
variable[params] assign[=] call[name[list], parameter[name[structure].lattice.abc]]
call[name[params].extend, parameter[name[structure].lattice.angles]]
variable[new_lattice] assign[=] call[name[Lattice].from_parameters, parameter[<ast.Starred object at 0x7da18ede4100>]]
<ast.Tuple object at 0x7da18ede7e80> assign[=] tuple[[<ast.List object at 0x7da18ede6260>, <ast.List object at 0x7da18ede7fd0>]]
for taget[name[site]] in starred[name[self].relaxed_structure] begin[:]
call[name[species].append, parameter[call[name[s_map]][name[site].specie]]]
call[name[frac_coords].append, parameter[name[site].frac_coords]]
return[call[name[Structure], parameter[name[new_lattice], name[species], name[frac_coords]]]] | keyword[def] identifier[apply_transformation] ( identifier[self] , identifier[structure] ):
literal[string]
keyword[if] identifier[self] . identifier[species_map] == keyword[None] :
identifier[match] = identifier[StructureMatcher] ()
identifier[s_map] = identifier[match] . identifier[get_best_electronegativity_anonymous_mapping] ( identifier[self] . identifier[unrelaxed_structure] ,
identifier[structure] )
keyword[else] :
identifier[s_map] = identifier[self] . identifier[species_map]
identifier[params] = identifier[list] ( identifier[structure] . identifier[lattice] . identifier[abc] )
identifier[params] . identifier[extend] ( identifier[structure] . identifier[lattice] . identifier[angles] )
identifier[new_lattice] = identifier[Lattice] . identifier[from_parameters] (*[ identifier[p] * identifier[self] . identifier[params_percent_change] [ identifier[i] ] keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[params] )])
identifier[species] , identifier[frac_coords] =[],[]
keyword[for] identifier[site] keyword[in] identifier[self] . identifier[relaxed_structure] :
identifier[species] . identifier[append] ( identifier[s_map] [ identifier[site] . identifier[specie] ])
identifier[frac_coords] . identifier[append] ( identifier[site] . identifier[frac_coords] )
keyword[return] identifier[Structure] ( identifier[new_lattice] , identifier[species] , identifier[frac_coords] ) | def apply_transformation(self, structure):
"""
Returns a copy of structure with lattice parameters
and sites scaled to the same degree as the relaxed_structure.
Arg:
structure (Structure): A structurally similar structure in
regards to crystal and site positions.
"""
if self.species_map == None:
match = StructureMatcher()
s_map = match.get_best_electronegativity_anonymous_mapping(self.unrelaxed_structure, structure) # depends on [control=['if'], data=[]]
else:
s_map = self.species_map
params = list(structure.lattice.abc)
params.extend(structure.lattice.angles)
new_lattice = Lattice.from_parameters(*[p * self.params_percent_change[i] for (i, p) in enumerate(params)])
(species, frac_coords) = ([], [])
for site in self.relaxed_structure:
species.append(s_map[site.specie])
frac_coords.append(site.frac_coords) # depends on [control=['for'], data=['site']]
return Structure(new_lattice, species, frac_coords) |
def abu_chartMulti(self, cyclist, mass_range=None, ilabel=True,
imlabel=True, imlabel_fontsize=8, imagic=False,
boxstable=True, lbound=20, plotaxis=[0,0,0,0],
color_map='jet', pdf=False, title=None, path=None):
'''
Method that plots abundence chart and saves those figures to a
.png file (by default). Plots a figure for each cycle in the
argument cycle.
Parameters
----------
cyclist : list
The list of cycles we are plotting.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The efault is True.
imlabel_fontsize : intager, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
default is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The defaults is 20.
plotaxis : list, optional
Set axis limit: If [0,0,0,0] the complete range in (N,Z)
will be plotted. The default is [0,0,0,0].
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
pdf : boolean, optional
What format will this be saved in pdf/png. The default is
True.
title : string, optional
The title of the plots and the saved images. The default is
None.
'''
if self._which('dvipng')==None:
print("This method may need the third party program dvipng to operate")
print('It is located at http://sourceforge.net/projects/dvipng/')
max_num = max(cyclist)
for i in range(len(cyclist)):
self.abu_chart( cyclist[i], mass_range ,ilabel,imlabel,imlabel_fontsize,imagic,\
boxstable,lbound,plotaxis,False,color_map)
if title !=None:
pl.title(title)
else:
name='AbuChart'
if path is not None:
name = os.path.join(path, name)
number_str=_padding_model_number(cyclist[i],max_num)
if not pdf:
pl.savefig(name+number_str+'.png', dpi=100)
else:
pl.savefig(name+number_str+'.pdf', dpi=200)
pl.close()
return None | def function[abu_chartMulti, parameter[self, cyclist, mass_range, ilabel, imlabel, imlabel_fontsize, imagic, boxstable, lbound, plotaxis, color_map, pdf, title, path]]:
constant[
Method that plots abundence chart and saves those figures to a
.png file (by default). Plots a figure for each cycle in the
argument cycle.
Parameters
----------
cyclist : list
The list of cycles we are plotting.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The efault is True.
imlabel_fontsize : intager, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
default is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The defaults is 20.
plotaxis : list, optional
Set axis limit: If [0,0,0,0] the complete range in (N,Z)
will be plotted. The default is [0,0,0,0].
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
pdf : boolean, optional
What format will this be saved in pdf/png. The default is
True.
title : string, optional
The title of the plots and the saved images. The default is
None.
]
if compare[call[name[self]._which, parameter[constant[dvipng]]] equal[==] constant[None]] begin[:]
call[name[print], parameter[constant[This method may need the third party program dvipng to operate]]]
call[name[print], parameter[constant[It is located at http://sourceforge.net/projects/dvipng/]]]
variable[max_num] assign[=] call[name[max], parameter[name[cyclist]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[cyclist]]]]]] begin[:]
call[name[self].abu_chart, parameter[call[name[cyclist]][name[i]], name[mass_range], name[ilabel], name[imlabel], name[imlabel_fontsize], name[imagic], name[boxstable], name[lbound], name[plotaxis], constant[False], name[color_map]]]
if compare[name[title] not_equal[!=] constant[None]] begin[:]
call[name[pl].title, parameter[name[title]]]
if compare[name[path] is_not constant[None]] begin[:]
variable[name] assign[=] call[name[os].path.join, parameter[name[path], name[name]]]
variable[number_str] assign[=] call[name[_padding_model_number], parameter[call[name[cyclist]][name[i]], name[max_num]]]
if <ast.UnaryOp object at 0x7da1b1bab790> begin[:]
call[name[pl].savefig, parameter[binary_operation[binary_operation[name[name] + name[number_str]] + constant[.png]]]]
call[name[pl].close, parameter[]]
return[constant[None]] | keyword[def] identifier[abu_chartMulti] ( identifier[self] , identifier[cyclist] , identifier[mass_range] = keyword[None] , identifier[ilabel] = keyword[True] ,
identifier[imlabel] = keyword[True] , identifier[imlabel_fontsize] = literal[int] , identifier[imagic] = keyword[False] ,
identifier[boxstable] = keyword[True] , identifier[lbound] = literal[int] , identifier[plotaxis] =[ literal[int] , literal[int] , literal[int] , literal[int] ],
identifier[color_map] = literal[string] , identifier[pdf] = keyword[False] , identifier[title] = keyword[None] , identifier[path] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[_which] ( literal[string] )== keyword[None] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[max_num] = identifier[max] ( identifier[cyclist] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[cyclist] )):
identifier[self] . identifier[abu_chart] ( identifier[cyclist] [ identifier[i] ], identifier[mass_range] , identifier[ilabel] , identifier[imlabel] , identifier[imlabel_fontsize] , identifier[imagic] , identifier[boxstable] , identifier[lbound] , identifier[plotaxis] , keyword[False] , identifier[color_map] )
keyword[if] identifier[title] != keyword[None] :
identifier[pl] . identifier[title] ( identifier[title] )
keyword[else] :
identifier[name] = literal[string]
keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] :
identifier[name] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[name] )
identifier[number_str] = identifier[_padding_model_number] ( identifier[cyclist] [ identifier[i] ], identifier[max_num] )
keyword[if] keyword[not] identifier[pdf] :
identifier[pl] . identifier[savefig] ( identifier[name] + identifier[number_str] + literal[string] , identifier[dpi] = literal[int] )
keyword[else] :
identifier[pl] . identifier[savefig] ( identifier[name] + identifier[number_str] + literal[string] , identifier[dpi] = literal[int] )
identifier[pl] . identifier[close] ()
keyword[return] keyword[None] | def abu_chartMulti(self, cyclist, mass_range=None, ilabel=True, imlabel=True, imlabel_fontsize=8, imagic=False, boxstable=True, lbound=20, plotaxis=[0, 0, 0, 0], color_map='jet', pdf=False, title=None, path=None):
"""
Method that plots abundence chart and saves those figures to a
.png file (by default). Plots a figure for each cycle in the
argument cycle.
Parameters
----------
cyclist : list
The list of cycles we are plotting.
mass_range : list, optional
A 1x2 array containing the lower and upper mass range. If
this is an instance of abu_vector this will only plot
isotopes that have an atomic mass within this range. This
will throw an error if this range does not make sence ie
[45,2] if None, it will plot over the entire range. The
default is None.
ilabel : boolean, optional
Elemental labels off/on. The default is True.
imlabel : boolean, optional
Label for isotopic masses off/on. The efault is True.
imlabel_fontsize : intager, optional
Fontsize for isotopic mass labels. The default is 8.
imagic : boolean, optional
Turn lines for magic numbers off/on. The default is False.
boxstable : boolean, optional
Plot the black boxes around the stable elements. The
default is True.
lbound : tuple, optional
Boundaries for colour spectrum ploted. The defaults is 20.
plotaxis : list, optional
Set axis limit: If [0,0,0,0] the complete range in (N,Z)
will be plotted. The default is [0,0,0,0].
color_map : string, optional
Color map according to choices in matplotlib
(e.g. www.scipy.org/Cookbook/Matplotlib/Show_colormaps).
The default is 'jet'.
pdf : boolean, optional
What format will this be saved in pdf/png. The default is
True.
title : string, optional
The title of the plots and the saved images. The default is
None.
"""
if self._which('dvipng') == None:
print('This method may need the third party program dvipng to operate')
print('It is located at http://sourceforge.net/projects/dvipng/') # depends on [control=['if'], data=[]]
max_num = max(cyclist)
for i in range(len(cyclist)):
self.abu_chart(cyclist[i], mass_range, ilabel, imlabel, imlabel_fontsize, imagic, boxstable, lbound, plotaxis, False, color_map)
if title != None:
pl.title(title) # depends on [control=['if'], data=['title']]
else:
name = 'AbuChart'
if path is not None:
name = os.path.join(path, name) # depends on [control=['if'], data=['path']]
number_str = _padding_model_number(cyclist[i], max_num)
if not pdf:
pl.savefig(name + number_str + '.png', dpi=100) # depends on [control=['if'], data=[]]
else:
pl.savefig(name + number_str + '.pdf', dpi=200)
pl.close() # depends on [control=['for'], data=['i']]
return None |
def get_smart_contract_event_by_tx_hash(self, tx_hash: str, is_full: bool = False) -> dict:
"""
This interface is used to get the corresponding smart contract event based on the height of block.
:param tx_hash: a hexadecimal hash value.
:param is_full:
:return: the information of smart contract event in dictionary form.
"""
payload = self.generate_json_rpc_payload(RpcMethod.GET_SMART_CONTRACT_EVENT, [tx_hash, 1])
response = self.__post(self.__url, payload)
if is_full:
return response
return response['result'] | def function[get_smart_contract_event_by_tx_hash, parameter[self, tx_hash, is_full]]:
constant[
This interface is used to get the corresponding smart contract event based on the height of block.
:param tx_hash: a hexadecimal hash value.
:param is_full:
:return: the information of smart contract event in dictionary form.
]
variable[payload] assign[=] call[name[self].generate_json_rpc_payload, parameter[name[RpcMethod].GET_SMART_CONTRACT_EVENT, list[[<ast.Name object at 0x7da207f03250>, <ast.Constant object at 0x7da207f01270>]]]]
variable[response] assign[=] call[name[self].__post, parameter[name[self].__url, name[payload]]]
if name[is_full] begin[:]
return[name[response]]
return[call[name[response]][constant[result]]] | keyword[def] identifier[get_smart_contract_event_by_tx_hash] ( identifier[self] , identifier[tx_hash] : identifier[str] , identifier[is_full] : identifier[bool] = keyword[False] )-> identifier[dict] :
literal[string]
identifier[payload] = identifier[self] . identifier[generate_json_rpc_payload] ( identifier[RpcMethod] . identifier[GET_SMART_CONTRACT_EVENT] ,[ identifier[tx_hash] , literal[int] ])
identifier[response] = identifier[self] . identifier[__post] ( identifier[self] . identifier[__url] , identifier[payload] )
keyword[if] identifier[is_full] :
keyword[return] identifier[response]
keyword[return] identifier[response] [ literal[string] ] | def get_smart_contract_event_by_tx_hash(self, tx_hash: str, is_full: bool=False) -> dict:
"""
This interface is used to get the corresponding smart contract event based on the height of block.
:param tx_hash: a hexadecimal hash value.
:param is_full:
:return: the information of smart contract event in dictionary form.
"""
payload = self.generate_json_rpc_payload(RpcMethod.GET_SMART_CONTRACT_EVENT, [tx_hash, 1])
response = self.__post(self.__url, payload)
if is_full:
return response # depends on [control=['if'], data=[]]
return response['result'] |
def _mongodump_exec(mongodump, address, port, user, passwd, db,
out_dir, auth_db, dry_run):
"""
Run mongodump on a database
:param address: server host name or IP address
:param port: server port
:param user: user name
:param passwd: password
:param db: database name
:param out_dir: output directory
:param auth_db: authentication database
:param dry_run: dry run mode
:raises OSError: if mongodump process returns error
"""
# Log the call
log.msg("mongodump [{mongodump}] db={db} auth_db={auth_db}" \
" mongodb://{user}@{host}:{port} > {output}"
.format(mongodump=mongodump, user=user, host=address,
port=port, db=db, auth_db=auth_db, output=out_dir))
# Prepare the call
args = "--host {host}:{port} -d {db} -u {user} -p {passwd} " \
"--authenticationDatabase {auth_db} -o {output}"\
.format(host=address, port=port, db=db, user=user, passwd=passwd,
auth_db=auth_db, output=out_dir)
if not dry_run:
# Make the actual call to mongodump
shell.run(mongodump, args=args) | def function[_mongodump_exec, parameter[mongodump, address, port, user, passwd, db, out_dir, auth_db, dry_run]]:
constant[
Run mongodump on a database
:param address: server host name or IP address
:param port: server port
:param user: user name
:param passwd: password
:param db: database name
:param out_dir: output directory
:param auth_db: authentication database
:param dry_run: dry run mode
:raises OSError: if mongodump process returns error
]
call[name[log].msg, parameter[call[constant[mongodump [{mongodump}] db={db} auth_db={auth_db} mongodb://{user}@{host}:{port} > {output}].format, parameter[]]]]
variable[args] assign[=] call[constant[--host {host}:{port} -d {db} -u {user} -p {passwd} --authenticationDatabase {auth_db} -o {output}].format, parameter[]]
if <ast.UnaryOp object at 0x7da1b0a80df0> begin[:]
call[name[shell].run, parameter[name[mongodump]]] | keyword[def] identifier[_mongodump_exec] ( identifier[mongodump] , identifier[address] , identifier[port] , identifier[user] , identifier[passwd] , identifier[db] ,
identifier[out_dir] , identifier[auth_db] , identifier[dry_run] ):
literal[string]
identifier[log] . identifier[msg] ( literal[string] literal[string]
. identifier[format] ( identifier[mongodump] = identifier[mongodump] , identifier[user] = identifier[user] , identifier[host] = identifier[address] ,
identifier[port] = identifier[port] , identifier[db] = identifier[db] , identifier[auth_db] = identifier[auth_db] , identifier[output] = identifier[out_dir] ))
identifier[args] = literal[string] literal[string] . identifier[format] ( identifier[host] = identifier[address] , identifier[port] = identifier[port] , identifier[db] = identifier[db] , identifier[user] = identifier[user] , identifier[passwd] = identifier[passwd] ,
identifier[auth_db] = identifier[auth_db] , identifier[output] = identifier[out_dir] )
keyword[if] keyword[not] identifier[dry_run] :
identifier[shell] . identifier[run] ( identifier[mongodump] , identifier[args] = identifier[args] ) | def _mongodump_exec(mongodump, address, port, user, passwd, db, out_dir, auth_db, dry_run):
"""
Run mongodump on a database
:param address: server host name or IP address
:param port: server port
:param user: user name
:param passwd: password
:param db: database name
:param out_dir: output directory
:param auth_db: authentication database
:param dry_run: dry run mode
:raises OSError: if mongodump process returns error
"""
# Log the call
log.msg('mongodump [{mongodump}] db={db} auth_db={auth_db} mongodb://{user}@{host}:{port} > {output}'.format(mongodump=mongodump, user=user, host=address, port=port, db=db, auth_db=auth_db, output=out_dir))
# Prepare the call
args = '--host {host}:{port} -d {db} -u {user} -p {passwd} --authenticationDatabase {auth_db} -o {output}'.format(host=address, port=port, db=db, user=user, passwd=passwd, auth_db=auth_db, output=out_dir)
if not dry_run:
# Make the actual call to mongodump
shell.run(mongodump, args=args) # depends on [control=['if'], data=[]] |
def jsonrpc_request(method, identifier, params=None):
"""Produce a JSONRPC request."""
return '{}\r\n'.format(json.dumps({
'id': identifier,
'method': method,
'params': params or {},
'jsonrpc': '2.0'
})).encode() | def function[jsonrpc_request, parameter[method, identifier, params]]:
constant[Produce a JSONRPC request.]
return[call[call[constant[{}
].format, parameter[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da1b02e4280>, <ast.Constant object at 0x7da1b02e4b50>, <ast.Constant object at 0x7da1b02e73d0>, <ast.Constant object at 0x7da1b02e5f00>], [<ast.Name object at 0x7da1b02e6b30>, <ast.Name object at 0x7da1b02e4d60>, <ast.BoolOp object at 0x7da1b02e54b0>, <ast.Constant object at 0x7da1b02e5870>]]]]]].encode, parameter[]]] | keyword[def] identifier[jsonrpc_request] ( identifier[method] , identifier[identifier] , identifier[params] = keyword[None] ):
literal[string]
keyword[return] literal[string] . identifier[format] ( identifier[json] . identifier[dumps] ({
literal[string] : identifier[identifier] ,
literal[string] : identifier[method] ,
literal[string] : identifier[params] keyword[or] {},
literal[string] : literal[string]
})). identifier[encode] () | def jsonrpc_request(method, identifier, params=None):
"""Produce a JSONRPC request."""
return '{}\r\n'.format(json.dumps({'id': identifier, 'method': method, 'params': params or {}, 'jsonrpc': '2.0'})).encode() |
def _to_pywintypes(row):
"""convert values in a row to types accepted by excel"""
def _pywintype(x):
if isinstance(x, dt.date):
return dt.datetime(x.year, x.month, x.day, tzinfo=dt.timezone.utc)
elif isinstance(x, (dt.datetime, pa.Timestamp)):
if x.tzinfo is None:
return x.replace(tzinfo=dt.timezone.utc)
elif isinstance(x, str):
if re.match("^\d{4}-\d{2}-\d{2}$", x):
return "'" + x
return x
elif isinstance(x, np.integer):
return int(x)
elif isinstance(x, np.floating):
return float(x)
elif x is not None and not isinstance(x, (str, int, float, bool)):
return str(x)
return x
return [_pywintype(x) for x in row] | def function[_to_pywintypes, parameter[row]]:
constant[convert values in a row to types accepted by excel]
def function[_pywintype, parameter[x]]:
if call[name[isinstance], parameter[name[x], name[dt].date]] begin[:]
return[call[name[dt].datetime, parameter[name[x].year, name[x].month, name[x].day]]]
return[name[x]]
return[<ast.ListComp object at 0x7da1b237f8b0>] | keyword[def] identifier[_to_pywintypes] ( identifier[row] ):
literal[string]
keyword[def] identifier[_pywintype] ( identifier[x] ):
keyword[if] identifier[isinstance] ( identifier[x] , identifier[dt] . identifier[date] ):
keyword[return] identifier[dt] . identifier[datetime] ( identifier[x] . identifier[year] , identifier[x] . identifier[month] , identifier[x] . identifier[day] , identifier[tzinfo] = identifier[dt] . identifier[timezone] . identifier[utc] )
keyword[elif] identifier[isinstance] ( identifier[x] ,( identifier[dt] . identifier[datetime] , identifier[pa] . identifier[Timestamp] )):
keyword[if] identifier[x] . identifier[tzinfo] keyword[is] keyword[None] :
keyword[return] identifier[x] . identifier[replace] ( identifier[tzinfo] = identifier[dt] . identifier[timezone] . identifier[utc] )
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[str] ):
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[x] ):
keyword[return] literal[string] + identifier[x]
keyword[return] identifier[x]
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[np] . identifier[integer] ):
keyword[return] identifier[int] ( identifier[x] )
keyword[elif] identifier[isinstance] ( identifier[x] , identifier[np] . identifier[floating] ):
keyword[return] identifier[float] ( identifier[x] )
keyword[elif] identifier[x] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[x] ,( identifier[str] , identifier[int] , identifier[float] , identifier[bool] )):
keyword[return] identifier[str] ( identifier[x] )
keyword[return] identifier[x]
keyword[return] [ identifier[_pywintype] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[row] ] | def _to_pywintypes(row):
"""convert values in a row to types accepted by excel"""
def _pywintype(x):
if isinstance(x, dt.date):
return dt.datetime(x.year, x.month, x.day, tzinfo=dt.timezone.utc) # depends on [control=['if'], data=[]]
elif isinstance(x, (dt.datetime, pa.Timestamp)):
if x.tzinfo is None:
return x.replace(tzinfo=dt.timezone.utc) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif isinstance(x, str):
if re.match('^\\d{4}-\\d{2}-\\d{2}$', x):
return "'" + x # depends on [control=['if'], data=[]]
return x # depends on [control=['if'], data=[]]
elif isinstance(x, np.integer):
return int(x) # depends on [control=['if'], data=[]]
elif isinstance(x, np.floating):
return float(x) # depends on [control=['if'], data=[]]
elif x is not None and (not isinstance(x, (str, int, float, bool))):
return str(x) # depends on [control=['if'], data=[]]
return x
return [_pywintype(x) for x in row] |
def copy_script(self, filename, id_=-1):
"""Copy a script to the repo's Script subdirectory.
Scripts are copied as files to a path, or, on a "migrated" JSS,
are POSTed to the JSS (pass an id if you wish to associate
the script with an existing Script object).
Args:
filename: Path for file to copy.
id_: Int ID, used _only_ for migrated repos. Default is -1,
which creates a new Script.
"""
if ("jss" in self.connection.keys() and
self.connection["jss"].jss_migrated):
self._copy_script_migrated(filename, id_, SCRIPT_FILE_TYPE)
else:
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection["mount_point"],
"Scripts", basename)) | def function[copy_script, parameter[self, filename, id_]]:
constant[Copy a script to the repo's Script subdirectory.
Scripts are copied as files to a path, or, on a "migrated" JSS,
are POSTed to the JSS (pass an id if you wish to associate
the script with an existing Script object).
Args:
filename: Path for file to copy.
id_: Int ID, used _only_ for migrated repos. Default is -1,
which creates a new Script.
]
if <ast.BoolOp object at 0x7da18bc73ac0> begin[:]
call[name[self]._copy_script_migrated, parameter[name[filename], name[id_], name[SCRIPT_FILE_TYPE]]] | keyword[def] identifier[copy_script] ( identifier[self] , identifier[filename] , identifier[id_] =- literal[int] ):
literal[string]
keyword[if] ( literal[string] keyword[in] identifier[self] . identifier[connection] . identifier[keys] () keyword[and]
identifier[self] . identifier[connection] [ literal[string] ]. identifier[jss_migrated] ):
identifier[self] . identifier[_copy_script_migrated] ( identifier[filename] , identifier[id_] , identifier[SCRIPT_FILE_TYPE] )
keyword[else] :
identifier[basename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] )
identifier[self] . identifier[_copy] ( identifier[filename] , identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[connection] [ literal[string] ],
literal[string] , identifier[basename] )) | def copy_script(self, filename, id_=-1):
"""Copy a script to the repo's Script subdirectory.
Scripts are copied as files to a path, or, on a "migrated" JSS,
are POSTed to the JSS (pass an id if you wish to associate
the script with an existing Script object).
Args:
filename: Path for file to copy.
id_: Int ID, used _only_ for migrated repos. Default is -1,
which creates a new Script.
"""
if 'jss' in self.connection.keys() and self.connection['jss'].jss_migrated:
self._copy_script_migrated(filename, id_, SCRIPT_FILE_TYPE) # depends on [control=['if'], data=[]]
else:
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection['mount_point'], 'Scripts', basename)) |
def valid_totp(
token,
secret,
digest_method=hashlib.sha1,
token_length=6,
interval_length=30,
clock=None,
window=0,
):
"""Check if given token is valid time-based one-time password for given
secret.
:param token: token which is being checked
:type token: int or str
:param secret: secret for which the token is being checked
:type secret: str
:param digest_method: method of generating digest (hashlib.sha1 by default)
:type digest_method: callable
:param token_length: length of the token (6 by default)
:type token_length: int
:param interval_length: length of TOTP interval (30 seconds by default)
:type interval_length: int
:param clock: time in epoch seconds to generate totp for, default is now
:type clock: int
:param window: compensate for clock skew, number of intervals to check on
each side of the current time. (default is 0 - only check the current
clock time)
:type window: int (positive)
:return: True, if is valid token, False otherwise
:rtype: bool
>>> secret = b'MFRGGZDFMZTWQ2LK'
>>> token = get_totp(secret)
>>> valid_totp(token, secret)
True
>>> valid_totp(token+1, secret)
False
>>> token = get_totp(secret, as_string=True)
>>> valid_totp(token, secret)
True
>>> valid_totp(token + b'1', secret)
False
"""
if _is_possible_token(token, token_length=token_length):
if clock is None:
clock = time.time()
for w in range(-window, window+1):
if int(token) == get_totp(
secret,
digest_method=digest_method,
token_length=token_length,
interval_length=interval_length,
clock=int(clock)+(w*interval_length)
):
return True
return False | def function[valid_totp, parameter[token, secret, digest_method, token_length, interval_length, clock, window]]:
constant[Check if given token is valid time-based one-time password for given
secret.
:param token: token which is being checked
:type token: int or str
:param secret: secret for which the token is being checked
:type secret: str
:param digest_method: method of generating digest (hashlib.sha1 by default)
:type digest_method: callable
:param token_length: length of the token (6 by default)
:type token_length: int
:param interval_length: length of TOTP interval (30 seconds by default)
:type interval_length: int
:param clock: time in epoch seconds to generate totp for, default is now
:type clock: int
:param window: compensate for clock skew, number of intervals to check on
each side of the current time. (default is 0 - only check the current
clock time)
:type window: int (positive)
:return: True, if is valid token, False otherwise
:rtype: bool
>>> secret = b'MFRGGZDFMZTWQ2LK'
>>> token = get_totp(secret)
>>> valid_totp(token, secret)
True
>>> valid_totp(token+1, secret)
False
>>> token = get_totp(secret, as_string=True)
>>> valid_totp(token, secret)
True
>>> valid_totp(token + b'1', secret)
False
]
if call[name[_is_possible_token], parameter[name[token]]] begin[:]
if compare[name[clock] is constant[None]] begin[:]
variable[clock] assign[=] call[name[time].time, parameter[]]
for taget[name[w]] in starred[call[name[range], parameter[<ast.UnaryOp object at 0x7da1b0bcab00>, binary_operation[name[window] + constant[1]]]]] begin[:]
if compare[call[name[int], parameter[name[token]]] equal[==] call[name[get_totp], parameter[name[secret]]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[valid_totp] (
identifier[token] ,
identifier[secret] ,
identifier[digest_method] = identifier[hashlib] . identifier[sha1] ,
identifier[token_length] = literal[int] ,
identifier[interval_length] = literal[int] ,
identifier[clock] = keyword[None] ,
identifier[window] = literal[int] ,
):
literal[string]
keyword[if] identifier[_is_possible_token] ( identifier[token] , identifier[token_length] = identifier[token_length] ):
keyword[if] identifier[clock] keyword[is] keyword[None] :
identifier[clock] = identifier[time] . identifier[time] ()
keyword[for] identifier[w] keyword[in] identifier[range] (- identifier[window] , identifier[window] + literal[int] ):
keyword[if] identifier[int] ( identifier[token] )== identifier[get_totp] (
identifier[secret] ,
identifier[digest_method] = identifier[digest_method] ,
identifier[token_length] = identifier[token_length] ,
identifier[interval_length] = identifier[interval_length] ,
identifier[clock] = identifier[int] ( identifier[clock] )+( identifier[w] * identifier[interval_length] )
):
keyword[return] keyword[True]
keyword[return] keyword[False] | def valid_totp(token, secret, digest_method=hashlib.sha1, token_length=6, interval_length=30, clock=None, window=0):
"""Check if given token is valid time-based one-time password for given
secret.
:param token: token which is being checked
:type token: int or str
:param secret: secret for which the token is being checked
:type secret: str
:param digest_method: method of generating digest (hashlib.sha1 by default)
:type digest_method: callable
:param token_length: length of the token (6 by default)
:type token_length: int
:param interval_length: length of TOTP interval (30 seconds by default)
:type interval_length: int
:param clock: time in epoch seconds to generate totp for, default is now
:type clock: int
:param window: compensate for clock skew, number of intervals to check on
each side of the current time. (default is 0 - only check the current
clock time)
:type window: int (positive)
:return: True, if is valid token, False otherwise
:rtype: bool
>>> secret = b'MFRGGZDFMZTWQ2LK'
>>> token = get_totp(secret)
>>> valid_totp(token, secret)
True
>>> valid_totp(token+1, secret)
False
>>> token = get_totp(secret, as_string=True)
>>> valid_totp(token, secret)
True
>>> valid_totp(token + b'1', secret)
False
"""
if _is_possible_token(token, token_length=token_length):
if clock is None:
clock = time.time() # depends on [control=['if'], data=['clock']]
for w in range(-window, window + 1):
if int(token) == get_totp(secret, digest_method=digest_method, token_length=token_length, interval_length=interval_length, clock=int(clock) + w * interval_length):
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['w']] # depends on [control=['if'], data=[]]
return False |
def _form_output(self):
""" Form the output """
self.output = u''
if self.external_inner_xml:
self.output += u'<Dummy_tag_to_create_valid_xml_on_external_inner'\
'_xml>\n'
self.output += u'<!-- BODY -->\n{0}'.format(self.body_formatted)
if self.external_inner_xml:
for number, didl in enumerate(self.inner_xml):
self.output += u'\n<!-- DIDL_{0} -->\n{1}'.\
format(number, etree.tostring(didl, pretty_print=True))
self.output += u'</Dummy_tag_to_create_valid_xml_on_external_'\
'inner_xml>' | def function[_form_output, parameter[self]]:
constant[ Form the output ]
name[self].output assign[=] constant[]
if name[self].external_inner_xml begin[:]
<ast.AugAssign object at 0x7da1b180cd60>
<ast.AugAssign object at 0x7da1b180db10>
if name[self].external_inner_xml begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b180ceb0>, <ast.Name object at 0x7da1b180ec50>]]] in starred[call[name[enumerate], parameter[name[self].inner_xml]]] begin[:]
<ast.AugAssign object at 0x7da1b180f730>
<ast.AugAssign object at 0x7da1b180d210> | keyword[def] identifier[_form_output] ( identifier[self] ):
literal[string]
identifier[self] . identifier[output] = literal[string]
keyword[if] identifier[self] . identifier[external_inner_xml] :
identifier[self] . identifier[output] += literal[string] literal[string]
identifier[self] . identifier[output] += literal[string] . identifier[format] ( identifier[self] . identifier[body_formatted] )
keyword[if] identifier[self] . identifier[external_inner_xml] :
keyword[for] identifier[number] , identifier[didl] keyword[in] identifier[enumerate] ( identifier[self] . identifier[inner_xml] ):
identifier[self] . identifier[output] += literal[string] . identifier[format] ( identifier[number] , identifier[etree] . identifier[tostring] ( identifier[didl] , identifier[pretty_print] = keyword[True] ))
identifier[self] . identifier[output] += literal[string] literal[string] | def _form_output(self):
""" Form the output """
self.output = u''
if self.external_inner_xml:
self.output += u'<Dummy_tag_to_create_valid_xml_on_external_inner_xml>\n' # depends on [control=['if'], data=[]]
self.output += u'<!-- BODY -->\n{0}'.format(self.body_formatted)
if self.external_inner_xml:
for (number, didl) in enumerate(self.inner_xml):
self.output += u'\n<!-- DIDL_{0} -->\n{1}'.format(number, etree.tostring(didl, pretty_print=True)) # depends on [control=['for'], data=[]]
self.output += u'</Dummy_tag_to_create_valid_xml_on_external_inner_xml>' # depends on [control=['if'], data=[]] |
def rounder(input_number, digit=5):
"""
Round input number and convert to str.
:param input_number: input number
:type input_number : anything
:param digit: scale (the number of digits to the right of the decimal point in a number.)
:type digit : int
:return: round number as str
"""
if isinstance(input_number, tuple):
tuple_list = list(input_number)
tuple_str = []
for i in tuple_list:
if isfloat(i):
tuple_str.append(str(numpy.around(i, digit)))
else:
tuple_str.append(str(i))
return "(" + ",".join(tuple_str) + ")"
if isfloat(input_number):
return str(numpy.around(input_number, digit))
return str(input_number) | def function[rounder, parameter[input_number, digit]]:
constant[
Round input number and convert to str.
:param input_number: input number
:type input_number : anything
:param digit: scale (the number of digits to the right of the decimal point in a number.)
:type digit : int
:return: round number as str
]
if call[name[isinstance], parameter[name[input_number], name[tuple]]] begin[:]
variable[tuple_list] assign[=] call[name[list], parameter[name[input_number]]]
variable[tuple_str] assign[=] list[[]]
for taget[name[i]] in starred[name[tuple_list]] begin[:]
if call[name[isfloat], parameter[name[i]]] begin[:]
call[name[tuple_str].append, parameter[call[name[str], parameter[call[name[numpy].around, parameter[name[i], name[digit]]]]]]]
return[binary_operation[binary_operation[constant[(] + call[constant[,].join, parameter[name[tuple_str]]]] + constant[)]]]
if call[name[isfloat], parameter[name[input_number]]] begin[:]
return[call[name[str], parameter[call[name[numpy].around, parameter[name[input_number], name[digit]]]]]]
return[call[name[str], parameter[name[input_number]]]] | keyword[def] identifier[rounder] ( identifier[input_number] , identifier[digit] = literal[int] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[input_number] , identifier[tuple] ):
identifier[tuple_list] = identifier[list] ( identifier[input_number] )
identifier[tuple_str] =[]
keyword[for] identifier[i] keyword[in] identifier[tuple_list] :
keyword[if] identifier[isfloat] ( identifier[i] ):
identifier[tuple_str] . identifier[append] ( identifier[str] ( identifier[numpy] . identifier[around] ( identifier[i] , identifier[digit] )))
keyword[else] :
identifier[tuple_str] . identifier[append] ( identifier[str] ( identifier[i] ))
keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[tuple_str] )+ literal[string]
keyword[if] identifier[isfloat] ( identifier[input_number] ):
keyword[return] identifier[str] ( identifier[numpy] . identifier[around] ( identifier[input_number] , identifier[digit] ))
keyword[return] identifier[str] ( identifier[input_number] ) | def rounder(input_number, digit=5):
"""
Round input number and convert to str.
:param input_number: input number
:type input_number : anything
:param digit: scale (the number of digits to the right of the decimal point in a number.)
:type digit : int
:return: round number as str
"""
if isinstance(input_number, tuple):
tuple_list = list(input_number)
tuple_str = []
for i in tuple_list:
if isfloat(i):
tuple_str.append(str(numpy.around(i, digit))) # depends on [control=['if'], data=[]]
else:
tuple_str.append(str(i)) # depends on [control=['for'], data=['i']]
return '(' + ','.join(tuple_str) + ')' # depends on [control=['if'], data=[]]
if isfloat(input_number):
return str(numpy.around(input_number, digit)) # depends on [control=['if'], data=[]]
return str(input_number) |
def shell_command(class_path):
"""Drop into a debugging shell."""
loader = ClassLoader(*class_path)
shell.start_shell(local_ns={
'ClassFile': ClassFile,
'loader': loader,
'constants': importlib.import_module('jawa.constants'),
}) | def function[shell_command, parameter[class_path]]:
constant[Drop into a debugging shell.]
variable[loader] assign[=] call[name[ClassLoader], parameter[<ast.Starred object at 0x7da1b25e8e80>]]
call[name[shell].start_shell, parameter[]] | keyword[def] identifier[shell_command] ( identifier[class_path] ):
literal[string]
identifier[loader] = identifier[ClassLoader] (* identifier[class_path] )
identifier[shell] . identifier[start_shell] ( identifier[local_ns] ={
literal[string] : identifier[ClassFile] ,
literal[string] : identifier[loader] ,
literal[string] : identifier[importlib] . identifier[import_module] ( literal[string] ),
}) | def shell_command(class_path):
"""Drop into a debugging shell."""
loader = ClassLoader(*class_path)
shell.start_shell(local_ns={'ClassFile': ClassFile, 'loader': loader, 'constants': importlib.import_module('jawa.constants')}) |
def windows_process_priority_format(instance):
"""Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
"""
class_suffix_re = re.compile(r'.+_CLASS$')
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'process':
try:
priority = obj['extensions']['windows-process-ext']['priority']
except KeyError:
continue
if not class_suffix_re.match(priority):
yield JSONError("The 'priority' property of object '%s' should"
" end in '_CLASS'." % key, instance['id'],
'windows-process-priority-format') | def function[windows_process_priority_format, parameter[instance]]:
constant[Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
]
variable[class_suffix_re] assign[=] call[name[re].compile, parameter[constant[.+_CLASS$]]]
for taget[tuple[[<ast.Name object at 0x7da1b0fd75b0>, <ast.Name object at 0x7da1b0fd4610>]]] in starred[call[call[name[instance]][constant[objects]].items, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0fd66b0> begin[:]
<ast.Try object at 0x7da1b0fd5d80>
if <ast.UnaryOp object at 0x7da1b1043760> begin[:]
<ast.Yield object at 0x7da1b1043190> | keyword[def] identifier[windows_process_priority_format] ( identifier[instance] ):
literal[string]
identifier[class_suffix_re] = identifier[re] . identifier[compile] ( literal[string] )
keyword[for] identifier[key] , identifier[obj] keyword[in] identifier[instance] [ literal[string] ]. identifier[items] ():
keyword[if] literal[string] keyword[in] identifier[obj] keyword[and] identifier[obj] [ literal[string] ]== literal[string] :
keyword[try] :
identifier[priority] = identifier[obj] [ literal[string] ][ literal[string] ][ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[continue]
keyword[if] keyword[not] identifier[class_suffix_re] . identifier[match] ( identifier[priority] ):
keyword[yield] identifier[JSONError] ( literal[string]
literal[string] % identifier[key] , identifier[instance] [ literal[string] ],
literal[string] ) | def windows_process_priority_format(instance):
"""Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
"""
class_suffix_re = re.compile('.+_CLASS$')
for (key, obj) in instance['objects'].items():
if 'type' in obj and obj['type'] == 'process':
try:
priority = obj['extensions']['windows-process-ext']['priority'] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]]
if not class_suffix_re.match(priority):
yield JSONError("The 'priority' property of object '%s' should end in '_CLASS'." % key, instance['id'], 'windows-process-priority-format') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def abs_error(predictions, labels, weights_fn=None):
"""Computes mean(abs(preds-target))."""
del weights_fn # Unused
targets = tf.squeeze(labels, axis=[2, 3])
batch_abs_error = tf.abs(predictions - targets)
den = tf.ones(tf.shape(batch_abs_error), dtype=tf.float32)
return (batch_abs_error, den) | def function[abs_error, parameter[predictions, labels, weights_fn]]:
constant[Computes mean(abs(preds-target)).]
<ast.Delete object at 0x7da207f00d60>
variable[targets] assign[=] call[name[tf].squeeze, parameter[name[labels]]]
variable[batch_abs_error] assign[=] call[name[tf].abs, parameter[binary_operation[name[predictions] - name[targets]]]]
variable[den] assign[=] call[name[tf].ones, parameter[call[name[tf].shape, parameter[name[batch_abs_error]]]]]
return[tuple[[<ast.Name object at 0x7da204620460>, <ast.Name object at 0x7da207f026e0>]]] | keyword[def] identifier[abs_error] ( identifier[predictions] , identifier[labels] , identifier[weights_fn] = keyword[None] ):
literal[string]
keyword[del] identifier[weights_fn]
identifier[targets] = identifier[tf] . identifier[squeeze] ( identifier[labels] , identifier[axis] =[ literal[int] , literal[int] ])
identifier[batch_abs_error] = identifier[tf] . identifier[abs] ( identifier[predictions] - identifier[targets] )
identifier[den] = identifier[tf] . identifier[ones] ( identifier[tf] . identifier[shape] ( identifier[batch_abs_error] ), identifier[dtype] = identifier[tf] . identifier[float32] )
keyword[return] ( identifier[batch_abs_error] , identifier[den] ) | def abs_error(predictions, labels, weights_fn=None):
"""Computes mean(abs(preds-target))."""
del weights_fn # Unused
targets = tf.squeeze(labels, axis=[2, 3])
batch_abs_error = tf.abs(predictions - targets)
den = tf.ones(tf.shape(batch_abs_error), dtype=tf.float32)
return (batch_abs_error, den) |
def select(self, node):
"""
Translate a select node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
where_block = node.conditions
if child_object.where_block:
where_block = '({0}) AND ({1})'\
.format(child_object.where_block, node.conditions)
child_object.where_block = where_block
if not child_object.select_block:
child_object.select_block = str(node.attributes)
return child_object | def function[select, parameter[self, node]]:
constant[
Translate a select node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
]
variable[child_object] assign[=] call[name[self].translate, parameter[name[node].child]]
variable[where_block] assign[=] name[node].conditions
if name[child_object].where_block begin[:]
variable[where_block] assign[=] call[constant[({0}) AND ({1})].format, parameter[name[child_object].where_block, name[node].conditions]]
name[child_object].where_block assign[=] name[where_block]
if <ast.UnaryOp object at 0x7da204567a30> begin[:]
name[child_object].select_block assign[=] call[name[str], parameter[name[node].attributes]]
return[name[child_object]] | keyword[def] identifier[select] ( identifier[self] , identifier[node] ):
literal[string]
identifier[child_object] = identifier[self] . identifier[translate] ( identifier[node] . identifier[child] )
identifier[where_block] = identifier[node] . identifier[conditions]
keyword[if] identifier[child_object] . identifier[where_block] :
identifier[where_block] = literal[string] . identifier[format] ( identifier[child_object] . identifier[where_block] , identifier[node] . identifier[conditions] )
identifier[child_object] . identifier[where_block] = identifier[where_block]
keyword[if] keyword[not] identifier[child_object] . identifier[select_block] :
identifier[child_object] . identifier[select_block] = identifier[str] ( identifier[node] . identifier[attributes] )
keyword[return] identifier[child_object] | def select(self, node):
"""
Translate a select node into SQLQuery.
:param node: a treebrd node
:return: a SQLQuery object for the tree rooted at node
"""
child_object = self.translate(node.child)
where_block = node.conditions
if child_object.where_block:
where_block = '({0}) AND ({1})'.format(child_object.where_block, node.conditions) # depends on [control=['if'], data=[]]
child_object.where_block = where_block
if not child_object.select_block:
child_object.select_block = str(node.attributes) # depends on [control=['if'], data=[]]
return child_object |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.